lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/core/search/query/term_query.rs
sonic182/rucene
5b55f842c2bb03beb96898d520e880c180c91adf
use error::Result; use std::fmt; use core::codec::Codec; use core::codec::{PostingIterator, PostingIteratorFlags}; use core::doc::Term; use core::index::reader::LeafReaderContext; use core::search::explanation::Explanation; use core::search::query::{Query, Weight}; use core::search::scorer::{Scorer, TermScorer}; use core::search::searcher::SearchPlanBuilder; use core::search::similarity::{SimWeight, Similarity}; use core::search::statistics::{CollectionStatistics, TermStatistics}; use core::search::DocIterator; use core::util::{DocId, KeyedContext}; pub const TERM: &str = "term"; #[derive(Clone, Debug, PartialEq)] pub struct TermQuery { pub term: Term, pub boost: f32, pub ctx: Option<KeyedContext>, } impl TermQuery { pub fn new<T: Into<Option<KeyedContext>>>(term: Term, boost: f32, ctx: T) -> TermQuery { let ctx = ctx.into(); TermQuery { term, boost, ctx } } #[inline] pub fn term(&self) -> &Term { &self.term } } impl<C: Codec> Query<C> for TermQuery { fn create_weight( &self, searcher: &dyn SearchPlanBuilder<C>, needs_scores: bool, ) -> Result<Box<dyn Weight<C>>> { let max_doc = searcher.max_doc() as i64; let term_stats = if needs_scores { vec![searcher.term_statistics(&self.term)?] } else { vec![TermStatistics::new(self.term.bytes.clone(), max_doc, -1)] }; let collection_stats = if needs_scores { if let Some(stat) = searcher.collections_statistics(&self.term.field) { stat.clone() } else { CollectionStatistics::new(self.term.field.clone(), 0, max_doc, -1, -1, -1) } } else { CollectionStatistics::new(self.term.field.clone(), 0, max_doc, -1, -1, -1) }; let similarity = searcher.similarity(&self.term.field, needs_scores); let sim_weight = similarity.compute_weight( &collection_stats, &term_stats, self.ctx.as_ref(), self.boost, ); Ok(Box::new(TermWeight::new( self.term.clone(), self.boost, similarity, sim_weight, needs_scores, ))) } fn extract_terms(&self) -> Vec<TermQuery> { vec![self.clone()] } fn as_any(&self) -> &dyn (::std::any::Any) { self } } impl fmt::Display for TermQuery { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "TermQuery(field: {}, term: {}, boost: {})", &self.term.field(), &self.term.text().unwrap(), self.boost ) } } struct TermWeight<C: Codec> { term: Term, boost: f32, similarity: Box<dyn Similarity<C>>, sim_weight: Box<dyn SimWeight<C>>, needs_scores: bool, } impl<C: Codec> TermWeight<C> { pub fn new( term: Term, boost: f32, similarity: Box<dyn Similarity<C>>, sim_weight: Box<dyn SimWeight<C>>, needs_scores: bool, ) -> TermWeight<C> { TermWeight { term, boost, similarity, sim_weight, needs_scores, } } } impl<C: Codec> Weight<C> for TermWeight<C> { fn create_scorer(&self, reader: &LeafReaderContext<'_, C>) -> Result<Option<Box<dyn Scorer>>> { let _norms = reader.reader.norm_values(&self.term.field); let sim_scorer = self.sim_weight.sim_scorer(reader.reader)?; let flags = if self.needs_scores { PostingIteratorFlags::FREQS } else { PostingIteratorFlags::NONE }; if let Some(postings_iterator) = reader.reader.postings(&self.term, flags as i32)? { Ok(Some(Box::new(TermScorer::new( sim_scorer, postings_iterator, )))) } else { Ok(None) } } fn query_type(&self) -> &'static str { TERM } fn normalize(&mut self, norm: f32, boost: f32) { self.sim_weight.normalize(norm, boost * self.boost) } fn value_for_normalization(&self) -> f32 { self.sim_weight.get_value_for_normalization() } fn needs_scores(&self) -> bool { self.needs_scores } fn explain(&self, reader: &LeafReaderContext<'_, C>, doc: DocId) -> Result<Explanation> { let flags = if self.needs_scores { PostingIteratorFlags::FREQS } else { PostingIteratorFlags::NONE }; if let Some(mut postings_iterator) = reader.reader.postings(&self.term, flags as i32)? { let new_doc = postings_iterator.advance(doc)?; if new_doc == doc { let freq = postings_iterator.freq()? as f32; let freq_expl = Explanation::new(true, freq, format!("termFreq={}", freq), vec![]); let score_expl = self.sim_weight.explain(reader.reader, doc, freq_expl)?; return Ok(Explanation::new( true, score_expl.value(), format!( "weight({} in {}) [{}], result of:", self, doc, self.similarity ), vec![score_expl], )); } } Ok(Explanation::new( false, 0f32, "no matching term".to_string(), vec![], )) } } impl<C: Codec> fmt::Display for TermWeight<C> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "TermWeight(field: {}, term: {}, boost: {}, similarity: {}, need_score: {})", &self.term.field(), &self.term.text().unwrap(), self.boost, &self.similarity, self.needs_scores ) } }
use error::Result; use std::fmt; use core::codec::Codec; use core::codec::{PostingIterator, PostingIteratorFlags}; use core::doc::Term; use core::index::reader::LeafReaderContext; use core::search::explanation::Explanation; use core::search::query::{Query, Weight}; use core::search::scorer::{Scorer, TermScorer}; use core::search::searcher::SearchPlanBuilder; use core::search::similarity::{SimWeight, Similarity}; use core::search::statistics::{CollectionStatistics, TermStatistics}; use core::search::DocIterator; use core::util::{DocId, KeyedContext}; pub const TERM: &str = "term"; #[derive(Clone, Debug, PartialEq)] pub struct TermQuery { pub term: Term, pub boost: f32, pub ctx: Option<KeyedContext>, } impl TermQuery { pub fn new<T: Into<Option<KeyedContext>>>(term: Term, boost: f32, ctx: T) -> TermQuery { let ctx = ctx.into(); TermQuery { term, boost, ctx } } #[inline] pub fn term(&self) -> &Term { &self.term } } impl<C: Codec> Query<C> for TermQuery { fn create_weight( &self, searcher: &dyn SearchPlanBuilder<C>, needs_scores: bool, ) -> Result<Box<dyn Weight<C>>> { let max_doc = searcher.max_doc() as i64; let term_stats = if needs_scores { vec![searcher.term_statistics(&self.term)?] } else { vec![TermStatistics::new(self.term.bytes.clone(), max_doc, -1)] }; let collection_stats = if needs_scores { if let Some(stat) = searcher.collections_statistics(&self.term.field) { stat.clone() } else { CollectionStatistics::new(self.term.field.clone(), 0, max_doc, -1, -1, -1) } } else { CollectionStatistics::new(self.term.field.clone(), 0, max_doc, -1, -1, -1) }; let similarity = searcher.similarity(&self.term.field, needs_scores); let sim_weight = similarity.compute_weight( &collection_stats, &term_stats,
fn extract_terms(&self) -> Vec<TermQuery> { vec![self.clone()] } fn as_any(&self) -> &dyn (::std::any::Any) { self } } impl fmt::Display for TermQuery { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "TermQuery(field: {}, term: {}, boost: {})", &self.term.field(), &self.term.text().unwrap(), self.boost ) } } struct TermWeight<C: Codec> { term: Term, boost: f32, similarity: Box<dyn Similarity<C>>, sim_weight: Box<dyn SimWeight<C>>, needs_scores: bool, } impl<C: Codec> TermWeight<C> { pub fn new( term: Term, boost: f32, similarity: Box<dyn Similarity<C>>, sim_weight: Box<dyn SimWeight<C>>, needs_scores: bool, ) -> TermWeight<C> { TermWeight { term, boost, similarity, sim_weight, needs_scores, } } } impl<C: Codec> Weight<C> for TermWeight<C> { fn create_scorer(&self, reader: &LeafReaderContext<'_, C>) -> Result<Option<Box<dyn Scorer>>> { let _norms = reader.reader.norm_values(&self.term.field); let sim_scorer = self.sim_weight.sim_scorer(reader.reader)?; let flags = if self.needs_scores { PostingIteratorFlags::FREQS } else { PostingIteratorFlags::NONE }; if let Some(postings_iterator) = reader.reader.postings(&self.term, flags as i32)? { Ok(Some(Box::new(TermScorer::new( sim_scorer, postings_iterator, )))) } else { Ok(None) } } fn query_type(&self) -> &'static str { TERM } fn normalize(&mut self, norm: f32, boost: f32) { self.sim_weight.normalize(norm, boost * self.boost) } fn value_for_normalization(&self) -> f32 { self.sim_weight.get_value_for_normalization() } fn needs_scores(&self) -> bool { self.needs_scores } fn explain(&self, reader: &LeafReaderContext<'_, C>, doc: DocId) -> Result<Explanation> { let flags = if self.needs_scores { PostingIteratorFlags::FREQS } else { PostingIteratorFlags::NONE }; if let Some(mut postings_iterator) = reader.reader.postings(&self.term, flags as i32)? { let new_doc = postings_iterator.advance(doc)?; if new_doc == doc { let freq = postings_iterator.freq()? as f32; let freq_expl = Explanation::new(true, freq, format!("termFreq={}", freq), vec![]); let score_expl = self.sim_weight.explain(reader.reader, doc, freq_expl)?; return Ok(Explanation::new( true, score_expl.value(), format!( "weight({} in {}) [{}], result of:", self, doc, self.similarity ), vec![score_expl], )); } } Ok(Explanation::new( false, 0f32, "no matching term".to_string(), vec![], )) } } impl<C: Codec> fmt::Display for TermWeight<C> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "TermWeight(field: {}, term: {}, boost: {}, similarity: {}, need_score: {})", &self.term.field(), &self.term.text().unwrap(), self.boost, &self.similarity, self.needs_scores ) } }
self.ctx.as_ref(), self.boost, ); Ok(Box::new(TermWeight::new( self.term.clone(), self.boost, similarity, sim_weight, needs_scores, ))) }
function_block-function_prefix_line
[]
Rust
src/geometry.rs
alteous/alteous-three-rs
d09d2699d8909dc38d469d8f564449d2f85c9ce6
use genmesh::{EmitTriangles, Triangulate, Vertex as GenVertex}; use genmesh::generators::{self, IndexedPolygon, SharedVertex}; use mint; use std::collections::HashMap; #[derive(Clone, Debug, Default)] pub struct Shape { pub vertices: Vec<mint::Point3<f32>>, pub normals: Vec<mint::Vector3<f32>>, pub tangents: Vec<mint::Vector4<f32>>, pub tex_coords: Vec<mint::Point2<f32>>, } impl Shape { pub fn empty() -> Self { Default::default() } } #[derive(Clone, Debug, Default)] pub struct Geometry { pub base_shape: Shape, pub shapes: HashMap<String, Shape>, pub faces: Vec<[u32; 3]>, } impl Geometry { pub fn empty() -> Self { Default::default() } pub fn with_vertices(vertices: Vec<mint::Point3<f32>>) -> Self { Geometry { base_shape: Shape { vertices, normals: Vec::new(), ..Shape::empty() }, ..Geometry::empty() } } fn generate<P, G, Fpos, Fnor>( gen: G, fpos: Fpos, fnor: Fnor, ) -> Self where P: EmitTriangles<Vertex = usize>, G: IndexedPolygon<P> + SharedVertex<GenVertex>, Fpos: Fn(GenVertex) -> mint::Point3<f32>, Fnor: Fn(GenVertex) -> mint::Vector3<f32>, { Geometry { base_shape: Shape { vertices: gen.shared_vertex_iter().map(fpos).collect(), normals: gen.shared_vertex_iter().map(fnor).collect(), ..Shape::empty() }, shapes: HashMap::new(), faces: gen.indexed_polygon_iter() .triangulate() .map(|t| [t.x as u32, t.y as u32, t.z as u32]) .collect(), } } pub fn plane( width: f32, height: f32, ) -> Self { Self::generate( generators::Plane::new(), |GenVertex { pos, .. }| [pos[0] * 0.5 * width, pos[1] * 0.5 * height, 0.0].into(), |v| v.normal.into(), ) } pub fn cuboid( width: f32, height: f32, depth: f32, ) -> Self { Self::generate( generators::Cube::new(), |GenVertex { pos, .. }| { [ pos[0] * 0.5 * width, pos[1] * 0.5 * height, pos[2] * 0.5 * depth, ].into() }, |v| v.normal.into(), ) } pub fn cylinder( radius_top: f32, radius_bottom: f32, height: f32, radius_segments: usize, ) -> Self { Self::generate( generators::Cylinder::new(radius_segments), |GenVertex { pos, .. }| { let scale = (pos[2] + 1.0) * 0.5 * radius_top + (1.0 - pos[2]) * 0.5 * radius_bottom; [pos[1] * scale, pos[2] * 0.5 * height, pos[0] * scale].into() }, |GenVertex { normal, .. }| [normal[1], normal[2], normal[0]].into(), ) } pub fn uv_sphere( radius: f32, equatorial_segments: usize, meridional_segments: usize, ) -> Self { Self::generate( generators::SphereUV::new(equatorial_segments, meridional_segments), |GenVertex { pos, .. }| [pos[0] * radius, pos[1] * radius, pos[2] * radius].into(), |v| v.normal.into(), ) } }
use genmesh::{EmitTriangles, Triangulate, Vertex as GenVertex}; use genmesh::generators::{self, IndexedPolygon, SharedVertex}; use mint; use std::collections::HashMap; #[derive(Clone, Debug, Default)] pub struct Shape { pub vertices: Vec<mint::Point3<f32>>, pub normals: Vec<mint::Vector3<f32>>, pub tangents: Vec<mint::Vector4<f32>>, pub tex_coords: Vec<mint::Point2<f32>>, } impl Shape { pub fn empty() -> Self { Default::default() } } #[derive(Clone, Debug, Default)] pub struct Geometry { pub base_shape: Shape, pub shapes: HashMap<String, Shape>, pub faces: Vec<[u32; 3]>, } impl Geometry { pub fn empty() -> Self { Default::default() } pub fn with_vertices(vertices: Vec<mint::Point3<f32>>) -> Self { Geometry { base_shape: Shape { vertices, normals: Vec::new(), ..Shape::empty() }, ..Geometry::empty() } } fn generate<P, G, Fpos, Fnor>( gen: G, fpos: Fpos, fnor: Fnor, ) -> Self where P: EmitTriangles<Vertex = usize>, G: IndexedPolygon<P> + SharedVertex<GenVertex>, Fpos: Fn(GenVertex) -> mint::Point3<f32>, Fnor: Fn(GenVertex) -> mint::Vector3<f32>, { Geometry { base_shape: Shape { vertices: gen.shared_vertex_iter().map(fpos).collect(), normals: gen.shared_vertex_iter().map(fnor).collect(), ..Shape::empty() }, shapes: HashMap::new(), faces: gen.indexed_polygon_iter() .triangulate() .map(|t| [t.x as u32, t.y as u32, t.z as u32]) .collect(), } }
pub fn cuboid( width: f32, height: f32, depth: f32, ) -> Self { Self::generate( generators::Cube::new(), |GenVertex { pos, .. }| { [ pos[0] * 0.5 * width, pos[1] * 0.5 * height, pos[2] * 0.5 * depth, ].into() }, |v| v.normal.into(), ) } pub fn cylinder( radius_top: f32, radius_bottom: f32, height: f32, radius_segments: usize, ) -> Self { Self::generate( generators::Cylinder::new(radius_segments), |GenVertex { pos, .. }| { let scale = (pos[2] + 1.0) * 0.5 * radius_top + (1.0 - pos[2]) * 0.5 * radius_bottom; [pos[1] * scale, pos[2] * 0.5 * height, pos[0] * scale].into() }, |GenVertex { normal, .. }| [normal[1], normal[2], normal[0]].into(), ) } pub fn uv_sphere( radius: f32, equatorial_segments: usize, meridional_segments: usize, ) -> Self { Self::generate( generators::SphereUV::new(equatorial_segments, meridional_segments), |GenVertex { pos, .. }| [pos[0] * radius, pos[1] * radius, pos[2] * radius].into(), |v| v.normal.into(), ) } }
pub fn plane( width: f32, height: f32, ) -> Self { Self::generate( generators::Plane::new(), |GenVertex { pos, .. }| [pos[0] * 0.5 * width, pos[1] * 0.5 * height, 0.0].into(), |v| v.normal.into(), ) }
function_block-full_function
[ { "content": "/// Reads the entire contents of a file into a `String`.\n\npub fn read_file_to_string<P: AsRef<path::Path>>(path: P) -> io::Result<String> {\n\n use self::io::Read;\n\n let file = fs::File::open(path)?;\n\n let len = file.metadata()?.len() as usize;\n\n let mut contents = String::with...
Rust
src/reader/error.rs
lo48576/fbx_direct
4f578295cc569df58d541e9cf1e5268cf0d423b0
use std::error; use std::fmt; use std::io; use std::str; use std::string; pub type Result<T> = ::std::result::Result<T, Error>; #[derive(Debug, Clone)] pub struct Error { pos: u64, kind: ErrorKind, } impl Error { pub fn new<K: Into<ErrorKind>>(pos: u64, kind: K) -> Self { Error { pos, kind: kind.into(), } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.kind { ErrorKind::Utf8Error(ref err) => { write!(f, "UTF-8 conversion error at pos={}: {}", self.pos, err) } ErrorKind::InvalidMagic => write!( f, "Invalid magic header at pos={}: Non-FBX or corrupted data?", self.pos ), ErrorKind::Io(ref err) => write!(f, "I/O error at pos={}: {}", self.pos, err), ErrorKind::DataError(ref err) => write!(f, "Invalid data at pos={}: {}", self.pos, err), ErrorKind::UnexpectedValue(ref err) => { write!(f, "Got an unexpected value at pos={}: {}", self.pos, err) } ErrorKind::UnexpectedEof => write!(f, "Unexpected EOF at pos={}", self.pos), ErrorKind::Unimplemented(ref err) => write!(f, "Unimplemented feature: {}", err), } } } impl error::Error for Error { fn description(&self) -> &str { match self.kind { ErrorKind::Utf8Error(ref err) => err.description(), ErrorKind::InvalidMagic => "Got an invalid magic header", ErrorKind::Io(ref err) => err.description(), ErrorKind::DataError(_) => "Got an invalid data", ErrorKind::UnexpectedValue(_) => "Invalid value in FBX data", ErrorKind::UnexpectedEof => "Unexpected EOF", ErrorKind::Unimplemented(_) => "Attempt to use unimplemented feature", } } fn cause(&self) -> Option<&dyn error::Error> { match self.kind { ErrorKind::Utf8Error(ref err) => Some(err as &dyn error::Error), ErrorKind::Io(ref err) => Some(err as &dyn error::Error), _ => None, } } } #[derive(Debug)] pub enum ErrorKind { Utf8Error(str::Utf8Error), InvalidMagic, Io(io::Error), DataError(String), UnexpectedValue(String), UnexpectedEof, Unimplemented(String), } impl Clone for ErrorKind { fn clone(&self) -> Self { use self::ErrorKind::*; use std::error::Error; match *self { Utf8Error(ref e) => Utf8Error(*e), InvalidMagic => InvalidMagic, Io(ref e) => Io(io::Error::new(e.kind(), e.description())), DataError(ref e) => DataError(e.clone()), UnexpectedValue(ref e) => UnexpectedValue(e.clone()), UnexpectedEof => UnexpectedEof, Unimplemented(ref e) => Unimplemented(e.clone()), } } } impl From<string::FromUtf8Error> for ErrorKind { fn from(err: string::FromUtf8Error) -> ErrorKind { ErrorKind::Utf8Error(err.utf8_error()) } } impl From<io::Error> for ErrorKind { fn from(err: io::Error) -> ErrorKind { ErrorKind::Io(err) } }
use std::error; use std::fmt; use std::io; use std::str; use std::string; pub type Result<T> = ::std::result::Result<T, Error>; #[derive(Debug, Clone)] pub struct Error { pos: u64, kind: ErrorKind, } impl Error { pub fn new<K: Into<ErrorKind>>(pos: u64, kind: K) -> Self { Error { pos, kind: kind.into(), } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.kind { ErrorKind::Utf8Error(ref err) => { write!(f, "UTF-8 conversion error at pos={}: {}", self.pos, err) } ErrorKind::InvalidMagic => write!( f, "Invalid magic header at pos={}: Non-FBX or corrupted data?", self.pos ), ErrorKind::Io(ref err) => write!(f, "I/O error at pos={}: {}", self.pos, err), ErrorKind::DataError(ref err) => write!(f, "Invalid data at pos={}: {}", self.pos, err), ErrorKind::UnexpectedValue(ref err) => { write!(f, "Got an unexpected value at pos={}: {}", self.pos, err) } ErrorKind::UnexpectedEof => write!(f, "Unexpected EOF at pos={}", self.pos), ErrorKind::Unimplemented(ref err) => write!(f, "Unimplemented feature: {}", err), } } } impl error::Error for Error { fn description(&self) -> &str { match self.kind { ErrorKind::Utf8Error(ref err) => err.description(), ErrorKind::InvalidMagic => "Got an invalid magic header", ErrorKind::Io(ref err) => err.description(), ErrorKind::DataError(_) => "Got an invalid data", ErrorKind::UnexpectedValue(_) => "Invalid value in FBX dat
fn cause(&self) -> Option<&dyn error::Error> { match self.kind { ErrorKind::Utf8Error(ref err) => Some(err as &dyn error::Error), ErrorKind::Io(ref err) => Some(err as &dyn error::Error), _ => None, } } } #[derive(Debug)] pub enum ErrorKind { Utf8Error(str::Utf8Error), InvalidMagic, Io(io::Error), DataError(String), UnexpectedValue(String), UnexpectedEof, Unimplemented(String), } impl Clone for ErrorKind { fn clone(&self) -> Self { use self::ErrorKind::*; use std::error::Error; match *self { Utf8Error(ref e) => Utf8Error(*e), InvalidMagic => InvalidMagic, Io(ref e) => Io(io::Error::new(e.kind(), e.description())), DataError(ref e) => DataError(e.clone()), UnexpectedValue(ref e) => UnexpectedValue(e.clone()), UnexpectedEof => UnexpectedEof, Unimplemented(ref e) => Unimplemented(e.clone()), } } } impl From<string::FromUtf8Error> for ErrorKind { fn from(err: string::FromUtf8Error) -> ErrorKind { ErrorKind::Utf8Error(err.utf8_error()) } } impl From<io::Error> for ErrorKind { fn from(err: io::Error) -> ErrorKind { ErrorKind::Io(err) } }
a", ErrorKind::UnexpectedEof => "Unexpected EOF", ErrorKind::Unimplemented(_) => "Attempt to use unimplemented feature", } }
function_block-function_prefixed
[ { "content": "fn indent<W: Write>(sink: &mut W, depth: usize) -> Result<()> {\n\n for _ in 0..depth {\n\n sink.write_all(b\"\\t\")?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/writer/emitter/ascii.rs", "rank": 0, "score": 72160.99466022922 }, { "content": "fn print_prope...
Rust
src/stats_api/mod.rs
Chronophylos/se-stats-exporter
ea58f9bf02b77cc4804368abd566684e95b20937
use chrono::{DateTime, Utc}; use serde::Deserialize; use std::{borrow::Cow, collections::HashMap}; #[derive(Debug, thiserror::Error)] pub enum Error { #[error("Could not build http client")] BuildClientError(#[source] reqwest::Error), #[error("Could not send {method} reqwest to {url}")] SendRequestError { method: &'static str, url: String, source: reqwest::Error, }, #[error("Could not parse json")] ParseJsonError(#[source] reqwest::Error), } #[derive(Debug, Clone, Deserialize)] pub struct Channel<'a> { pub channel: Cow<'a, str>, pub messages: u64, } #[derive(Debug, Clone, Deserialize)] pub struct ChatEmotes<'a> { pub username: Cow<'a, str>, pub emotes: EmoteList<'a>, #[serde(rename = "lastMessage")] pub last_message: DateTime<Utc>, } #[derive(Debug, Clone, Deserialize)] pub struct EmoteList<'a> { #[serde(rename = "bttvGlobalEmotes")] pub bttv_global_emotes: HashMap<Cow<'a, str>, Emote<'a>>, #[serde(rename = "bttvChannelEmotes")] pub bttv_channel_emotes: HashMap<Cow<'a, str>, Emote<'a>>, #[serde(rename = "ffzGlobalEmotes")] pub ffz_global_emotes: HashMap<Cow<'a, str>, Emote<'a>>, #[serde(rename = "ffzChannelEmotes")] pub ffz_channel_emotes: HashMap<Cow<'a, str>, Emote<'a>>, } #[derive(Debug, Clone, Deserialize)] pub struct Emote<'a> { pub name: Cow<'a, str>, #[serde(rename = "_id")] pub id: Cow<'a, str>, #[serde(rename = "type")] pub typ: EmoteType, pub width: u8, pub height: u8, pub gif: bool, } #[derive(Debug, Clone, Copy, Deserialize)] #[serde(rename = "lowercase")] pub enum EmoteType { BTTV, FFZ, } #[derive(Debug, Clone, Deserialize)] pub struct ChatStats<'a> { pub channel: Cow<'a, str>, #[serde(rename = "totalMessages")] pub total_messages: u64, pub chatters: Cow<'a, [ChatterStats<'a>]>, pub hashtags: Cow<'a, [HashtagStats<'a>]>, pub commands: Cow<'a, [CommandStats<'a>]>, #[serde(rename = "bttvEmotes")] pub bttv_emotes: Cow<'a, [EmoteStats<'a>]>, #[serde(rename = "ffzEmotes")] pub ffz_emotes: Cow<'a, [EmoteStats<'a>]>, #[serde(rename = "twitchEmotes")] pub twitch_emotes: Cow<'a, [EmoteStats<'a>]>, } #[derive(Debug, Clone, Deserialize)] pub struct ChatterStats<'a> { pub name: Cow<'a, str>, pub amount: u64, } #[derive(Debug, Clone, Deserialize)] pub struct HashtagStats<'a> { pub hashtag: Cow<'a, str>, pub amount: u64, } #[derive(Debug, Clone, Deserialize)] pub struct CommandStats<'a> { pub command: Cow<'a, str>, pub amount: u64, } #[derive(Debug, Clone, Deserialize)] pub struct EmoteStats<'a> { pub id: Cow<'a, str>, pub emote: Cow<'a, str>, pub amount: u64, } #[derive(Debug, Clone)] pub struct ApiClient { client: reqwest::Client, } impl ApiClient { pub fn new() -> Result<ApiClient, Error> { let client = reqwest::ClientBuilder::new() .build() .map_err(|e| Error::BuildClientError(e))?; Ok(ApiClient { client }) } pub async fn get_top_channels<'a>(&self) -> Result<Cow<'a, [Channel<'a>]>, Error> { const URL: &str = "https://api.streamelements.com/kappa/v2/chatstats"; let channels = self .client .get(URL) .send() .await .map_err(|source| Error::SendRequestError { method: "GET", url: URL.to_string(), source, })? .json() .await .map_err(|e| Error::ParseJsonError(e))?; Ok(channels) } pub async fn get_stats<'a, S>(&self, channel: S) -> Result<ChatStats<'a>, Error> where S: AsRef<str>, { let url = format!( "https://api.streamelements.com/kappa/v2/chatstats/{}/stats", channel.as_ref() ); let stats = self .client .get(&url) .send() .await .map_err(|source| Error::SendRequestError { method: "GET", url: url.clone(), source, })? .json() .await .map_err(|e| Error::ParseJsonError(e))?; Ok(stats) } } #[cfg(test)] mod tests { use super::{ApiClient, Error}; #[tokio::test] async fn get_top_channels() -> Result<(), Error> { let client = ApiClient::new()?; let channels = client.get_top_channels().await?; assert_eq!(channels.len(), 100); Ok(()) } #[tokio::test] async fn get_global_stats() -> Result<(), Error> { let client = ApiClient::new()?; let stats = client.get_stats("global").await?; assert_eq!(stats.channel, "global"); assert!(stats.total_messages > 67397996744); assert_eq!(stats.commands.len(), 100); assert_eq!(stats.hashtags.len(), 100); assert_eq!(stats.bttv_emotes.len(), 100); assert_eq!(stats.ffz_emotes.len(), 100); assert_eq!(stats.twitch_emotes.len(), 100); Ok(()) } #[test] fn sanity_check_message_count_fits_in_u64() { let _: u64 = 67397996744; } }
use chrono::{DateTime, Utc}; use serde::Deserialize; use std::{borrow::Cow, collections::HashMap}; #[derive(Debug, thiserror::Error)] pub enum Error { #[error("Could not build http client")] BuildClientError(#[source] reqwest::Error), #[error("Could not send {method} reqwest to {url}")] SendRequestError { method: &'static str, url: String, source: reqwest::Error, }, #[error("Could not parse json")] ParseJsonError(#[source] reqwest::Error), } #[derive(Debug, Clone, Deserialize)] pub struct Channel<'a> { pub channel: Cow<'a, str>, pub messages: u64, } #[derive(Debug, Clone, Deserialize)] pub struct ChatEmotes<'a> { pub username: Cow<'a, str>, pub emotes: EmoteList<'a>, #[serde(rename = "lastMessage")] pub last_message: DateTime<Utc>, } #[derive(Debug, Clone, Deserialize)] pub struct EmoteList<'a> { #[serde(rename = "bttvGlobalEmotes")] pub bttv_global_emotes: HashMap<Cow<'a, str>, Emote<'a>>, #[serde(rename = "bttvChannelEmotes")] pub bttv_channel_emotes: HashMap<Cow<'a, str>, Emote<'a>>, #[serde(rename = "ffzGlobalEmotes")] pub ffz_global_emotes: HashMap<Cow<'a, str>, Emote<'a>>, #[serde(rename = "ffzChannelEmotes")] pub ffz_channel_emotes: HashMap<Cow<'a, str>, Emote<'a>>, } #[derive(Debug, Clone, Deserialize)] pub struct Emote<'a> { pub name: Cow<'a, str>, #[serde(rename = "_id")] pub id: Cow<'a, str>, #[serde(rename = "type")] pub typ: EmoteType, pub width: u8, pub height: u8, pub gif: bool, } #[derive(Debug, Clone, Copy, Deserialize)] #[serde(rename = "lowercase")] pub enum EmoteType { BTTV, FFZ, } #[derive(Debug, Clone, Deserialize)] pub struct ChatStats<'a> { pub channel: Cow<'a, str>, #[serde(rename = "totalMessages")] pub total_messages: u64, pub chatters: Cow<'a, [ChatterStats<'a>]>, pub hashtags: Cow<'a, [HashtagStats<'a>]>, pub commands: Cow<'a, [CommandStats<'a>]>, #[serde(rename = "bttvEmotes")] pub bttv_emotes: Cow<'a, [EmoteStats<'a>]>, #[serde(rename = "ffzEmotes")] pub ffz_emotes: Cow<'a, [EmoteStats<'a>]>, #[serde(rename = "twitchEmotes")] pub twitch_emotes: Cow<'a, [EmoteStats<'a>]>, } #[derive(Debug, Clone, Deserialize)] pub struct ChatterStats<'a> { pub name: Cow<'a, str>, pub amount: u64, } #[derive(Debug, Clone, Deserialize)] pub struct HashtagStats<'a> { pub hashtag: Cow<'a, str>, pub amount: u64, } #[derive(Debug, Clone, Deserialize)] pub struct CommandStats<'a> { pub command: Cow<'a, str>, pub amount: u64, } #[derive(Debug, Clone, Deserialize)] pub struct EmoteStats<'a> { pub id: Cow<'a, str>, pub emote: Cow<'a, str>, pub amount: u64, } #[derive(Debug, Clone)] pub struct ApiClient { client: reqwest::Client, } impl ApiClient { pub fn new() -> Result<ApiClient, Error> { let client = reqwest::ClientBuilder::new() .build() .map_err(|e| Error::BuildClientError(e))?; Ok(ApiClient { client }) } pub async fn get_top_channels<'a>(&self) -> Result<Cow<'a, [Channel<'a>]>, Error> { const URL: &str = "https://api.streamelements.com/kappa/v2/chatstats"; let channels = self .client .get(URL) .send() .await .map_err(|source| Error::SendRequestError { method: "GET", url: URL.to_string(), source, })? .json() .await .map_err(|e| Error::ParseJsonError(e))?; Ok(channels) } pub async fn get_stats<'a, S>(&self, channel: S) -> Result<ChatStats<'a>, Error> where S: AsRef<str>, { let url = format!( "https://api.streamelements.com/kappa/v2/chatstats/{}/stats", channel.as_ref() ); let stats = self .client .get(&url) .se
:ParseJsonError(e))?; Ok(stats) } } #[cfg(test)] mod tests { use super::{ApiClient, Error}; #[tokio::test] async fn get_top_channels() -> Result<(), Error> { let client = ApiClient::new()?; let channels = client.get_top_channels().await?; assert_eq!(channels.len(), 100); Ok(()) } #[tokio::test] async fn get_global_stats() -> Result<(), Error> { let client = ApiClient::new()?; let stats = client.get_stats("global").await?; assert_eq!(stats.channel, "global"); assert!(stats.total_messages > 67397996744); assert_eq!(stats.commands.len(), 100); assert_eq!(stats.hashtags.len(), 100); assert_eq!(stats.bttv_emotes.len(), 100); assert_eq!(stats.ffz_emotes.len(), 100); assert_eq!(stats.twitch_emotes.len(), 100); Ok(()) } #[test] fn sanity_check_message_count_fits_in_u64() { let _: u64 = 67397996744; } }
nd() .await .map_err(|source| Error::SendRequestError { method: "GET", url: url.clone(), source, })? .json() .await .map_err(|e| Error:
random
[ { "content": "This is a prototype and only does what I need it to.\n\nIf anyone is interested I'll continue developing this\n", "file_path": "README.md", "rank": 0, "score": 16504.059525609697 }, { "content": "fn drain_to_gauge<'a, I, L, ValueF, LabelF>(\n\n name: &'static str,\n\n dat...
Rust
src/merkle_tree/incremental_merkle_tree.rs
stechu/crypto-primitives
fd9a0decc470a7c92d2ed2ef7a5fffa6952c9c83
use crate::crh::TwoToOneCRHScheme; use crate::merkle_tree::{Config, DigestConverter, LeafParam, Path, TwoToOneParam}; use crate::CRHScheme; use ark_std::borrow::Borrow; use ark_std::vec::Vec; #[derive(Derivative)] #[derivative(Clone(bound = "P: Config"))] pub struct IncrementalMerkleTree<P: Config> { leaf_nodes: Vec<P::LeafDigest>, two_to_one_hash_param: TwoToOneParam<P>, leaf_hash_param: LeafParam<P>, height: usize, current_path: Path<P>, root: P::InnerDigest, empty: bool, } impl<P: Config> IncrementalMerkleTree<P> { pub fn is_empty(&self) -> bool { self.empty } pub fn current_index(&self) -> Option<usize> { if self.is_empty() { None } else { Some(self.current_path.leaf_index) } } pub fn next_available(&self) -> Option<usize> { let current_index = self.current_path.leaf_index; if self.is_empty() { Some(0) } else if current_index < (1 << (self.height - 1)) - 1 { Some(current_index + 1) } else { None } } pub fn blank( leaf_hash_param: &LeafParam<P>, two_to_one_hash_param: &TwoToOneParam<P>, height: usize, ) -> Result<Self, crate::Error> { assert!( height > 1, "the height of incremental merkle tree should be at least 2" ); let leaves_digest = vec![]; Ok(IncrementalMerkleTree { current_path: Path { leaf_sibling_hash: P::LeafDigest::default(), auth_path: Vec::new(), leaf_index: 0, }, leaf_nodes: leaves_digest, two_to_one_hash_param: two_to_one_hash_param.clone(), leaf_hash_param: leaf_hash_param.clone(), root: P::InnerDigest::default(), height, empty: true, }) } pub fn append<T: Borrow<P::Leaf>>(&mut self, new_leaf: T) -> Result<(), crate::Error> { assert!(self.next_available() != None, "index out of range"); let leaf_digest = P::LeafHash::evaluate(&self.leaf_hash_param, new_leaf)?; let (path, root) = self.next_path(leaf_digest.clone())?; self.leaf_nodes.push(leaf_digest); self.current_path = path; self.root = root; self.empty = false; Ok(()) } pub fn next_path( &self, new_leaf_digest: P::LeafDigest, ) -> Result<(Path<P>, P::InnerDigest), crate::Error> { assert!(self.next_available() != None, "index out of range"); let tree_height = self.height; let hash_of_empty_node: P::InnerDigest = P::InnerDigest::default(); let hash_of_empty_leaf: P::LeafDigest = P::LeafDigest::default(); let mut new_auth_path = Vec::with_capacity(tree_height - 2); if self.is_empty() { let mut current_node = P::TwoToOneHash::evaluate( &self.two_to_one_hash_param, P::LeafInnerDigestConverter::convert(new_leaf_digest)?, P::LeafInnerDigestConverter::convert(P::LeafDigest::default())?, )?; for _ in 0..tree_height - 2 { new_auth_path.push(hash_of_empty_node.clone()); current_node = P::TwoToOneHash::compress( &self.two_to_one_hash_param, current_node, hash_of_empty_node.clone(), )?; } let path = Path { leaf_index: 0, auth_path: new_auth_path, leaf_sibling_hash: hash_of_empty_leaf, }; Ok((path, current_node)) } else { let mut new_index = self.next_available().unwrap(); let mut old_index = self.current_index().unwrap(); let old_leaf = self.leaf_nodes[old_index].clone(); let (old_left_leaf, old_right_leaf) = if is_left_child(old_index) { ( self.leaf_nodes[old_index].clone(), self.current_path.leaf_sibling_hash.clone(), ) } else { ( self.current_path.leaf_sibling_hash.clone(), self.leaf_nodes[old_index].clone(), ) }; let (new_left_leaf, new_right_leaf, leaf_sibling) = if is_left_child(new_index) { ( new_leaf_digest, hash_of_empty_leaf.clone(), hash_of_empty_leaf, ) } else { (old_leaf.clone(), new_leaf_digest, old_leaf) }; let mut old_current_node = P::TwoToOneHash::evaluate( &self.two_to_one_hash_param, P::LeafInnerDigestConverter::convert(old_left_leaf)?, P::LeafInnerDigestConverter::convert(old_right_leaf)?, )?; let mut new_current_node = P::TwoToOneHash::evaluate( &self.two_to_one_hash_param, P::LeafInnerDigestConverter::convert(new_left_leaf)?, P::LeafInnerDigestConverter::convert(new_right_leaf)?, )?; let mut old_auth_path = self.current_path.auth_path.clone(); old_auth_path.reverse(); for x in 0..tree_height - 2 { new_index = parent_index_on_level(new_index); old_index = parent_index_on_level(old_index); if new_index == old_index { new_auth_path.push(old_auth_path[x].clone()); let (new_left, new_right) = if is_left_child(new_index) { (new_current_node, hash_of_empty_node.clone()) } else { (old_auth_path[x].clone(), new_current_node) }; new_current_node = P::TwoToOneHash::compress( &self.two_to_one_hash_param, new_left, new_right, )?; } else { let auth_node = if is_left_child(new_index) { hash_of_empty_node.clone() } else { old_current_node.clone() }; new_auth_path.push(auth_node); let (new_left, new_right) = if is_left_child(new_index) { (new_current_node.clone(), hash_of_empty_node.clone()) } else { (old_current_node.clone(), new_current_node) }; new_current_node = P::TwoToOneHash::compress( &self.two_to_one_hash_param, new_left, new_right, )?; if !is_left_child(old_index) { old_current_node = P::TwoToOneHash::compress( &self.two_to_one_hash_param, old_auth_path[x].clone(), old_current_node, )?; } } } new_auth_path.reverse(); let path = Path { leaf_index: self.next_available().unwrap(), auth_path: new_auth_path, leaf_sibling_hash: leaf_sibling, }; Ok((path, new_current_node)) } } pub fn current_proof(&self) -> Path<P> { self.current_path.clone() } pub fn root(&self) -> P::InnerDigest { self.root.clone() } } #[inline] fn is_left_child(index_on_level: usize) -> bool { index_on_level % 2 == 0 } #[inline] fn parent_index_on_level(index_on_level: usize) -> usize { index_on_level >> 1 }
use crate::crh::TwoToOneCRHScheme; use crate::merkle_tree::{Config, DigestConverter, LeafParam, Path, TwoToOneParam}; use crate::CRHScheme; use ark_std::borrow::Borrow; use ark_std::vec::Vec; #[derive(Derivative)] #[derivative(Clone(bound = "P: Config"))] pub struct IncrementalMerkleTree<P: Config> { leaf_nodes: Vec<P::LeafDigest>, two_to_one_hash_param: TwoToOneParam<P>, leaf_hash_param: LeafParam<P>, height: usize, current_path: Path<P>, root: P::InnerDigest, empty: bool, } impl<P: Config> IncrementalMerkleTree<P> { pub fn is_empty(&self) -> bool { self.empty } pub fn current_index(&self) -> Option<usize> { if self.is_empty() { None } else { Some(self.current_path.leaf_index) } } pub fn next_avai
}) } pub fn append<T: Borrow<P::Leaf>>(&mut self, new_leaf: T) -> Result<(), crate::Error> { assert!(self.next_available() != None, "index out of range"); let leaf_digest = P::LeafHash::evaluate(&self.leaf_hash_param, new_leaf)?; let (path, root) = self.next_path(leaf_digest.clone())?; self.leaf_nodes.push(leaf_digest); self.current_path = path; self.root = root; self.empty = false; Ok(()) } pub fn next_path( &self, new_leaf_digest: P::LeafDigest, ) -> Result<(Path<P>, P::InnerDigest), crate::Error> { assert!(self.next_available() != None, "index out of range"); let tree_height = self.height; let hash_of_empty_node: P::InnerDigest = P::InnerDigest::default(); let hash_of_empty_leaf: P::LeafDigest = P::LeafDigest::default(); let mut new_auth_path = Vec::with_capacity(tree_height - 2); if self.is_empty() { let mut current_node = P::TwoToOneHash::evaluate( &self.two_to_one_hash_param, P::LeafInnerDigestConverter::convert(new_leaf_digest)?, P::LeafInnerDigestConverter::convert(P::LeafDigest::default())?, )?; for _ in 0..tree_height - 2 { new_auth_path.push(hash_of_empty_node.clone()); current_node = P::TwoToOneHash::compress( &self.two_to_one_hash_param, current_node, hash_of_empty_node.clone(), )?; } let path = Path { leaf_index: 0, auth_path: new_auth_path, leaf_sibling_hash: hash_of_empty_leaf, }; Ok((path, current_node)) } else { let mut new_index = self.next_available().unwrap(); let mut old_index = self.current_index().unwrap(); let old_leaf = self.leaf_nodes[old_index].clone(); let (old_left_leaf, old_right_leaf) = if is_left_child(old_index) { ( self.leaf_nodes[old_index].clone(), self.current_path.leaf_sibling_hash.clone(), ) } else { ( self.current_path.leaf_sibling_hash.clone(), self.leaf_nodes[old_index].clone(), ) }; let (new_left_leaf, new_right_leaf, leaf_sibling) = if is_left_child(new_index) { ( new_leaf_digest, hash_of_empty_leaf.clone(), hash_of_empty_leaf, ) } else { (old_leaf.clone(), new_leaf_digest, old_leaf) }; let mut old_current_node = P::TwoToOneHash::evaluate( &self.two_to_one_hash_param, P::LeafInnerDigestConverter::convert(old_left_leaf)?, P::LeafInnerDigestConverter::convert(old_right_leaf)?, )?; let mut new_current_node = P::TwoToOneHash::evaluate( &self.two_to_one_hash_param, P::LeafInnerDigestConverter::convert(new_left_leaf)?, P::LeafInnerDigestConverter::convert(new_right_leaf)?, )?; let mut old_auth_path = self.current_path.auth_path.clone(); old_auth_path.reverse(); for x in 0..tree_height - 2 { new_index = parent_index_on_level(new_index); old_index = parent_index_on_level(old_index); if new_index == old_index { new_auth_path.push(old_auth_path[x].clone()); let (new_left, new_right) = if is_left_child(new_index) { (new_current_node, hash_of_empty_node.clone()) } else { (old_auth_path[x].clone(), new_current_node) }; new_current_node = P::TwoToOneHash::compress( &self.two_to_one_hash_param, new_left, new_right, )?; } else { let auth_node = if is_left_child(new_index) { hash_of_empty_node.clone() } else { old_current_node.clone() }; new_auth_path.push(auth_node); let (new_left, new_right) = if is_left_child(new_index) { (new_current_node.clone(), hash_of_empty_node.clone()) } else { (old_current_node.clone(), new_current_node) }; new_current_node = P::TwoToOneHash::compress( &self.two_to_one_hash_param, new_left, new_right, )?; if !is_left_child(old_index) { old_current_node = P::TwoToOneHash::compress( &self.two_to_one_hash_param, old_auth_path[x].clone(), old_current_node, )?; } } } new_auth_path.reverse(); let path = Path { leaf_index: self.next_available().unwrap(), auth_path: new_auth_path, leaf_sibling_hash: leaf_sibling, }; Ok((path, new_current_node)) } } pub fn current_proof(&self) -> Path<P> { self.current_path.clone() } pub fn root(&self) -> P::InnerDigest { self.root.clone() } } #[inline] fn is_left_child(index_on_level: usize) -> bool { index_on_level % 2 == 0 } #[inline] fn parent_index_on_level(index_on_level: usize) -> usize { index_on_level >> 1 }
lable(&self) -> Option<usize> { let current_index = self.current_path.leaf_index; if self.is_empty() { Some(0) } else if current_index < (1 << (self.height - 1)) - 1 { Some(current_index + 1) } else { None } } pub fn blank( leaf_hash_param: &LeafParam<P>, two_to_one_hash_param: &TwoToOneParam<P>, height: usize, ) -> Result<Self, crate::Error> { assert!( height > 1, "the height of incremental merkle tree should be at least 2" ); let leaves_digest = vec![]; Ok(IncrementalMerkleTree { current_path: Path { leaf_sibling_hash: P::LeafDigest::default(), auth_path: Vec::new(), leaf_index: 0, }, leaf_nodes: leaves_digest, two_to_one_hash_param: two_to_one_hash_param.clone(), leaf_hash_param: leaf_hash_param.clone(), root: P::InnerDigest::default(), height, empty: true,
random
[ { "content": "#[inline]\n\nfn is_root(index: usize) -> bool {\n\n index == 0\n\n}\n\n\n\n/// Returns the index of the left child, given an index.\n", "file_path": "src/merkle_tree/mod.rs", "rank": 0, "score": 159094.4456264609 }, { "content": "pub trait ConfigGadget<P: Config, ConstraintF...
Rust
rustrat-client/tests/wasm.rs
rustrat/rustrat
28604668d54e5a00aad702ae411dc52ac1458f74
use rustrat_client::ffi::wrappers; use rustrat_client::ffi::FnTable; use rustrat_client::runtime::executor::Environment; use rustrat_client::runtime::CommonUtils; use wasm3; #[test] fn test_adder() { let common_utils = CommonUtils::new(); let mut env = Environment::new( &include_bytes!("wasm-test-bins/wasm_test_bins.wasm")[..], common_utils, |_| {}, ) .expect("Unable to create WASM environment."); let wasm_env = env.get_wasm_environment(); let func = wasm_env .find_function::<(u32, u32), u32>("add") .expect("Unable to find the add function"); assert_eq!(func.call(3, 6).expect("Unable to call the add function"), 9); } static mut IS_EXTERNAL_FN_CALLED: bool = false; fn external_fn(param: u32) -> u32 { unsafe { IS_EXTERNAL_FN_CALLED = true; } param } wasm3::make_func_wrapper!(external_fn_wrapper: external_fn(param: u32) -> u32); #[test] fn test_external_fn() { let common_utils = CommonUtils::new(); let mut env = Environment::new( &include_bytes!("wasm-test-bins/wasm_test_bins.wasm")[..], common_utils, |_| {}, ) .expect("Unable to create WASM environment."); let wasm_env = env.get_wasm_environment(); let mut wasm_module = wasm_env.modules().next().expect("No WASM modules found."); wasm_module .link_function::<(u32,), u32>("env", "external_fn", external_fn_wrapper) .expect("Unable to link external_fn"); let func = wasm_module .find_function::<(u32,), u32>("call_external") .expect("Unable to find the call_external function"); assert_eq!( func.call(9) .expect("Unable to call the call_external function"), 9 ); unsafe { assert_eq!(IS_EXTERNAL_FN_CALLED, true, "external-fn not called"); } } #[test] fn test_wasm_virtualalloc_virtualfree() { let common_utils = CommonUtils::new(); let mut env = Environment::new( &include_bytes!("wasm-test-bins/wasm_test_bins.wasm")[..], common_utils.clone(), |_| {}, ) .expect("Unable to create WASM environment."); let wasm_env = env.get_wasm_environment(); let mut wasm_module = wasm_env.modules().next().expect("No WASM modules found."); wrappers::link_ffi_bindings(&mut wasm_module, &common_utils.fn_table) .expect("Unable to link ffi bindings."); let virtualalloc = wasm_module .find_function::<(), u64>("virtualalloc_u64") .expect("Unable to find the virtualalloc_u64 function"); let virtualfree = wasm_module .find_function::<(u64,), u32>("virtualfree") .expect("Unable to find the virtualfree function"); for _ in 0..100 { unsafe { common_utils.fn_table.replace(FnTable::new()); let ptr = virtualalloc .call() .expect("Unable to call the virtualalloc_u64 function."); let actual_ptr: *mut u64 = ptr as *mut u64; assert_ne!(0, ptr, "VirtualAlloc return 0/NULL, indicating an error."); assert_eq!( 0, *actual_ptr, "Allocated pointer does not contain 0, as guaranteed by VirtualAlloc." ); *actual_ptr = 9; assert_eq!( 9, *actual_ptr, "Unable to read and/or write to pointer from VirtualAlloc." ); assert_ne!( 0, virtualfree .call(ptr) .expect("Unable to call the virtualfree function."), "Virtualfree returned 0, indicating an error." ); } } }
use rustrat_client::ffi::wrappers; use rustrat_client::ffi::FnTable; use rustrat_client::runtime::executor::Environment; use rustrat_client::runtime::CommonUtils; use wasm3; #[test] fn test_adder() { let common_utils = CommonUtils::new(); let mut env = Environment::new( &include_bytes!("wasm-test-bins/wasm_test_bins.wasm")[..], common_utils, |_| {}, ) .expect("Unable to create WASM environment."); let wasm_env = env.get_wasm_environment(); let func = wasm_env .find_function::<(u32, u32), u32>("add") .expect("Unable to find the add function"); assert_eq!(func.call(3, 6).expect("Unable to call the add function"), 9); } static mut IS_EXTERNAL_FN_CALLED: bool = false; fn external_fn(param: u32) -> u32 { unsafe { IS_EXTERNAL_FN_CALLED = true; } param } wasm3::make_func_wrapper!(external_fn_wrapper: external_fn(param: u32) -> u32); #[test] fn test_external_fn() { let common_utils = CommonUtils::new(); let mut env = Environment::new( &include_bytes!("wasm-test-bins/wasm_test_bins.wasm")[..], common_utils, |_| {}, ) .expect("Unable to create WASM environment."); let wasm_env = env.get_wasm_environment(); let mut wasm_module = wasm_env.modules().next().expect("No WASM modules found."); wasm_module .link_function::<(u32,), u32>("env", "external_fn", external_fn_wrapper) .expect("Unable to link external_fn"); let func = wasm_module .find_function::<(u32,), u32>("call_external") .expect("Unable to find the call_external function"); assert_eq!( func.call(9) .expect("Unable to call the call_external function"), 9 ); unsafe { assert_eq!(IS_EXTERNAL_FN_CALLED, true, "external-fn not called"); } } #[test]
fn test_wasm_virtualalloc_virtualfree() { let common_utils = CommonUtils::new(); let mut env = Environment::new( &include_bytes!("wasm-test-bins/wasm_test_bins.wasm")[..], common_utils.clone(), |_| {}, ) .expect("Unable to create WASM environment."); let wasm_env = env.get_wasm_environment(); let mut wasm_module = wasm_env.modules().next().expect("No WASM modules found."); wrappers::link_ffi_bindings(&mut wasm_module, &common_utils.fn_table) .expect("Unable to link ffi bindings."); let virtualalloc = wasm_module .find_function::<(), u64>("virtualalloc_u64") .expect("Unable to find the virtualalloc_u64 function"); let virtualfree = wasm_module .find_function::<(u64,), u32>("virtualfree") .expect("Unable to find the virtualfree function"); for _ in 0..100 { unsafe { common_utils.fn_table.replace(FnTable::new()); let ptr = virtualalloc .call() .expect("Unable to call the virtualalloc_u64 function."); let actual_ptr: *mut u64 = ptr as *mut u64; assert_ne!(0, ptr, "VirtualAlloc return 0/NULL, indicating an error."); assert_eq!( 0, *actual_ptr, "Allocated pointer does not contain 0, as guaranteed by VirtualAlloc." ); *actual_ptr = 9; assert_eq!( 9, *actual_ptr, "Unable to read and/or write to pointer from VirtualAlloc." ); assert_ne!( 0, virtualfree .call(ptr) .expect("Unable to call the virtualfree function."), "Virtualfree returned 0, indicating an error." ); } } }
function_block-full_function
[ { "content": "// TODO combine the two link functions?\n\npub fn link_print_closure<F: Fn(&str) + 'static>(\n\n module: &mut wasm3::Module,\n\n print_closure: F,\n\n) -> Result<()> {\n\n module\n\n .link_closure(\n\n \"rustrat\",\n\n \"print\",\n\n move |cc, (str_...
Rust
transformer/src/parsers/doc/etc/primitive_type.rs
ccouzens/vcloud-rest-openapi
b6c772aed95101891eeba529dcf180995be51ac7
use std::str::FromStr; use thiserror::Error; #[derive(Debug, PartialEq, Copy, Clone)] pub enum PrimitiveType { AnyType, AnyUri, Base64Binary, Boolean, DateTime, Decimal, Double, Float, HexBinary, Int, Integer, Long, NormalizedString, Short, String, } #[derive(Error, Debug, PartialEq)] pub enum ParsePrimitiveTypeError { #[error("No match for input: `{0}`")] NoMatch(String), } impl FromStr for PrimitiveType { type Err = ParsePrimitiveTypeError; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "xs:anyType" => PrimitiveType::AnyType, "xs:anyURI" => PrimitiveType::AnyUri, "xs:base64Binary" => PrimitiveType::Base64Binary, "xs:boolean" => PrimitiveType::Boolean, "xs:dateTime" => PrimitiveType::DateTime, "xs:decimal" => PrimitiveType::Decimal, "xs:double" => PrimitiveType::Double, "xs:float" => PrimitiveType::Float, "xs:hexBinary" => PrimitiveType::HexBinary, "xs:int" => PrimitiveType::Int, "xs:integer" => PrimitiveType::Integer, "xs:long" => PrimitiveType::Long, "xs:normalizedString" => PrimitiveType::NormalizedString, "xs:short" => PrimitiveType::Short, "xs:string" => PrimitiveType::String, _ => return Err(ParsePrimitiveTypeError::NoMatch(s.to_owned())), }) } } #[derive(Debug, PartialEq)] pub(super) struct RestrictedPrimitiveType<'a> { pub(super) r#type: PrimitiveType, pub(super) pattern: &'a Option<String>, pub(super) enumeration: &'a Vec<String>, pub(super) min_inclusive: &'a Option<String>, } impl<'a> From<&RestrictedPrimitiveType<'a>> for openapiv3::Type { fn from(t: &RestrictedPrimitiveType) -> Self { match &t.r#type { PrimitiveType::AnyType | PrimitiveType::Decimal | PrimitiveType::HexBinary | PrimitiveType::NormalizedString | PrimitiveType::String => Self::String(openapiv3::StringType { enumeration: t.enumeration.clone(), pattern: t.pattern.clone(), ..Default::default() }), PrimitiveType::AnyUri => Self::String(openapiv3::StringType { enumeration: t.enumeration.clone(), pattern: t.pattern.clone(), format: openapiv3::VariantOrUnknownOrEmpty::Unknown("uri".to_owned()), ..Default::default() }), PrimitiveType::Base64Binary => Self::String(openapiv3::StringType { enumeration: t.enumeration.clone(), pattern: t.pattern.clone(), format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::StringFormat::Byte), ..Default::default() }), PrimitiveType::Boolean => Self::Boolean {}, PrimitiveType::DateTime => Self::String(openapiv3::StringType { enumeration: t.enumeration.clone(), pattern: t.pattern.clone(), format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::StringFormat::DateTime), ..Default::default() }), PrimitiveType::Double => Self::Number(openapiv3::NumberType { format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::NumberFormat::Double), minimum: t.min_inclusive.as_ref().and_then(|m| m.parse().ok()), enumeration: t .enumeration .iter() .filter_map(|s| s.parse().ok()) .collect(), ..Default::default() }), PrimitiveType::Float => Self::Number(openapiv3::NumberType { format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::NumberFormat::Float), minimum: t.min_inclusive.as_ref().and_then(|m| m.parse().ok()), enumeration: t .enumeration .iter() .filter_map(|s| s.parse().ok()) .collect(), ..Default::default() }), PrimitiveType::Int => Self::Integer(openapiv3::IntegerType { format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::IntegerFormat::Int32), minimum: t.min_inclusive.as_ref().and_then(|m| m.parse().ok()), enumeration: t .enumeration .iter() .filter_map(|s| s.parse().ok()) .collect(), ..Default::default() }), PrimitiveType::Integer | PrimitiveType::Short => { Self::Integer(openapiv3::IntegerType { minimum: t.min_inclusive.as_ref().and_then(|m| m.parse().ok()), enumeration: t .enumeration .iter() .filter_map(|s| s.parse().ok()) .collect(), ..Default::default() }) } PrimitiveType::Long => Self::Integer(openapiv3::IntegerType { format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::IntegerFormat::Int64), minimum: t.min_inclusive.as_ref().and_then(|m| m.parse().ok()), enumeration: t .enumeration .iter() .filter_map(|s| s.parse().ok()) .collect(), ..Default::default() }), } } }
use std::str::FromStr; use thiserror::Error; #[derive(Debug, PartialEq, Copy, Clone)] pub enum PrimitiveType { AnyType, AnyUri, Base64Binary, Boolean, DateTime, Decimal, Double, Float, HexBinary, Int, Integer, Long, NormalizedString, Short, String, } #[derive(Error, Debug, PartialEq)] pub enum ParsePrimitiveTypeError { #[error("No match for input: `{0}`")] NoMatch(String), } impl FromStr for PrimitiveType { type Err = ParsePrimitiveTypeError; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "xs:anyType" => PrimitiveType::AnyType, "xs:anyURI" => PrimitiveType::AnyUri, "xs:base64Binary" => PrimitiveType::Base64Binary, "xs:boolean" => PrimitiveType::Boolean, "xs:dateTime" => PrimitiveType::DateTime, "xs:decimal" => PrimitiveType::Decimal, "xs:double" => PrimitiveType::Double, "xs:float" => PrimitiveType::Float, "xs:hexBinary" => PrimitiveType::HexBinary, "xs:int" => PrimitiveType::Int, "xs:integer" => PrimitiveType::Integer, "xs:long" => PrimitiveType::Long, "xs:normalizedString" => PrimitiveType::NormalizedString, "xs:short" => PrimitiveType::Short, "xs:string" => PrimitiveType::String, _ => return Err(ParsePrimitiveTypeError::NoMatch(s.to_owned())), }) } } #[derive(Debug, PartialEq)] pub(super) struct RestrictedPrimitiveType<'a> { pub(super) r#type: PrimitiveType, pub(super) pattern: &'a Option<String>, pub(super) enumeration: &'a Vec<String>, pub(super) min_inclusive: &'a Option<String>, } impl<'a> From<&RestrictedPrimitiveType<'a>> for openapiv3::Type { fn from(t: &RestrictedPrimitiveType) -> Self { match &t.r#type { PrimitiveType::AnyType | PrimitiveType::Decimal | PrimitiveType::HexBinary | PrimitiveType::NormalizedString | PrimitiveType::String => Self::String(openapiv3::StringType { enumeration: t.enumeration.clone(), pattern: t.pattern.clone(), ..Default::default() }), PrimitiveType::AnyUri => Self::String(openapiv3::StringType { enumeration: t.enumeration.clone(), pattern: t.pattern.clone(), format: openapiv3::VariantOrUnknownOrEmpty::Unknown("uri".to_owned()), ..Default::default() }), PrimitiveType::Base64Binary => Self::String(openapiv3::StringType { enumeration: t.enumeration.clone(), pattern: t.pattern.clone(), format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::StringFormat::Byte), ..Default::default() }), PrimitiveType::Boolean => Self::Boolean {}, PrimitiveType::DateTime => Self::String(openapiv3::StringType { enumeration: t.enumeration.clone(), pattern: t.pattern.clone(), format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::StringFormat::DateTime), ..Default::defaul
}
t() }), PrimitiveType::Double => Self::Number(openapiv3::NumberType { format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::NumberFormat::Double), minimum: t.min_inclusive.as_ref().and_then(|m| m.parse().ok()), enumeration: t .enumeration .iter() .filter_map(|s| s.parse().ok()) .collect(), ..Default::default() }), PrimitiveType::Float => Self::Number(openapiv3::NumberType { format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::NumberFormat::Float), minimum: t.min_inclusive.as_ref().and_then(|m| m.parse().ok()), enumeration: t .enumeration .iter() .filter_map(|s| s.parse().ok()) .collect(), ..Default::default() }), PrimitiveType::Int => Self::Integer(openapiv3::IntegerType { format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::IntegerFormat::Int32), minimum: t.min_inclusive.as_ref().and_then(|m| m.parse().ok()), enumeration: t .enumeration .iter() .filter_map(|s| s.parse().ok()) .collect(), ..Default::default() }), PrimitiveType::Integer | PrimitiveType::Short => { Self::Integer(openapiv3::IntegerType { minimum: t.min_inclusive.as_ref().and_then(|m| m.parse().ok()), enumeration: t .enumeration .iter() .filter_map(|s| s.parse().ok()) .collect(), ..Default::default() }) } PrimitiveType::Long => Self::Integer(openapiv3::IntegerType { format: openapiv3::VariantOrUnknownOrEmpty::Item(openapiv3::IntegerFormat::Int64), minimum: t.min_inclusive.as_ref().and_then(|m| m.parse().ok()), enumeration: t .enumeration .iter() .filter_map(|s| s.parse().ok()) .collect(), ..Default::default() }), } }
function_block-function_prefixed
[ { "content": "fn html_to_mimes(html: &str) -> impl Iterator<Item = String> + '_ {\n\n html.split(\"<br>\")\n\n .filter(|&t| !(t.is_empty() || t == \"None\"))\n\n .map(String::from)\n\n}\n\n\n\nimpl<'a> TryFrom<DetailPage> for Operation {\n\n type Error = OperationParseError;\n\n\n\n fn tr...
Rust
wallpapers/MusicVisualization/res/raw/many.rs
xie-wenjie/AndroidBaseApplicationSourse
931ca8fa907b2e0a19dd35948c37da8d166abf90
#pragma version(1) #pragma stateVertex(PVBackground) #pragma stateRaster(parent) #pragma stateStore(PFSBackground) #define RSID_POINTS 1 void dumpState() { } void drawVU(float* ident) { int i; float mat1[16]; float scale = 0.0041; matrixLoadMat(mat1,ident); matrixRotate(mat1, 0.f, 0.f, 0.f, 1.f); matrixScale(mat1, scale, scale, scale); vpLoadModelMatrix(mat1); bindProgramFragment(NAMED_PFBackgroundMip); bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_background); drawQuadTexCoords( -208.0f, -33.0f, 600.0f, 0.09375f, 0.9551f, 208, -33.0f, 600.0f, 0.90625, 0.9551f, 208, 200.0f, 600.0f, 0.90625, 0.0449f, -208.0f, 200.0f, 600.0f, 0.09375f, 0.0449f); if (State->mPeak > 0) { bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_peak_on); } else { bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_peak_off); } drawQuadTexCoords( 140.0f, 70.0f, 600.0f, 0.0625f, 0.953125, 196, 70.0f, 600.0f, 0.9375f, 0.953125, 196, 128.0f, 600.0f, 0.9375f, 0.046875, 140.0f, 128.0f, 600.0f, 0.0625f, 0.046875); matrixLoadMat(mat1,ident); matrixTranslate(mat1, 0.f, -57.0f * scale, 0.f); matrixRotate(mat1, State->mAngle - 90.f, 0.f, 0.f, 1.f); matrixScale(mat1, scale, scale, scale); vpLoadModelMatrix(mat1); bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_needle); drawQuadTexCoords( -44.0f, -102.0f+57.f, 600.0f, .15625f, 0.755859375f, 44.0f, -102.0f+57.f, 600.0f, 0.84375f, 0.755859375f, 44.0f, 160.0f+57.f, 600.0f, 0.84375f, 0.244140625f, -44.0f, 160.0f+57.f, 600.0f, 0.15625f, 0.244140625f); matrixLoadMat(mat1,ident); matrixRotate(mat1, 0.f, 0.f, 0.f, 1.f); matrixScale(mat1, scale, scale, scale); vpLoadModelMatrix(mat1); bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_black); drawQuad(-100.f, -55.f, 600.f, -100.f, -105.f, 600.f, 100.f, -105.f, 600.f, 100.f, -55.f, 600.f); bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_frame); drawQuadTexCoords( -236.0f, -60.0f, 600.0f, 0.0390625f, 0.783203125f, 236, -60.0f, 600.0f, 0.9609375f, 0.783203125f, 236, 230.0f, 600.0f, 0.9609375f, 0.216796875f, -236.0f, 230.0f, 600.0f, 0.0390625f, 0.216796875f); } int fadeoutcounter = 0; int fadeincounter = 0; int wave1pos = 0; int wave1amp = 0; int wave2pos = 0; int wave2amp= 0; int wave3pos = 0; int wave3amp= 0; int wave4pos = 0; int wave4amp= 0; float idle[4096]; int waveCounter = 0; int lastuptime = 0; float autorotation = 0; #define FADEOUT_LENGTH 100 #define FADEOUT_FACTOR 0.95f #define FADEIN_LENGTH 15 void makeIdleWave(float *points) { int i; float amp1 = sinf(0.007 * wave1amp) * 120 * 1024; float amp2 = sinf(0.023 * wave2amp) * 80 * 1024; float amp3 = sinf(0.011 * wave3amp) * 40 * 1024; float amp4 = sinf(0.031 * wave4amp) * 20 * 1024; for (i = 0; i < 256; i++) { float val = sinf(0.013 * (wave1pos + i * 4)) * amp1 + sinf(0.029 * (wave2pos + i * 4)) * amp2; float off = sinf(0.005 * (wave3pos + i * 4)) * amp3 + sinf(0.017 * (wave4pos + i * 4)) * amp4; if (val < 2.f && val > -2.f) val = 2.f; points[i*8+1] = val + off; points[i*8+5] = -val + off; } } void drawWave(float *ident) { float scale = .008f; float mat1[16]; matrixLoadMat(mat1, ident); matrixScale(mat1, scale, scale / 2048.f, scale); matrixTranslate(mat1, 0.f, 81920.f, 350.f); vpLoadModelMatrix(mat1); int i; if (State->mIdle) { float *points = loadArrayF(RSID_POINTS, 0); if (fadeoutcounter > 0) { for (i = 0; i < 256; i++) { float val = absf(points[i*8+1]); val = val * FADEOUT_FACTOR; if (val < 2.f) val = 2.f; points[i*8+1] = val; points[i*8+5] = -val; } fadeoutcounter--; if (fadeoutcounter == 0) { wave1amp = 0; wave2amp = 0; wave3amp = 0; wave4amp = 0; } } else { makeIdleWave(points); } fadeincounter = FADEIN_LENGTH; } else { if (fadeincounter > 0 && fadeoutcounter == 0) { makeIdleWave(idle); if (waveCounter != State->mWaveCounter) { waveCounter = State->mWaveCounter; float *points = loadArrayF(RSID_POINTS, 0); for (i = 0; i < 256; i++) { float val = absf(points[i*8+1]); points[i*8+1] = (val * (FADEIN_LENGTH - fadeincounter) + idle[i*8+1] * fadeincounter) / FADEIN_LENGTH; points[i*8+5] = (-val * (FADEIN_LENGTH - fadeincounter) + idle[i*8+5] * fadeincounter) / FADEIN_LENGTH; } } fadeincounter--; if (fadeincounter == 0) { fadeoutcounter = FADEOUT_LENGTH; } } else { fadeoutcounter = FADEOUT_LENGTH; } } uploadToBufferObject(NAMED_PointBuffer); bindProgramFragment(NAMED_PFBackgroundNoMip); bindTexture(NAMED_PFBackgroundNoMip, 0, NAMED_Tlinetexture); drawSimpleMesh(NAMED_CubeMesh); } void drawVizLayer(float *ident) { int i; for (i = 0; i < 6; i++) { if (i & 1) { drawVU(ident); } else { drawWave(ident); } matrixRotate(ident, 60.f, 0.f, 1.f, 0.f); } } int main(int launchID) { int i; float ident[16]; int now = uptimeMillis(); int delta = now - lastuptime; lastuptime = now; if (delta > 80) { delta = 80; } autorotation += .3 * delta / 35; while (autorotation > 360.f) autorotation -= 360.f; matrixLoadIdentity(ident); matrixRotate(ident, State->mTilt, 1.f, 0.f, 0.f); matrixRotate(ident, autorotation + State->mRotate, 0.f, 1.f, 0.f); matrixTranslate(ident, 0.f, -1.f, 0.f); matrixScale(ident, 1.f, -1.f, 1.f); drawVizLayer(ident); bindProgramFragment(NAMED_PFBackgroundMip); bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_album); drawQuadTexCoords( -1500.0f, -60.0f, 1500.0f, 0.f, 1.f, 1500, -60.0f, 1500.0f, 1.f, 1.f, 1500, -60.0f, -1500.0f, 1.f, 0.f, -1500.0f, -60.0f, -1500.0f, 0.f, 0.f); matrixScale(ident, 1.f, -1.f, 1.f); matrixTranslate(ident, 0.f, 1.f, 0.f); drawVizLayer(ident); wave1pos++; wave1amp++; wave2pos--; wave2amp++; wave3pos++; wave3amp++; wave4pos++; wave4amp++; return 1; }
#pragma version(1) #pragma stateVertex(PVBackground) #pragma stateRaster(parent) #pragma stateStore(PFSBackground) #define RSID_POINTS 1 void dumpState() { } void drawVU(float* ident) { int i; float mat1[16]; float scale = 0.0041; matrixLoadMat(mat1,ident); matrixRotate(mat1, 0.f, 0.f, 0.f, 1.f); matrixScale(mat1, scale, scale, scale); vpLoadModelMatrix(mat1); bindProgramFragment(NAMED_PFBackgroundMip); bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_background); drawQuadTexCoords( -208.0f, -33.0f, 600.0f, 0.09375f, 0.9551f, 208, -33.0f, 600.0f, 0.90625, 0.9551f, 208, 200.0f, 600.0f, 0.90625, 0.0449f, -208.0f, 200.0f, 600.0f, 0.09375f, 0.0449f); if (State->mPeak > 0) { bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_peak_on); } else { bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_peak_off); } drawQuadTexCoords( 140.0f, 70.0f, 600.0f, 0.0625f, 0.953125, 196, 70.0f, 600.0f, 0.9375f, 0.953125, 196, 128.0f, 600.0f, 0.9375f, 0.046875, 140.0f, 128.0f, 600.0f, 0.0625f, 0.046875); matrixLoadMat(mat1,ident); matrixTranslate(mat1, 0.f, -57.0f * scale, 0.f); matrixRotate(mat1, State->mAngle - 90.f, 0.f, 0.f, 1.f); matrixScale(mat1, scale, scale, scale); vpLoadModelMatrix(mat1); bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_needle); drawQuadTexCoords( -44.0f, -102.0f+57.f, 600.0f, .15625f, 0.755859375f, 44.0f, -102.0f+57.f, 600.0f, 0.84375f, 0.755859375f, 44.0f, 160.0f+57.f, 600.0f, 0.84375f, 0.244140625f, -44.0f, 160.0f+57.f, 600.0f, 0.15625f, 0.244140625f); matrixLoadMat(mat1,ident); matrixRotate(mat1, 0.f, 0.f, 0.f, 1.f); matrixScale(mat1, scale, scale, scale); vpLoadModelMatrix(mat1); bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_black); drawQuad(-100.f, -55.f, 600.f, -100.f, -105.f, 600.f, 100.f, -105.f, 600.f, 100.f, -55.f, 600.f); bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_frame); drawQuadTexCoords( -236.0f, -60.0f, 600.0f, 0.0390625f, 0.783203125f, 236, -60.0f, 600.0f, 0.9609375f, 0.783203125f, 236, 230.0f, 600.0f, 0.9609375f, 0.216796875f, -236.0f, 230.0f, 600.0f, 0.0390625f, 0.216796875f); } int fadeoutcounter = 0; int fadeincounter = 0; int wave1pos = 0; int wave1amp = 0; int wave2pos = 0; int wave2amp= 0; int wave3pos = 0; int wave3amp= 0; int wave4pos = 0; int wave4amp= 0; float idle[4096]; int waveCounter = 0; int lastuptime = 0; float autorotation = 0; #define FADEOUT_LENGTH 100 #define FADEOUT_FACTOR 0.95f #define FADEIN_LENGTH 15 void makeIdleWave(float *points) { int i; float amp1 = sinf(0.007 * wave1amp) * 120 * 1024; float amp2 = sinf(0.023 * wave2amp) * 80 * 1024; float amp3 = sinf(0.011 * wave3amp) * 40 * 1024; float amp4 = sinf(0.031 * wave4amp) * 20 * 1024; for (i = 0; i < 256; i++) { float val = sinf(0.013 * (wave1pos + i * 4)) * amp1 + sinf(0.029 * (wave2pos + i * 4)) * amp2; float off = sinf(0.005 * (wave3pos + i * 4)) * amp3 + sinf(0.017 * (wave4pos + i * 4)) * amp4; if (val < 2.f && val > -2.f) val = 2.f; points[i*8+1] = val + off; points[i*8+5] = -val + off; } } void drawWave(float *ident) { float scale = .008f; float mat1[16]; matrixLoadMat(mat1, ident); matrixScale(mat1, scale, scale / 2048.f, scale); matrixTranslate(mat1, 0.f, 81920.f, 350.f); vpLoadModelMatrix(mat1); int i; if (State->mIdle) { float *points = loadArrayF(RSID_POINTS, 0); if (fadeoutcounter > 0) { for (i = 0; i < 256; i++) { float val = absf(points[i*8+1]); val = val * FADEOUT_FACTOR; if (val < 2.f) val = 2.f; points[i*8+1] = val; points[i*8+5] = -val; } fadeoutcounter--; if (fadeoutcounter == 0) { wave1amp = 0; wave2amp = 0; wave3amp = 0; wave4amp = 0; } } else { makeIdleWave(points); } fadeincounter = FADEIN_LENGTH; } else { if (fadeincounter > 0 && fadeoutcounter == 0) { makeIdleWave(idle); if (waveCounter != State->mWaveCounter) { waveCounter = State->mWaveCounter; float *points = loadArrayF(RSID_POINTS, 0); for (i = 0; i < 256; i++) { float val = absf(points[i*8+1]); points[i*8+1] = (val * (FADEIN_LENGTH - fadeincounter) + idle[i*8+1] * fadeincounter) / FADEIN_LENGTH; points[i*8+5] = (-val * (FADEIN_LENGTH - fadeincounter) + idle[i*8+5] * fadeincounter) / FADEIN_LENGTH; } } fadeincounter--; if (fadeincounter == 0) { fadeoutcounter = FADEOUT_LENGTH; } } else { fadeoutcounter = FADEOUT_LENGTH; } } uploadToBufferObject(NAMED_PointBuffer); bindProgramFragment(NAMED_PFBackgroundNoMip); bindTexture(NAMED_PFBackgroundNoMip, 0, NAMED_Tlinetexture); drawSimpleMesh(NAMED_CubeMesh); } void drawVizLayer(float *ident) { int i; for (i = 0; i < 6; i++) { if (i & 1) { drawVU(ident); } else { drawWave(ident); } matrixRotate(ident, 60.f, 0.f, 1.f, 0.f); } } int main(int launchID) { int i; float ident[16]; int now = uptimeMillis(); int delta = now - lastuptime; lastuptime = now; if (delta > 80) { delta = 80; } autorotation += .3 * delta / 35; while (autorotation > 360.f) autorotation -= 360.f; matrixLoadIdentity(ident); matrixRotate(ident, State->mTilt, 1.f, 0.f, 0.f); matrixRotate(ident, autorota
500.0f, 0.f, 1.f, 1500, -60.0f, 1500.0f, 1.f, 1.f, 1500, -60.0f, -1500.0f, 1.f, 0.f, -1500.0f, -60.0f, -1500.0f, 0.f, 0.f); matrixScale(ident, 1.f, -1.f, 1.f); matrixTranslate(ident, 0.f, 1.f, 0.f); drawVizLayer(ident); wave1pos++; wave1amp++; wave2pos--; wave2amp++; wave3pos++; wave3amp++; wave4pos++; wave4amp++; return 1; }
tion + State->mRotate, 0.f, 1.f, 0.f); matrixTranslate(ident, 0.f, -1.f, 0.f); matrixScale(ident, 1.f, -1.f, 1.f); drawVizLayer(ident); bindProgramFragment(NAMED_PFBackgroundMip); bindTexture(NAMED_PFBackgroundMip, 0, NAMED_Tvumeter_album); drawQuadTexCoords( -1500.0f, -60.0f, 1
random
[ { "content": " private float mDelta;\n", "file_path": "apps/Gallery3D/src/com/cooliris/media/FloatAnim.java", "rank": 0, "score": 156138.519818536 }, { "content": " public static final boolean boundsContainsPoint(float left, float right, float top, float bottom, float posX, float posY)...
Rust
lib/src/lattice/dom_pair.rs
MingweiSamuel/spinach
622fdb841ec1130ff2c896a8ca8f87d0e3b8869b
use std::cmp::Ordering; use super::{Lattice, LatticeRepr, Merge, Compare, Convert, Debottom, Top}; use super::bottom::BottomRepr; use crate::tag; pub struct DomPair<La: Lattice, Lb: Lattice> { _phantom: std::marker::PhantomData<(La, Lb)>, } impl<La: Lattice, Lb: Lattice> Lattice for DomPair<La, Lb> {} pub struct DomPairRepr<Ra: LatticeRepr, Rb: LatticeRepr> { _phantom: std::marker::PhantomData<(Ra, Rb)>, } impl<Ra: LatticeRepr, Rb: LatticeRepr> LatticeRepr for DomPairRepr<Ra, Rb> { type Lattice = DomPair<Ra::Lattice, Rb::Lattice>; type Repr = (Ra::Repr, Rb::Repr); } impl<SelfRA, SelfRB, DeltaRA, DeltaRB, La, Lb> Merge<DomPairRepr<DeltaRA, DeltaRB>> for DomPairRepr<SelfRA, SelfRB> where La: Lattice, Lb: Lattice, SelfRA: LatticeRepr<Lattice = La>, SelfRB: LatticeRepr<Lattice = Lb>, DeltaRA: LatticeRepr<Lattice = La>, DeltaRB: LatticeRepr<Lattice = Lb>, SelfRA: Merge<DeltaRA> + Compare<DeltaRA>, SelfRB: Merge<DeltaRB> + Compare<DeltaRB>, DeltaRA: Convert<SelfRA>, DeltaRB: Convert<SelfRB>, { fn merge(this: &mut <DomPairRepr<SelfRA, SelfRB> as LatticeRepr>::Repr, delta: <DomPairRepr<DeltaRA, DeltaRB> as LatticeRepr>::Repr) -> bool { match SelfRA::compare(&this.0, &delta.0) { None => { SelfRA::merge(&mut this.0, delta.0); SelfRB::merge(&mut this.1, delta.1); true } Some(Ordering::Equal) => { SelfRB::merge(&mut this.1, delta.1) } Some(Ordering::Less) => { *this = ( DeltaRA::convert(delta.0), DeltaRB::convert(delta.1), ); true } Some(Ordering::Greater) => false } } } impl<Ra: LatticeRepr, Rb: LatticeRepr> Convert<DomPairRepr<Ra, Rb>> for DomPairRepr<Ra, Rb> { fn convert(this: <DomPairRepr<Ra, Rb> as LatticeRepr>::Repr) -> <DomPairRepr<Ra, Rb> as LatticeRepr>::Repr { this } } impl<SelfRA, SelfRB, DeltaRA, DeltaRB, La, Lb> Compare<DomPairRepr<DeltaRA, DeltaRB>> for DomPairRepr<SelfRA, SelfRB> where La: Lattice, Lb: Lattice, SelfRA: LatticeRepr<Lattice = La>, SelfRB: LatticeRepr<Lattice = Lb>, DeltaRA: LatticeRepr<Lattice = La>, DeltaRB: LatticeRepr<Lattice = Lb>, SelfRA: Compare<DeltaRA>, SelfRB: Compare<DeltaRB>, { fn compare(this: &<DomPairRepr<SelfRA, SelfRB> as LatticeRepr>::Repr, other: &<DomPairRepr<DeltaRA, DeltaRB> as LatticeRepr>::Repr) -> Option<Ordering> { SelfRA::compare(&this.0, &other.0) .or_else(|| SelfRB::compare(&this.1, &other.1)) } } impl<Ra: Debottom, Rb: Debottom> Debottom for DomPairRepr<Ra, Rb> { fn is_bottom(this: &Self::Repr) -> bool { Ra::is_bottom(&this.0) && Rb::is_bottom(&this.1) } type DebottomLr = DomPairRepr<BottomRepr<Ra::DebottomLr>, BottomRepr<Rb::DebottomLr>>; fn debottom(this: Self::Repr) -> Option<<Self::DebottomLr as LatticeRepr>::Repr> { match (Ra::debottom(this.0), Rb::debottom(this.1)) { (None, None) => None, somes => Some(somes), } } } impl<Ra: Top, Rb: Top> Top for DomPairRepr<Ra, Rb> { fn is_top(this: &Self::Repr) -> bool { Ra::is_top(&this.0) && Rb::is_top(&this.1) } fn top() -> Self::Repr { (Ra::top(), Rb::top()) } } fn __assert_merges() { use static_assertions::{assert_impl_all, assert_not_impl_any}; use super::set_union::{SetUnionRepr}; type HashSetHashSet = DomPairRepr<SetUnionRepr<tag::HASH_SET, u32>, SetUnionRepr<tag::HASH_SET, u32>>; type HashSetArraySet = DomPairRepr<SetUnionRepr<tag::HASH_SET, u32>, SetUnionRepr<tag::ARRAY<8>, u32>>; type ArraySetHashSet = DomPairRepr<SetUnionRepr<tag::ARRAY<8>, u32>, SetUnionRepr<tag::HASH_SET, u32>>; type ArraySetArraySet = DomPairRepr<SetUnionRepr<tag::ARRAY<8>, u32>, SetUnionRepr<tag::ARRAY<8>, u32>>; assert_impl_all!(HashSetHashSet: Merge<HashSetHashSet>, Merge<HashSetArraySet>, Merge<ArraySetHashSet>, Merge<ArraySetArraySet>, ); assert_not_impl_any!(HashSetArraySet: Merge<HashSetHashSet>, Merge<HashSetArraySet>, Merge<ArraySetHashSet>, Merge<ArraySetArraySet>, ); assert_not_impl_any!(ArraySetHashSet: Merge<HashSetHashSet>, Merge<HashSetArraySet>, Merge<ArraySetHashSet>, Merge<ArraySetArraySet>, ); assert_not_impl_any!(ArraySetArraySet: Merge<HashSetHashSet>, Merge<HashSetArraySet>, Merge<ArraySetHashSet>, Merge<ArraySetArraySet>, ); }
use std::cmp::Ordering; use super::{Lattice, LatticeRepr, Merge, Compare, Convert, Debottom, Top}; use super::bottom::BottomRepr; use crate::tag; pub struct DomPair<La: Lattice, Lb: Lattice> { _phantom: std::marker::PhantomData<(La, Lb)>, } impl<La: Lattice, Lb: Lattice> Lattice for DomPair<La, Lb> {} pub struct DomPairRepr<Ra: LatticeRepr, Rb: LatticeRepr> { _phantom: std::marker::PhantomData<(Ra, Rb)>, } impl<Ra: LatticeRepr, Rb: LatticeRepr> LatticeRepr for DomPairRepr<Ra, Rb> { type Lattice = DomPair<Ra::Lattice, Rb::Lattice>; type Repr = (Ra::Repr, Rb::Repr); } impl<SelfRA, SelfRB, DeltaRA, DeltaRB, La, Lb> Merge<DomPairRepr<DeltaRA, DeltaRB>> for DomPairRepr<SelfRA, SelfRB> where La: Lattice, Lb: Lattice, SelfRA: LatticeRepr<Lattice = La>, SelfRB: LatticeRepr<Lattice = Lb>, DeltaRA: LatticeRepr<Lattice = La>, DeltaRB: LatticeRepr<Lattice = Lb>, SelfRA: Merge<DeltaRA> + Compare<DeltaRA>, SelfRB: Merge<DeltaRB> + Compare<DeltaRB>, DeltaRA: Convert<SelfRA>, DeltaRB: Convert<SelfRB>, { fn merge(this: &mut <DomPairRepr<SelfRA, SelfRB> as LatticeRepr>::Repr, delta: <DomPairRepr<DeltaRA, DeltaRB> as LatticeRepr>::Repr) -> bool { match SelfRA::compare(&this.0, &delta.0) { None => { SelfRA::merge(&mut this.0, delta.0); SelfRB::merge(&mut this.1, delta.1); true } Some(Ordering::Equal) => { SelfRB::merge(&mut this.1, delta.1) } Some(Ordering::Less) => { *this = ( DeltaRA::convert(delta.0), DeltaRB::convert(delta.1), ); true } Some(Ordering::Greater) => false } } } impl<Ra: LatticeRepr, Rb: LatticeRepr> Convert<DomPairRepr<Ra, Rb>> for DomPairRepr<Ra, Rb> { fn convert(this: <DomPairRepr<Ra, Rb> as LatticeRepr>::Repr) -> <DomPairRepr<Ra, Rb> as LatticeRepr>::Repr { this } } impl<SelfRA, SelfRB, DeltaRA, DeltaRB, La, Lb> Compare<DomPairRepr<DeltaRA, DeltaRB>> for DomPairRepr<SelfRA, SelfRB> where La: Lattice, Lb: Lattice, SelfRA: LatticeRepr<Lattice = La>, SelfRB: LatticeRepr<Lattice = Lb>, DeltaRA: LatticeRepr<Lattice = La>, DeltaRB: LatticeRepr<Lattice = Lb>, SelfRA: Compare<DeltaRA>, SelfRB: Compare<DeltaRB>, { fn compare(this: &<DomPairRepr<SelfRA, SelfRB> as LatticeRepr>::Repr, other: &<DomPairRepr<DeltaRA, DeltaRB> as LatticeRepr>::Repr) -> Option<Ordering> { SelfRA::compare(&this.0, &other.0) .or_else(|| SelfRB::compare(&this.1, &other.1)) } } impl<Ra: Debottom, Rb: Debottom> Debottom for DomPairRepr<Ra, Rb> { fn is_bottom(this: &Self::Repr) -> bool { Ra::is_bottom(&this.0) && Rb::is_bottom(&this.1) } type DebottomLr = DomPairRepr<BottomRepr<Ra::DebottomLr>, BottomRepr<Rb::DebottomLr>>; fn debottom(this: Self::Repr) -> Option<<Self::DebottomLr as LatticeRepr>::Repr> { match (Ra::debottom(this.0), Rb::debottom(this.1)) { (None, None) => None, somes => Some(somes), } } } impl<Ra: Top, Rb: Top> Top for DomPairRepr<Ra, Rb> { fn is_top(this: &Self::Repr) -> bool { Ra::is_top(&this.0) && Rb::is_top(&this.1) } fn top() -> Self::Repr { (Ra::top(), Rb::top()) } } fn __assert_merges() { use static_assertions::{assert_impl_all, assert_not_impl_any}; use super::set_union::{SetUnionRepr}; type HashSetHashSet = DomPairRepr<SetUnionRepr<tag::HASH_SET, u32>, SetUnionRepr<tag::HASH_SET, u32>>; type HashSetArraySet = DomPairRepr<SetUnionRepr<tag::HASH_SET, u32>, SetUnionRepr<tag::ARRAY<8>, u32>>; type ArraySetHashSet = DomPairRepr<SetUnionRepr<tag::ARRAY<8>, u32>, SetUnionRepr<tag::HASH_SET, u32>>; type ArraySetArraySet = DomPairRepr<SetUnionRepr<tag::ARRAY<8>, u32>, SetUnionRepr<tag::ARRAY<8>, u32>>; assert_impl_all!(HashSetHashSet: Merge<HashSetHashSet>, Merge<HashSetArraySet>, Merge<ArraySetHashSet>, Merge<ArraySetArraySet>, ); assert_not_impl_any!(HashSetArraySet: Merge<HashSetHashSet>, Merge<HashSetArraySet>, Merge<ArraySetHashSet>, Merge<ArraySetArraySet>, ); assert_not_impl_any!(ArraySetHashSet: Merge<HashSetHashSet>, Merge<HashSetArraySet>, Merge<ArraySetHashSet>,
Merge<ArraySetArraySet>, ); assert_not_impl_any!(ArraySetArraySet: Merge<HashSetHashSet>, Merge<HashSetArraySet>, Merge<ArraySetHashSet>, Merge<ArraySetArraySet>, ); }
function_block-function_prefix_line
[ { "content": "pub trait Merge<Delta: LatticeRepr>: LatticeRepr<Lattice = Delta::Lattice> {\n\n /// Merge DELTA into THIS. Return TRUE if THIS changed, FALSE if THIS was unchanged.\n\n fn merge(this: &mut Self::Repr, delta: Delta::Repr) -> bool;\n\n\n\n fn merge_hide<Y: Qualifier, Z: Qualifier>(this: &m...
Rust
src/iso/file_entry.rs
jmpesp/mkisofs-rs
7ffc9158ace73d7da9ee67a8a9eeaf4ef18a6fc0
use crate::iso::utils; use crate::iso::utils::{LOGIC_SIZE, LOGIC_SIZE_I64, LOGIC_SIZE_U32}; use byteorder::{BigEndian, LittleEndian, WriteBytesExt}; use chrono::prelude::*; use std; use std::fs::File; use std::io; use std::io::prelude::*; use std::io::Cursor; use std::io::SeekFrom; use std::path::PathBuf; #[derive(Debug, Clone)] pub enum FileType { Regular { path: PathBuf }, Buffer { name: String, data: Vec<u8> }, } #[derive(Debug, Clone)] pub struct FileEntry { pub file_type: FileType, pub size: usize, pub lba: u32, pub aligned_size: usize, } impl FileEntry { pub fn get_file_name(&self) -> String { match &self.file_type { FileType::Regular { path } => path.file_name().unwrap().to_str().unwrap().to_string(), FileType::Buffer { name, .. } => name.clone(), } } pub fn open_content_provider(&self) -> Box<dyn Read> { match &self.file_type { FileType::Regular { path } => Box::new(File::open(path).unwrap()), FileType::Buffer { data, .. } => Box::new(Cursor::new(data.clone())), } } pub fn write_entry<T>(&self, output_writter: &mut T) -> std::io::Result<()> where T: Write + Seek, { let current_pos = output_writter.seek(SeekFrom::Current(0))? as i32; let expected_aligned_pos = utils::align_up(current_pos, LOGIC_SIZE_U32 as i32); let diff_size = expected_aligned_pos - current_pos; let file_entry_size = self.get_entry_size() as i32; if file_entry_size > diff_size && diff_size != 0 { let mut padding: Vec<u8> = Vec::new(); padding.resize(diff_size as usize, 0u8); output_writter.write_all(&padding)?; } let old_pos = output_writter.seek(SeekFrom::Current(0))? as i32; let file_name = self.get_file_name(); let file_identifier = utils::convert_name(&file_name); let file_identifier_len = file_identifier.len() + 2; output_writter.write_u8(file_entry_size as u8)?; output_writter.write_u8(0u8)?; write_bothendian! { output_writter.write_u32(self.lba)?; } write_bothendian! { output_writter.write_u32(self.size as u32)?; } let record_datetime: DateTime<Utc> = Utc::now(); output_writter.write_u8((record_datetime.year() - 1900) as u8)?; output_writter.write_u8((record_datetime.month()) as u8)?; output_writter.write_u8((record_datetime.day()) as u8)?; output_writter.write_u8((record_datetime.hour()) as u8)?; output_writter.write_u8((record_datetime.minute()) as u8)?; output_writter.write_u8((record_datetime.second()) as u8)?; output_writter.write_u8(0u8)?; output_writter.write_u8(0x0u8)?; output_writter.write_u8(0x0u8)?; output_writter.write_u8(0x0u8)?; write_bothendian! { output_writter.write_u16(0x1)?; } output_writter.write_u8(file_identifier_len as u8)?; output_writter.write_all(&file_identifier[..])?; output_writter.write_all(b";1")?; if (file_identifier_len % 2) == 0 { output_writter.write_u8(0x0u8)?; } output_writter.write_all(b"PX")?; output_writter.write_u8(0x2c)?; output_writter.write_u8(0x1)?; write_bothendian! { output_writter.write_u32(0o100_644)?; } write_bothendian! { output_writter.write_u32(0x1)?; } write_bothendian! { output_writter.write_u32(0x0)?; } write_bothendian! { output_writter.write_u32(0x0)?; } write_bothendian! { output_writter.write_u32(self.lba)?; } output_writter.write_all(b"NM")?; output_writter.write_u8(0x5 + file_name.len() as u8)?; output_writter.write_u8(0x1)?; output_writter.write_u8(0x0)?; output_writter.write_all(file_name.as_bytes())?; let new_pos = output_writter.seek(SeekFrom::Current(0))? as i32; assert!(old_pos + file_entry_size == new_pos); Ok(()) } pub fn get_entry_size(&self) -> u32 { let file_name = self.get_file_name(); utils::get_entry_size(0x21 + 2, &file_name, 0, 1) } pub fn update(&mut self) { match &self.file_type { FileType::Buffer { data, .. } => { self.size = data.len(); self.aligned_size = utils::align_up(self.size as i32, LOGIC_SIZE_U32 as i32) as usize; } _ => unimplemented!(), } } pub fn write_content<T>(&mut self, output_writter: &mut T) -> std::io::Result<()> where T: Write + Seek, { let old_pos = output_writter.seek(SeekFrom::Current(0))?; output_writter.seek(SeekFrom::Start(u64::from(self.lba * LOGIC_SIZE_U32)))?; let mut file: Box<dyn Read> = self.open_content_provider(); io::copy(&mut file, output_writter)?; let current_pos = output_writter.seek(SeekFrom::Current(0))? as usize; let expected_aligned_pos = ((current_pos as i64) & -LOGIC_SIZE_I64) as usize; let diff_size = current_pos - expected_aligned_pos; if diff_size != 0 { let mut padding: Vec<u8> = Vec::new(); padding.resize(LOGIC_SIZE - diff_size, 0u8); output_writter.write_all(&padding)?; } output_writter.seek(SeekFrom::Start(old_pos))?; Ok(()) } pub fn new_buffered(name: String) -> FileEntry { FileEntry { file_type: FileType::Buffer { name, data: Vec::new(), }, lba: 0, size: 0, aligned_size: 0, } } }
use crate::iso::utils; use crate::iso::utils::{LOGIC_SIZE, LOGIC_SIZE_I64, LOGIC_SIZE_U32}; use byteorder::{BigEndian, LittleEndian, WriteBytesExt}; use chrono::prelude::*; use std; use std::fs::File; use std::io; use std::io::prelude::*; use std::io::Cursor; use std::io::SeekFrom; use std::path::PathBuf; #[derive(Debug, Clone)] pub enum FileType { Regular { path: PathBuf }, Buffer { name: String, data: Vec<u8> }, } #[derive(
tput_writter.write_u8(file_identifier_len as u8)?; output_writter.write_all(&file_identifier[..])?; output_writter.write_all(b";1")?; if (file_identifier_len % 2) == 0 { output_writter.write_u8(0x0u8)?; } output_writter.write_all(b"PX")?; output_writter.write_u8(0x2c)?; output_writter.write_u8(0x1)?; write_bothendian! { output_writter.write_u32(0o100_644)?; } write_bothendian! { output_writter.write_u32(0x1)?; } write_bothendian! { output_writter.write_u32(0x0)?; } write_bothendian! { output_writter.write_u32(0x0)?; } write_bothendian! { output_writter.write_u32(self.lba)?; } output_writter.write_all(b"NM")?; output_writter.write_u8(0x5 + file_name.len() as u8)?; output_writter.write_u8(0x1)?; output_writter.write_u8(0x0)?; output_writter.write_all(file_name.as_bytes())?; let new_pos = output_writter.seek(SeekFrom::Current(0))? as i32; assert!(old_pos + file_entry_size == new_pos); Ok(()) } pub fn get_entry_size(&self) -> u32 { let file_name = self.get_file_name(); utils::get_entry_size(0x21 + 2, &file_name, 0, 1) } pub fn update(&mut self) { match &self.file_type { FileType::Buffer { data, .. } => { self.size = data.len(); self.aligned_size = utils::align_up(self.size as i32, LOGIC_SIZE_U32 as i32) as usize; } _ => unimplemented!(), } } pub fn write_content<T>(&mut self, output_writter: &mut T) -> std::io::Result<()> where T: Write + Seek, { let old_pos = output_writter.seek(SeekFrom::Current(0))?; output_writter.seek(SeekFrom::Start(u64::from(self.lba * LOGIC_SIZE_U32)))?; let mut file: Box<dyn Read> = self.open_content_provider(); io::copy(&mut file, output_writter)?; let current_pos = output_writter.seek(SeekFrom::Current(0))? as usize; let expected_aligned_pos = ((current_pos as i64) & -LOGIC_SIZE_I64) as usize; let diff_size = current_pos - expected_aligned_pos; if diff_size != 0 { let mut padding: Vec<u8> = Vec::new(); padding.resize(LOGIC_SIZE - diff_size, 0u8); output_writter.write_all(&padding)?; } output_writter.seek(SeekFrom::Start(old_pos))?; Ok(()) } pub fn new_buffered(name: String) -> FileEntry { FileEntry { file_type: FileType::Buffer { name, data: Vec::new(), }, lba: 0, size: 0, aligned_size: 0, } } }
Debug, Clone)] pub struct FileEntry { pub file_type: FileType, pub size: usize, pub lba: u32, pub aligned_size: usize, } impl FileEntry { pub fn get_file_name(&self) -> String { match &self.file_type { FileType::Regular { path } => path.file_name().unwrap().to_str().unwrap().to_string(), FileType::Buffer { name, .. } => name.clone(), } } pub fn open_content_provider(&self) -> Box<dyn Read> { match &self.file_type { FileType::Regular { path } => Box::new(File::open(path).unwrap()), FileType::Buffer { data, .. } => Box::new(Cursor::new(data.clone())), } } pub fn write_entry<T>(&self, output_writter: &mut T) -> std::io::Result<()> where T: Write + Seek, { let current_pos = output_writter.seek(SeekFrom::Current(0))? as i32; let expected_aligned_pos = utils::align_up(current_pos, LOGIC_SIZE_U32 as i32); let diff_size = expected_aligned_pos - current_pos; let file_entry_size = self.get_entry_size() as i32; if file_entry_size > diff_size && diff_size != 0 { let mut padding: Vec<u8> = Vec::new(); padding.resize(diff_size as usize, 0u8); output_writter.write_all(&padding)?; } let old_pos = output_writter.seek(SeekFrom::Current(0))? as i32; let file_name = self.get_file_name(); let file_identifier = utils::convert_name(&file_name); let file_identifier_len = file_identifier.len() + 2; output_writter.write_u8(file_entry_size as u8)?; output_writter.write_u8(0u8)?; write_bothendian! { output_writter.write_u32(self.lba)?; } write_bothendian! { output_writter.write_u32(self.size as u32)?; } let record_datetime: DateTime<Utc> = Utc::now(); output_writter.write_u8((record_datetime.year() - 1900) as u8)?; output_writter.write_u8((record_datetime.month()) as u8)?; output_writter.write_u8((record_datetime.day()) as u8)?; output_writter.write_u8((record_datetime.hour()) as u8)?; output_writter.write_u8((record_datetime.minute()) as u8)?; output_writter.write_u8((record_datetime.second()) as u8)?; output_writter.write_u8(0u8)?; output_writter.write_u8(0x0u8)?; output_writter.write_u8(0x0u8)?; output_writter.write_u8(0x0u8)?; write_bothendian! { output_writter.write_u16(0x1)?; } ou
random
[ { "content": "pub fn create_iso(opt: &mut option::Opt) -> std::io::Result<()> {\n\n let volume_descriptor_list = generate_volume_descriptors(opt);\n\n\n\n let mut out_file = File::create(&opt.output)?;\n\n\n\n let mut current_lba: u32 = 0x10 + 1 + (volume_descriptor_list.len() as u32);\n\n\n\n let p...
Rust
backend/wallet/interface/src/controller/controller.rs
kzmake/osaifu
5b44ad071f201a14dfbde9f13b077acd77a86420
use crate::osaifu_wallet_v1::Wallet as PBWallet; use crate::osaifu_wallet_v1::{CreateRequest, CreateResponse}; use crate::osaifu_wallet_v1::{DeleteRequest, DeleteResponse}; use crate::osaifu_wallet_v1::{GetRequest, GetResponse}; use crate::osaifu_wallet_v1::{ListRequest, ListResponse}; use anyhow::Result; use derive_new::new; use query::port::{ListWalletsInputData, ListWalletsOutputData, QueryPort}; use tonic::{Request, Response, Status}; use usecase::port::{ CreateWalletInputData, CreateWalletOutputData, DeleteWalletInputData, DeleteWalletOutputData, GetWalletInputData, GetWalletOutputData, Port, }; pub trait Controller { fn create(&self, request: Request<CreateRequest>) -> Result<Response<CreateResponse>, Status>; fn list(&self, request: Request<ListRequest>) -> Result<Response<ListResponse>, Status>; fn get(&self, request: Request<GetRequest>) -> Result<Response<GetResponse>, Status>; fn delete(&self, request: Request<DeleteRequest>) -> Result<Response<DeleteResponse>, Status>; } #[derive(new)] pub struct WalletController<Create, List, Get, Delete> where Create: Port<CreateWalletInputData, CreateWalletOutputData>, List: QueryPort<ListWalletsInputData, ListWalletsOutputData>, Get: Port<GetWalletInputData, GetWalletOutputData>, Delete: Port<DeleteWalletInputData, DeleteWalletOutputData>, { create_wallet: Create, list_wallets: List, get_wallet: Get, delete_wallet: Delete, } impl<Create, List, Get, Delete> Controller for WalletController<Create, List, Get, Delete> where Create: Port<CreateWalletInputData, CreateWalletOutputData>, List: QueryPort<ListWalletsInputData, ListWalletsOutputData>, Get: Port<GetWalletInputData, GetWalletOutputData>, Delete: Port<DeleteWalletInputData, DeleteWalletOutputData>, { fn create(&self, request: Request<CreateRequest>) -> Result<Response<CreateResponse>, Status> { let input = CreateWalletInputData::new(request.get_ref().owner.to_string()); match self.create_wallet.handle(input) { Ok(output) => Ok(Response::new(CreateResponse { wallet: Some(PBWallet { id: output.wallet.id().to_string(), owner: "alice".to_string(), balance: output.wallet.balance().to_string(), }), })), Err(_) => Err(Status::internal("error")), } } fn list(&self, _request: Request<ListRequest>) -> Result<Response<ListResponse>, Status> { let input = ListWalletsInputData::new("".to_string()); match self.list_wallets.handle(input) { Ok(output) => Ok(Response::new(ListResponse { wallets: output .wallets .iter() .map(|x| PBWallet { id: x.id.clone(), owner: x.owner.clone(), balance: x.balance.clone(), }) .collect(), })), Err(_) => Err(Status::internal("error")), } } fn get(&self, request: Request<GetRequest>) -> Result<Response<GetResponse>, Status> { let input = GetWalletInputData::new(request.get_ref().id.to_string()); match self.get_wallet.handle(input) { Ok(output) => Ok(Response::new(GetResponse { wallet: Some(PBWallet { id: output.wallet.id().to_string(), owner: "alice".to_string(), balance: output.wallet.balance().to_string(), }), })), Err(_) => Err(Status::internal("error")), } } fn delete(&self, request: Request<DeleteRequest>) -> Result<Response<DeleteResponse>, Status> { let input = DeleteWalletInputData::new(request.get_ref().id.to_string()); match self.delete_wallet.handle(input) { Ok(_) => Ok(Response::new(DeleteResponse {})), Err(_) => Err(Status::internal("error")), } } } #[cfg(test)] mod tests { use super::*; use anyhow::bail; use domain::entity::*; use domain::vo::*; use query::port::*; use usecase::port::*; fn new_wallet() -> Wallet { WalletBuilder::default() .id("01F8MECHZX3TBDSZ7XRADM79XE".parse::<Id<Wallet>>().unwrap()) .balance("2000".parse::<Money<JPY>>().unwrap()) .build() .unwrap() } #[test] fn test_create_wallet_handle_ok() { let entity = new_wallet(); let view_model = query::view::Wallet { id: new_wallet().id().to_string(), balance: new_wallet().balance().to_string(), }; let mut create = MockPort::<CreateWalletInputData, CreateWalletOutputData>::new(); let mut list = MockQueryPort::<ListWalletsInputData, ListWalletsOutputData>::new(); let mut get = MockPort::<GetWalletInputData, GetWalletOutputData>::new(); let mut delete = MockPort::<DeleteWalletInputData, DeleteWalletOutputData>::new(); create .expect_handle() .returning(|_| Ok(CreateWalletOutputData::new(entity))); list.expect_handle() .returning(|_| Ok(ListWalletsOutputData::new(vec![view_model]))); get.expect_handle() .returning(|_| Ok(GetWalletOutputData::new(entity))); delete .expect_handle() .returning(|_| Ok(DeleteWalletOutputData::new())); let sut = WalletController::new(create, list, get, delete); assert_eq!( sut.create(Request::new(CreateRequest { owner: "alice".to_string(), })) .unwrap() .get_ref(), Response::new(CreateResponse { wallet: Some(PBWallet { id: entity.id().to_string(), owner: "alice".to_string(), balance: entity.balance().to_string(), }), }) .get_ref(), ); } #[test] fn test_create_wallet_handle_err() { let _entity = new_wallet(); let mut create = MockPort::<CreateWalletInputData, CreateWalletOutputData>::new(); let mut list = MockQueryPort::<ListWalletsInputData, ListWalletsOutputData>::new(); let mut get = MockPort::<GetWalletInputData, GetWalletOutputData>::new(); let mut delete = MockPort::<DeleteWalletInputData, DeleteWalletOutputData>::new(); create.expect_handle().returning(|_| bail!("error")); list.expect_handle().returning(|_| bail!("error")); get.expect_handle().returning(|_| bail!("error")); delete.expect_handle().returning(|_| bail!("error")); let sut = WalletController::new(create, list, get, delete); assert!(sut .create(Request::new(CreateRequest { owner: "alice".to_string(), })) .is_err()); } #[test] fn test_get_wallet_handle_ok() { let entity = new_wallet(); let view_model = query::view::Wallet { id: new_wallet().id().to_string(), balance: new_wallet().balance().to_string(), }; let mut create = MockPort::<CreateWalletInputData, CreateWalletOutputData>::new(); let mut list = MockQueryPort::<ListWalletsInputData, ListWalletsOutputData>::new(); let mut get = MockPort::<GetWalletInputData, GetWalletOutputData>::new(); let mut delete = MockPort::<DeleteWalletInputData, DeleteWalletOutputData>::new(); create .expect_handle() .returning(|_| Ok(CreateWalletOutputData::new(entity))); list.expect_handle() .returning(|_| Ok(ListWalletsOutputData::new(vec![view_model]))); get.expect_handle() .returning(|_| Ok(GetWalletOutputData::new(entity))); delete .expect_handle() .returning(|_| Ok(DeleteWalletOutputData::new())); let sut = WalletController::new(create, list, get, delete); assert_eq!( sut.get(Request::new(GetRequest { id: entity.id().to_string(), })) .unwrap() .get_ref(), Response::new(GetResponse { wallet: Some(PBWallet { id: entity.id().to_string(), owner: "alice".to_string(), balance: entity.balance().to_string(), }), }) .get_ref(), ); } #[test] fn test_get_wallet_handle_err() { let entity = new_wallet(); let mut create = MockPort::<CreateWalletInputData, CreateWalletOutputData>::new(); let mut list = MockQueryPort::<ListWalletsInputData, ListWalletsOutputData>::new(); let mut get = MockPort::<GetWalletInputData, GetWalletOutputData>::new(); let mut delete = MockPort::<DeleteWalletInputData, DeleteWalletOutputData>::new(); create.expect_handle().returning(|_| bail!("error")); list.expect_handle().returning(|_| bail!("error")); get.expect_handle().returning(|_| bail!("error")); delete.expect_handle().returning(|_| bail!("error")); let sut = WalletController::new(create, list, get, delete); assert!(sut .get(Request::new(GetRequest { id: entity.id().to_string() })) .is_err()); } }
use crate::osaifu_wallet_v1::Wallet as PBWallet; use crate::osaifu_wallet_v1::{CreateRequest, CreateResponse}; use crate::osaifu_wallet_v1::{DeleteRequest, DeleteResponse}; use crate::osaifu_wallet_v1::{GetRequest, GetResponse}; use crate::osaifu_wallet_v1::{ListRequest, ListResponse}; use anyhow::Result; use derive_new::new; use query::port::{ListWalletsInputData, ListWalletsOutputData, QueryPort}; use tonic::{Request, Response, Status}; use usecase::port::{ CreateWalletInputData, CreateWalletOutputData, DeleteWalletInputData, DeleteWalletOutputData, GetWalletInputData, GetWalletOutputData, Port, }; pub trait Controller { fn create(&self, request: Request<CreateRequest>) -> Result<Response<CreateResponse>, Status>; fn list(&self, request: Request<ListRequest>) -> Result<Response<ListResponse>, Status>; fn get(&self, request: Request<GetRequest>) -> Result<Response<GetResponse>, Status>; fn delete(&self, request: Request<DeleteRequest>) -> Result<Response<DeleteResponse>, Status>; } #[derive(new)] pub struct WalletController<Create, List, Get, Delete> where Create: Port<CreateWalletInputData, CreateWalletOutputData>, List: QueryPort<ListWalletsInputData, ListWalletsOutputData>, Get: Port<GetWalletInputData, GetWalletOutputData>, Delete: Port<DeleteWalletInputData, DeleteWalletOutputData>, { create_wallet: Create, list_wallets: List, get_wallet: Get, delete_wallet: Delete, } impl<Create, List, Get, Delete> Controller for WalletController<Create, List, Get, Delete> where Create: Port<CreateWalletInputData, CreateWalletOutputData>, List: QueryPort<ListWalletsInputData, ListWalletsOutputData>, Get: Port<GetWalletInputData, GetWalletOutputData>, Delete: Port<DeleteWalletInputData, DeleteWalletOutputData>, { fn create(&self, request: Request<CreateRequest>) -> Result<Response<CreateResponse>, Status> { let input = CreateWalletInputData::new(request.get_ref().owner.to_string()); match self.create_wallet.handle(input) { Ok(output) => Ok(Response::new(CreateResponse { wallet: Some(PBWallet { id: output.wallet.id().to_string(), owner: "alice".to_string(), balance: output.wallet.balance().to_string(), }), })), Err(_) => Err(Status::internal("error")), } } fn list(&self, _request: Request<ListRequest>) -> Result<Response<ListResponse>, Status> { let input = ListWalletsInputData::new("".to_string()); match self.list_wallets.handle(input) { Ok(output) => Ok(Response::new(ListResponse { wallets: output .wallets .iter() .map(|x| PBWallet { id: x.id.clone(), owner: x.owner.clone(), balance: x.balance.clone(), }) .collect(), })), Err(_) => Err(Status::internal("error")), } } fn get(&self, request: Request<GetRequest>) -> Result<Response<GetResponse>, Status> { let input = GetWalletInputData::new(request.get_ref().id.to_string()); match self.get_wallet.handle(input) { Ok(output) => Ok(Response::new(GetResponse { wallet: Some(PBWallet { id: output.wallet.id().to_string(), owner: "alice".to_string(), balance: output.wallet.balance().to_string(), }), })), Err(_) => Err(Status::internal("error")), } } fn delete(&self, request: Request<DeleteRequest>) -> Result<Response<DeleteResponse>, Status> { let input = DeleteWalletInputData::new(request.get_ref().id.to_string()); match self.delete_wallet.handle(input) { Ok(_) => Ok(Response::new(DeleteResponse {})), Err(_) => Err(Status::internal("error")), } } } #[cfg(test)] mod tests { use super::*; use anyhow::bail; use domain::entity::*; use domain::vo::*; use query::port::*; use usecase::port::*; fn new_wallet() -> Wallet { WalletBuilder::default() .id("01F8MECHZX3TBDSZ7XRADM79XE".parse::<Id<Wallet>>().unwrap()) .balance("2000".parse::<Money<JPY>>().unwrap()) .build() .unwrap() } #[test] fn test_create_wallet_handle_ok() { let entity = new_wallet(); let view_model = query::view::Wallet { id: new_wallet().id().to_string(), balance: new_wallet().balance().to_string(),
ng(|_| bail!("error")); get.expect_handle().returning(|_| bail!("error")); delete.expect_handle().returning(|_| bail!("error")); let sut = WalletController::new(create, list, get, delete); assert!(sut .create(Request::new(CreateRequest { owner: "alice".to_string(), })) .is_err()); } #[test] fn test_get_wallet_handle_ok() { let entity = new_wallet(); let view_model = query::view::Wallet { id: new_wallet().id().to_string(), balance: new_wallet().balance().to_string(), }; let mut create = MockPort::<CreateWalletInputData, CreateWalletOutputData>::new(); let mut list = MockQueryPort::<ListWalletsInputData, ListWalletsOutputData>::new(); let mut get = MockPort::<GetWalletInputData, GetWalletOutputData>::new(); let mut delete = MockPort::<DeleteWalletInputData, DeleteWalletOutputData>::new(); create .expect_handle() .returning(|_| Ok(CreateWalletOutputData::new(entity))); list.expect_handle() .returning(|_| Ok(ListWalletsOutputData::new(vec![view_model]))); get.expect_handle() .returning(|_| Ok(GetWalletOutputData::new(entity))); delete .expect_handle() .returning(|_| Ok(DeleteWalletOutputData::new())); let sut = WalletController::new(create, list, get, delete); assert_eq!( sut.get(Request::new(GetRequest { id: entity.id().to_string(), })) .unwrap() .get_ref(), Response::new(GetResponse { wallet: Some(PBWallet { id: entity.id().to_string(), owner: "alice".to_string(), balance: entity.balance().to_string(), }), }) .get_ref(), ); } #[test] fn test_get_wallet_handle_err() { let entity = new_wallet(); let mut create = MockPort::<CreateWalletInputData, CreateWalletOutputData>::new(); let mut list = MockQueryPort::<ListWalletsInputData, ListWalletsOutputData>::new(); let mut get = MockPort::<GetWalletInputData, GetWalletOutputData>::new(); let mut delete = MockPort::<DeleteWalletInputData, DeleteWalletOutputData>::new(); create.expect_handle().returning(|_| bail!("error")); list.expect_handle().returning(|_| bail!("error")); get.expect_handle().returning(|_| bail!("error")); delete.expect_handle().returning(|_| bail!("error")); let sut = WalletController::new(create, list, get, delete); assert!(sut .get(Request::new(GetRequest { id: entity.id().to_string() })) .is_err()); } }
}; let mut create = MockPort::<CreateWalletInputData, CreateWalletOutputData>::new(); let mut list = MockQueryPort::<ListWalletsInputData, ListWalletsOutputData>::new(); let mut get = MockPort::<GetWalletInputData, GetWalletOutputData>::new(); let mut delete = MockPort::<DeleteWalletInputData, DeleteWalletOutputData>::new(); create .expect_handle() .returning(|_| Ok(CreateWalletOutputData::new(entity))); list.expect_handle() .returning(|_| Ok(ListWalletsOutputData::new(vec![view_model]))); get.expect_handle() .returning(|_| Ok(GetWalletOutputData::new(entity))); delete .expect_handle() .returning(|_| Ok(DeleteWalletOutputData::new())); let sut = WalletController::new(create, list, get, delete); assert_eq!( sut.create(Request::new(CreateRequest { owner: "alice".to_string(), })) .unwrap() .get_ref(), Response::new(CreateResponse { wallet: Some(PBWallet { id: entity.id().to_string(), owner: "alice".to_string(), balance: entity.balance().to_string(), }), }) .get_ref(), ); } #[test] fn test_create_wallet_handle_err() { let _entity = new_wallet(); let mut create = MockPort::<CreateWalletInputData, CreateWalletOutputData>::new(); let mut list = MockQueryPort::<ListWalletsInputData, ListWalletsOutputData>::new(); let mut get = MockPort::<GetWalletInputData, GetWalletOutputData>::new(); let mut delete = MockPort::<DeleteWalletInputData, DeleteWalletOutputData>::new(); create.expect_handle().returning(|_| bail!("error")); list.expect_handle().returni
random
[ { "content": "#[mockall::automock]\n\npub trait Port<Input: InputData, Output: OutputData> {\n\n fn handle(&self, input: Input) -> Result<Output, Error>;\n\n}\n", "file_path": "backend/wallet/usecase/src/port/port.rs", "rank": 0, "score": 237251.4002721125 }, { "content": "#[mockall::auto...
Rust
src/shapes/sphere.rs
mlunnay/rs_illume
a7e88118d829d24e35673df8a1f49a3b870a36e6
use std::sync::Arc; use crate::core::pbrt::{Float, consts::PI, gamma, radians}; use crate::core::shape::Shape; use crate::core::geometry::{Bounding3, Bounds3f, Ray, Point2f, Point3f, Normal3f, Vector3f, offset_ray_origin, dot_normal_vec, coordinate_system, spherical_direction_vec3}; use crate::core::transform::Transform; use crate::core::material::Material; use crate::core::sampling::{uniform_sample_sphere, uniform_cone_pdf}; use crate::core::interaction::*; use crate::core::interaction::SurfaceInteraction; use num::clamp; use crate::core::efloat::{EFloat, quadratic}; use crate::core::profiler::Profiler; #[derive(Clone)] pub struct Sphere { pub radius: Float, pub z_min: Float, pub z_max: Float, pub theta_min: Float, pub theta_max: Float, pub phi_max: Float, object_to_world: Transform, world_to_object: Transform, reverse_orientation: bool, transform_swaps_handedness: bool, pub material: Option<Arc<dyn Material + Send + Sync>>, } impl Default for Sphere{ fn default() -> Sphere { let object_to_world = Transform::default(); Sphere { object_to_world, world_to_object: Transform::default(), radius: 1.0, z_min: -1.0, z_max: 1.0, theta_min: (-1.0 as Float).acos(), theta_max: (1.0 as Float).acos(), phi_max: PI * 2.0, reverse_orientation: false, transform_swaps_handedness: object_to_world.swaps_handness(), material: None } } } impl Sphere { fn new( object_to_world: Transform, world_to_object: Transform, reverse_orientation: bool, radius: Float, z_min: Float, z_max: Float, phi_max: Float ) -> Sphere { let min = z_min.min(z_max); let max = z_min.max(z_max); Sphere{ object_to_world, world_to_object, reverse_orientation, radius, z_min: clamp(min, -radius, radius), z_max: clamp(max, -radius, radius), theta_min: clamp(min / radius, -1.0, 1.0).acos(), theta_max: clamp(max / radius, -1.0, 1.0).acos(), phi_max: radians(clamp(phi_max, 0.0, 360.0)), transform_swaps_handedness: object_to_world.swaps_handness(), material: None } } } impl Shape for Sphere { fn object_bound(&self) -> Box<dyn Bounding3<Float>> { Box::new(Bounds3f{ min: Point3f { x: -self.radius, y: -self.radius, z: self.z_min }, max: Point3f { x: self.radius, y: self.radius, z: self.z_max } }) } fn world_bound(&self) -> Box<dyn Bounding3<Float>> { self.object_to_world.transform_bounds(self.object_bound()) } fn get_reverse_orientation(&self) -> bool { self.reverse_orientation } fn get_transform_swaps_handedness(&self) -> bool { self.transform_swaps_handedness } fn intersect(&self, ray: &Ray, test_alpha_texture: bool) -> Option<(SurfaceInteraction, Float)> { let _p = Profiler::instance().profile("Sphere::intersect"); let mut phi: Float = 0.0; let mut p_hit: Point3f; let mut o_err = Vector3f::default(); let mut d_err = Vector3f::default(); let ray = self.world_to_object.transform_ray_with_error(&ray, &mut o_err, &mut d_err); let ox = EFloat::new(ray.o.x, o_err.x); let oy = EFloat::new(ray.o.y, o_err.y); let oz = EFloat::new(ray.o.z, o_err.z); let dx = EFloat::new(ray.d.x, d_err.x); let dy = EFloat::new(ray.d.y, d_err.y); let dz = EFloat::new(ray.d.z, d_err.z); let a = dx * dx + dy * dy + dz * dz; let b = (dx * ox + dy * oy + dz * oz) * 2.0; let c = ox * ox + oy * oy + oz * oz - EFloat::new(self.radius, 0.0) * EFloat::new(self.radius, 0.0); let mut t0 = EFloat::default(); let mut t1 = EFloat::default(); if !quadratic(a, b, c, &mut t0, &mut t1) { return None; } if t0.upper_bound() > ray.t_max || t1.lower_bound() <= 0.0 { return None; } let mut t_shape_hit = t0; if t_shape_hit.lower_bound() <= 0.0 { t_shape_hit = t1; if t_shape_hit.upper_bound() > ray.t_max { return None; } } p_hit = ray.point_at_time(t_shape_hit.v); p_hit *= self.radius / p_hit.distance(&Point3f::new(0.0, 0.0, 0.0)); if p_hit.x == 0.0 && p_hit.y == 0.0 { p_hit.x = 1e-5 * self.radius; } phi = p_hit.y.atan2(p_hit.x); if phi < 0.0 { phi += 2.0 * PI; } if (self.z_min > -self.radius && p_hit.z < self.z_min) || (self.z_max < self.radius && p_hit.z > self.z_max) || phi > self.phi_max { if t_shape_hit == t1 { return None; } if t1.upper_bound() > ray.t_max { return None; } t_shape_hit = t1; p_hit = ray.point_at_time(t_shape_hit.v); p_hit *= self.radius / p_hit.distance(&Point3f::default()); if p_hit.x == 0.0 && p_hit.y == 0.0 { p_hit.x = 1e-5 * self.radius; } phi = p_hit.y.atan2(p_hit.x); if phi < 0.0 { phi += 2.0 * PI; } if self.z_min > -self.radius && p_hit.z < self.z_min || (self.z_max < self.radius && p_hit.z > self.z_max) || phi > self.phi_max { return None; } } let u = phi / self.phi_max; let theta = clamp(p_hit.z / self.radius, -1.0, 1.0).acos(); let v = (theta - self.theta_min) / (self.theta_max - self.theta_min); let z_radius = (p_hit.x * p_hit.x + p_hit.y * p_hit.y).sqrt(); let inv_z_radius = 1.0 / z_radius; let cos_phi = p_hit.x * inv_z_radius; let sin_phi = p_hit.y * inv_z_radius; let dpdu = Vector3f::new(-self.phi_max * p_hit.y, self.phi_max * p_hit.x, 0.0); let dpdv = (self.theta_max - self.theta_min) * Vector3f::new(p_hit.z * cos_phi, p_hit.z * sin_phi, -self.radius * theta.sin()); let d2_pduu = -self.phi_max * self.phi_max * Vector3f::new(p_hit.x, p_hit.y, 0.0); let d2_pduv = (self.theta_max - self.theta_min) * p_hit.z * self.phi_max * Vector3f::new(-sin_phi, cos_phi, 0.); let d2_pdvv = -(self.theta_max - self.theta_min) * (self.theta_max - self.theta_min) * Vector3f::new(p_hit.x, p_hit.y, p_hit.z); let e = dpdu.dot(&dpdu); let f = dpdu.dot(&dpdv); let g = dpdv.dot(&dpdv); let n = dpdu.cross(&dpdv).normalize(); let e = n.dot(&d2_pduu); let f = n.dot(&d2_pduv); let g = n.dot(&d2_pdvv); let inv_e_g_f2 = 1.0 / (e * g - f * f); let dndu = Normal3f::from((f * f - e * g) * inv_e_g_f2 * dpdu + (e * f - f * e) * inv_e_g_f2 * dpdv); let dndv = Normal3f::from((g * f - f * g) * inv_e_g_f2 * dpdu + (f * f - g * e) * inv_e_g_f2 * dpdv); let p_error = gamma(5) * Vector3f::from(p_hit).abs(); let isect = self.object_to_world.tranform_surface_interaction(&SurfaceInteraction::new(p_hit, p_error, Point2f::new(u, v), -ray.d, -dpdu, -dpdv, -dndu, -dndv, ray.time, Some(self), 0)); Some((isect, t_shape_hit.v as Float)) } fn intersect_p(&self, ray: &Ray, test_alpha_texture: bool) -> bool { let _p = Profiler::instance().profile("Sphere::intersect_p"); let mut phi: Float = 0.0; let mut p_hit: Point3f; let mut o_err = Vector3f::default(); let mut d_err = Vector3f::default(); let ray = self.world_to_object.transform_ray_with_error(&ray, &mut o_err, &mut d_err); let ox = EFloat::new(ray.o.x, o_err.x); let oy = EFloat::new(ray.o.y, o_err.y); let oz = EFloat::new(ray.o.z, o_err.z); let dx = EFloat::new(ray.d.x, d_err.x); let dy = EFloat::new(ray.d.y, d_err.y); let dz = EFloat::new(ray.d.z, d_err.z); let a = dx * dx + dy * dy + dz * dz; let b = (dx * ox + dy * oy + dz * oz) * 2.0; let c = ox * ox + oy * oy + oz * oz - EFloat::new(self.radius, 0.0) * EFloat::new(self.radius, 0.0); let mut t0 = EFloat::default(); let mut t1 = EFloat::default(); if !quadratic(a, b, c, &mut t0, &mut t1) { return false; } if t0.upper_bound() > ray.t_max || t1.lower_bound() <= 0.0 { return false; } let mut t_shape_hit = t0; if t_shape_hit.lower_bound() <= 0.0 { t_shape_hit = t1; if t_shape_hit.upper_bound() > ray.t_max { return false; } } p_hit = ray.point_at_time(t_shape_hit.v); p_hit *= self.radius / p_hit.distance(&Point3f::new(0.0, 0.0, 0.0)); if p_hit.x == 0.0 && p_hit.y == 0.0 { p_hit.x = 1e-5 * self.radius; } phi = p_hit.y.atan2(p_hit.x); if phi < 0.0 { phi += 2.0 * PI; } if (self.z_min > -self.radius && p_hit.z < self.z_min) || (self.z_max < self.radius && p_hit.z > self.z_max) || phi > self.phi_max { if t_shape_hit == t1 { return false; } if t1.upper_bound() > ray.t_max { return false; } t_shape_hit = t1; p_hit = ray.point_at_time(t_shape_hit.v); p_hit *= self.radius / p_hit.distance(&Point3f::default()); if p_hit.x == 0.0 && p_hit.y == 0.0 { p_hit.x = 1e-5 * self.radius; } phi = p_hit.y.atan2(p_hit.x); if phi < 0.0 { phi += 2.0 * PI; } if self.z_min > -self.radius && p_hit.z < self.z_min || (self.z_max < self.radius && p_hit.z > self.z_max) || phi > self.phi_max { return false; } } true } fn area(&self) -> Float { self.phi_max * self.radius * (self.z_max - self.z_min) } fn sample(&self, u: &Point2f, pdf: &mut Float) -> Box<dyn Interaction> { let mut p_obj = Point3f::new(0.0, 0.0, 0.0) + self.radius * uniform_sample_sphere(u); let mut it = SimpleInteraction::default(); it.n = self.object_to_world.transform_normal(&Normal3f::new(p_obj.x, p_obj.y, p_obj.z)).normalize(); if self.reverse_orientation { it.n *= -1.0; } p_obj *= self.radius / p_obj.distance(&Point3f::default()); let p_obj_error = gamma(5) * Vector3f::from(p_obj).abs(); it.p = self.object_to_world.transform_point_with_abs_error(&p_obj, &p_obj_error, &mut it.p_error); *pdf = 1.0 / self.area(); Box::new(it) } fn sample_with_point(&self, iref: &dyn Interaction, u: &Point2f, pdf: &mut Float) -> Box<dyn Interaction> { let p_center = self.object_to_world.transform_point(&Point3f::default()); let p_origin = offset_ray_origin(&iref.get_p(), &iref.get_p_error(), &iref.get_n(), &(p_center - iref.get_p())); if p_origin.distance_squared(&p_center) <= self.radius * self.radius { let intr = self.sample(&u, pdf); let wi = intr.get_p() - iref.get_p(); if wi.length_squared() == 0.0 { *pdf = 0.0; } else { wi = wi.normalize(); *pdf *= iref.get_p().distance_squared(&intr.get_p()) / dot_normal_vec(&intr.get_n(), &-wi).abs(); } if pdf.is_infinite() { *pdf = 0.0; } return intr; } let dc = iref.get_p().distance(&p_center); let inv_dc = 1.0 / dc; let wc = (p_center - iref.get_p()) * inv_dc; let wc_x: Vector3f; let wc_y: Vector3f; coordinate_system(&wc, &mut wc_x, &mut wc_y); let sin_theta_max = self.radius * inv_dc; let sin_theta_max2 = sin_theta_max * sin_theta_max; let inv_sin_theta_max = 1.0 / sin_theta_max; let cos_theta_max = (1.0 - sin_theta_max2).max(0.0).sqrt(); let cos_theta = (cos_theta_max - 1.0) * u.x + 1.0; let sin_theta2 = 1.0 - cos_theta * cos_theta; if sin_theta_max2 < 0.000685230 /* sin^2(1.5 deg) */ { /* Fall back to a Taylor series expansion for small angles, where the standard approach suffers from severe cancellation errors */ sin_theta2 = sin_theta_max2 * u.x; cos_theta = (1.0 - sin_theta2).sqrt(); } let cos_alpha = sin_theta2 * inv_sin_theta_max + cos_theta * (1.0 - sin_theta2 * inv_sin_theta_max * inv_sin_theta_max).max(0.0).sqrt(); let sin_alpha = (1.0 - cos_alpha*cos_alpha).max(0.0).sqrt(); let phi = u.y * 2.0 * PI; let n_world = spherical_direction_vec3(sin_alpha, cos_alpha, phi, &-wc_x, &-wc_y, &-wc); let p_world = p_center + Point3f::new(n_world.x, n_world.y, n_world.z) * self.radius; let mut it = SimpleInteraction::default(); it.p = p_world; it.p_error = gamma(5) * Vector3f::from(p_world).abs(); it.n = n_world.into(); if self.reverse_orientation { it.n *= -1.0; } *pdf = 1.0 / (2.0 * PI * (1.0 - cos_theta_max)); Box::new(it) } fn pdf_with_point(&self, iref: &dyn Interaction, wi: &Vector3f) -> Float { let p_center = self.object_to_world.transform_point(&Point3f::default()); let p_origin = offset_ray_origin(&iref.get_p(), &iref.get_p_error(), &iref.get_n(), &(p_center - iref.get_p())); if p_origin.distance_squared(&p_center) <= self.radius * self.radius { return Shape::pdf_with_point(self, iref, wi); } let sin_theta_max2 = self.radius * self.radius / iref.get_p().distance_squared(&p_center); let cos_theta_max = (1.0 - sin_theta_max2).max(0.0).sqrt(); uniform_cone_pdf(cos_theta_max) } fn solid_angle(&self, p: &Point3f, n_samples: i32) -> Float { let p_center = self.object_to_world.transform_point(&Point3f::default()); if p.distance_squared(&p_center) <= self.radius * self.radius { return 4.0 * PI; } let sin_theta2 = self.radius * self.radius / p.distance_squared(&p_center); let cos_theta = (1.0 - sin_theta2).max(0.0).sqrt(); (2.0 * PI * (1.0 - cos_theta)) } }
use std::sync::Arc; use crate::core::pbrt::{Float, consts::PI, gamma, radians}; use crate::core::shape::Shape; use crate::core::geometry::{Bounding3, Bounds3f, Ray, Point2f, Point3f, Normal3f, Vector3f, offset_ray_origin, dot_normal_vec, coordinate_system, spherical_direction_vec3}; use crate::core::transform::Transform; use crate::core::material::Material; use crate::core::sampling::{uniform_sample_sphere, uniform_cone_pdf}; use crate::core::interaction::*; use crate::core::interaction::SurfaceInteraction; use num::clamp; use crate::core::efloat::{EFloat, quadratic}; use crate::core::profiler::Profiler; #[derive(Clone)] pub struct Sphere { pub radius: Float, pub z_min: Float, pub z_max: Float, pub theta_min: Float, pub theta_max: Float, pub phi_max: Float, object_to_world: Transform, world_to_object: Transform, reverse_orientation: bool, transform_swaps_handedness: bool, pub material: Option<Arc<dyn Material + Send + Sync>>, } impl Default for Sphere{ fn default() -> Sphere { let object_to_world = Transform::default(); Sphere { object_to_world, world_to_object: Transform::default(), radius: 1.0, z_min: -1.0, z_max: 1.0, theta_min: (-1.0 as Float).acos(), theta_max: (1.0 as Float).acos(), phi_max: PI * 2.0, reverse_orientation: false, transform_swaps_handedness: object_to_world.swaps_handness(), material: None } } } impl Sphere { fn new( object_to_world: Transform, world_to_object: Transform, reverse_orientation: bool, radius: Float, z_min: Float, z_max: Float, phi_max: Float ) -> Sphere { let min = z_min.min(z_max); let max = z_min.max(z_max); Sphere{ object_to_world, world_to_object, reverse_orientation, radius, z_min: clamp(min, -radius, radius), z_max: clamp(max, -radius, radius), theta_min: clamp(min / radius, -1.0, 1.0).acos(), theta_max: clamp(max / radius, -1.0, 1.0).acos(), phi_max: radians(clamp(phi_max, 0.0, 360.0)), transform_swaps_handedness: object_to_world.swaps_handness(), material: None } } } impl Shape for Sphere { fn object_bound(&self) -> Box<dyn Bounding3<Float>> { Box::new(Bounds3f{ min: Point3f { x: -self.radius, y: -self.radius, z: self.z_min }, max: Point3f { x: self.radius, y: self.radius, z: self.z_max } }) } fn world_bound(&self) -> Box<dyn Bounding3<Float>> { self.object_to_world.transform_bounds(self.object_bound()) } fn get_reverse_orientation(&self) -> bool { self.reverse_orientation } fn get_transform_swaps_handedness(&self) -> bool { self.transform_swaps_handedness } fn intersect(&self, ray: &Ray, test_alpha_texture: bool) -> Option<(SurfaceInteraction, Float)> { let _p = Profiler::instance().profile("Sphere::intersect"); let mut phi: Float = 0.0; let mut p_hit: Point3f; let mut o_err = Vector3f::default(); let mut d_err = Vector3f::default(); let ray = self.world_to_object.transform_ray_with_error(&ray, &mut o_err, &mut d_err); let ox = EFloat::new(ray.o.x, o_err.x); let oy = EFloat::new(ray.o.y, o_err.y); let oz = EFloat::new(ray.o.z, o_err.z); let dx = EFloat::new(ray.d.x, d_err.x); let dy = EFloat::new(ray.d.y, d_err.y); let dz = EFloat::new(ray.d.z, d_err.z); let a = dx * dx + dy * dy + dz * dz; let b = (dx * ox + dy * oy + dz * oz) * 2.0; let c = ox * ox + oy * oy + oz * oz - EFloat::new(self.radius, 0.0) * EFloat::new(self.radius, 0.0); let mut t0 = EFloat::default(); let mut t1 = EFloat::default(); if !quadratic(a, b, c, &mut t0, &mut t1) { return None; } if t0.upper_bound() > ray.t_max || t1.lower_bound() <= 0.0 { return None; } let mut t_shape_hit = t0; if t_shape_hit.lower_bound() <= 0.0 { t_shape_hit = t1; if t_shape_hit.upper_bound() > ray.t_max { return None; } } p_hit = ray.point_at_time(t_shape_hit.v); p_hit *= self.radius / p_hit.distance(&Point3f::new(0.0, 0.0, 0.0)); if p_hit.x == 0.0 && p_hit.y == 0.0 { p_hit.x = 1e-5 * self.radius; } phi = p_hit.y.atan2(p_hit.x); if phi < 0.0 { phi += 2.0 * PI; } if (self.z_min > -self.radius && p_hit.z < self.z_min) || (self.z_max < self.radius && p_hit.z > self.z_max) || phi > self.phi_max { if t_shape_hit == t1 { return None; } if t1.upper_bound() > ray.t_max { return None; } t_shape_hit = t1; p_hit = ray.point_at_time(t_shape_hit.v); p_hit *= self.radius / p_hit.distance(&Point3f::default()); if p_hit.x == 0.0 && p_hit.y == 0.0 { p_hit.x = 1e-5 * self.radius; } phi = p_hit.y.atan2(p_hit.x); if phi < 0.0 { phi += 2.0 * PI; } if self.z_min > -self.radius && p_hit.z < self.z_min || (self.z_max < self.radius && p_hit.z > self.z_max) || phi > self.phi_max { return None; } } let u = phi / self.phi_max; let theta = clamp(p_hit.z / self.radius, -1.0, 1.0).acos(); let v = (theta - self.theta_min) / (self.theta_max - self.theta_min); let z_radius = (p_hit.x * p_hit.x + p_hit.y * p_hit.y).sqrt(); let inv_z_radius = 1.0 / z_radius; let cos_phi = p_hit.x * inv_z_radius; let sin_phi = p_hit.y * inv_z_radius; let dpdu = Vector3f::new(-self.phi_max * p_hit.y, self.phi_max * p_hit.x, 0.0); let dpdv = (self.theta_max - self.theta_min) * Vector3f::new(p_hit.z * cos_phi, p_hit.z * sin_phi, -self.radius * theta.sin()); let d2_pduu = -self.phi_max * self.phi_max * Vector3f::new(p_hit.x, p_hit.y, 0.0); let d2_pduv = (self.theta_max - self.theta_min) * p_hit.z * self.phi_max * Vector3f::new(-sin_phi, cos_phi, 0.); let d2_pdvv = -(self.theta_max - self.theta_min) * (self.theta_max - self.theta_min) * Vector3f::new(p_hit.x, p_hit.y, p_hit.z); let e = dpdu.dot(&dpdu); let f = dpdu.dot(&dpdv); let g = dpdv.dot(&dpdv); let n = dpdu.cross(&dpdv).normalize(); let e = n.dot(&d2_pduu); let f = n.dot(&d2_pduv); let g = n.dot(&d2_pdvv); let inv_e_g_f2 = 1.0 / (e * g - f * f); let dndu = Normal3f::from((f * f - e * g) * inv_e_g_f2 * dpdu + (e * f - f * e) * inv_e_g_f2 * dpdv); let dndv = Normal3f::from((g * f - f * g) * inv_e_g_f2 * dpdu + (f * f - g * e) * inv_e_g_f2 * dpdv); let p_error = gamma(5) * Vector3f::from(p_hit).abs(); let isect = self.object_to_world.tranform_surface_interaction(&SurfaceInteraction::new(p_hit, p_error, Point2f::new(u, v), -ray.d, -dpdu, -dpdv, -dndu, -dndv, ray.time, Some(self), 0)); Some((isect, t_shape_hit.v as Float)) }
fn area(&self) -> Float { self.phi_max * self.radius * (self.z_max - self.z_min) } fn sample(&self, u: &Point2f, pdf: &mut Float) -> Box<dyn Interaction> { let mut p_obj = Point3f::new(0.0, 0.0, 0.0) + self.radius * uniform_sample_sphere(u); let mut it = SimpleInteraction::default(); it.n = self.object_to_world.transform_normal(&Normal3f::new(p_obj.x, p_obj.y, p_obj.z)).normalize(); if self.reverse_orientation { it.n *= -1.0; } p_obj *= self.radius / p_obj.distance(&Point3f::default()); let p_obj_error = gamma(5) * Vector3f::from(p_obj).abs(); it.p = self.object_to_world.transform_point_with_abs_error(&p_obj, &p_obj_error, &mut it.p_error); *pdf = 1.0 / self.area(); Box::new(it) } fn sample_with_point(&self, iref: &dyn Interaction, u: &Point2f, pdf: &mut Float) -> Box<dyn Interaction> { let p_center = self.object_to_world.transform_point(&Point3f::default()); let p_origin = offset_ray_origin(&iref.get_p(), &iref.get_p_error(), &iref.get_n(), &(p_center - iref.get_p())); if p_origin.distance_squared(&p_center) <= self.radius * self.radius { let intr = self.sample(&u, pdf); let wi = intr.get_p() - iref.get_p(); if wi.length_squared() == 0.0 { *pdf = 0.0; } else { wi = wi.normalize(); *pdf *= iref.get_p().distance_squared(&intr.get_p()) / dot_normal_vec(&intr.get_n(), &-wi).abs(); } if pdf.is_infinite() { *pdf = 0.0; } return intr; } let dc = iref.get_p().distance(&p_center); let inv_dc = 1.0 / dc; let wc = (p_center - iref.get_p()) * inv_dc; let wc_x: Vector3f; let wc_y: Vector3f; coordinate_system(&wc, &mut wc_x, &mut wc_y); let sin_theta_max = self.radius * inv_dc; let sin_theta_max2 = sin_theta_max * sin_theta_max; let inv_sin_theta_max = 1.0 / sin_theta_max; let cos_theta_max = (1.0 - sin_theta_max2).max(0.0).sqrt(); let cos_theta = (cos_theta_max - 1.0) * u.x + 1.0; let sin_theta2 = 1.0 - cos_theta * cos_theta; if sin_theta_max2 < 0.000685230 /* sin^2(1.5 deg) */ { /* Fall back to a Taylor series expansion for small angles, where the standard approach suffers from severe cancellation errors */ sin_theta2 = sin_theta_max2 * u.x; cos_theta = (1.0 - sin_theta2).sqrt(); } let cos_alpha = sin_theta2 * inv_sin_theta_max + cos_theta * (1.0 - sin_theta2 * inv_sin_theta_max * inv_sin_theta_max).max(0.0).sqrt(); let sin_alpha = (1.0 - cos_alpha*cos_alpha).max(0.0).sqrt(); let phi = u.y * 2.0 * PI; let n_world = spherical_direction_vec3(sin_alpha, cos_alpha, phi, &-wc_x, &-wc_y, &-wc); let p_world = p_center + Point3f::new(n_world.x, n_world.y, n_world.z) * self.radius; let mut it = SimpleInteraction::default(); it.p = p_world; it.p_error = gamma(5) * Vector3f::from(p_world).abs(); it.n = n_world.into(); if self.reverse_orientation { it.n *= -1.0; } *pdf = 1.0 / (2.0 * PI * (1.0 - cos_theta_max)); Box::new(it) } fn pdf_with_point(&self, iref: &dyn Interaction, wi: &Vector3f) -> Float { let p_center = self.object_to_world.transform_point(&Point3f::default()); let p_origin = offset_ray_origin(&iref.get_p(), &iref.get_p_error(), &iref.get_n(), &(p_center - iref.get_p())); if p_origin.distance_squared(&p_center) <= self.radius * self.radius { return Shape::pdf_with_point(self, iref, wi); } let sin_theta_max2 = self.radius * self.radius / iref.get_p().distance_squared(&p_center); let cos_theta_max = (1.0 - sin_theta_max2).max(0.0).sqrt(); uniform_cone_pdf(cos_theta_max) } fn solid_angle(&self, p: &Point3f, n_samples: i32) -> Float { let p_center = self.object_to_world.transform_point(&Point3f::default()); if p.distance_squared(&p_center) <= self.radius * self.radius { return 4.0 * PI; } let sin_theta2 = self.radius * self.radius / p.distance_squared(&p_center); let cos_theta = (1.0 - sin_theta2).max(0.0).sqrt(); (2.0 * PI * (1.0 - cos_theta)) } }
fn intersect_p(&self, ray: &Ray, test_alpha_texture: bool) -> bool { let _p = Profiler::instance().profile("Sphere::intersect_p"); let mut phi: Float = 0.0; let mut p_hit: Point3f; let mut o_err = Vector3f::default(); let mut d_err = Vector3f::default(); let ray = self.world_to_object.transform_ray_with_error(&ray, &mut o_err, &mut d_err); let ox = EFloat::new(ray.o.x, o_err.x); let oy = EFloat::new(ray.o.y, o_err.y); let oz = EFloat::new(ray.o.z, o_err.z); let dx = EFloat::new(ray.d.x, d_err.x); let dy = EFloat::new(ray.d.y, d_err.y); let dz = EFloat::new(ray.d.z, d_err.z); let a = dx * dx + dy * dy + dz * dz; let b = (dx * ox + dy * oy + dz * oz) * 2.0; let c = ox * ox + oy * oy + oz * oz - EFloat::new(self.radius, 0.0) * EFloat::new(self.radius, 0.0); let mut t0 = EFloat::default(); let mut t1 = EFloat::default(); if !quadratic(a, b, c, &mut t0, &mut t1) { return false; } if t0.upper_bound() > ray.t_max || t1.lower_bound() <= 0.0 { return false; } let mut t_shape_hit = t0; if t_shape_hit.lower_bound() <= 0.0 { t_shape_hit = t1; if t_shape_hit.upper_bound() > ray.t_max { return false; } } p_hit = ray.point_at_time(t_shape_hit.v); p_hit *= self.radius / p_hit.distance(&Point3f::new(0.0, 0.0, 0.0)); if p_hit.x == 0.0 && p_hit.y == 0.0 { p_hit.x = 1e-5 * self.radius; } phi = p_hit.y.atan2(p_hit.x); if phi < 0.0 { phi += 2.0 * PI; } if (self.z_min > -self.radius && p_hit.z < self.z_min) || (self.z_max < self.radius && p_hit.z > self.z_max) || phi > self.phi_max { if t_shape_hit == t1 { return false; } if t1.upper_bound() > ray.t_max { return false; } t_shape_hit = t1; p_hit = ray.point_at_time(t_shape_hit.v); p_hit *= self.radius / p_hit.distance(&Point3f::default()); if p_hit.x == 0.0 && p_hit.y == 0.0 { p_hit.x = 1e-5 * self.radius; } phi = p_hit.y.atan2(p_hit.x); if phi < 0.0 { phi += 2.0 * PI; } if self.z_min > -self.radius && p_hit.z < self.z_min || (self.z_max < self.radius && p_hit.z > self.z_max) || phi > self.phi_max { return false; } } true }
function_block-full_function
[ { "content": "/// Find solution(s) of the quadratic equation at<sup>2</sup> + bt + c = 0 using\n\n/// *EFloat* instead of *Float* for error bounds.\n\npub fn quadratic(a: EFloat, b: EFloat, c: EFloat, t0: &mut EFloat, t1: &mut EFloat) -> bool {\n\n let discrim: f64 = b.v as f64 * b.v as f64 - 4.0f64 * a.v as...
Rust
src/macros.rs
MeirShpilraien/ijson
a5dbe78d7f22856f7deaeeeb2bf2df436409243c
macro_rules! value_subtype_impls { ($t:ty, $cf:ident, $rcf:ident, $mcf:ident) => { impl std::convert::AsRef<crate::IValue> for $t { fn as_ref(&self) -> &crate::IValue { &self.0 } } impl std::convert::AsMut<crate::IValue> for $t { fn as_mut(&mut self) -> &mut crate::IValue { &mut self.0 } } impl std::borrow::Borrow<crate::IValue> for $t { fn borrow(&self) -> &crate::IValue { &self.0 } } impl std::borrow::BorrowMut<crate::IValue> for $t { fn borrow_mut(&mut self) -> &mut crate::IValue { &mut self.0 } } impl std::convert::From<$t> for crate::IValue { fn from(other: $t) -> Self { other.0 } } impl std::convert::TryFrom<crate::IValue> for $t { type Error = crate::IValue; fn try_from(other: crate::IValue) -> Result<Self, crate::IValue> { other.$cf() } } impl<'a> std::convert::TryFrom<&'a crate::IValue> for &'a $t { type Error = (); fn try_from(other: &'a crate::IValue) -> Result<Self, ()> { other.$rcf().ok_or(()) } } impl<'a> std::convert::TryFrom<&'a mut crate::IValue> for &'a mut $t { type Error = (); fn try_from(other: &'a mut crate::IValue) -> Result<Self, ()> { other.$mcf().ok_or(()) } } }; } macro_rules! typed_conversions { ($( $interm:ty: $( $src:ty $(where ($($gb:tt)*))* ),*; )*) => { $( $( impl $(<$($gb)*>)* From<$src> for IValue { fn from(other: $src) -> Self { <$interm>::from(other).into() } } )* )* } } #[macro_export(local_inner_macros)] macro_rules! ijson { ($($json:tt)+) => { $crate::ijson_internal!($($json)+) }; } #[macro_export(local_inner_macros)] #[doc(hidden)] macro_rules! ijson_internal { (@array $array:ident) => {}; (@array $array:ident ,) => {}; (@array $array:ident , null $($rest:tt)*) => { $array.push(ijson_internal!(null)); ijson_internal!(@array $array $($rest)*) }; (@array $array:ident , true $($rest:tt)*) => { $array.push(ijson_internal!(true)); ijson_internal!(@array $array $($rest)*) }; (@array $array:ident , false $($rest:tt)*) => { $array.push(ijson_internal!(false)); ijson_internal!(@array $array $($rest)*) }; (@array $array:ident , [$($arr:tt)*] $($rest:tt)*) => { $array.push(ijson_internal!([$($arr)*])); ijson_internal!(@array $array $($rest)*) }; (@array $array:ident , {$($obj:tt)*} $($rest:tt)*) => { $array.push(ijson_internal!({$($obj)*})); ijson_internal!(@array $array $($rest)*) }; (@array $array:ident , $next:expr , $($rest:tt)*) => { $array.push(ijson_internal!($next)); ijson_internal!(@array $array , $($rest)*) }; (@array $array:ident , $last:expr) => { $array.push(ijson_internal!($last)); }; (@array $array:ident , $unexpected:tt $($rest:tt)*) => { ijson_unexpected!($unexpected) }; (@array $array:ident $unexpected:tt $($rest:tt)*) => { ijson_unexpected!($unexpected) }; (@object $object:ident () () ()) => {}; (@object $object:ident [$($key:tt)+] ($value:expr) , $($rest:tt)*) => { let _ = $object.insert(($($key)+), $value); ijson_internal!(@object $object () ($($rest)*) ($($rest)*)); }; (@object $object:ident [$($key:tt)+] ($value:expr) $unexpected:tt $($rest:tt)*) => { ijson_unexpected!($unexpected); }; (@object $object:ident [$($key:tt)+] ($value:expr)) => { let _ = $object.insert(($($key)+), $value); }; (@object $object:ident ($($key:tt)+) (: null $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!(null)) $($rest)*); }; (@object $object:ident ($($key:tt)+) (: true $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!(true)) $($rest)*); }; (@object $object:ident ($($key:tt)+) (: false $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!(false)) $($rest)*); }; (@object $object:ident ($($key:tt)+) (: [$($array:tt)*] $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!([$($array)*])) $($rest)*); }; (@object $object:ident ($($key:tt)+) (: {$($map:tt)*} $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!({$($map)*})) $($rest)*); }; (@object $object:ident ($($key:tt)+) (: $value:expr , $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!($value)) , $($rest)*); }; (@object $object:ident ($($key:tt)+) (: $value:expr) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!($value))); }; (@object $object:ident ($($key:tt)+) (:) $copy:tt) => { ijson_internal!(); }; (@object $object:ident ($($key:tt)+) () $copy:tt) => { ijson_internal!(); }; (@object $object:ident () (: $($rest:tt)*) ($colon:tt $($copy:tt)*)) => { ijson_unexpected!($colon); }; (@object $object:ident ($($key:tt)*) (, $($rest:tt)*) ($comma:tt $($copy:tt)*)) => { ijson_unexpected!($comma); }; (@object $object:ident () (($key:expr) : $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object ($key) (: $($rest)*) (: $($rest)*)); }; (@object $object:ident ($($key:tt)*) (: $($unexpected:tt)+) $copy:tt) => { ijson_expect_expr_comma!($($unexpected)+); }; (@object $object:ident ($($key:tt)*) ($tt:tt $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object ($($key)* $tt) ($($rest)*) ($($rest)*)); }; (null) => { $crate::IValue::NULL }; (true) => { $crate::IValue::TRUE }; (false) => { $crate::IValue::FALSE }; ([]) => { $crate::IValue::from($crate::IArray::new()) }; ([ $($tt:tt)+ ]) => { $crate::IValue::from({ let mut array = $crate::IArray::new(); ijson_internal!(@array array , $($tt)+); array }) }; ({}) => { $crate::IValue::from($crate::IObject::new()) }; ({ $($tt:tt)+ }) => { $crate::IValue::from({ let mut object = $crate::IObject::new(); ijson_internal!(@object object () ($($tt)+) ($($tt)+)); object }) }; ($other:expr) => { $crate::to_value(&$other).unwrap() }; } #[macro_export] #[doc(hidden)] macro_rules! ijson_unexpected { () => {}; } #[macro_export] #[doc(hidden)] macro_rules! ijson_expect_expr_comma { ($e:expr , $($tt:tt)*) => {}; }
macro_rules! value_subtype_impls { ($t:ty, $cf:ident, $rcf:ident, $mcf:ident) => { impl std::convert::AsRef<crate::IValue> for $t { fn as_ref(&self) -> &crate::IValue { &self.0 } } impl std::convert::AsMut<crate::IValue> for $t { fn as_mut(&mut self) -> &mut crate::IValue { &mut self.0 } } impl std::borrow::Borrow<crate::IValue> for $t { fn borrow(&self) -> &crate::IValue { &self.0 } } impl std::borrow::BorrowMut<crate::IValue> for $t { fn borrow_mut(&mut self) -> &mut crate::IValue { &mut self.0 } } impl std::convert::From<$t> for crate::IValue { fn from(other: $t) -> Self { other.0 } } impl std::convert::TryFrom<crate::IValue> for $t { type Error = crate::IValue; fn try_from(other: crate::IValue) -> Result<Self, crate::IValue> { other.$cf() } } impl<'a> std::convert::TryFrom<&'a crate::IValue> for &'a $t { type Error = (); fn try_from(other: &'a crate::IValue) -> Result<Self, ()> { o
(@object $object:ident [$($key:tt)+] ($value:expr) , $($rest:tt)*) => { let _ = $object.insert(($($key)+), $value); ijson_internal!(@object $object () ($($rest)*) ($($rest)*)); }; (@object $object:ident [$($key:tt)+] ($value:expr) $unexpected:tt $($rest:tt)*) => { ijson_unexpected!($unexpected); }; (@object $object:ident [$($key:tt)+] ($value:expr)) => { let _ = $object.insert(($($key)+), $value); }; (@object $object:ident ($($key:tt)+) (: null $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!(null)) $($rest)*); }; (@object $object:ident ($($key:tt)+) (: true $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!(true)) $($rest)*); }; (@object $object:ident ($($key:tt)+) (: false $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!(false)) $($rest)*); }; (@object $object:ident ($($key:tt)+) (: [$($array:tt)*] $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!([$($array)*])) $($rest)*); }; (@object $object:ident ($($key:tt)+) (: {$($map:tt)*} $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!({$($map)*})) $($rest)*); }; (@object $object:ident ($($key:tt)+) (: $value:expr , $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!($value)) , $($rest)*); }; (@object $object:ident ($($key:tt)+) (: $value:expr) $copy:tt) => { ijson_internal!(@object $object [$($key)+] (ijson_internal!($value))); }; (@object $object:ident ($($key:tt)+) (:) $copy:tt) => { ijson_internal!(); }; (@object $object:ident ($($key:tt)+) () $copy:tt) => { ijson_internal!(); }; (@object $object:ident () (: $($rest:tt)*) ($colon:tt $($copy:tt)*)) => { ijson_unexpected!($colon); }; (@object $object:ident ($($key:tt)*) (, $($rest:tt)*) ($comma:tt $($copy:tt)*)) => { ijson_unexpected!($comma); }; (@object $object:ident () (($key:expr) : $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object ($key) (: $($rest)*) (: $($rest)*)); }; (@object $object:ident ($($key:tt)*) (: $($unexpected:tt)+) $copy:tt) => { ijson_expect_expr_comma!($($unexpected)+); }; (@object $object:ident ($($key:tt)*) ($tt:tt $($rest:tt)*) $copy:tt) => { ijson_internal!(@object $object ($($key)* $tt) ($($rest)*) ($($rest)*)); }; (null) => { $crate::IValue::NULL }; (true) => { $crate::IValue::TRUE }; (false) => { $crate::IValue::FALSE }; ([]) => { $crate::IValue::from($crate::IArray::new()) }; ([ $($tt:tt)+ ]) => { $crate::IValue::from({ let mut array = $crate::IArray::new(); ijson_internal!(@array array , $($tt)+); array }) }; ({}) => { $crate::IValue::from($crate::IObject::new()) }; ({ $($tt:tt)+ }) => { $crate::IValue::from({ let mut object = $crate::IObject::new(); ijson_internal!(@object object () ($($tt)+) ($($tt)+)); object }) }; ($other:expr) => { $crate::to_value(&$other).unwrap() }; } #[macro_export] #[doc(hidden)] macro_rules! ijson_unexpected { () => {}; } #[macro_export] #[doc(hidden)] macro_rules! ijson_expect_expr_comma { ($e:expr , $($tt:tt)*) => {}; }
ther.$rcf().ok_or(()) } } impl<'a> std::convert::TryFrom<&'a mut crate::IValue> for &'a mut $t { type Error = (); fn try_from(other: &'a mut crate::IValue) -> Result<Self, ()> { other.$mcf().ok_or(()) } } }; } macro_rules! typed_conversions { ($( $interm:ty: $( $src:ty $(where ($($gb:tt)*))* ),*; )*) => { $( $( impl $(<$($gb)*>)* From<$src> for IValue { fn from(other: $src) -> Self { <$interm>::from(other).into() } } )* )* } } #[macro_export(local_inner_macros)] macro_rules! ijson { ($($json:tt)+) => { $crate::ijson_internal!($($json)+) }; } #[macro_export(local_inner_macros)] #[doc(hidden)] macro_rules! ijson_internal { (@array $array:ident) => {}; (@array $array:ident ,) => {}; (@array $array:ident , null $($rest:tt)*) => { $array.push(ijson_internal!(null)); ijson_internal!(@array $array $($rest)*) }; (@array $array:ident , true $($rest:tt)*) => { $array.push(ijson_internal!(true)); ijson_internal!(@array $array $($rest)*) }; (@array $array:ident , false $($rest:tt)*) => { $array.push(ijson_internal!(false)); ijson_internal!(@array $array $($rest)*) }; (@array $array:ident , [$($arr:tt)*] $($rest:tt)*) => { $array.push(ijson_internal!([$($arr)*])); ijson_internal!(@array $array $($rest)*) }; (@array $array:ident , {$($obj:tt)*} $($rest:tt)*) => { $array.push(ijson_internal!({$($obj)*})); ijson_internal!(@array $array $($rest)*) }; (@array $array:ident , $next:expr , $($rest:tt)*) => { $array.push(ijson_internal!($next)); ijson_internal!(@array $array , $($rest)*) }; (@array $array:ident , $last:expr) => { $array.push(ijson_internal!($last)); }; (@array $array:ident , $unexpected:tt $($rest:tt)*) => { ijson_unexpected!($unexpected) }; (@array $array:ident $unexpected:tt $($rest:tt)*) => { ijson_unexpected!($unexpected) }; (@object $object:ident () () ()) => {};
random
[ { "content": "fn key_must_be_a_string() -> Error {\n\n Error::custom(\"Object key must be a string\")\n\n}\n\n\n\nimpl Serializer for ObjectKeySerializer {\n\n type Ok = IString;\n\n type Error = Error;\n\n\n\n type SerializeSeq = Impossible<IString, Error>;\n\n type SerializeTuple = Impossible<I...
Rust
src/generator.rs
zhipeng-jia/flood
d357dea3ef4019ef883ed63f25c31dbd5dc7800e
use std::collections::VecDeque; use std::fmt::{self, Write}; use std::iter; use std::sync::{atomic, Arc, Condvar, Mutex}; use std::thread; use bytes::{BufMut, Bytes, BytesMut}; use log::*; use quick_js::{self, JsValue}; static JS_LIB_CODE: &'static str = include_str!("lib.js"); pub struct Request { pub input: Bytes, pub req_type: u32, } #[derive(Debug)] pub enum Error { JsExecError(quick_js::ExecutionError), InvalidScript(String), } impl std::error::Error for Error {} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::JsExecError(js_err) => write!(f, "JsExecutionError: {}", js_err), Error::InvalidScript(msg) => write!(f, "Invalid user script: {}", msg), } } } type Result<T> = std::result::Result<T, Error>; struct RequestQueue { capacity: usize, queue: Mutex<VecDeque<Request>>, cond: Condvar, waiter: atomic::AtomicUsize, stopped: atomic::AtomicBool, } impl RequestQueue { pub fn new(capacity: usize) -> RequestQueue { Self { capacity: capacity, queue: Mutex::new(VecDeque::<Request>::with_capacity(capacity)), cond: Condvar::new(), waiter: atomic::AtomicUsize::new(0), stopped: atomic::AtomicBool::new(false), } } pub fn push(&self, req: Request) { let mut queue = self.queue.lock().unwrap(); while (*queue).len() >= self.capacity { self.waiter.fetch_add(1, atomic::Ordering::SeqCst); queue = self.cond.wait(queue).unwrap(); self.waiter.fetch_sub(1, atomic::Ordering::SeqCst); if self.stopped.load(atomic::Ordering::SeqCst) { return; } } assert!((*queue).len() < self.capacity); (*queue).push_back(req); } pub fn pop(&self) -> Option<Request> { let mut queue = self.queue.lock().unwrap(); if let Some(req) = (*queue).pop_front() { if self.waiter.load(atomic::Ordering::SeqCst) > 0 { self.cond.notify_one(); } return Some(req); } None } pub fn stop_all_waiters(&self) { self.stopped.store(true, atomic::Ordering::SeqCst); self.cond.notify_all(); } } pub struct Generator { host: String, num_threads: usize, thread_control: Arc<atomic::AtomicBool>, threads: Vec<thread::JoinHandle<()>>, queue: Arc<RequestQueue>, js_context: quick_js::Context, } macro_rules! expect_js_int { ($value:expr, $msg:expr) => { match $value { JsValue::Int(num) => num.clone(), _ => { return Err(Error::InvalidScript($msg.to_string())); } } }; } macro_rules! expect_js_str { ($value:expr, $msg:expr) => { match $value { JsValue::String(s) => s, _ => { return Err(Error::InvalidScript($msg.to_string())); } } }; } macro_rules! expect_js_obj { ($value:expr, $msg:expr) => { match $value { JsValue::Object(obj) => obj, _ => { return Err(Error::InvalidScript($msg.to_string())); } } }; } impl Drop for Generator { fn drop(&mut self) { self.thread_control.store(false, atomic::Ordering::SeqCst); self.queue.stop_all_waiters(); while let Some(thread) = self.threads.pop() { thread.join().unwrap(); } } } impl Generator { pub fn new(host: &str, num_threads: usize, max_qsize: usize) -> Generator { let js_context = quick_js::Context::new().unwrap(); js_context.eval(JS_LIB_CODE).unwrap(); Self { host: String::from(host), num_threads: num_threads, thread_control: Arc::new(atomic::AtomicBool::new(false)), threads: Vec::<thread::JoinHandle<()>>::with_capacity(num_threads), queue: Arc::new(RequestQueue::new(max_qsize)), js_context: js_context, } } fn test_user_script(&self, user_script: &str) -> Result<()> { if let Err(js_err) = self.js_context.eval(user_script) { return Err(Error::JsExecError(js_err)); } if let Err(err) = Generator::new_request("test.com", &self.js_context) { return Err(err); } Ok(()) } pub fn load_user_script(&mut self, user_script: &str) -> Result<()> { self.test_user_script(user_script)?; self.thread_control.store(true, atomic::Ordering::SeqCst); for i in 0..self.num_threads { let control = self.thread_control.clone(); let queue = self.queue.clone(); let user_script = String::from(user_script); let host = self.host.clone(); let thread = thread::spawn(move || { info!("{}-th JS thread starts", i); let js_context = quick_js::Context::new().unwrap(); js_context.eval(JS_LIB_CODE).unwrap(); js_context.eval(&user_script).unwrap(); while control.load(atomic::Ordering::SeqCst) { let req = Generator::new_request(&host, &js_context).unwrap(); queue.push(req); } }); self.threads.push(thread); } Ok(()) } fn new_request(host: &str, js_context: &quick_js::Context) -> Result<Request> { let empty_args = iter::empty::<JsValue>(); let request = match js_context.call_function("newRequest", empty_args) { Ok(value) => expect_js_obj!(value, "newRequest must return an object"), Err(js_err) => { return Err(Error::JsExecError(js_err)); } }; for &key in ["type", "method", "path", "headers"].iter() { if !request.contains_key(key) { return Err(Error::InvalidScript(format!( "Returned object must contain `{}`", key ))); } } let req_type = expect_js_int!(request.get("type").unwrap(), "`type` must be an integer"); let mut data = BytesMut::with_capacity(256); write!( &mut data, "{} {} HTTP/1.1\r\n", expect_js_str!(request.get("method").unwrap(), "`method` must be a string"), expect_js_str!(request.get("path").unwrap(), "`path` must be a string") ) .unwrap(); write!(&mut data, "Host: {}\r\n", host).unwrap(); write!(&mut data, "Connection: keep-alive\r\n").unwrap(); let mut has_accept = false; let mut has_user_agent = false; let mut has_content_type = false; let headers = expect_js_obj!( request.get("headers").unwrap(), "`headers` must be an object" ); for (key, value) in headers.iter() { if key == "Host" || key == "Connection" || key == "Content-Length" { continue; } if key == "Accept" { has_accept = true; } if key == "User-Agent" { has_user_agent = true; } if key == "Content-Type" { has_content_type = true; } let value_str = expect_js_str!(value, "header value must be a string"); write!(&mut data, "{}: {}\r\n", key, value_str).unwrap(); } if !has_accept { write!(&mut data, "Accept: */*\r\n").unwrap(); } if !has_user_agent { write!(&mut data, "User-Agent: flood\r\n").unwrap(); } if !has_content_type { write!(&mut data, "Content-Type: text/plain\r\n").unwrap(); } if request.contains_key("body") { let body = expect_js_str!(request.get("body").unwrap(), "`body` must be a string"); write!(&mut data, "Content-Length: {}\r\n\r\n", body.len()).unwrap(); data.put_slice(body.as_bytes()); } else { write!(&mut data, "\r\n").unwrap(); } Ok(Request { input: data.freeze(), req_type: req_type as u32, }) } pub fn get(&mut self) -> Request { if let Some(req) = self.queue.pop() { return req; } warn!("JS threads failed to generate enough request data"); Generator::new_request(&self.host, &self.js_context).unwrap() } }
use std::collections::VecDeque; use std::fmt::{self, Write}; use std::iter; use std::sync::{atomic, Arc, Condvar, Mutex}; use std::thread; use bytes::{BufMut, Bytes, BytesMut}; use log::*; use quick_js::{self, JsValue}; static JS_LIB_CODE: &'static str = include_str!("lib.js"); pub struct Request { pub input: Bytes, pub req_type: u32, } #[derive(Debug)] pub enum Error { JsExecError(quick_js::ExecutionError), InvalidScript(String), } impl std::error::Error for Error {} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::JsExecError(js_err) => write!(f, "JsExecutionError: {}", js_err), Error::InvalidScript(msg) => write!(f, "Invalid user script: {}", msg), } } } type Result<T> = std::result::Result<T, Error>; struct RequestQueue { capacity: usize, queue: Mutex<VecDeque<Request>>, cond: Condvar, waiter: atomic::AtomicUsize, stopped: atomic::AtomicBool, } impl RequestQueue { pub fn new(capacity: usize) -> RequestQueue { Self { capacity: capacity, queue: Mutex::new(VecDeque::<Request>::with_capacity(capacity)), cond: Condvar::new(), waiter: atomic::AtomicUsize::new(0), stopped: atomic::AtomicBool::new(false), } } pub fn push(&self, req: Request) { let mut queue = self.queue.lock().unwrap(); while (*queue).len() >= self.capacity { self.waiter.fetch_add(1, atomic::Ordering::SeqCst); queue = self.cond.wait(queue).unwrap(); self.waiter.fetch_sub(1, atomic::Ordering::SeqCst); if self.stopped.load(atomic::Ordering::SeqCst) { return; } } assert!((*queue).len() < self.capacity); (*queue).push_back(req); } pub fn pop(&self) -> Option<Request> { let mut queue = self.queue.lock().unwrap(); if let Some(req) = (*queue).pop_front() { if self.waiter.load(atomic::Ordering::SeqCst) > 0 { self.cond.notify_one(); } return Some(req); } None } pub fn stop_all_waiters(&self) { self.stopped.store(true, atomic::Ordering::SeqCst); self.cond.notify_all(); } } pub struct Generator { host: String, num_threads: usize, thread_control: Arc<atomic::AtomicBool>, threads: Vec<thread::JoinHandle<()>>, queue: Arc<RequestQueue>, js_context: quick_js::Context, } macro_rules! expect_js_int { ($value:expr, $msg:expr) => { match $value { JsValue::Int(num) => num.clone(), _ => { return Err(Error::InvalidScript($msg.to_string())); } } }; } macro_rules! expect_js_str { ($value:expr, $msg:expr) => { match $value { JsValue::String(s) => s, _ => { return Err(Error::InvalidScript($msg.to_string())); } } }; } macro_rules! expect_js_obj { ($value:expr, $msg:expr) => { match $value { JsValue::Object(obj) => obj, _ => { return Err(Error::InvalidScript($msg.to_string())); } } }; } impl Drop for Generator { fn drop(&mut self) { self.thread_control.store(false, atomic::Ordering::SeqCst); self.queue.stop_all_waiters(); while let Some(thread) = self.threads.pop() { thread.join().unwrap(); } } } impl Generator { pub fn new(host: &str, num_threads: usize, max_qsize: usize) -> Generator { let js_context = quick_js::Context::new().unwrap(); js_context.eval(JS_LIB_CODE).unwrap(); Self { host: String::from(host), num_threads: num_threads, thread_control: Arc::new(atomic::AtomicBool::new(false)), threads: Vec::<thread::JoinHandle<()>>::with_capacity(num_threads), queue: Arc::new(RequestQueue::new(max_qsize)), js_context: js_context, } } fn test_user_script(&self, user_script: &str) -> Result<()> { if let Err(js_err) = self.js_context.eval(user_script) { return Err(Error::JsExecError(js_err)); } if let Err(err) = Generator::new_request("test.com", &self.js_context) { return Err(err); } Ok(()) }
fn new_request(host: &str, js_context: &quick_js::Context) -> Result<Request> { let empty_args = iter::empty::<JsValue>(); let request = match js_context.call_function("newRequest", empty_args) { Ok(value) => expect_js_obj!(value, "newRequest must return an object"), Err(js_err) => { return Err(Error::JsExecError(js_err)); } }; for &key in ["type", "method", "path", "headers"].iter() { if !request.contains_key(key) { return Err(Error::InvalidScript(format!( "Returned object must contain `{}`", key ))); } } let req_type = expect_js_int!(request.get("type").unwrap(), "`type` must be an integer"); let mut data = BytesMut::with_capacity(256); write!( &mut data, "{} {} HTTP/1.1\r\n", expect_js_str!(request.get("method").unwrap(), "`method` must be a string"), expect_js_str!(request.get("path").unwrap(), "`path` must be a string") ) .unwrap(); write!(&mut data, "Host: {}\r\n", host).unwrap(); write!(&mut data, "Connection: keep-alive\r\n").unwrap(); let mut has_accept = false; let mut has_user_agent = false; let mut has_content_type = false; let headers = expect_js_obj!( request.get("headers").unwrap(), "`headers` must be an object" ); for (key, value) in headers.iter() { if key == "Host" || key == "Connection" || key == "Content-Length" { continue; } if key == "Accept" { has_accept = true; } if key == "User-Agent" { has_user_agent = true; } if key == "Content-Type" { has_content_type = true; } let value_str = expect_js_str!(value, "header value must be a string"); write!(&mut data, "{}: {}\r\n", key, value_str).unwrap(); } if !has_accept { write!(&mut data, "Accept: */*\r\n").unwrap(); } if !has_user_agent { write!(&mut data, "User-Agent: flood\r\n").unwrap(); } if !has_content_type { write!(&mut data, "Content-Type: text/plain\r\n").unwrap(); } if request.contains_key("body") { let body = expect_js_str!(request.get("body").unwrap(), "`body` must be a string"); write!(&mut data, "Content-Length: {}\r\n\r\n", body.len()).unwrap(); data.put_slice(body.as_bytes()); } else { write!(&mut data, "\r\n").unwrap(); } Ok(Request { input: data.freeze(), req_type: req_type as u32, }) } pub fn get(&mut self) -> Request { if let Some(req) = self.queue.pop() { return req; } warn!("JS threads failed to generate enough request data"); Generator::new_request(&self.host, &self.js_context).unwrap() } }
pub fn load_user_script(&mut self, user_script: &str) -> Result<()> { self.test_user_script(user_script)?; self.thread_control.store(true, atomic::Ordering::SeqCst); for i in 0..self.num_threads { let control = self.thread_control.clone(); let queue = self.queue.clone(); let user_script = String::from(user_script); let host = self.host.clone(); let thread = thread::spawn(move || { info!("{}-th JS thread starts", i); let js_context = quick_js::Context::new().unwrap(); js_context.eval(JS_LIB_CODE).unwrap(); js_context.eval(&user_script).unwrap(); while control.load(atomic::Ordering::SeqCst) { let req = Generator::new_request(&host, &js_context).unwrap(); queue.push(req); } }); self.threads.push(thread); } Ok(()) }
function_block-full_function
[ { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n env_logger::Builder::from_env(Env::default().default_filter_or(\"info\")).init();\n\n let opt = Opt::from_args();\n\n\n\n let mut resolved_addrs = opt.host.to_socket_addrs()?;\n\n let addr: SocketAddr = resolved_addrs.next().unw...
Rust
gad/src/net_ext.rs
matbd/gad
83f2eb1c46cfb84e979fda940c9d6bf0e226c6d3
use crate::{ arith::ArithAlgebra, array::ArrayAlgebra, core::{CoreAlgebra, HasDims}, error::{check_equal_dimensions, Error, Result}, graph::Value, matrix::MatrixAlgebra, net::{HasGradientId, HasGradientReader, Net, WeightOps}, Graph1, Number, }; use serde::{Deserialize, Serialize}; pub trait SingleOutputNet<Data, Algebra>: Net<Algebra> where Algebra: HasGradientReader + CoreAlgebra<Data, Value = Self::Output>, { fn add_square_loss(self) -> SquareLoss<Self, Data> where Self: Sized, { SquareLoss(self, std::marker::PhantomData) } } impl<Data, Algebra, N> SingleOutputNet<Data, Algebra> for N where N: Net<Algebra>, Algebra: HasGradientReader + CoreAlgebra<Data, Value = Self::Output>, { } pub trait DiffNet<T>: Net<Graph1, Output = Value<T>> where T: Number, Self::Weights: WeightOps<T>, { fn apply_gradient_step(&mut self, lambda: T, batch: Vec<Self::Input>) -> Result<T> { let mut delta: Option<Self::Weights> = None; let mut cumulated_output: Option<T> = None; for example in batch { let mut g = Graph1::new(); let (output, info) = self.eval_with_gradient_info(&mut g, example)?; match &mut cumulated_output { opt @ None => *opt = Some(*output.data()), Some(val) => *val += *output.data(), } let store = g.evaluate_gradients_once(output.gid()?, T::one())?; let gradients = self.read_weight_gradients(info, &store)?; match &mut delta { opt @ None => *opt = Some(gradients.scale(lambda)), Some(val) => val.add_assign(gradients.scale(lambda))?, } } if let Some(delta) = delta { self.update_weights(delta)?; } cumulated_output.ok_or_else(|| Error::empty(func_name!())) } } impl<N, T> DiffNet<T> for N where T: Number, N: Net<Graph1, Output = Value<T>>, N::Weights: WeightOps<T>, { } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct SquareLoss<N, Data>(N, std::marker::PhantomData<Data>); impl<Data, Algebra, N> Net<Algebra> for SquareLoss<N, Data> where Algebra: HasGradientReader + CoreAlgebra<Data, Value = N::Output> + ArrayAlgebra<N::Output> + ArithAlgebra<N::Output> + MatrixAlgebra<N::Output>, N: Net<Algebra>, Data: HasDims, N::Output: HasDims<Dims = Data::Dims>, Data::Dims: Clone + PartialEq + std::fmt::Debug, { type Input = (N::Input, Data); type Output = <Algebra as ArrayAlgebra<N::Output>>::Scalar; type Weights = N::Weights; type GradientInfo = N::GradientInfo; fn eval_with_gradient_info( &self, graph: &mut Algebra, input: Self::Input, ) -> Result<(Self::Output, Self::GradientInfo)> { let (output, info) = self.0.eval_with_gradient_info(graph, input.0)?; check_equal_dimensions( "eval_with_gradient_info", &[&output.dims(), &input.1.dims()], )?; let target = graph.constant(input.1); let delta = graph.sub(&target, &output)?; let loss = graph.norm2(&delta); Ok((loss, info)) } fn get_weights(&self) -> Self::Weights { self.0.get_weights() } fn set_weights(&mut self, weights: Self::Weights) -> Result<()> { self.0.set_weights(weights) } fn update_weights(&mut self, delta: Self::Weights) -> Result<()> { self.0.update_weights(delta) } fn read_weight_gradients( &self, info: Self::GradientInfo, store: &Algebra::GradientReader, ) -> Result<Self::Weights> { self.0.read_weight_gradients(info, store) } }
use crate::{ arith::ArithAlgebra, array::ArrayAlgebra, core::{CoreAlgebra, HasDims}, error::{check_equal_dimensions, Error, Result}, graph::Value, matrix::MatrixAlgebra, net::{HasGradientId, HasGradientReader, Net, WeightOps}, Graph1, Number, }; use serde::{Deserialize, Serialize}; pub trait SingleOutputNet<Data, Algebra>: Net<Algebra> where
ut = <Algebra as ArrayAlgebra<N::Output>>::Scalar; type Weights = N::Weights; type GradientInfo = N::GradientInfo; fn eval_with_gradient_info( &self, graph: &mut Algebra, input: Self::Input, ) -> Result<(Self::Output, Self::GradientInfo)> { let (output, info) = self.0.eval_with_gradient_info(graph, input.0)?; check_equal_dimensions( "eval_with_gradient_info", &[&output.dims(), &input.1.dims()], )?; let target = graph.constant(input.1); let delta = graph.sub(&target, &output)?; let loss = graph.norm2(&delta); Ok((loss, info)) } fn get_weights(&self) -> Self::Weights { self.0.get_weights() } fn set_weights(&mut self, weights: Self::Weights) -> Result<()> { self.0.set_weights(weights) } fn update_weights(&mut self, delta: Self::Weights) -> Result<()> { self.0.update_weights(delta) } fn read_weight_gradients( &self, info: Self::GradientInfo, store: &Algebra::GradientReader, ) -> Result<Self::Weights> { self.0.read_weight_gradients(info, store) } }
Algebra: HasGradientReader + CoreAlgebra<Data, Value = Self::Output>, { fn add_square_loss(self) -> SquareLoss<Self, Data> where Self: Sized, { SquareLoss(self, std::marker::PhantomData) } } impl<Data, Algebra, N> SingleOutputNet<Data, Algebra> for N where N: Net<Algebra>, Algebra: HasGradientReader + CoreAlgebra<Data, Value = Self::Output>, { } pub trait DiffNet<T>: Net<Graph1, Output = Value<T>> where T: Number, Self::Weights: WeightOps<T>, { fn apply_gradient_step(&mut self, lambda: T, batch: Vec<Self::Input>) -> Result<T> { let mut delta: Option<Self::Weights> = None; let mut cumulated_output: Option<T> = None; for example in batch { let mut g = Graph1::new(); let (output, info) = self.eval_with_gradient_info(&mut g, example)?; match &mut cumulated_output { opt @ None => *opt = Some(*output.data()), Some(val) => *val += *output.data(), } let store = g.evaluate_gradients_once(output.gid()?, T::one())?; let gradients = self.read_weight_gradients(info, &store)?; match &mut delta { opt @ None => *opt = Some(gradients.scale(lambda)), Some(val) => val.add_assign(gradients.scale(lambda))?, } } if let Some(delta) = delta { self.update_weights(delta)?; } cumulated_output.ok_or_else(|| Error::empty(func_name!())) } } impl<N, T> DiffNet<T> for N where T: Number, N: Net<Graph1, Output = Value<T>>, N::Weights: WeightOps<T>, { } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct SquareLoss<N, Data>(N, std::marker::PhantomData<Data>); impl<Data, Algebra, N> Net<Algebra> for SquareLoss<N, Data> where Algebra: HasGradientReader + CoreAlgebra<Data, Value = N::Output> + ArrayAlgebra<N::Output> + ArithAlgebra<N::Output> + MatrixAlgebra<N::Output>, N: Net<Algebra>, Data: HasDims, N::Output: HasDims<Dims = Data::Dims>, Data::Dims: Clone + PartialEq + std::fmt::Debug, { type Input = (N::Input, Data); type Outp
random
[ { "content": "/// A Neural Network over an algebra of operations.\n\npub trait Net<Algebra: HasGradientReader> {\n\n /// Input of the network.\n\n type Input;\n\n /// Output of the network.\n\n type Output;\n\n /// External representation for the weights of the network.\n\n type Weights;\n\n ...
Rust
src/backend/sprite.rs
skyne98/blackonyx
175e6cccf7fe97539639bb0e0a189400ef6a4eeb
use anyhow::Result; use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; use std::{ sync::mpsc::{channel, Receiver, Sender}, thread::JoinHandle, }; use crate::backend::vertex::Vertex; const BASE_VEC_A: ultraviolet::Vec3 = ultraviolet::Vec3::new(0.0, 1.0, 1.0); const BASE_VEC_B: ultraviolet::Vec3 = ultraviolet::Vec3::new(0.0, 0.0, 1.0); const BASE_VEC_C: ultraviolet::Vec3 = ultraviolet::Vec3::new(1.0, 0.0, 1.0); const BASE_VEC_D: ultraviolet::Vec3 = ultraviolet::Vec3::new(1.0, 1.0, 1.0); const BASE_UV_A: ultraviolet::Vec2 = ultraviolet::Vec2::new(0.0, 0.0); const BASE_UV_B: ultraviolet::Vec2 = ultraviolet::Vec2::new(0.0, 1.0); const BASE_UV_C: ultraviolet::Vec2 = ultraviolet::Vec2::new(1.0, 1.0); const BASE_UV_D: ultraviolet::Vec2 = ultraviolet::Vec2::new(1.0, 0.0); const BASELINE_INDICES: &[u16] = &[0, 1, 3, 3, 1, 2]; #[inline] fn calculate_scale_mat( scale: ultraviolet::Vec2, source_size: ultraviolet::Vec2, ) -> ultraviolet::Mat3 { ultraviolet::Mat3::from_nonuniform_scale(ultraviolet::Vec3::new( source_size.x * scale.x, source_size.y * scale.y, 1.0, )) } #[inline] fn calculate_origin_translation_mat( origin: ultraviolet::Vec2, scale: ultraviolet::Vec2, source_size: ultraviolet::Vec2, ) -> ultraviolet::Mat3 { ultraviolet::Mat3::from_translation(ultraviolet::Vec2::new( -source_size.x * origin.x * scale.x, -source_size.y * origin.y * scale.y, )) } #[inline] fn calculate_rotation_mat(angle: f32) -> ultraviolet::Mat3 { ultraviolet::Mat3::from_rotation_z(angle * std::f32::consts::PI / 180.0) } #[inline] fn calculate_translation_mat(position: ultraviolet::Vec2) -> ultraviolet::Mat3 { ultraviolet::Mat3::from_translation(ultraviolet::Vec2::new((position).x, (position).y)) } pub struct Sprites { length: usize, texture_index: Vec<u32>, source_position: Vec<ultraviolet::Vec2>, source_size: Vec<ultraviolet::Vec2>, position: Vec<ultraviolet::Vec2>, angle: Vec<f32>, scale: Vec<ultraviolet::Vec2>, depth: Vec<f32>, color: Vec<[f32; 4]>, origin: Vec<ultraviolet::Vec2>, scale_mat: Vec<ultraviolet::Mat3>, origin_translation_mat: Vec<ultraviolet::Mat3>, rotation_mat: Vec<ultraviolet::Mat3>, translation_mat: Vec<ultraviolet::Mat3>, threads: usize, } impl Sprites { pub fn new() -> Self { Self { length: 0, texture_index: vec![], source_position: vec![], source_size: vec![], position: vec![], angle: vec![], scale: vec![], depth: vec![], color: vec![], origin: vec![], scale_mat: vec![], origin_translation_mat: vec![], rotation_mat: vec![], translation_mat: vec![], threads: 16, } } pub fn add( &mut self, texture_index: u32, source_position: ultraviolet::Vec2, source_size: ultraviolet::Vec2, position: ultraviolet::Vec2, angle: f32, scale: ultraviolet::Vec2, depth: f32, color: [f32; 4], origin: ultraviolet::Vec2, ) -> usize { let index = self.texture_index.len(); self.length += 1; self.texture_index.push(texture_index); self.source_position.push(source_position); self.source_size.push(source_size); self.position.push(position); self.angle.push(angle); self.scale.push(scale); self.depth.push(depth); self.color.push(color); self.origin.push(origin); self.scale_mat.push(calculate_scale_mat(scale, source_size)); self.origin_translation_mat .push(calculate_origin_translation_mat(origin, scale, source_size)); self.rotation_mat.push(calculate_rotation_mat(angle)); self.translation_mat .push(calculate_translation_mat(position)); index } pub fn set_texture_index(&mut self, index: usize, val: u32) -> Option<()> { *(self.texture_index.get_mut(index)?) = val; Some(()) } pub fn set_source_position(&mut self, index: usize, val: ultraviolet::Vec2) -> Option<()> { *(self.source_position.get_mut(index)?) = val; Some(()) } pub fn set_source_size(&mut self, index: usize, val: ultraviolet::Vec2) -> Option<()> { *(self.source_size.get_mut(index)?) = val; let scale = self.scale.get(index)?; let origin = self.origin.get(index)?; *(self.scale_mat.get_mut(index)?) = calculate_scale_mat(*scale, val); *(self.origin_translation_mat.get_mut(index)?) = calculate_origin_translation_mat(*origin, *scale, val); Some(()) } pub fn set_position(&mut self, index: usize, val: ultraviolet::Vec2) -> Option<()> { *(self.position.get_mut(index)?) = val; *(self.translation_mat.get_mut(index)?) = calculate_translation_mat(val); Some(()) } pub fn set_angle(&mut self, index: usize, val: f32) -> Option<()> { *(self.angle.get_mut(index)?) = val; *(self.rotation_mat.get_mut(index)?) = calculate_rotation_mat(val); Some(()) } pub fn set_scale(&mut self, index: usize, val: ultraviolet::Vec2) -> Option<()> { *(self.scale.get_mut(index)?) = val; let origin = self.origin.get(index)?; let source_size = self.source_size.get(index)?; *(self.scale_mat.get_mut(index)?) = calculate_scale_mat(val, *source_size); *(self.origin_translation_mat.get_mut(index)?) = calculate_origin_translation_mat(*origin, val, *source_size); Some(()) } pub fn set_depth(&mut self, index: usize, val: f32) -> Option<()> { *(self.depth.get_mut(index)?) = val; Some(()) } pub fn set_color(&mut self, index: usize, val: [f32; 4]) -> Option<()> { *(self.color.get_mut(index)?) = val; Some(()) } pub fn set_origin(&mut self, index: usize, val: ultraviolet::Vec2) -> Option<()> { *(self.origin.get_mut(index)?) = val; let scale = self.scale.get(index)?; let source_size = self.source_size.get(index)?; *(self.origin_translation_mat.get_mut(index)?) = calculate_origin_translation_mat(val, *scale, *source_size); Some(()) } pub fn len(&self) -> usize { self.length } pub fn remove(&mut self, index: usize) { self.length -= 1; self.texture_index.remove(index); self.source_position.remove(index); self.source_size.remove(index); self.position.remove(index); self.angle.remove(index); self.scale.remove(index); self.depth.remove(index); self.color.remove(index); self.origin.remove(index); self.scale_mat.remove(index); self.origin_translation_mat.remove(index); self.rotation_mat.remove(index); self.translation_mat.remove(index); } pub fn vertices_indices( &self, texture_width: u32, texture_height: u32, ) -> (Vec<Vertex>, Vec<u16>) { let thread_count = self.threads; let chunk_size = self.length / thread_count; let leftover = self.length - (chunk_size * thread_count); let mut result_vertices = vec![]; let mut result_indices = vec![]; (0..thread_count) .into_par_iter() .map(|index| { let thread_slice_start = if index == 0 { 0 } else { leftover + chunk_size * index }; let thread_slice_range = if index == 0 { thread_slice_start..thread_slice_start + chunk_size + leftover } else { thread_slice_start..thread_slice_start + chunk_size }; let thread_source_positions = &self.source_position[thread_slice_range.clone()]; let thread_source_size = &self.source_size[thread_slice_range.clone()]; let thread_scale_mat = &self.scale_mat[thread_slice_range.clone()]; let thread_origin_translation_mat = &self.origin_translation_mat[thread_slice_range.clone()]; let thread_rotation_mat = &self.rotation_mat[thread_slice_range.clone()]; let thread_translation = &self.translation_mat[thread_slice_range.clone()]; let range = 0..thread_slice_range.len(); let mut result_vertices = vec![]; let mut result_indices = vec![]; for local_index in range { let source_position = thread_source_positions.get(local_index).unwrap().clone(); let source_size = thread_source_size.get(local_index).unwrap().clone(); let scale_mat = thread_scale_mat.get(local_index).unwrap().clone(); let origin_translation_mat = thread_origin_translation_mat .get(local_index) .unwrap() .clone(); let rotation_mat = thread_rotation_mat.get(local_index).unwrap().clone(); let translation_mat = thread_translation.get(local_index).unwrap().clone(); /* let _src_relative_min_x: f32 = source_position.x / texture_width as f32; let _src_relative_min_y: f32 = source_position.y / texture_height as f32; let _src_relative_max_x: f32 = source_position.x + source_size.x / texture_width as f32; let _src_relative_max_y: f32 = source_position.y + source_size.y / texture_height as f32; */ let transformation = translation_mat * rotation_mat * origin_translation_mat * scale_mat; let vec_a = transformation * BASE_VEC_A; let vec_b = transformation * BASE_VEC_B; let vec_c = transformation * BASE_VEC_C; let vec_d = transformation * BASE_VEC_D; let uv_a = BASE_UV_A; let uv_b = BASE_UV_B; let uv_c = BASE_UV_C; let uv_d = BASE_UV_D; let indices = BASELINE_INDICES .iter() .map(|i| *i + (4 * (thread_slice_start as u16 + local_index as u16))) .collect::<Vec<_>>(); let vertices = vec![ Vertex { position: [vec_a.x, vec_a.y, vec_a.z], tex_coords: [uv_a.x, uv_a.y], }, Vertex { position: [vec_b.x, vec_b.y, vec_b.z], tex_coords: [uv_b.x, uv_b.y], }, Vertex { position: [vec_c.x, vec_c.y, vec_c.z], tex_coords: [uv_c.x, uv_c.y], }, Vertex { position: [vec_d.x, vec_d.y, vec_d.z], tex_coords: [uv_d.x, uv_d.y], }, ]; result_vertices.extend_from_slice(&vertices[..]); result_indices.extend_from_slice(&indices[..]); } (result_vertices, result_indices) }) .unzip_into_vecs(&mut result_vertices, &mut result_indices); let result_vertices = result_vertices.into_iter().flatten().collect(); let result_indices = result_indices.into_iter().flatten().collect(); (result_vertices, result_indices) } }
use anyhow::Result; use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; use std::{ sync::mpsc::{channel, Receiver, Sender}, thread::JoinHandle, }; use crate::backend::vertex::Vertex; const BASE_VEC_A: ultraviolet::Vec3 = ultraviolet::Vec3::new(0.0, 1.0, 1.0); const BASE_VEC_B: ultraviolet::Vec3 = ultraviolet::Vec3::new(0.0, 0.0, 1.0); const BASE_VEC_C: ultraviolet::Vec3 = ultraviolet::Vec3::new(1.0, 0.0, 1.0); const BASE_VEC_D: ultraviolet::Vec3 = ultraviolet::Vec3::new(1.0, 1.0, 1.0); const BASE_UV_A: ultraviolet::Vec2 = ultraviolet::Vec2::new(0.0, 0.0); const BASE_UV_B: ultraviolet::Vec2 = ultraviolet::Vec2::new(0.0, 1.0); const BASE_UV_C: ultraviolet::Vec2 = ultraviolet::Vec2::new(1.0, 1.0); const BASE_UV_D: ultraviolet::Vec2 = ultraviolet::Vec2::new(1.0, 0.0); const BASELINE_INDICES: &[u16] = &[0, 1, 3, 3, 1, 2]; #[inline] fn calculate_scale_mat( scale: ultraviolet::Vec2, source_size: ultraviolet::Vec2, ) -> ultraviolet::Mat3 { ultraviolet::Mat3::from_nonuniform_scale(ultraviolet::Vec3::new( source_size.x * scale.x, source_size.y * scale.y, 1.0, )) } #[inline] fn calculate_origin_translation_mat( origin: ultraviolet::Vec2, scale: ultraviolet::Vec2, source_size: ultraviolet::Vec2, ) -> ultraviolet::Mat3 { ultraviolet::Mat3::from_translation(ultraviolet::Vec2::new( -source_size.x * origin.x * scale.x, -source_size.y * origin.y * scale.y, )) } #[inline] fn calculate_rotation_mat(angle: f32) -> ultraviolet::Mat3 { ultraviolet::Mat3::from_rotation_z(angle * std::f32::consts::PI / 180.0) } #[inline] fn calculate_translation_mat(position: ultraviolet::Vec2) -> ultraviolet::Mat3 { ultraviolet::Mat3::from_translation(ultraviolet::Vec2::new((position).x, (position).y)) } pub struct Sprites { length: usize, texture_index: Vec<u32>, source_position: Vec<ultraviolet::Vec2>, source_size: Vec<ultraviolet::Vec2>, position: Vec<ultraviolet::Vec2>, angle: Vec<f32>, scale: Vec<ultraviolet::Vec2>, depth: Vec<f32>, color: Vec<[f32; 4]>, origin: Vec<ultraviolet::Vec2>, scale_mat: Vec<ultraviolet::Mat3>, origin_translation_mat: Vec<ultraviolet::Mat3>, rotation_mat: Vec<ultraviolet::Mat3>, translation_mat: Vec<ultraviolet::Mat3>, threads: usize, } impl Sprites { pub fn new() -> Self { Self { length: 0, texture_index: vec![], source_position: vec![], source_size: vec![], position: vec![], angle: vec![], scale: vec![], depth: vec![], color: vec![], origin: vec![], scale_mat: vec![], origin_translation_mat: vec![], rotation_mat: vec![], translation_mat: vec![], threads: 16, } } pub fn add( &mut self, texture_index: u32, source_position: ultraviolet::Vec2, source_size: ultraviolet::Vec2, position: ultraviolet::Vec2, angle: f32, scale: ultraviolet::Vec2, depth: f32, color: [f32; 4], origin: ultraviolet::Vec2, ) -> usize { let index = self.texture_index.len(); self.length += 1; self.texture_index.push(texture_index); self.source_position.push(source_position); self.source_size.push(source_size); self.position.push(position); self.angle.push(angle); self.scale.push(scale); self.depth.push(depth); self.color.push(color); self.origin.push(origin); self.scale_mat.push(calculate_scale_mat(scale, source_size)); self.origin_translation_mat .push(calculate_origin_translation_mat(origin, scale, source_size)); self.rotation_mat.push(calculate_rotation_mat(angle)); self.translation_mat .push(calculate_translation_mat(position)); index } pub fn set_texture_index(&mut self, index: usize, val: u32) -> Option<()> { *(self.texture_index.get_mut(index)?) = val; Some(()) } pub fn set_source_position(&mut self, index: usize, val: ultraviolet::Vec2) -> Option<()> { *(self.source_position.get_mut(index)?) = val; Some(()) } pub fn set_source_size(&mut self, index: usize, val: ultraviolet::Vec2) -> Option<()> { *(self.source_size.get_mut(index)?) = val; let scale = self.scale.get(index)?; let origin = self.origin.get(index)?; *(self.scale_mat.get_mut(index)?) = calculate_scale_mat(*scale, val); *(self.origin_translation_mat.get_mut(index)?) = calculate_origin_translation_mat(*origin, *scale, val); Some(()) } pub fn set_position(&mut self, index: usize, val: ultraviolet::Vec2) -> Option<()> { *(self.position.get_mut(index)?) = val; *(self.translation_mat.get_mut(index)?) = calculate_translation_mat(val); Some(()) } pub fn set_angle(&mut self, index: usize, val: f32) -> Option<()> { *(self.angle.get_mut(index)?) = val; *(self.rotation_mat.get_mut(index)?) = calculate_rotation_mat(val); Some(()) } pub fn set_scale(&mut self, index: usize, val: ultraviolet::Vec2) -> Option<()> { *(self.scale.get_mut(index)?) = val; let origin = self.origin.get(index)?; let source_size = self.source_size.get(index)?; *(self.scale_mat.get_mut(index)?) = calculate_scale_mat(val, *source_size); *(self.origin_translation_mat.get_mut(index)?) = calculate_origin_translation_mat(*origin, val, *source_size); Some(()) } pub fn set_depth(&mut self, index: usize, val: f32) -> Option<()> { *(self.depth.get_mut(index)?) = val; Some(()) } pub fn set_color(&mut self, index: usize, val: [f32; 4]) -> Option<()> { *(self.color.get_mut(index)?) = val; Some(()) }
pub fn len(&self) -> usize { self.length } pub fn remove(&mut self, index: usize) { self.length -= 1; self.texture_index.remove(index); self.source_position.remove(index); self.source_size.remove(index); self.position.remove(index); self.angle.remove(index); self.scale.remove(index); self.depth.remove(index); self.color.remove(index); self.origin.remove(index); self.scale_mat.remove(index); self.origin_translation_mat.remove(index); self.rotation_mat.remove(index); self.translation_mat.remove(index); } pub fn vertices_indices( &self, texture_width: u32, texture_height: u32, ) -> (Vec<Vertex>, Vec<u16>) { let thread_count = self.threads; let chunk_size = self.length / thread_count; let leftover = self.length - (chunk_size * thread_count); let mut result_vertices = vec![]; let mut result_indices = vec![]; (0..thread_count) .into_par_iter() .map(|index| { let thread_slice_start = if index == 0 { 0 } else { leftover + chunk_size * index }; let thread_slice_range = if index == 0 { thread_slice_start..thread_slice_start + chunk_size + leftover } else { thread_slice_start..thread_slice_start + chunk_size }; let thread_source_positions = &self.source_position[thread_slice_range.clone()]; let thread_source_size = &self.source_size[thread_slice_range.clone()]; let thread_scale_mat = &self.scale_mat[thread_slice_range.clone()]; let thread_origin_translation_mat = &self.origin_translation_mat[thread_slice_range.clone()]; let thread_rotation_mat = &self.rotation_mat[thread_slice_range.clone()]; let thread_translation = &self.translation_mat[thread_slice_range.clone()]; let range = 0..thread_slice_range.len(); let mut result_vertices = vec![]; let mut result_indices = vec![]; for local_index in range { let source_position = thread_source_positions.get(local_index).unwrap().clone(); let source_size = thread_source_size.get(local_index).unwrap().clone(); let scale_mat = thread_scale_mat.get(local_index).unwrap().clone(); let origin_translation_mat = thread_origin_translation_mat .get(local_index) .unwrap() .clone(); let rotation_mat = thread_rotation_mat.get(local_index).unwrap().clone(); let translation_mat = thread_translation.get(local_index).unwrap().clone(); /* let _src_relative_min_x: f32 = source_position.x / texture_width as f32; let _src_relative_min_y: f32 = source_position.y / texture_height as f32; let _src_relative_max_x: f32 = source_position.x + source_size.x / texture_width as f32; let _src_relative_max_y: f32 = source_position.y + source_size.y / texture_height as f32; */ let transformation = translation_mat * rotation_mat * origin_translation_mat * scale_mat; let vec_a = transformation * BASE_VEC_A; let vec_b = transformation * BASE_VEC_B; let vec_c = transformation * BASE_VEC_C; let vec_d = transformation * BASE_VEC_D; let uv_a = BASE_UV_A; let uv_b = BASE_UV_B; let uv_c = BASE_UV_C; let uv_d = BASE_UV_D; let indices = BASELINE_INDICES .iter() .map(|i| *i + (4 * (thread_slice_start as u16 + local_index as u16))) .collect::<Vec<_>>(); let vertices = vec![ Vertex { position: [vec_a.x, vec_a.y, vec_a.z], tex_coords: [uv_a.x, uv_a.y], }, Vertex { position: [vec_b.x, vec_b.y, vec_b.z], tex_coords: [uv_b.x, uv_b.y], }, Vertex { position: [vec_c.x, vec_c.y, vec_c.z], tex_coords: [uv_c.x, uv_c.y], }, Vertex { position: [vec_d.x, vec_d.y, vec_d.z], tex_coords: [uv_d.x, uv_d.y], }, ]; result_vertices.extend_from_slice(&vertices[..]); result_indices.extend_from_slice(&indices[..]); } (result_vertices, result_indices) }) .unzip_into_vecs(&mut result_vertices, &mut result_indices); let result_vertices = result_vertices.into_iter().flatten().collect(); let result_indices = result_indices.into_iter().flatten().collect(); (result_vertices, result_indices) } }
pub fn set_origin(&mut self, index: usize, val: ultraviolet::Vec2) -> Option<()> { *(self.origin.get_mut(index)?) = val; let scale = self.scale.get(index)?; let source_size = self.source_size.get(index)?; *(self.origin_translation_mat.get_mut(index)?) = calculate_origin_translation_mat(val, *scale, *source_size); Some(()) }
function_block-full_function
[ { "content": "fn generate_sprites(n: usize) -> Sprites {\n\n let mut sprites = Sprites::new();\n\n for i in 0..n {\n\n sprites.add(\n\n 0,\n\n ultraviolet::Vec2::new(0.0, 0.0),\n\n ultraviolet::Vec2::new(128.0, 128.0),\n\n ultraviolet::Vec2::new(128.0 * i...
Rust
pulsar-binary-protocol-spec/src/frame/renderer.rs
bk-rs/pulsar-rs
c20d7558a0d4603ded5a473e687ce06fafce8760
use std::{cmp::Ordering, convert::TryFrom}; use crc32c::crc32c; use protobuf::{Message, ProtobufEnum as _, ProtobufError}; use thiserror::Error; use crate::{ command::{Command, PayloadCommandPayload}, types::CompressionType, }; use super::{MAGIC_NUMBER, MAX_FRAME_SIZE_DEFAULT}; #[derive(Default, Debug, Clone)] pub struct FrameRendererConfig { max_frame_size: Option<u32>, compression_buf_capacity: Option<u32>, } impl FrameRendererConfig { pub fn new() -> Self { Self::default() } pub fn set_max_frame_size(&mut self, value: u32) -> &mut Self { self.max_frame_size = Some(value); self } fn get_max_frame_size(&self) -> u32 { self.max_frame_size.unwrap_or(MAX_FRAME_SIZE_DEFAULT) } pub fn set_compression_buf_capacity(&mut self, value: u32) -> &mut Self { self.compression_buf_capacity = Some(value); self } fn get_compression_buf_capacity(&self) -> u32 { self.compression_buf_capacity.unwrap_or(2 * 1024 * 1024) } } #[derive(Error, Debug)] pub enum FrameRenderError { #[error("PayloadTooLarge current:{current} max:{max}")] PayloadTooLarge { current: u32, max: u32 }, #[error("SerializeMessageFailed {0}")] SerializeMessageFailed(ProtobufError), #[error("CompressionUnsupported type:{type_code}")] CompressionUnsupported { type_code: i32 }, #[cfg(feature = "with-compression-lz4")] #[error("CompressionLZ4CompressError {0}")] CompressionLZ4CompressError(std::io::Error), #[cfg(feature = "with-compression-zlib")] #[error("CompressionZlibCompressError {0}")] CompressionZlibCompressError(std::io::Error), #[error("PayloadUnsupported")] PayloadUnsupported, } #[derive(Default, Debug, Clone)] pub struct FrameRenderer { config: FrameRendererConfig, compression_buf: Vec<u8>, } impl FrameRenderer { pub fn new() -> Self { Self::with_config(Default::default()) } pub fn with_config(config: FrameRendererConfig) -> Self { let compression_buf_capacity = config.get_compression_buf_capacity(); Self { config, compression_buf: Vec::with_capacity(compression_buf_capacity as usize), } } pub fn get_mut_config(&mut self) -> &mut FrameRendererConfig { &mut self.config } pub fn render<C>(&mut self, command: C, buf: &mut Vec<u8>) -> Result<(), FrameRenderError> where C: Into<Command>, { let command: Command = command.into(); let n_start_with_total_size = buf.len(); buf.extend_from_slice(&0u32.to_be_bytes()[..]); match command { Command::Simple(c) => { render_protobuf_message(c.message, buf)?; } Command::Payload(mut c) => { let compression_type = c.metadata.get_compression(); let compression_type = CompressionType::try_from(compression_type).map_err(|_| { FrameRenderError::CompressionUnsupported { type_code: compression_type.value(), } })?; render_protobuf_message(c.message, buf)?; buf.extend_from_slice(&MAGIC_NUMBER.to_be_bytes()[..]); let n_start_with_checksum = buf.len(); buf.extend_from_slice(&0u32.to_be_bytes()[..]); if compression_type != CompressionType::NONE { if let PayloadCommandPayload::Single(bytes) = &c.payload { c.metadata.set_uncompressed_size(bytes.len() as u32) } } let n_start_with_old_metadata = buf.len(); render_protobuf_message(c.metadata.to_owned(), buf)?; let n_old_metadata = buf.len() - n_start_with_old_metadata; match &c.payload { PayloadCommandPayload::Single(bytes) => match compression_type { CompressionType::NONE => { buf.extend_from_slice(&bytes[..]); } #[cfg(feature = "with-compression-lz4")] CompressionType::LZ4 => { self.compression_buf.clear(); crate::compression::lz4::compress( &bytes[..], &mut self.compression_buf, ) .map_err(FrameRenderError::CompressionLZ4CompressError)?; buf.extend_from_slice(&self.compression_buf[..]); self.compression_buf.clear(); } #[cfg(feature = "with-compression-zlib")] CompressionType::ZLIB => { self.compression_buf.clear(); crate::compression::zlib::compress( &bytes[..], &mut self.compression_buf, ) .map_err(FrameRenderError::CompressionZlibCompressError)?; buf.extend_from_slice(&self.compression_buf[..]); self.compression_buf.clear(); } }, PayloadCommandPayload::Batch(list) => { let n_start_with_batch = buf.len(); for (single_message_metadata, bytes) in list { render_protobuf_message(single_message_metadata.to_owned(), buf)?; buf.extend_from_slice(&bytes[..]); } if compression_type != CompressionType::NONE { let uncompressed_size = buf[n_start_with_batch..].len(); match compression_type { CompressionType::NONE => {} #[cfg(feature = "with-compression-lz4")] CompressionType::LZ4 => { self.compression_buf.clear(); crate::compression::lz4::compress( &buf[n_start_with_batch..], &mut self.compression_buf, ) .map_err(FrameRenderError::CompressionLZ4CompressError)?; buf.drain(n_start_with_batch..); buf.extend_from_slice(&self.compression_buf[..]); self.compression_buf.clear(); } #[cfg(feature = "with-compression-zlib")] CompressionType::ZLIB => { self.compression_buf.clear(); crate::compression::zlib::compress( &buf[n_start_with_batch..], &mut self.compression_buf, ) .map_err(FrameRenderError::CompressionZlibCompressError)?; buf.drain(n_start_with_batch..); buf.extend_from_slice(&self.compression_buf[..]); self.compression_buf.clear(); } } c.metadata.set_uncompressed_size(uncompressed_size as u32); let n_start_with_new_metadata = buf.len(); render_protobuf_message(c.metadata, buf)?; let n_new_metadata = buf.len() - n_start_with_new_metadata; let slice_new_metadata = buf[n_start_with_new_metadata ..n_start_with_new_metadata + n_new_metadata] .to_vec(); buf.drain(n_start_with_new_metadata..); match n_new_metadata.cmp(&n_old_metadata) { Ordering::Less => { for i in 0..n_old_metadata - n_new_metadata { buf.remove(n_start_with_old_metadata + n_old_metadata - i); } } Ordering::Equal => {} Ordering::Greater => { for i in 0..n_new_metadata - n_old_metadata { buf.insert( n_start_with_old_metadata + n_old_metadata + i, 0, ); } } } buf.splice( n_start_with_old_metadata ..n_start_with_old_metadata + n_new_metadata, slice_new_metadata.iter().cloned(), ); } } } let n_end = buf.len(); let checksum = crc32c(&buf[n_start_with_checksum + 4..n_end]); buf.splice( n_start_with_checksum..n_start_with_checksum + 4, checksum.to_be_bytes().to_vec(), ); } } let n_end = buf.len(); let total_size = (n_end - n_start_with_total_size - 4) as u32; buf.splice( n_start_with_total_size..n_start_with_total_size + 4, total_size.to_be_bytes().to_vec(), ); if total_size > self.config.get_max_frame_size() - 4 { return Err(FrameRenderError::PayloadTooLarge { current: total_size, max: self.config.get_max_frame_size(), }); } Ok(()) } } fn render_protobuf_message<M: Message>( message: M, buf: &mut Vec<u8>, ) -> Result<(), FrameRenderError> { let n_start = buf.len(); buf.extend_from_slice(&0u32.to_be_bytes()[..]); message .write_to_vec(buf) .map_err(FrameRenderError::SerializeMessageFailed)?; let n_end = buf.len(); let size = (n_end - n_start - 4) as u32; buf.splice(n_start..n_start + 4, size.to_be_bytes().to_vec()); Ok(()) }
use std::{cmp::Ordering, convert::TryFrom}; use crc32c::crc32c; use protobuf::{Message, ProtobufEnum as _, ProtobufError}; use thiserror::Error; use crate::{ command::{Command, PayloadCommandPayload}, types::CompressionType, }; use super::{MAGIC_NUMBER, MAX_FRAME_SIZE_DEFAULT}; #[derive(Default, Debug, Clone)] pub struct FrameRendererConfig { max_frame_size: Option<u32>, compression_buf_capacity: Option<u32>, } impl FrameRendererConfig { pub fn new() -> Self { Self::default() } pub fn set_max_frame_size(&mut self, value: u32) -> &mut Self { self.max_frame_size = Some(value); self } fn get_max_frame_size(&self) -> u32 { self.max_frame_size.unwrap_or(MAX_FRAME_SIZE_DEFAULT) } pub fn set_compression_buf_capacity(&mut self, value: u32) -> &mut Self { self.compression_buf_capacity = Some(value); self } fn get_compression_buf_capacity(&self) -> u32 { self.compression_buf_capacity.unwrap_or(2 * 1024 * 1024) } } #[derive(Error, Debug)] pub enum FrameRenderError { #[error("PayloadTooLarge current:{current} max:{max}")] PayloadTooLarge { current: u32, max: u32 }, #[error("SerializeMessageFailed {0}")] SerializeMessageFailed(ProtobufError), #[error("CompressionUnsupported type:{type_code}")] CompressionUnsupported { type_code: i32 }, #[cfg(feature = "with-compression-lz4")] #[error("CompressionLZ4CompressError {0}")] CompressionLZ4CompressError(std::io::Error), #[cfg(feature = "with-compression-zlib")] #[error("CompressionZlibCompressError {0}")] CompressionZlibCompressError(std::io::Error), #[error("PayloadUnsupported")] PayloadUnsupported, } #[derive(Default, Debug, Clone)] pub struct FrameRenderer { config: FrameRendererConfig, compression_buf: Vec<u8>, } impl FrameRenderer { pub fn new() -> Self { Self::with_config(Default::default()) }
pub fn get_mut_config(&mut self) -> &mut FrameRendererConfig { &mut self.config } pub fn render<C>(&mut self, command: C, buf: &mut Vec<u8>) -> Result<(), FrameRenderError> where C: Into<Command>, { let command: Command = command.into(); let n_start_with_total_size = buf.len(); buf.extend_from_slice(&0u32.to_be_bytes()[..]); match command { Command::Simple(c) => { render_protobuf_message(c.message, buf)?; } Command::Payload(mut c) => { let compression_type = c.metadata.get_compression(); let compression_type = CompressionType::try_from(compression_type).map_err(|_| { FrameRenderError::CompressionUnsupported { type_code: compression_type.value(), } })?; render_protobuf_message(c.message, buf)?; buf.extend_from_slice(&MAGIC_NUMBER.to_be_bytes()[..]); let n_start_with_checksum = buf.len(); buf.extend_from_slice(&0u32.to_be_bytes()[..]); if compression_type != CompressionType::NONE { if let PayloadCommandPayload::Single(bytes) = &c.payload { c.metadata.set_uncompressed_size(bytes.len() as u32) } } let n_start_with_old_metadata = buf.len(); render_protobuf_message(c.metadata.to_owned(), buf)?; let n_old_metadata = buf.len() - n_start_with_old_metadata; match &c.payload { PayloadCommandPayload::Single(bytes) => match compression_type { CompressionType::NONE => { buf.extend_from_slice(&bytes[..]); } #[cfg(feature = "with-compression-lz4")] CompressionType::LZ4 => { self.compression_buf.clear(); crate::compression::lz4::compress( &bytes[..], &mut self.compression_buf, ) .map_err(FrameRenderError::CompressionLZ4CompressError)?; buf.extend_from_slice(&self.compression_buf[..]); self.compression_buf.clear(); } #[cfg(feature = "with-compression-zlib")] CompressionType::ZLIB => { self.compression_buf.clear(); crate::compression::zlib::compress( &bytes[..], &mut self.compression_buf, ) .map_err(FrameRenderError::CompressionZlibCompressError)?; buf.extend_from_slice(&self.compression_buf[..]); self.compression_buf.clear(); } }, PayloadCommandPayload::Batch(list) => { let n_start_with_batch = buf.len(); for (single_message_metadata, bytes) in list { render_protobuf_message(single_message_metadata.to_owned(), buf)?; buf.extend_from_slice(&bytes[..]); } if compression_type != CompressionType::NONE { let uncompressed_size = buf[n_start_with_batch..].len(); match compression_type { CompressionType::NONE => {} #[cfg(feature = "with-compression-lz4")] CompressionType::LZ4 => { self.compression_buf.clear(); crate::compression::lz4::compress( &buf[n_start_with_batch..], &mut self.compression_buf, ) .map_err(FrameRenderError::CompressionLZ4CompressError)?; buf.drain(n_start_with_batch..); buf.extend_from_slice(&self.compression_buf[..]); self.compression_buf.clear(); } #[cfg(feature = "with-compression-zlib")] CompressionType::ZLIB => { self.compression_buf.clear(); crate::compression::zlib::compress( &buf[n_start_with_batch..], &mut self.compression_buf, ) .map_err(FrameRenderError::CompressionZlibCompressError)?; buf.drain(n_start_with_batch..); buf.extend_from_slice(&self.compression_buf[..]); self.compression_buf.clear(); } } c.metadata.set_uncompressed_size(uncompressed_size as u32); let n_start_with_new_metadata = buf.len(); render_protobuf_message(c.metadata, buf)?; let n_new_metadata = buf.len() - n_start_with_new_metadata; let slice_new_metadata = buf[n_start_with_new_metadata ..n_start_with_new_metadata + n_new_metadata] .to_vec(); buf.drain(n_start_with_new_metadata..); match n_new_metadata.cmp(&n_old_metadata) { Ordering::Less => { for i in 0..n_old_metadata - n_new_metadata { buf.remove(n_start_with_old_metadata + n_old_metadata - i); } } Ordering::Equal => {} Ordering::Greater => { for i in 0..n_new_metadata - n_old_metadata { buf.insert( n_start_with_old_metadata + n_old_metadata + i, 0, ); } } } buf.splice( n_start_with_old_metadata ..n_start_with_old_metadata + n_new_metadata, slice_new_metadata.iter().cloned(), ); } } } let n_end = buf.len(); let checksum = crc32c(&buf[n_start_with_checksum + 4..n_end]); buf.splice( n_start_with_checksum..n_start_with_checksum + 4, checksum.to_be_bytes().to_vec(), ); } } let n_end = buf.len(); let total_size = (n_end - n_start_with_total_size - 4) as u32; buf.splice( n_start_with_total_size..n_start_with_total_size + 4, total_size.to_be_bytes().to_vec(), ); if total_size > self.config.get_max_frame_size() - 4 { return Err(FrameRenderError::PayloadTooLarge { current: total_size, max: self.config.get_max_frame_size(), }); } Ok(()) } } fn render_protobuf_message<M: Message>( message: M, buf: &mut Vec<u8>, ) -> Result<(), FrameRenderError> { let n_start = buf.len(); buf.extend_from_slice(&0u32.to_be_bytes()[..]); message .write_to_vec(buf) .map_err(FrameRenderError::SerializeMessageFailed)?; let n_end = buf.len(); let size = (n_end - n_start - 4) as u32; buf.splice(n_start..n_start + 4, size.to_be_bytes().to_vec()); Ok(()) }
pub fn with_config(config: FrameRendererConfig) -> Self { let compression_buf_capacity = config.get_compression_buf_capacity(); Self { config, compression_buf: Vec::with_capacity(compression_buf_capacity as usize), } }
function_block-full_function
[ { "content": "pub fn decompress(slice: &[u8], w: &mut Vec<u8>) -> io::Result<()> {\n\n let mut decoder = Decoder::new(Cursor::new(slice.to_vec()))?;\n\n io::copy(&mut decoder, w)?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use std::error;\n\n\n\n #[test]\n\n ...
Rust
examples/pingpong_client.rs
agemocui/ruyi
28c2b8be016d06b24926f87310b91385ed61b183
extern crate structopt; #[macro_use] extern crate structopt_derive; extern crate env_logger; #[macro_use] extern crate log; extern crate chrono; extern crate futures; extern crate num_cpus; extern crate ruyi; use std::env; use std::mem; use std::net::{IpAddr, SocketAddr}; use std::sync::Arc; use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread; use std::time::Duration; use chrono::prelude::Utc; use env_logger::LogBuilder; use futures::{stream, Future, Sink, Stream}; use structopt::StructOpt; use ruyi::buf::ByteBuf; use ruyi::sync::spsc; use ruyi::net::TcpStream; use ruyi::net::tcp::connect; use ruyi::reactor::{self, Timer}; use ruyi::{IntoTask, Task}; #[derive(StructOpt, Debug)] #[structopt(name = "pingpong_client", about = "Ping-pong client.")] struct Conf { #[structopt(short = "t", long = "threads", help = "Number of threads", default_value = "0")] threads: usize, #[structopt(short = "b", long = "bytes", help = "Number of bytes to send", default_value = "16384")] bytes: usize, #[structopt(short = "c", long = "connections", help = "Concurrent connections per thread", default_value = "25")] conns: usize, #[structopt(short = "s", long = "seconds", help = "Seconds to run", default_value = "60")] secs: usize, #[structopt(help = "Server IP to connect to", default_value = "127.0.0.1")] host: Option<IpAddr>, #[structopt(help = "Server port to connect to", default_value = "10007")] port: Option<u16>, } struct Vars { msgs: usize, bytes: usize, } fn ping_pong( addr: &SocketAddr, len: usize, vars: &'static mut Vars, conns: &'static mut usize, ) -> Task { let n = *conns; connect::<TcpStream>(addr) .and_then(move |s| { *conns -= 1; if *conns == 0 { info!("All {} connections are established", n); } s.as_ref().set_nodelay(true)?; Ok(s) }) .and_then(move |s| { let (r, w) = s.into_twoway(); let mut data = Vec::<u8>::with_capacity(len); unsafe { data.set_len(len) }; w.send_all( stream::once(Ok(ByteBuf::from(data))).chain(r.filter(move |b| { vars.msgs += 1; vars.bytes += b.len(); true })), ) }) .map_err(|e| error!("{}", e)) .into_task() } fn run(conf: &Conf) { info!("Start - {:?}", conf); let addr = SocketAddr::new(conf.host.unwrap(), conf.port.unwrap()); ruyi::net::init(); let timer = Timer::new(Duration::from_secs(conf.secs as u64)); let mut threads = Vec::with_capacity(conf.threads); let total_msgs = Arc::new(AtomicUsize::new(0)); let total_bytes = Arc::new(AtomicUsize::new(0)); for _ in 0..conf.threads { let (tx, rx) = spsc::sync_channel(1).unwrap(); let total_msgs = total_msgs.clone(); let total_bytes = total_bytes.clone(); let conns = conf.conns; let handle = thread::spawn(move || { let mut vars = Vars { msgs: 0, bytes: 0 }; { let mut n = conns; let task = rx.recv().unwrap().for_each(|(addr, bytes)| { for _ in 0..conns { let s_vars: &'static mut Vars = unsafe { mem::transmute(&mut vars) }; let s_conns: &'static mut usize = unsafe { mem::transmute(&mut n) }; reactor::spawn(ping_pong(&addr, bytes, s_vars, s_conns)); } Ok(()) }); reactor::run(task).unwrap(); } total_msgs.as_ref().fetch_add(vars.msgs, Ordering::Relaxed); total_bytes .as_ref() .fetch_add(vars.bytes, Ordering::Relaxed); }); tx.send((addr, conf.bytes)).unwrap(); threads.push((Some(handle), Some(tx))); } reactor::run(timer).unwrap(); for thread in threads.iter_mut() { thread.1 = None; } for thread in threads.iter_mut() { thread.0.take().unwrap().join().unwrap(); } let bytes = total_bytes.as_ref().load(Ordering::Relaxed); let msgs = total_msgs.as_ref().load(Ordering::Relaxed); info!("Total bytes read: {}", bytes); info!("Total messages read: {}", msgs); info!("Average message size: {}", bytes as f64 / msgs as f64); info!( "Throughput: {} MiB/s", bytes as f64 / (conf.secs * 1024 * 1024) as f64 ); } fn main() { let mut conf = Conf::from_args(); let mut builder = LogBuilder::new(); builder.format(|r| { format!( "{} {:<5} {}", Utc::now().format("%Y-%m-%d %H:%M:%S.%f"), r.level(), r.args() ) }); if let Ok(v) = env::var("RUST_LOG") { builder.parse(&v); } builder.init().unwrap(); if conf.threads < 1 { conf.threads = num_cpus::get(); } run(&conf); }
extern crate structopt; #[macro_use] extern crate structopt_derive; extern crate env_logger; #[macro_use] extern crate log; extern crate chrono; extern crate futures; extern crate num_cpus; extern crate ruyi; use std::env; use std::mem; use std::net::{IpAddr, SocketAddr}; use std::sync::Arc; use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread; use std::time::Duration; use chrono::prelude::Utc; use env_logger::LogBuilder; use futures::{stream, Future, Sink, Stream}; use structopt::StructOpt; use ruyi::buf::ByteBuf; use ruyi::sync::spsc; use ruyi::net::TcpStream; use ruyi::net::tcp::connect; use ruyi::reactor::{self, Timer}; use ruyi::{IntoTask, Task}; #[derive(StructOpt, Debug)] #[structopt(name = "pingpong_client", about = "Ping-pong client.")] struct Conf { #[structopt(short = "t", long = "threads", help = "Number of threads", default_value = "0")] threads: usize, #[structopt(short = "b", long = "bytes", help = "Number of bytes to send", default_value = "16384")] bytes: usize, #[structopt(short = "c", long = "connections", help = "Concurrent connections per thread", default_value = "25")] conns: usize, #[structopt(short = "s", long = "seconds", help = "Seconds to run", default_value = "60")] secs: usize, #[structopt(help = "Server IP to connect to", default_value = "127.0.0.1")] host: Option<IpAddr>, #[structopt(help = "Server port to connect to", default_value = "10007")] port: Option<u16>, } struct Vars { msgs: usize, bytes: usize, } fn ping_pong( addr: &SocketAddr, len: usize, vars: &'static mut Vars, conns: &'static mut usize, ) -> Task { let n = *conns; connect::<TcpStream>(addr) .and_then(move |s| { *conns -= 1; if *conns == 0 { info!("All {} connections are established", n); } s.as_ref().set_nodelay(true)?; Ok(s) }) .and_then(move |s| { let (r, w) = s.into_twoway(); let mut data = Vec::<u8>::with_capacity(len); unsafe { data.set_len(len) }; w.send_all( stream::once(Ok(ByteBuf::from(data))).chain(r.filter(move |b| { vars.msgs += 1; vars.bytes += b.len(); true })), ) }) .map_err(|e| error!("{}", e)) .into_task() }
fn main() { let mut conf = Conf::from_args(); let mut builder = LogBuilder::new(); builder.format(|r| { format!( "{} {:<5} {}", Utc::now().format("%Y-%m-%d %H:%M:%S.%f"), r.level(), r.args() ) }); if let Ok(v) = env::var("RUST_LOG") { builder.parse(&v); } builder.init().unwrap(); if conf.threads < 1 { conf.threads = num_cpus::get(); } run(&conf); }
fn run(conf: &Conf) { info!("Start - {:?}", conf); let addr = SocketAddr::new(conf.host.unwrap(), conf.port.unwrap()); ruyi::net::init(); let timer = Timer::new(Duration::from_secs(conf.secs as u64)); let mut threads = Vec::with_capacity(conf.threads); let total_msgs = Arc::new(AtomicUsize::new(0)); let total_bytes = Arc::new(AtomicUsize::new(0)); for _ in 0..conf.threads { let (tx, rx) = spsc::sync_channel(1).unwrap(); let total_msgs = total_msgs.clone(); let total_bytes = total_bytes.clone(); let conns = conf.conns; let handle = thread::spawn(move || { let mut vars = Vars { msgs: 0, bytes: 0 }; { let mut n = conns; let task = rx.recv().unwrap().for_each(|(addr, bytes)| { for _ in 0..conns { let s_vars: &'static mut Vars = unsafe { mem::transmute(&mut vars) }; let s_conns: &'static mut usize = unsafe { mem::transmute(&mut n) }; reactor::spawn(ping_pong(&addr, bytes, s_vars, s_conns)); } Ok(()) }); reactor::run(task).unwrap(); } total_msgs.as_ref().fetch_add(vars.msgs, Ordering::Relaxed); total_bytes .as_ref() .fetch_add(vars.bytes, Ordering::Relaxed); }); tx.send((addr, conf.bytes)).unwrap(); threads.push((Some(handle), Some(tx))); } reactor::run(timer).unwrap(); for thread in threads.iter_mut() { thread.1 = None; } for thread in threads.iter_mut() { thread.0.take().unwrap().join().unwrap(); } let bytes = total_bytes.as_ref().load(Ordering::Relaxed); let msgs = total_msgs.as_ref().load(Ordering::Relaxed); info!("Total bytes read: {}", bytes); info!("Total messages read: {}", msgs); info!("Average message size: {}", bytes as f64 / msgs as f64); info!( "Throughput: {} MiB/s", bytes as f64 / (conf.secs * 1024 * 1024) as f64 ); }
function_block-full_function
[ { "content": "#[inline]\n\npub fn connect<T>(addr: &SocketAddr) -> Connect<T>\n\nwhere\n\n T: AsRef<TcpStream> + AsMut<TcpStream> + From<TcpStream>,\n\n{\n\n Connect {\n\n inner: tcp::Connect::from(addr),\n\n }\n\n}\n\n\n", "file_path": "src/net/tcp.rs", "rank": 2, "score": 240904.23...
Rust
src/main.rs
Awarua-/Distributed-H264-to-H265-File-encoder
2634c4fe177fb6e12444ab49d30c75d5ae052be1
#[macro_use] extern crate log; extern crate log4rs; #[macro_use] extern crate clap; extern crate time; use clap::App; use std::process::{Command, Output, Stdio, ExitStatus}; use std::fs::{read_dir, DirEntry, copy, remove_file}; use std::path::Path; use std::io::{BufReader, BufRead, Read, Result}; use std::ffi::OsStr; static NVENC_CHECK_STRING: &'static str = "supports NVENC"; static EXTENSION: &'static str = "mkv"; static H264_CHECK_STRING: &'static str = "h264"; fn run(command: String, args: Vec<String>) -> Output { let error_message = format!("{} failed :(", command); Command::new(command) .args(args.as_slice()) .output() .expect(error_message.as_str()) } fn consume_stdio<R: Read>(mut buffered_reader: BufReader<R>) { let mut buffer = String::new(); while buffered_reader.read_line(&mut buffer).unwrap() > 0 { let b = buffer.to_owned(); buffer.clear(); println!("{}", b.as_str()); } } fn run_with_stdio(command: String, args: Vec<String>) -> Result<ExitStatus> { let mut cmd = Command::new(command) .args(args.as_slice()) .stderr(Stdio::piped()) .stdout(Stdio::piped()) .spawn() .unwrap(); consume_stdio(BufReader::new(cmd.stderr.take().unwrap())); consume_stdio(BufReader::new(cmd.stdout.take().unwrap())); cmd.wait() } fn visit_dirs(dir: &Path, files: &mut Vec<DirEntry>) -> Result<()> { if dir.is_dir() { for entry in try!(read_dir(dir)) { let entry = try!(entry); let path = entry.path(); if path.is_dir() { try!(visit_dirs(&path, files)); } else { files.push(entry) } } } Ok(()) } fn timestamp() -> i64 { let timespec = time::get_time(); let mills: f64 = timespec.sec as f64 + (timespec.nsec as f64 / 1000.0 / 1000.0 / 1000.0); mills.trunc() as i64 } fn main() { log4rs::init_file("log4rs.yaml", Default::default()).unwrap(); println!("starting up"); info!("starting up"); let yaml = load_yaml!("cli.yaml"); let matches = App::from_yaml(yaml).get_matches(); let ffmpeg = run("ffmpeg".to_string(), vec![String::from("-f"), String::from("lavfi"), String::from("-i"), String::from("nullsrc"), String::from("-c:v"), String::from("nvenc"), String::from("-gpu"), String::from("list"), String::from("-f"), String::from("null"), String::from("-")]); let supports_nvenc = String::from_utf8_lossy(ffmpeg.stderr.as_slice()) .contains(NVENC_CHECK_STRING); if supports_nvenc { println!("Supports NVENC"); warn!("Supports NVENC"); } else { return; } let path = Path::new(matches.value_of("SRC_DIR").unwrap()); if !path.is_dir() { println!("SRC_DIR is not a path, exiting"); warn!("SRC_DIR is not a path, exiting"); return; } let temp_path = Path::new(matches.value_of("TEMP_DIR").unwrap()); if !temp_path.is_dir() { println!("TEMP_DIR is not a path, exiting"); warn!("TEMP_DIR is not a path, exiting"); return; } let mut directories: Vec<DirEntry> = Vec::new(); let result = visit_dirs(path, &mut directories); if result.is_err() { let result_err = result.unwrap_err(); println!("Something went wrong"); println!("{}", result_err); warn!("Something went wrong"); warn!("{}", result_err); return; } if matches.is_present("reverse") { directories.reverse(); println!("directories reversed"); info!("directories reversed"); } for (index, file) in directories.iter().enumerate() { let path_buf = file.path(); let file_path = path_buf.as_path(); if file_path.extension().unwrap_or(OsStr::new("")) != EXTENSION { continue; } let percentage = ((index as f64) / (directories.len() as f64)) * 100 as f64; info!("percentage complete: {}", percentage); println!("percentage complete: {}", percentage); let file_path_string = file_path.as_os_str().to_os_string().into_string().unwrap(); println!("processing {}", file_path_string); info!("processing {}", file_path_string); let file_path_string_2 = file_path_string.to_owned(); let ffprobe = run("ffprobe".to_string(), vec![String::from("-v"), String::from("quiet"), String::from("-show_entries"), String::from("stream=codec_name"), String::from("-select_streams"), String::from("v:0"), String::from("-of"), String::from("default=noprint_wrappers=1"), file_path_string_2]); let output = String::from_utf8_lossy(ffprobe.stdout.as_slice()); let is_h264 = output.contains(H264_CHECK_STRING); if !is_h264 { println!("File was not {}, but was {}", H264_CHECK_STRING, output); warn!("File was not {}, but was {}", H264_CHECK_STRING, output); continue; } let copy_file_name = file_path.file_name().unwrap(); let copy_file_path = temp_path.join(Path::new(copy_file_name.to_str().unwrap())); let copy_file_path_copy = copy_file_path.to_owned(); let copy_file_path_string = copy_file_path.into_os_string().into_string().unwrap(); let bytes_copied = copy(&file_path_string, &copy_file_path_copy).unwrap(); println!("file copied size of {}", bytes_copied); info!("file copied size of {}", bytes_copied); let file_stem = file_path.file_stem().unwrap().to_str().unwrap(); let now = timestamp().to_string(); let mut file_name = String::from(file_stem); file_name.push_str("_"); file_name.push_str(now.as_str()); file_name.push_str("."); file_name.push_str(EXTENSION); let temp_file_path = temp_path.join(Path::new(file_name.as_str())); let temp_file_path_string = temp_file_path.into_os_string().into_string().unwrap(); println!("creating {}", temp_file_path_string); info!("creating {}", temp_file_path_string); let temp_file_path_string_copy = temp_file_path_string.to_owned(); let ffmpeg_session = run_with_stdio("ffmpeg".to_string(), vec![String::from("-c:v"), String::from("h264_cuvid"), String::from("-i"), copy_file_path_string, String::from("-map"), String::from("0"), String::from("-c"), String::from("copy"), String::from("-c:v"), String::from("hevc_nvenc"), String::from("-preset"), String::from("slow"), temp_file_path_string]); let exit_code = ffmpeg_session.unwrap().code().unwrap(); if exit_code != 0 { println!("something went wrong processing file {}", file_path_string); warn!("something went wrong processing file {}", file_path_string); continue; } let bytes_copied = copy(&temp_file_path_string_copy, file_path_string).unwrap(); println!("file copied size of {}", bytes_copied); info!("file copied size of {}", bytes_copied); let result1 = remove_file(&temp_file_path_string_copy); if result1.is_err() { println!("could not remove file {}", temp_file_path_string_copy); warn!("could not remove file {}", temp_file_path_string_copy); continue; } let result2 = remove_file(&copy_file_path_copy); if result2.is_err() { let string = copy_file_path_copy.into_os_string().into_string().unwrap(); println!("could not remove file {}", string); warn!("could not remove file {}", string); } } }
#[macro_use] extern crate log; extern crate log4rs; #[macro_use] extern crate clap; extern crate time; use clap::App; use std::process::{Command, Output, Stdio, ExitStatus}; use std::fs::{read_dir, DirEntry, copy, remove_file}; use std::path::Path; use std::io::{BufReader, BufRead, Read, Result}; use std::ffi::OsStr; static NVENC_CHECK_STRING: &'static str = "supports NVENC"; static EXTENSION: &'static str = "mkv"; static H264_CHECK_STRING: &'static str = "h264"; fn run(command: String, args: Vec<String>) -> Output { let error_message = format!("{} failed :(", command); Command::new(command) .args(args.as_slice()) .output() .expect(error_message.as_str()) } fn consume_stdio<R: Read>(mut buffered_reader: BufReader<R>) { let mut buffer = String::new(); while buffered_reader.read_line(&mut buffer).unwrap() > 0 { let b = buffer.to_owned(); buffer.clear(); println!("{}", b.as_str()); } } fn run_with_stdio(command: String, args: Vec<String>) -> Result<ExitStatus> { let mut cmd = Command::new(command) .args(args.as_slice()) .stderr(Stdio::piped()) .stdout(Stdio::piped()) .spawn() .unwrap(); consume_stdio(BufReader::new(cmd.stderr.take().unwrap())); consume_stdio(BufReader::new(cmd.stdout.take().unwrap())); cmd.wait() } fn visit_dirs(dir: &Path, files: &mut Vec<DirEntry>) -> Result<()> { if dir.is_dir() { for entry in try!(read_dir(dir)) { let entry = try!(entry); let path = entry.path(); if path.is_dir() { try!(visit_dirs(&path, files)); } else { files.push(entry) } } } Ok(()) } fn timestamp() -> i64 { let timespec = time::get_time(); let mills: f64 = timespec.sec as f64 + (timespec.nsec as f64 / 1000.0 / 1000.0 / 1000.0); mills.trunc() as i64 } fn main() { log4rs::init_file("log4rs.yaml", Default::default()).unwrap(); println!("starting up"); info!("starting up"); let yaml = load_yaml!("cli.yaml"); let matches = App::from_yaml(yaml).get_matches(); let ffmpeg =
; let supports_nvenc = String::from_utf8_lossy(ffmpeg.stderr.as_slice()) .contains(NVENC_CHECK_STRING); if supports_nvenc { println!("Supports NVENC"); warn!("Supports NVENC"); } else { return; } let path = Path::new(matches.value_of("SRC_DIR").unwrap()); if !path.is_dir() { println!("SRC_DIR is not a path, exiting"); warn!("SRC_DIR is not a path, exiting"); return; } let temp_path = Path::new(matches.value_of("TEMP_DIR").unwrap()); if !temp_path.is_dir() { println!("TEMP_DIR is not a path, exiting"); warn!("TEMP_DIR is not a path, exiting"); return; } let mut directories: Vec<DirEntry> = Vec::new(); let result = visit_dirs(path, &mut directories); if result.is_err() { let result_err = result.unwrap_err(); println!("Something went wrong"); println!("{}", result_err); warn!("Something went wrong"); warn!("{}", result_err); return; } if matches.is_present("reverse") { directories.reverse(); println!("directories reversed"); info!("directories reversed"); } for (index, file) in directories.iter().enumerate() { let path_buf = file.path(); let file_path = path_buf.as_path(); if file_path.extension().unwrap_or(OsStr::new("")) != EXTENSION { continue; } let percentage = ((index as f64) / (directories.len() as f64)) * 100 as f64; info!("percentage complete: {}", percentage); println!("percentage complete: {}", percentage); let file_path_string = file_path.as_os_str().to_os_string().into_string().unwrap(); println!("processing {}", file_path_string); info!("processing {}", file_path_string); let file_path_string_2 = file_path_string.to_owned(); let ffprobe = run("ffprobe".to_string(), vec![String::from("-v"), String::from("quiet"), String::from("-show_entries"), String::from("stream=codec_name"), String::from("-select_streams"), String::from("v:0"), String::from("-of"), String::from("default=noprint_wrappers=1"), file_path_string_2]); let output = String::from_utf8_lossy(ffprobe.stdout.as_slice()); let is_h264 = output.contains(H264_CHECK_STRING); if !is_h264 { println!("File was not {}, but was {}", H264_CHECK_STRING, output); warn!("File was not {}, but was {}", H264_CHECK_STRING, output); continue; } let copy_file_name = file_path.file_name().unwrap(); let copy_file_path = temp_path.join(Path::new(copy_file_name.to_str().unwrap())); let copy_file_path_copy = copy_file_path.to_owned(); let copy_file_path_string = copy_file_path.into_os_string().into_string().unwrap(); let bytes_copied = copy(&file_path_string, &copy_file_path_copy).unwrap(); println!("file copied size of {}", bytes_copied); info!("file copied size of {}", bytes_copied); let file_stem = file_path.file_stem().unwrap().to_str().unwrap(); let now = timestamp().to_string(); let mut file_name = String::from(file_stem); file_name.push_str("_"); file_name.push_str(now.as_str()); file_name.push_str("."); file_name.push_str(EXTENSION); let temp_file_path = temp_path.join(Path::new(file_name.as_str())); let temp_file_path_string = temp_file_path.into_os_string().into_string().unwrap(); println!("creating {}", temp_file_path_string); info!("creating {}", temp_file_path_string); let temp_file_path_string_copy = temp_file_path_string.to_owned(); let ffmpeg_session = run_with_stdio("ffmpeg".to_string(), vec![String::from("-c:v"), String::from("h264_cuvid"), String::from("-i"), copy_file_path_string, String::from("-map"), String::from("0"), String::from("-c"), String::from("copy"), String::from("-c:v"), String::from("hevc_nvenc"), String::from("-preset"), String::from("slow"), temp_file_path_string]); let exit_code = ffmpeg_session.unwrap().code().unwrap(); if exit_code != 0 { println!("something went wrong processing file {}", file_path_string); warn!("something went wrong processing file {}", file_path_string); continue; } let bytes_copied = copy(&temp_file_path_string_copy, file_path_string).unwrap(); println!("file copied size of {}", bytes_copied); info!("file copied size of {}", bytes_copied); let result1 = remove_file(&temp_file_path_string_copy); if result1.is_err() { println!("could not remove file {}", temp_file_path_string_copy); warn!("could not remove file {}", temp_file_path_string_copy); continue; } let result2 = remove_file(&copy_file_path_copy); if result2.is_err() { let string = copy_file_path_copy.into_os_string().into_string().unwrap(); println!("could not remove file {}", string); warn!("could not remove file {}", string); } } }
run("ffmpeg".to_string(), vec![String::from("-f"), String::from("lavfi"), String::from("-i"), String::from("nullsrc"), String::from("-c:v"), String::from("nvenc"), String::from("-gpu"), String::from("list"), String::from("-f"), String::from("null"), String::from("-")])
call_expression
[ { "content": "# Distributed-H264-to-H265-File-encoder\n\n\n\nOne day this will hopefully be distributed, but for now its really only a script written in Rust. Until I have enough hardware to justify becoming distributed.\n", "file_path": "README.md", "rank": 6, "score": 15258.789288423348 } ]
Rust
sqlite3ffi/src/test_placeholders.rs
sqlite-mpi/sqlite-mpi-rust
e55a4ae3ebf25c1f7818963b57404af79f878ce8
use super::*; use placeholder::PlaceholderTypes; use serde_json::json; use crate::stmt::ErrorBind; static TEST_OUTPUT_DIR: &'static str = "/tmp"; #[test] fn test_is_valid_handle() { } fn get_test_file() -> String { let now: DateTime<Utc> = Utc::now(); format!("{}/del-test-{:?}.sqlite3", TEST_OUTPUT_DIR, now) } #[test] fn test_get_db_handle() { let file = get_test_file(); for _ in 0..5 { let h = DbHandle::new(file.clone()).unwrap(); } } #[test] fn test_get_stmt_handle_err() { let file = get_test_file(); let h = DbHandle::new(file).unwrap(); for _ in 0..5 { let s = StmtHandle::new(&h, "SELECT * FROM table_does_not_exist"); match s { Err(e) => { match e.primary.id { PrimaryRC::SQLITE_ERROR => { match e.err_msg { Some(m) => assert!(m.len() > 0, "Should be a msg"), None => assert!(false, "Should have msg") } } _ => assert!(false, "Should be SQLITE_ERROR") } } Ok(_) => assert!(false, "Should be Err") } } } #[test] fn test_get_stmt_handle_ok() { let file = get_test_file(); let h = DbHandle::new(file).unwrap(); { let s = StmtHandle::new(&h, "CREATE TABLE t1(a PRIMARY KEY, b);").expect("Syntax Ok"); let rset = s.run().expect("Creates table"); assert!(!rset.is_read_only, "Q modifies db file"); } { let s = StmtHandle::new(&h, "CREATE TABLE t1(a PRIMARY KEY, b);"); match s { Err(e) => { if let PrimaryRC::SQLITE_ERROR = e.primary.id { assert!(true, "Correct error ID"); } else { assert!(false, "Incorrect error ID"); } } Ok(_) => assert!(false) } } } fn assert_bind_kv_fails_with(s: &StmtHandle, kind: ErrorBindType) { let data: HashMap<String, Val> = [ ("abc".to_string(), Val::I64(1)), ].iter().cloned().collect(); let kv = KeyVal { data }; let e = s.bind_kv(&kv); assert!(e.is_err()); match e { Err(eb) => { assert!(&eb.kind == &kind); } _ => {} } } fn assert_bind_err_eq(e: &Result<(), ErrorBind>, kind: ErrorBindType) { assert!(e.is_err()); match e { Err(eb) => { assert!(&eb.kind == &kind); } _ => {} } } fn assert_bind_index_fails_with(s: &StmtHandle, kind: ErrorBindType) { let e = s.bind_index(&vec![]); assert_bind_err_eq(&e, kind); } #[test] fn test_get_stmt_params_kv_ok() { { let h = DbHandle::new(":memory:".to_string()).unwrap(); { let q = r#" SELECT :keyA, @keyA, $keyA "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Key, *t); assert_bind_kv_fails_with(&s, ErrorBindType::MissingKeysInData); let data: HashMap<String, Val> = [ ("keyA".to_string(), Val::I64(1)), (":keyA".to_string(), Val::I64(12)), ("@keyA".to_string(), Val::I64(13)), ("$keyA".to_string(), Val::I64(14)), ("extraKeyIsIgnored".to_string(), Val::I64(15)) ].iter().cloned().collect(); let kv = KeyVal { data }; assert!(&s.bind_kv(&kv).is_ok()); if let Ok(r) = &s.run() { let target = Rows { data: vec![ vec![Val::I64(1), Val::I64(1), Val::I64(1)], ] }; assert_eq!(target, r.rows); } else { assert!(false); } } { let q = r#" SELECT :111, @222, $333 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Key, *t); let data: HashMap<String, Val> = [ ("111".to_string(), Val::I64(111)), ("222".to_string(), Val::I64(222)), ("333".to_string(), Val::I64(333)) ].iter().cloned().collect(); let kv = KeyVal { data }; assert!(&s.bind_kv(&kv).is_ok()); if let Ok(r) = &s.run() { let target = Rows { data: vec![ vec![Val::I64(111), Val::I64(222), Val::I64(333)], ] }; assert_eq!(target, r.rows); } else { assert!(false); } } { let q = r#" SELECT :i64, :f64, :string, :null, :blob "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Key, *t); let data: HashMap<String, Val> = [ ("i64".to_string(), Val::I64(123)), ("f64".to_string(), Val::F64(567.567)), ("string".to_string(), Val::Null), ("null".to_string(), Val::String("example string".to_string())), ("blob".to_string(), Val::Blob("A string as bytes".to_string().into_bytes())) ].iter().cloned().collect(); let kv = KeyVal { data }; assert!(&s.bind_kv(&kv).is_ok()); if let Ok(r) = &s.run() { let target = Rows { data: vec![ vec![ Val::I64(123), Val::F64(567.567), Val::Null, Val::String("example string".to_string()), Val::Blob("A string as bytes".to_string().into_bytes()) ], ] }; assert_eq!(target, r.rows); } else { assert!(false); } } } } #[test] fn test_get_stmt_params_index_ok() { { let h = DbHandle::new(":memory:".to_string()).unwrap(); { let q = r#" SELECT ?, ?, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Index, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); let vals: Vec<Val> = vec![ Val::I64(1), Val::I64(2), Val::I64(3) ]; assert!(&s.bind_index(&vals).is_ok()); if let Ok(r) = &s.run() { let target = Rows { data: vec![ vec![Val::I64(1), Val::I64(2), Val::I64(3)], ] }; assert_eq!(target, r.rows); } else { assert!(false); } } { let q = r#" SELECT ?, ?, ?, ?, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let vals: Vec<Val> = vec![ Val::I64(123), Val::F64(567.567), Val::Null, Val::String("example string".to_string()), Val::Blob("A string as bytes".to_string().into_bytes()) ]; assert!(&s.bind_index(&vals).is_ok()); if let Ok(r) = &s.run() { let target = Rows { data: vec![ vec![ Val::I64(123), Val::F64(567.567), Val::Null, Val::String("example string".to_string()), Val::Blob("A string as bytes".to_string().into_bytes()) ], ] }; assert_eq!(target, r.rows); } else { assert!(false); } } } } #[test] fn test_get_stmt_params_index_error() { let h = DbHandle::new(":memory:".to_string()).unwrap(); { let q = r#" SELECT ?, ?, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let vals: Vec<Val> = vec![ Val::I64(1), Val::I64(2), ]; assert_bind_err_eq(&s.bind_index(&vals), ErrorBindType::MissingIndexesInData); } { let q = r#" SELECT ?99 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let vals: Vec<Val> = vec![ Val::I64(1), Val::I64(2), ]; assert_bind_err_eq(&s.bind_index(&vals), ErrorBindType::MissingIndexesInData); } } #[test] fn test_get_stmt_params_error() { { let h = DbHandle::new(":memory:".to_string()).unwrap(); { let q = r#" SELECT 1 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::None, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT ?, ?10 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Index, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT ?, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Index, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT ?10, ?20 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Index, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT ?five "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Index, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT :keyA, @keyA, $keyA, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::IndexAndKey, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT :keyA, @keyA, $keyA, ?10 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::IndexAndKey, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT ?, ?, ?, :keyA "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::IndexAndKey, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT :keyA, ?, ?, ?, :keyA "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::IndexAndKey, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT :keyA, ?10, :keyA, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::IndexAndKey, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } } }
use super::*; use placeholder::PlaceholderTypes; use serde_json::json; use crate::stmt::ErrorBind; static TEST_OUTPUT_DIR: &'static str = "/tmp"; #[test] fn test_is_valid_handle() { } fn get_test_file() -> String { let now: DateTime<Utc> = Utc::now(); format!("{}/del-test-{:?}.sqlite3", TEST_OUTPUT_DIR, now) } #[test] fn test_get_db_handle() { let file = get_test_file(); for _ in 0..5 { let h = DbHandle::new(file.clone()).unwrap(); } } #[test] fn test_get_stmt_handle_err() { let file = get_test_file(); let h = DbHandle::new(file).unwrap(); for _ in 0..5 { let s = StmtHandle::new(&h, "SELECT * FROM table_does_not_exist"); match s { Err(e) => { match e.primary.id { PrimaryRC::SQLITE_ERROR => { match e.err_msg { Some(m) => assert!(m.len() > 0, "Should be a msg"), None => assert!(false, "Should have msg") } } _ => assert!(false, "Should be SQLITE_ERROR") } } Ok(_) => assert!(false, "Should be Err") } } } #[test] fn test_get_stmt_handle_ok() { let file = get_test_file(); let h = DbHandle::new(file).unwrap(); { let s = StmtHandle::new(&h, "CREATE TABLE t1(a PRIMARY KEY, b);").expect("Syntax Ok"); let rset = s.run().expect("Creates table"); assert!(!rset.is_read_only, "Q modifies db file"); } { let s = StmtHandle::new(&h, "CREATE TABLE t1(a PRIMARY KEY, b);"); match s { Err(e) => { if let PrimaryRC::SQLITE_ERROR = e.primary.id { assert!(true, "Correct error ID"); } else { assert!(false, "Incorrect error ID"); } } Ok(_) => assert!(false) } } } fn assert_bind_kv_fails_with(s: &StmtHandle, kind: ErrorBindType) { let data: HashMap<String, Val> = [ ("abc".to_string(), Val::I64(1)), ].iter().cloned().collect(); let kv = KeyVal { data }; let e = s.bind_kv(&kv); assert!(e.is_err()); match e { Err(eb) => { assert!(&eb.kind == &kind); } _ => {} } } fn assert_bind_err_eq(e: &Result<(), ErrorBind>, kind: ErrorBindType) { assert!(e.is_err()); match e { Err(eb) => { assert!(&eb.kind == &kind); } _ => {} } } fn assert_bind_index_fails_with(s: &StmtHandle, kind: ErrorBindType) { let e = s.bind_index(&vec![]); assert_bind_err_eq(&e, kind); } #[test] fn test_get_stmt_params_kv_ok() { { let h = DbHandle::new(":memory:".to_string()).unwrap(); { let q = r#" SELECT :keyA, @keyA, $keyA "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Key, *t); assert_bind_kv_fails_with(&s, ErrorBindType::MissingKeysInData); let data: HashMap<String, Val> = [ ("keyA".to_string(), Val::I64(1)), (":keyA".to_string(), Val::I64(12)), ("@keyA".to_string(), Val::I64(13)), ("$keyA".to_string(), Val::I64(14)), ("extraKeyIsIgnored".to_string(), Val::I64(15)) ].iter().cloned().collect(); let kv = KeyVal { data }; assert!(&s.bind_kv(&kv).is_ok()); if let Ok(r) = &s.run() { let target = Rows { data: vec![ vec![Val::I64(1), Val::I64(1), Val::I64(1)], ] }; assert_eq!(target, r.rows); } else { assert!(false); } } { let q = r#" SELECT :111, @222, $333 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Key, *t); let data: HashMap<String, Val> = [ ("111".to_string(), Val::I64(111)), ("222".to_string(), Val::I64(222)), ("333".to_string(), Val::I64(333)) ].iter().cloned().collect(); let kv = KeyVal { data }; assert!(&s.bind_kv(&kv).is_ok()); if let Ok(r) = &s.run() { let target = Rows { data: vec![ vec![Val::I64(111), Val::I64(222), Val::I64(333)], ] }; assert_eq!(target, r.rows); } else { assert!(false); } } { let q = r#" SELECT :i64, :f64, :string, :null, :blob "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Key, *t); let data: HashMap<String, Val> = [ ("i64".to_string(), Val::I64(123)), ("f64".to_string(), Val::F64(567.567)), ("string".to_string(), Val::Null), ("null".to_string(), Val::String("example string".to_string())), ("blob".to_string(), Val::Blob("A string as bytes".to_string().into_bytes())) ].iter().cloned().collect(); let kv = KeyVal { data }; assert!(&s.bind_kv(&kv).is_ok()); if let Ok(r) = &s.run() { let target = Rows { data: vec![ vec![ Val::I64(123), Val::F64(567.567), Val::Null, Val::String("example string".to_string()), Val::Blob("A string as bytes".to_string().into_bytes()) ], ] }; assert_eq!(target, r.rows); } else { assert!(false); } } } } #[test]
#[test] fn test_get_stmt_params_index_error() { let h = DbHandle::new(":memory:".to_string()).unwrap(); { let q = r#" SELECT ?, ?, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let vals: Vec<Val> = vec![ Val::I64(1), Val::I64(2), ]; assert_bind_err_eq(&s.bind_index(&vals), ErrorBindType::MissingIndexesInData); } { let q = r#" SELECT ?99 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let vals: Vec<Val> = vec![ Val::I64(1), Val::I64(2), ]; assert_bind_err_eq(&s.bind_index(&vals), ErrorBindType::MissingIndexesInData); } } #[test] fn test_get_stmt_params_error() { { let h = DbHandle::new(":memory:".to_string()).unwrap(); { let q = r#" SELECT 1 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::None, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT ?, ?10 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Index, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT ?, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Index, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT ?10, ?20 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Index, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT ?five "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Index, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT :keyA, @keyA, $keyA, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::IndexAndKey, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT :keyA, @keyA, $keyA, ?10 "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::IndexAndKey, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT ?, ?, ?, :keyA "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::IndexAndKey, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT :keyA, ?, ?, ?, :keyA "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::IndexAndKey, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } { let q = r#" SELECT :keyA, ?10, :keyA, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::IndexAndKey, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); assert_bind_index_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); } } }
fn test_get_stmt_params_index_ok() { { let h = DbHandle::new(":memory:".to_string()).unwrap(); { let q = r#" SELECT ?, ?, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let t = &s.placeholder_meta.types_used; assert_eq!(PlaceholderTypes::Index, *t); assert_bind_kv_fails_with(&s, ErrorBindType::PlaceholderDataTypeNotCompatible); let vals: Vec<Val> = vec![ Val::I64(1), Val::I64(2), Val::I64(3) ]; assert!(&s.bind_index(&vals).is_ok()); if let Ok(r) = &s.run() { let target = Rows { data: vec![ vec![Val::I64(1), Val::I64(2), Val::I64(3)], ] }; assert_eq!(target, r.rows); } else { assert!(false); } } { let q = r#" SELECT ?, ?, ?, ?, ? "#; let s = StmtHandle::new(&h, q).expect("Syntax Ok"); let vals: Vec<Val> = vec![ Val::I64(123), Val::F64(567.567), Val::Null, Val::String("example string".to_string()), Val::Blob("A string as bytes".to_string().into_bytes()) ]; assert!(&s.bind_index(&vals).is_ok()); if let Ok(r) = &s.run() { let target = Rows { data: vec![ vec![ Val::I64(123), Val::F64(567.567), Val::Null, Val::String("example string".to_string()), Val::Blob("A string as bytes".to_string().into_bytes()) ], ] }; assert_eq!(target, r.rows); } else { assert!(false); } } } }
function_block-full_function
[ { "content": "fn create_table_a(c1: DbFile) -> Result<DbFile, ReturnStatus> {\n\n let wtx1 = c1.get_write_tx().expect(\"Ok\");\n\n wtx1.q(\"CREATE TABLE t1(a PRIMARY KEY, b);\").expect(\"Ok\");\n\n let p = Params::Index(vec![Val::I64(3), Val::I64(4)]);\n\n wtx1.q_params(\"INSERT INTO t1 (a, b) VALUE...
Rust
http-service-h1/src/lib.rs
chrisdickinson/http-service
430a1c6a675a9301d553fb61a76dc5d4b6bb5613
#![forbid(future_incompatible, rust_2018_idioms)] #![deny(missing_debug_implementations, nonstandard_style)] #![warn(missing_docs, missing_doc_code_examples)] #![cfg_attr(test, deny(warnings))] use std::future::Future; use std::pin::Pin; use std::task::{Context, Poll}; use http_service::{Error, HttpService}; use async_std::io; use async_std::net::{SocketAddr, TcpStream}; use async_std::prelude::*; use async_std::stream::Stream; use async_std::sync::Arc; #[derive(Debug)] pub struct Server<I, S: HttpService> { incoming: I, service: Arc<S>, addr: String, } impl<I: Stream<Item = io::Result<TcpStream>>, S: HttpService> Server<I, S> where <<S as HttpService>::ResponseFuture as Future>::Output: Send, <S as HttpService>::Connection: Sync, I: Unpin + Send + Sync, { pub fn new(addr: String, incoming: I, service: S) -> Self { Server { service: Arc::new(service), incoming, addr, } } pub async fn run(&mut self) -> io::Result<()> { while let Some(stream) = self.incoming.next().await { let stream = stream?; async_std::task::spawn(accept(self.addr.clone(), self.service.clone(), stream)); } Ok(()) } } async fn accept<S>(addr: String, service: Arc<S>, stream: TcpStream) -> Result<(), Error> where S: HttpService, <<S as HttpService>::ResponseFuture as Future>::Output: Send, <S as HttpService>::Connection: Sync, { let stream = WrapStream(Arc::new(stream)); let conn = service .clone() .connect() .await .map_err(|_| io::Error::from(io::ErrorKind::Other))?; async_h1::accept(&addr, stream.clone(), |req| async { let conn = conn.clone(); let service = service.clone(); async move { let res = service .respond(conn, req) .await .map_err(|_| io::Error::from(io::ErrorKind::Other))?; Ok(res) } .await }) .await?; Ok(()) } pub async fn serve<S: HttpService>(service: S, addr: SocketAddr) -> io::Result<()> where <<S as HttpService>::ResponseFuture as Future>::Output: Send, <S as HttpService>::Connection: Sync, { let listener = async_std::net::TcpListener::bind(addr).await?; let addr = format!("http://{}", listener.local_addr()?); let mut server = Server::<_, S>::new(addr, listener.incoming(), service); server.run().await } #[derive(Clone)] struct WrapStream(Arc<TcpStream>); impl io::Read for WrapStream { fn poll_read( self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8], ) -> Poll<io::Result<usize>> { Pin::new(&mut &*self.0).poll_read(cx, buf) } } impl io::Write for WrapStream { fn poll_write( self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8], ) -> Poll<io::Result<usize>> { Pin::new(&mut &*self.0).poll_write(cx, buf) } fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> { Pin::new(&mut &*self.0).poll_flush(cx) } fn poll_close(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> { Pin::new(&mut &*self.0).poll_close(cx) } }
#![forbid(future_incompatible, rust_2018_idioms)] #![deny(missing_debug_implementations, nonstandard_style)] #![warn(missing_docs, missing_doc_code_examples)] #![cfg_attr(test, deny(warnings))] use std::future::Future; use std::pin::Pin; use std::task::{Context, Poll}; use http_service::{Error, HttpService}; use async_std::io; use async_std::net::{SocketAddr, TcpStream}; use async_std::prelude::*; use async_std::stream::Stream; use async_std::sync::Arc; #[derive(Debug)] pub struct Server<I, S: HttpService> { incoming: I, service: Arc<S>, addr: String, } impl<I: Stream<Item = io::Result<TcpStream>>, S: HttpService> Server<I, S> where <<S as HttpService>::ResponseFuture as Future>::Output: Send, <S as HttpService>::Connection: Sync, I: Unpin + Send + Sync, { pub fn new(addr: String, incoming: I, service: S) -> Self { Server { service: Arc::new(service), incoming, addr, } } pub async fn run(&mut self) -> io::Result<()> { while let Some(stream) = self.incoming.next().await { let stream = stream?; async_std::task::spawn(accept(self.addr.clone(), self.service.clone(), stream)); } Ok(()) } } async fn accept<S>(addr: String, service: Arc<S>, stream: TcpStream) -> Result<(), Error> where S: HttpService, <<S as HttpService>::ResponseFuture as Future>::Output: Send, <S as HttpService>::Connection: Sync, { let stream = WrapStream(Arc::new(stream)); let conn = service .clone() .connect() .await .map_err(|_| io::Error::from(io::ErrorKind::Other))?; async_h1::accept(&addr, stream.clone(), |req| async { let conn = conn.clone(); let service = service.clone(); async move {
Ok(res) } .await }) .await?; Ok(()) } pub async fn serve<S: HttpService>(service: S, addr: SocketAddr) -> io::Result<()> where <<S as HttpService>::ResponseFuture as Future>::Output: Send, <S as HttpService>::Connection: Sync, { let listener = async_std::net::TcpListener::bind(addr).await?; let addr = format!("http://{}", listener.local_addr()?); let mut server = Server::<_, S>::new(addr, listener.incoming(), service); server.run().await } #[derive(Clone)] struct WrapStream(Arc<TcpStream>); impl io::Read for WrapStream { fn poll_read( self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8], ) -> Poll<io::Result<usize>> { Pin::new(&mut &*self.0).poll_read(cx, buf) } } impl io::Write for WrapStream { fn poll_write( self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8], ) -> Poll<io::Result<usize>> { Pin::new(&mut &*self.0).poll_write(cx, buf) } fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> { Pin::new(&mut &*self.0).poll_flush(cx) } fn poll_close(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> { Pin::new(&mut &*self.0).poll_close(cx) } }
let res = service .respond(conn, req) .await .map_err(|_| io::Error::from(io::ErrorKind::Other))?;
assignment_statement
[ { "content": "/// An async HTTP service\n\n///\n\n/// An instance represents a service as a whole. The associated `Conn` type\n\n/// represents a particular connection, and may carry connection-specific state.\n\npub trait HttpService: Send + Sync + 'static {\n\n /// An individual connection.\n\n ///\n\n ...
Rust
src/ui.rs
AidoP/blockade-recon
4116a6ff6e01ca040ca377719f6a6a9c2ff198b8
use std::{thread, sync::mpsc::{self, Receiver}, ops::{Deref, DerefMut}}; use termion::{event::Key, input::{MouseTerminal, TermRead}, raw::{IntoRawMode, RawTerminal}, screen::AlternateScreen}; use tui::{ backend::TermionBackend, layout::{Alignment, Constraint, Layout}, widgets::{Block, Borders, Paragraph}, style::{Style, Modifier, Color}, text::{Spans, Span} }; pub type Backend = TermionBackend<AlternateScreen<MouseTerminal<RawTerminal<std::io::Stdout>>>>; pub type Terminal = tui::Terminal<Backend>; pub struct Ui { pub input: Input, pub terminal: Terminal } impl Ui { pub fn new() -> Self { let backend = TermionBackend::new( AlternateScreen::from( MouseTerminal::from( std::io::stdout().into_raw_mode().expect("Unable to switch stdout to raw mode") ) ) ); let terminal = tui::Terminal::new(backend).expect("Unable to create TUI"); let input = Input::new(); Self { input, terminal } } pub fn error(&mut self, location: String, message: &str, error: &dyn std::fmt::Display) { let spans = vec![ Spans::from(vec![ Span::styled("Error", Style::default().fg(Color::Red)), Span::from(" @ "), Span::styled(location, Style::default().fg(Color::Blue)) ]), Spans::from(vec![ Span::styled(message, Style::default().add_modifier(Modifier::BOLD)) ]), Spans::from(vec![ Span::from("Reason: "), Span::styled(format!("\"{}\"", error), Style::default().fg(Color::LightRed)) ]) ]; self.terminal.draw(|frame| { frame.render_widget( Paragraph::new(spans) .style(Style::reset()) .alignment(Alignment::Center), Layout::default().margin(3).constraints(vec![Constraint::Percentage(100)]).split(frame.size())[0] ); }).expect("Unable to draw to stdout"); let _ = self.input.stdin.try_iter().count(); let _ = self.input.stdin.recv(); } } pub struct ListState{ state: tui::widgets::ListState, item_count: usize } impl ListState { pub fn with_item_count(item_count: usize) -> Self { let mut state = tui::widgets::ListState::default(); state.select(Some(0)); Self { state, item_count } } pub fn set_item_count(&mut self, item_count: usize) { if let Some(selected) = self.state.selected() { if selected >= item_count { self.state.select(Some(selected.saturating_sub(1))) } } self.item_count = item_count; } pub fn up(&mut self) { if let Some(selected) = self.state.selected() { if selected <= 0 { self.state.select(Some(self.item_count.saturating_sub(1))) } else { self.state.select(Some(selected.saturating_sub(1))) } } } pub fn down(&mut self) { if let Some(selected) = self.state.selected() { if selected >= self.item_count.saturating_sub(1) { self.state.select(Some(0)) } else { self.state.select(Some(selected.saturating_add(1))) } } } pub fn top(&mut self) { self.state.select(Some(0)) } pub fn bottom(&mut self) { self.state.select(Some(self.item_count.saturating_sub(1))) } } impl Default for ListState { fn default() -> Self { let mut state = tui::widgets::ListState::default(); state.select(Some(0)); Self { state, item_count: 0 } } } impl Deref for ListState { type Target = tui::widgets::ListState; fn deref(&self) -> &Self::Target { &self.state } } impl DerefMut for ListState { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.state } } pub struct TabState<'a> { pub titles: Vec<Spans<'a>>, pub index: usize } impl<'a> TabState<'a> { pub fn new(titles: Vec<Spans<'a>>) -> Self { Self { titles, index: 0 } } pub fn select(&mut self, index: usize) { self.index = index.clamp(0, self.titles.len() - 1) } pub fn next(&mut self) { if self.index >= self.titles.len() - 1 { self.index = 0 } else { self.index += 1 } } pub fn previous(&mut self) { if self.index <= 0 { self.index = self.titles.len() - 1 } else { self.index -= 1 } } } pub struct Input { pub stdin: Receiver<Key>, } impl Input { pub fn new() -> Input { let (tx, rx) = mpsc::channel(); thread::spawn(move || { let mut keys = std::io::stdin().keys(); while let Some(key) = keys.next() { if let Ok(key) = key { tx.send(key).expect("Input channel unexpectedly closed") } } }); Self { stdin: rx } } } #[macro_export] macro_rules! expect { ($ui:expr => $result:expr, $msg:expr) => { match $result { Ok(t) => t, Err(e) => { let location = format!("{}:{}:{}", file!(), line!(), column!()); $ui.error(location, $msg, &e); Err(e).expect($msg) } } }; }
use std::{thread, sync::mpsc::{self, Receiver}, ops::{Deref, DerefMut}}; use termion::{event::Key, input::{MouseTerminal, TermRead}, raw::{IntoRawMode, RawTerminal}, screen::AlternateScreen}; use tui::{ backend::TermionBackend, layout::{Alignment, Constraint, Layout}, widgets::{Block, Borders, Paragraph}, style::{Style, Modifier, Color}, text::{Spans, Span} }; pub type Backend = TermionBackend<AlternateScreen<MouseTerminal<RawTerminal<std::io::Stdout>>>>; pub type Terminal = tui::Terminal<Backend>; pub struct Ui { pub input: Input, pub terminal: Terminal } impl Ui { pub fn new() -> Self { let backend = TermionBackend::new( AlternateScreen::from( MouseTerminal::from( std::io::stdout().into_raw_mode().expect("Unable to switch stdout to raw mode") ) ) ); let terminal = tui::Terminal::new(backend).expect("Unable to create TUI"); let input = Input::new(); Self { input, terminal } } pub fn error(&mut self, location: String, message: &str, error: &dyn std::fmt::Display) { let spans = vec![ Spans::from(vec![ Span::styled("Error", Style::default().fg(Color::Red)), Span::from(" @ "), Span::styled(location, Style::default().fg(Color::Blue)) ]), Spans::from(vec![ Span::styled(message, Style::default().add_modifier(Modifier::BOLD)) ]), Spans::from(vec![ Span::from("Reason: "), Span::styled(format!("\"{}\"", error), Style::default().fg(Color::LightRed)) ]) ]; self.terminal.draw(|frame| { frame.render_widget( Paragraph::new(spans) .style(Style::reset()) .alignment(Alignment::Center), Layout::default().margin(3).constraints(vec![Constraint::Percentage(100)]).split(frame.size())[0] ); }).expect("Unable to draw to stdout"); let _ = self.input.stdin.try_iter().count(); let _ = self.input.stdin.recv(); } } pub struct ListState{ state: tui::widgets::ListState, item_count: usize } impl ListState { pub fn with_item_count(item_count: usize) -> Self { let mut state = tui::widgets::ListState::default(); state.select(Some(0)); Self { state, item_count } } pub fn set_item_count(&mut self, item_count: usize) { if let Some(selected) = self.state.selected() { if selected >= item_count { self.state.select(Some(selected.saturating_sub(1))) } } self.item_count = item_count; } pub fn up(&mut self) { if let Some(selected) = self.state.selected() { if selected <= 0 { self.state.select(Some(self.item_count.saturating_sub(1))) } else { self.state.select(Some(selected.saturating_sub(1))) } } } pub fn down(&mut self) { if let Some(selected) = self.state.selected() { if selected >= self.item_count.saturating_sub(1) { self.state.select(Some(0)) } else { self.state.select(Some(selected.saturating_add(1))) } } } pub fn top(&mut self) { self.state.select(Some(0)) } pub fn bottom(&mut self) { self.state.select(Some(self.item_count.saturating_sub(1))) } } impl Default for ListState { fn default() -> Self { let mut state = tui::widgets::ListState::default(); state.select(Some(0)); Self { state, item_count: 0 } } } impl Deref for ListState { type Target = tui::widgets::ListState; fn deref(&self) -> &Self::Target { &self.state } } impl DerefMut for ListState { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.state } } pub struct TabState<'a> { pub titles: Vec<Spans<'a>>, pub index: usize } impl<'a> TabState<'a> { pub fn new(titles: Vec<Spans<'a>>) -> Self { Self { titles, index: 0 } } pub fn select(&mut self, index: usize) { self.index = index.clamp(0, self.titles.len() - 1) } pub fn next(&mut self) { if self.index >= self.titles.len() - 1 { self.index = 0 } else { self.index += 1 } } pub fn previous(&mut self) { if self.index <= 0 { self.index = self.titles.len() - 1 } else { self.index -= 1 } } } pub struct Input { pub stdin: Receiver<Key>, } impl Input { pub fn new() -> Input { let (tx, rx) = mpsc::channel(); thread::spawn(move || { let mut keys = std::io::stdin().keys(); while let Some(key) = keys.nex
} #[macro_export] macro_rules! expect { ($ui:expr => $result:expr, $msg:expr) => { match $result { Ok(t) => t, Err(e) => { let location = format!("{}:{}:{}", file!(), line!(), column!()); $ui.error(location, $msg, &e); Err(e).expect($msg) } } }; }
t() { if let Ok(key) = key { tx.send(key).expect("Input channel unexpectedly closed") } } }); Self { stdin: rx } }
function_block-function_prefixed
[ { "content": "type Result<T> = std::result::Result<T, Error>;\n\n#[derive(Debug)]\n\npub enum Error {\n\n UnexpectedEof,\n\n InvalidVersion(u8),\n\n UnrecognisedFrameType,\n\n MissingTag(&'static str),\n\n}\n\nimpl From<eui48::ParseError> for Error {\n\n fn from(_: eui48::ParseError) -> Self {\n\...
Rust
deuterium-plugin/src/model/macro_ext.rs
s-panferov/deuterium-orm
ee8b98773005dfda5271a3720cc1166744f1f2a7
#[macro_export] macro_rules! define_model { ( $model:ident, $model_meta:ident, $table:ident, $many_select_query_ext:ident, $one_select_query_ext:ident, $table_name:expr, [ $(( $field_name:ident, $field_type:ty, $field_name_f:ident, $field_get:ident, $field_set:ident, $field_changed_flag:ident, $field_changed_accessor:ident, $($vis:tt)*)),+ ], [ $($before_create:ident),* ], [ $($before_save:ident),* ] ) => ( #[derive(Default, Debug, Clone)] #[allow(dead_code)] pub struct $model_meta { $( $field_changed_flag: bool, )+ changed: bool, } impl $model_meta { pub fn new() -> $model_meta { $model_meta { $( $field_changed_flag: !true, )+ changed: false, } } } #[derive(Default, Debug, Clone)] #[allow(dead_code)] pub struct $model { $( $field_name: Option<$field_type>, )+ __meta: $model_meta } impl $model { $( #[allow(dead_code)] pub fn $field_get(&self) -> &$field_type { return self.$field_name.as_ref().unwrap(); } #[allow(dead_code)] pub fn $field_set(&mut self, value: $field_type) { self.$field_name = Some(value); self.__meta.changed = true; self.__meta.$field_changed_flag = true; } #[allow(dead_code)] pub fn $field_changed_accessor(&self) -> bool { self.__meta.$field_changed_flag } )+ fn empty() -> $model { $model { $( $field_name: None, )+ __meta: $model_meta::new() } } } #[cfg(feature = "postgres")] impl ::deuterium_orm::adapter::postgres::FromRow for $model { fn from_row<T, L>(query: &::deuterium::SelectQuery<T, L, $model>, row: &::postgres::Row) -> $model { match query.get_select() { &::deuterium::Select::All => { $model { $( $field_name: Some(row.get(stringify!($field_name))), )+ __meta: $model_meta::new() } }, &::deuterium::Select::Only(_) => { let mut model = $model::empty(); $( model.$field_name = match row.get_opt(stringify!($field_name)) { Ok(val) => Some(val), Err(_) => None }; )+ model } } } } #[derive(Clone)] pub struct $table(::deuterium::TableDef); #[allow(dead_code)] impl $model { pub fn table_name() -> &'static str { $table_name } pub fn table() -> $table { $table(::deuterium::TableDef::new($model::table_name())) } pub fn alias(alias: &str) -> $table { $table(::deuterium::TableDef::new_with_alias($model::table_name(), alias)) } $( pub fn $field_name_f() -> ::deuterium::NamedField<$field_type> { ::deuterium::NamedField::<$field_type>::new(stringify!($field_name), $model::table_name()) } )+ fn call_before_create_hooks(&mut self) { $( $before_create(self); )* } fn call_after_create_hooks(&mut self) { unimplemented!() } fn call_before_save_hooks(&mut self) { $( $before_save(self); )* } fn call_after_save_hooks(&mut self) { unimplemented!() } fn call_before_update_hooks(&mut self) { unimplemented!() } fn call_after_update_hooks(&mut self) { unimplemented!() } fn call_before_destroy_hooks(&mut self) { unimplemented!() } fn call_after_destroy_hooks(&mut self) { unimplemented!() } pub fn create_query(&mut self) -> ::deuterium::InsertQuery<(), (), $model, (), ()> { let query = { let mut fields: Vec<::deuterium::BoxedField> = vec![]; let mut values: Vec<&::deuterium::Expression<::deuterium::RawExpression>> = vec![]; self.call_before_create_hooks(); self.call_before_save_hooks(); $( let $field_name; if self.__meta.$field_changed_flag == true { $field_name = $model::$field_name_f(); fields.push(Box::new($field_name)); values.push(self.$field_get().as_expr()); } )+ let mut query = $model::table().insert_fields(&fields.iter().map(|f| &**f).collect::<Vec<&::deuterium::Field>>()); query.push_untyped(&values); query }; $( if self.__meta.$field_changed_flag == true { self.__meta.$field_changed_flag = false; } )+ query } pub fn update_query(&mut self) -> ::deuterium::UpdateQuery<(), ::deuterium::NoResult, $model> { self.call_before_save_hooks(); let mut query = $model::table().update(); $( if self.__meta.$field_changed_flag == true { let field = $model::$field_name_f().set(self.$field_get()); query = query.field(field); self.__meta.$field_changed_flag = false; } )+ query.where_(self.lookup_predicate()) } pub fn delete_query(&mut self) -> ::deuterium::DeleteQuery<(), ::deuterium::NoResult, $model> { $model::table().delete().where_(self.lookup_predicate()) } } impl ::deuterium::Table for $table { fn upcast_table(&self) -> ::deuterium::SharedTable { ::std::rc::Rc::new(Box::new(self.clone()) as ::deuterium::BoxedTable) } fn get_table_name(&self) -> &String { self.0.get_table_name() } fn get_table_alias(&self) -> &Option<String> { self.0.get_table_alias() } } #[allow(dead_code)] impl $table { $( pub fn $field_name_f(&self) -> ::deuterium::NamedField<$field_type> { ::deuterium::NamedField::<$field_type>::field_of(stringify!($field_name), self) } )+ } impl ::deuterium::From for $table { fn as_sql(&self) -> &::deuterium::FromToSql { &self.0 } fn upcast_from(&self) -> ::deuterium::SharedFrom { ::std::rc::Rc::new(Box::new(self.clone()) as ::deuterium::BoxedFrom) } } impl ::deuterium::Selectable<$model> for $table { } impl ::deuterium::Updatable<$model> for $table { } impl ::deuterium::Deletable<$model> for $table { } impl ::deuterium::Insertable<$model> for $table { } ) } #[macro_export] macro_rules! primary_key { ($s:ident, $model:ident, $body:block) => ( impl $model { #[allow(dead_code)] pub fn lookup_predicate(&$s) -> ::deuterium::SharedPredicate { $body } } ) } #[macro_export] macro_rules! create_model { ($model:ident, $($field_name:ident: $field_value:expr),+) => ( $model { $( $field_name: Some($field_value), )+ ..std::default::Default::default() } ) }
#[macro_export] macro_rules! define_model { ( $model:ident, $model_meta:ident, $table:ident, $many_select_query_ext:ident, $one_select_query_ext:ident, $table_name:expr, [ $(( $field_name:ident, $field_type:ty, $field_name_f:ident, $field_get:ident, $field_set:ident, $field_changed_flag:ident, $field_changed_accessor:ident, $($vis:tt)*)),+ ], [ $($before_create:ident),* ], [ $($before_save:ident),* ] ) => ( #[derive(Default, Debug, Clone)] #[allow(dead_code)] pub struct $model_meta { $( $field_changed_flag: bool, )+ changed: bool, } impl $model_meta { pub fn new() -> $model_meta { $model_meta { $( $field_changed_flag: !true, )+ changed: false, } } } #[derive(Default, Debug, Clone)] #[allow(dead_code)] pub struct $model { $( $field_name: Option<$field_type>, )+ __meta: $model_meta } impl $model { $( #[allow(dead_code)] pub fn $field_get(&self) -> &$field_type { return self.$field_name.as_ref().unwrap(); } #[allow(dead_code)] pub fn $field_set(&mut self, value: $field_type) { self.$field_name = Some(value); self.__meta.changed = true; self.__meta.$field_changed_flag = true; } #[allow(dead_code)] pub fn $field_changed_accessor(&self) -> bool { self.__meta.$field_changed_flag } )+ fn empty() -> $model { $model { $( $field_name: None, )+ __meta: $model_meta::new() } } } #[cfg(feature = "postgres")] impl ::deuterium_orm::adapter::postgres::FromRow for $model { fn from_row<T, L>(query: &::deuterium::SelectQuery<T, L, $model>, row: &::postgres::Row) -> $model { match query.get_select() { &::deuterium::Select::All => { $model { $( $field_name: Some(row.get(stringify!($field_name))), )+ __meta: $model_meta::new() } }, &::deuterium::Select::Only(_) => { let mut model = $model::empty(); $( model.$field_name = match row.get_opt(stringify!($field_name)) { Ok(val) => Some(val), Err(_) => None }; )+ model } } } } #[derive(Clone)] pub struct $table(::deuterium::TableDef); #[allow(dead_code)] impl $model {
gify!($field_name), self) } )+ } impl ::deuterium::From for $table { fn as_sql(&self) -> &::deuterium::FromToSql { &self.0 } fn upcast_from(&self) -> ::deuterium::SharedFrom { ::std::rc::Rc::new(Box::new(self.clone()) as ::deuterium::BoxedFrom) } } impl ::deuterium::Selectable<$model> for $table { } impl ::deuterium::Updatable<$model> for $table { } impl ::deuterium::Deletable<$model> for $table { } impl ::deuterium::Insertable<$model> for $table { } ) } #[macro_export] macro_rules! primary_key { ($s:ident, $model:ident, $body:block) => ( impl $model { #[allow(dead_code)] pub fn lookup_predicate(&$s) -> ::deuterium::SharedPredicate { $body } } ) } #[macro_export] macro_rules! create_model { ($model:ident, $($field_name:ident: $field_value:expr),+) => ( $model { $( $field_name: Some($field_value), )+ ..std::default::Default::default() } ) }
pub fn table_name() -> &'static str { $table_name } pub fn table() -> $table { $table(::deuterium::TableDef::new($model::table_name())) } pub fn alias(alias: &str) -> $table { $table(::deuterium::TableDef::new_with_alias($model::table_name(), alias)) } $( pub fn $field_name_f() -> ::deuterium::NamedField<$field_type> { ::deuterium::NamedField::<$field_type>::new(stringify!($field_name), $model::table_name()) } )+ fn call_before_create_hooks(&mut self) { $( $before_create(self); )* } fn call_after_create_hooks(&mut self) { unimplemented!() } fn call_before_save_hooks(&mut self) { $( $before_save(self); )* } fn call_after_save_hooks(&mut self) { unimplemented!() } fn call_before_update_hooks(&mut self) { unimplemented!() } fn call_after_update_hooks(&mut self) { unimplemented!() } fn call_before_destroy_hooks(&mut self) { unimplemented!() } fn call_after_destroy_hooks(&mut self) { unimplemented!() } pub fn create_query(&mut self) -> ::deuterium::InsertQuery<(), (), $model, (), ()> { let query = { let mut fields: Vec<::deuterium::BoxedField> = vec![]; let mut values: Vec<&::deuterium::Expression<::deuterium::RawExpression>> = vec![]; self.call_before_create_hooks(); self.call_before_save_hooks(); $( let $field_name; if self.__meta.$field_changed_flag == true { $field_name = $model::$field_name_f(); fields.push(Box::new($field_name)); values.push(self.$field_get().as_expr()); } )+ let mut query = $model::table().insert_fields(&fields.iter().map(|f| &**f).collect::<Vec<&::deuterium::Field>>()); query.push_untyped(&values); query }; $( if self.__meta.$field_changed_flag == true { self.__meta.$field_changed_flag = false; } )+ query } pub fn update_query(&mut self) -> ::deuterium::UpdateQuery<(), ::deuterium::NoResult, $model> { self.call_before_save_hooks(); let mut query = $model::table().update(); $( if self.__meta.$field_changed_flag == true { let field = $model::$field_name_f().set(self.$field_get()); query = query.field(field); self.__meta.$field_changed_flag = false; } )+ query.where_(self.lookup_predicate()) } pub fn delete_query(&mut self) -> ::deuterium::DeleteQuery<(), ::deuterium::NoResult, $model> { $model::table().delete().where_(self.lookup_predicate()) } } impl ::deuterium::Table for $table { fn upcast_table(&self) -> ::deuterium::SharedTable { ::std::rc::Rc::new(Box::new(self.clone()) as ::deuterium::BoxedTable) } fn get_table_name(&self) -> &String { self.0.get_table_name() } fn get_table_alias(&self) -> &Option<String> { self.0.get_table_alias() } } #[allow(dead_code)] impl $table { $( pub fn $field_name_f(&self) -> ::deuterium::NamedField<$field_type> { ::deuterium::NamedField::<$field_type>::field_of(strin
random
[ { "content": "pub fn from_row<T, L, M: FromRow>(query: &::deuterium::SelectQuery<T, L, M>, row: &::postgres::rows::Row) -> M {\n\n FromRow::from_row(query, row)\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! to_sql_string_pg {\n\n ($query:expr) => ({\n\n let mut ctx = ::deuterium::SqlContext::new(Box:...
Rust
components/dada-parse/src/parser/items.rs
ciyer/dada
af6ee8aa4c6eacbdf843b9a0805c6ce2cb4667a7
use crate::{parser::Parser, token_test::SpannedIdentifier}; use dada_ir::{ class::Class, code::{ syntax::{op::Op, Spans, Tables}, UnparsedCode, }, effect::Effect, function::Function, item::Item, kw::Keyword, return_type::{ReturnType, ReturnTypeKind}, source_file::{self, SourceFile}, span::Span, word::{SpannedWord, Word}, }; use super::OrReportError; impl<'db> Parser<'db> { pub(crate) fn parse_source_file(&mut self) -> SourceFile { let mut items = vec![]; let mut exprs = vec![]; let mut tables = Tables::default(); let mut spans = Spans::default(); while self.tokens.peek().is_some() { if let Some(item) = self.parse_item() { items.push(item); } else if let Some(expr) = self.parse_top_level_expr(&mut tables, &mut spans) { exprs.push(expr); } else { let span = self.tokens.last_span(); self.tokens.consume(); dada_ir::error!(span.in_file(self.filename), "unexpected token").emit(self.db); } } let main_fn = if !exprs.is_empty() { let start_span = spans[exprs[0]]; let end_span = spans[*exprs.last().unwrap()]; let main_span = start_span.to(end_span).in_file(self.filename); let main_name = Word::from(self.db, source_file::TOP_LEVEL_FN); let main_name = SpannedWord::new(self.db, main_name, main_span); let return_type = ReturnType::new(self.db, ReturnTypeKind::Unit, main_span); let function = Function::new( self.db, main_name, Effect::Async, main_span, return_type, None, main_span, ); let syntax_tree = self.create_syntax_tree(start_span, vec![], tables, spans, exprs); crate::code_parser::parse_function_body::set(self.db, function, syntax_tree); crate::parameter_parser::parse_function_parameters::set(self.db, function, vec![]); items.push(Item::Function(function)); Some(function) } else { None }; SourceFile::new(self.db, self.filename, items, main_fn) } fn parse_item(&mut self) -> Option<Item> { if let Some(class) = self.parse_class() { Some(Item::Class(class)) } else if let Some(func) = self.parse_function() { Some(Item::Function(func)) } else { None } } fn parse_class(&mut self) -> Option<Class> { let (class_span, _) = self.eat(Keyword::Class)?; let (_, class_name) = self .eat(SpannedIdentifier) .or_report_error(self, || "expected a class name")?; let (_, field_tokens) = self .delimited('(') .or_report_error(self, || "expected class parameters")?; Some(Class::new( self.db, class_name, field_tokens, self.span_consumed_since(class_span).in_file(self.filename), )) } fn parse_function(&mut self) -> Option<Function> { let is_fn = self.testahead(|parser| { let _ = parser.eat(Keyword::Async); parser.eat(Keyword::Fn).is_some() }); if !is_fn { return None; } let (effect_span, effect) = if let Some((span, _)) = self.eat(Keyword::Async) { (Some(span), Effect::Async) } else { (None, Effect::Default) }; let (fn_span, _) = self.eat(Keyword::Fn).unwrap(); let (_, func_name) = self .eat(SpannedIdentifier) .or_report_error(self, || "expected function name".to_string())?; let (_, parameter_tokens) = self .delimited('(') .or_report_error(self, || "expected function parameters".to_string())?; let return_type = { let right_arrow = self.eat_op(Op::RightArrow); let span = right_arrow .unwrap_or_else(|| Span { start: self.tokens.last_span().end, end: self.tokens.peek_span().start, }) .in_file(self.filename); ReturnType::new( self.db, if right_arrow.is_some() { ReturnTypeKind::Value } else { ReturnTypeKind::Unit }, span, ) }; let (_, body_tokens) = self .delimited('{') .or_report_error(self, || "expected function body".to_string())?; let code = UnparsedCode::new(parameter_tokens, body_tokens); let start_span = effect_span.unwrap_or(fn_span); Some(Function::new( self.db, func_name, effect, effect_span.unwrap_or(fn_span).in_file(self.filename), return_type, Some(code), self.span_consumed_since(start_span).in_file(self.filename), )) } }
use crate::{parser::Parser, token_test::SpannedIdentifier}; use dada_ir::{ class::Class, code::{ syntax::{op::Op, Spans, Tables}, UnparsedCode, }, effect::Effect, function::Function, item::Item, kw::Keyword, return_type::{ReturnType, ReturnTypeKind}, source_file::{self, SourceFile}, span::Span, word::{SpannedWord, Word}, }; use super::OrReportError; impl<'db> Parser<'db> { pub(crate) fn parse_source_file(&mut self) -> SourceFile { let mut items = vec![]; let mut exprs = vec![]; let mut tables = Tables::default(); let mut spans = Spans::default(); while self.tokens.peek().is_some() { if let Some(item) = self.parse_item() { items.push(item); } else if let Some(expr) = self.parse_top_level_expr(&mut tables, &mut spans) { exprs.push(expr); } else { let span = self.tokens.last_span(); self.tokens.consume(); dada_ir::error!(span.in_file(self.filename), "unexpected token").emit(self.db); } } let main_fn = if !exprs.is_empty() { let start_span = spans[exprs[0]]; let end_span = spans[*exprs.last().unwrap()]; let main_span = start_span.to(end_span).in_file(self.filename); let main_name = Word::from(self.db, source_file::TOP_LEVEL_FN); let main_name = SpannedWord::new(self.db, main_name, main_span); let return_type = ReturnType::new(self.db, ReturnTypeKind::Unit, main_span); let function = Function::new( self.db, main_nam
fn parse_item(&mut self) -> Option<Item> { if let Some(class) = self.parse_class() { Some(Item::Class(class)) } else if let Some(func) = self.parse_function() { Some(Item::Function(func)) } else { None } } fn parse_class(&mut self) -> Option<Class> { let (class_span, _) = self.eat(Keyword::Class)?; let (_, class_name) = self .eat(SpannedIdentifier) .or_report_error(self, || "expected a class name")?; let (_, field_tokens) = self .delimited('(') .or_report_error(self, || "expected class parameters")?; Some(Class::new( self.db, class_name, field_tokens, self.span_consumed_since(class_span).in_file(self.filename), )) } fn parse_function(&mut self) -> Option<Function> { let is_fn = self.testahead(|parser| { let _ = parser.eat(Keyword::Async); parser.eat(Keyword::Fn).is_some() }); if !is_fn { return None; } let (effect_span, effect) = if let Some((span, _)) = self.eat(Keyword::Async) { (Some(span), Effect::Async) } else { (None, Effect::Default) }; let (fn_span, _) = self.eat(Keyword::Fn).unwrap(); let (_, func_name) = self .eat(SpannedIdentifier) .or_report_error(self, || "expected function name".to_string())?; let (_, parameter_tokens) = self .delimited('(') .or_report_error(self, || "expected function parameters".to_string())?; let return_type = { let right_arrow = self.eat_op(Op::RightArrow); let span = right_arrow .unwrap_or_else(|| Span { start: self.tokens.last_span().end, end: self.tokens.peek_span().start, }) .in_file(self.filename); ReturnType::new( self.db, if right_arrow.is_some() { ReturnTypeKind::Value } else { ReturnTypeKind::Unit }, span, ) }; let (_, body_tokens) = self .delimited('{') .or_report_error(self, || "expected function body".to_string())?; let code = UnparsedCode::new(parameter_tokens, body_tokens); let start_span = effect_span.unwrap_or(fn_span); Some(Function::new( self.db, func_name, effect, effect_span.unwrap_or(fn_span).in_file(self.filename), return_type, Some(code), self.span_consumed_since(start_span).in_file(self.filename), )) } }
e, Effect::Async, main_span, return_type, None, main_span, ); let syntax_tree = self.create_syntax_tree(start_span, vec![], tables, spans, exprs); crate::code_parser::parse_function_body::set(self.db, function, syntax_tree); crate::parameter_parser::parse_function_parameters::set(self.db, function, vec![]); items.push(Item::Function(function)); Some(function) } else { None }; SourceFile::new(self.db, self.filename, items, main_fn) }
function_block-function_prefixed
[ { "content": "#[salsa::component(in crate::Jar ref)]\n\n#[allow(clippy::needless_lifetimes)]\n\npub fn parse_function_parameters(db: &dyn crate::Db, function: Function) -> Vec<Parameter> {\n\n if let Some(unparsed_code) = function.unparsed_code(db) {\n\n parse_parameters(db, unparsed_code.parameter_to...
Rust
src/main.rs
flattiverse/connector-rust
59693f71a3525a983e3abc84428201e4428e59c1
#![deny(intra_doc_link_resolution_failure)] #[macro_use] extern crate log; #[macro_use] extern crate num_derive; extern crate num_traits; use log::{LevelFilter, SetLoggerError}; use log4rs::append::console::ConsoleAppender; use log4rs::config::{Appender, Config, Logger, Root}; use log4rs::encode::pattern::PatternEncoder; use crate::connector::Connector; use crate::entity::{Privilege, Privileges, Universe}; use crate::players::Account; use std::time::Duration; #[macro_use] pub mod macros; pub mod com; pub mod command; pub mod connector; pub mod crypt; pub mod entity; pub mod io; pub mod packet; pub mod players; pub mod requesting; pub mod requests; pub mod state; #[tokio::main] async fn main() { init_logger(Some(LevelFilter::Info)).unwrap(); debug!("Logger init"); let env = std::env::args().collect::<Vec<String>>(); info!("Reaching out to the flattiverse..."); let mut connector = Connector::login(&env[1], &env[2]).await.unwrap(); info!("Successfully logged in!"); info!("Available universes:"); for universe in connector.universes() { info!(" - {} ({})", universe.name(), universe.id()); info!(" Teams: "); for team in universe.teams() { info!(" » {}", team.name()); } info!(" Galaxies: "); for galaxy in universe.galaxies() { info!(" » {} ({})", galaxy.name(), galaxy.id()); } info!(" Components: "); for system in universe.systems() { info!( " » {:?} [{}, {}]", system.kind(), system.level_start(), system.level_end() ); } } { let connector = connector.clone(); tokio::spawn(async move { let mut connector = connector.await; let request = connector.universe(1).map(|u| u.join_with_team(0)); if let Some(request) = request { match connector .send_request(request) .await .await .expect("Connector disconnected") { Ok(_) => info!("Joined successfully"), Err(e) => error!("{}", e), } } /* while let Some(event) = connector.update(Duration::from_millis(1000)).await { info!("Processed event: {:?}", event); } let request = connector.universe(1).map(|u| u.part()); if let Some(request) = request { match connector.send_request(request).await.await.expect("Connector disconnected") { Ok(_) => info!("Parted successfully"), Err(e) => error!("{}", e) } } while let Some(event) = connector.update(Duration::from_millis(1000)).await { info!("Processed event: {:?}", event); }*/ }); } tokio::spawn(connector.with_clone(query_all_accounts)); tokio::spawn(connector.with_clone(|mut connector| async move { tokio::time::delay_for(Duration::from_secs(2)).await; info!( "Your({}) account info: {:?}", &env[1], connector .query_account_by_name(&env[1]) .await .expect("Failed to query") ); info!( "Random(asdf) account info: {:?}", connector .query_account_by_name("asdf") .await .expect("Failed to query") ); query_print_universe_privileges(&mut connector, 0).await; query_print_universe_privileges(&mut connector, 15).await; query_xml_stuff(&mut connector).await; alter_privileges(&mut connector, &env[1]).await; connector.disconnect().await; })); while let Some(event) = connector.update().await { info!("Processed event: {:?}", event); } } async fn alter_privileges(connector: &mut Connector, acc: &str) { let acc = connector .query_account_by_name(acc) .await .expect("Failed to query account") .expect("Account does not exist"); let pvs = Privileges::from(&[Privilege::Join, Privilege::ManageUniverse][..]); connector .alter_privileges_of_universe(0, &acc, pvs) .await .expect("Failed to alter privileges"); let mut stream = connector .query_privileges_of_universe(0) .await .expect("Failed to query privileges"); while let Some(Ok((acc, p))) = stream.next().await { info!( "{}: {:?}", acc.as_ref().map(|a| a.name()).unwrap_or_default(), p ); } connector .reset_privileges_of_universe(0, &acc) .await .expect("Failed to reset privileges"); } async fn query_xml_stuff(connector: &mut Connector) { connector.update_unit_xml( 15, 0, "<Sun Name=\"RustUnit\" Radius=\"300\" PositionX=\"0\" PositionY=\"0\" Gravity=\"0.7\" Radiation=\"2\" PowerOutput=\"150\" />" ).await.expect("Failed to update"); info!( "RustUnit: {}", connector .query_unit_xml_by_name(15, 0, "RustUnit") .await .expect("Failed to query RustUnit details") ); connector .delete_unit_by_name(15, 0, "RustUnit") .await .expect("Failed to delete RustUnit"); } async fn query_all_accounts(mut connector: Connector) { info!("Sending account query"); let mut stream = connector .query_accounts_by_name_pattern(None, false) .await .expect("Account query failed"); info!("Accounts:"); while let Some(Ok(account)) = stream.next().await { info!(" - {:?}", account); } info!("Accounts done"); } async fn query_print_universe_privileges(connector: &mut Connector, universe: u16) { info!( "Querying {:?} privileges", connector .universe(usize::from(universe)) .map(Universe::name) .expect("Invalid universe") ); let mut stream = connector .query_privileges_of_universe(universe) .await .expect("Failed to query universe for privileges"); info!("Privileges:"); while let Some(result) = stream.next().await { match result { Ok((account, privileges)) => info!( " - {:?}: {:?}", account.as_ref().map(Account::name), privileges ), Err(e) => error!("{:?}", e), } } info!("Privileges done"); } pub fn init_logger(level: Option<LevelFilter>) -> Result<::log4rs::Handle, SetLoggerError> { let stdout = ConsoleAppender::builder() .encoder(Box::new(PatternEncoder::new( "{h({d(%Y-%m-%d %H:%M:%S%.3f)} {M:>30.30}:{L:>03} {T:>25.25} {l:>5} {m})}{n}", ))) .build(); let config = Config::builder() .appender(Appender::builder().build("stdout", Box::new(stdout))) .logger(Logger::builder().build(env!("CARGO_PKG_NAME"), level.unwrap_or(LevelFilter::Info))) .build(Root::builder().appender("stdout").build(LevelFilter::Info)) .expect("Failed to create logger config"); ::log4rs::init_config(config) }
#![deny(intra_doc_link_resolution_failure)] #[macro_use] extern crate log; #[macro_use] extern crate num_derive; extern crate num_traits; use log::{LevelFilter, SetLoggerError}; use log4rs::append::console::ConsoleAppender; use log4rs::config::{Appender, Config, Logger, Root}; use log4rs::encode::pattern::PatternEncoder; use crate::connector::Connector; use crate::entity::{Privilege, Privileges, Universe}; use crate::players::Account; use std::time::Duration; #[macro_use] pub mod macros; pub mod com; pub mod command; pub mod connector; pub mod crypt; pub mod entity; pub mod io; pub mod packet; pub mod players; pub mod requesting; pub mod requests; pub mod state; #[tokio::main] async fn main() { init_logger(Some(LevelFilter::Info)).unwrap(); debug!("Logger init"); let env = std::env::args().collect::<Vec<String>>(); info!("Reaching out to the flattiverse..."); let mut connector = Connector::login(&env[1], &env[2]).await.unwrap(); info!("Successfully logged in!"); info!("Available universes:"); for universe in connector.universes() { info!(" - {} ({})", universe.name(), universe.id()); info!(" Teams: "); for team in universe.teams() { info!(" » {}", team.name()); } info!(" Galaxies: "); for galaxy in universe.galaxies() { info!(" » {} ({})", galaxy.name(), galaxy.id()); } info!(" Components: "); for system in universe.systems() { info!( " » {:?} [{}, {}]", system.kind(), system.level_start(), system.level_end() ); } } { let connector = connector.clone(); tokio::spawn(async move { let mut connector = connector.await; let request = connector.universe(1).map(|u| u.join_with_team(0)); if let Some(request) = request { match connector .send_request(request) .await .await .expect("Connector disconnected") { Ok(_) => info!("Joined successfully"), Err(e) => error!("{}", e), } } /* while let Some(event) = connector.update(Duration::from_millis(1000)).await { info!("Processed event: {:?}", event); } let request = connector.universe(1).map(|u| u.part()); if let Some(request) = request { match connector.send_request(request).await.await.expect("Connector disconnected") { Ok(_) => info!("Parted successfully"), Err(e) => error!("{}", e) } } while let Some(event) = connector.update(Duration::from_millis(1000)).await { info!("Processed event: {:?}", event); }*/ }); } tokio::spawn(connector.with_clone(query_all_accounts)); tokio::spawn(connector.with_clone(|mut connector| async move { tokio::time::delay_for(Duration::from_secs(2)).await; info!( "Your({}) account info: {:?}", &env[1], connector .query_account_by_name(&env[1]) .await .expect("Failed to query") ); info!( "Random(asdf) account info: {:?}", connector .query_account_by_name("asdf") .await .expect("Failed to query") ); query_print_universe_privileges(&mut connector, 0).await; query_print_universe_privileges(&mut connector, 15).await; query_xml_stuff(&mut connector).await; alter_privileges(&mut connector, &env[1]).await; connector.disconnect().await; })); while let Some(event) = connector.update().await { info!("Processed event: {:?}", event); } } async fn alter_privileges(connector: &mut Connector, acc: &str) { let acc = connector .query_account_by_name(acc) .await .expect("Failed to query account") .expect("Account does not exist"); let pvs = Privileges::from(&[Privilege::Join, Privilege::ManageUniverse][..]); connector .alter_privileges_of_universe(0, &acc, pvs) .await .expect("Failed to alter privileges"); let mut stream = connector .query_privileges_of_universe(0) .await .expect("Failed to query privileges"); while let Some(Ok((acc, p))) = stream.next().await { info!( "{}: {:?}", acc.as_ref().map(|a| a.name()).unwrap_or_default(), p ); } connector .reset_privileges_of_universe(0, &acc) .await .expect("Failed to reset privileges"); } async fn query_xml_stuff(connector: &mut Connector) { connector.update_unit_xml( 15, 0, "<Sun Name=\"RustUnit\" Radius=\"300\" PositionX=\"0\" PositionY=\"0\" Gravity=\"0.7\" Radiation=\"2\" PowerOutput=\"150\" />" ).await.expect("Failed to update"); info!( "RustUnit: {}", connector .query_unit_xml_by_name(15, 0, "RustUnit") .await .expect("Failed to query RustUnit details") ); connector .delete_unit_by_name(15, 0, "RustUnit") .await .expect("Failed to delete RustUnit"); }
async fn query_print_universe_privileges(connector: &mut Connector, universe: u16) { info!( "Querying {:?} privileges", connector .universe(usize::from(universe)) .map(Universe::name) .expect("Invalid universe") ); let mut stream = connector .query_privileges_of_universe(universe) .await .expect("Failed to query universe for privileges"); info!("Privileges:"); while let Some(result) = stream.next().await { match result { Ok((account, privileges)) => info!( " - {:?}: {:?}", account.as_ref().map(Account::name), privileges ), Err(e) => error!("{:?}", e), } } info!("Privileges done"); } pub fn init_logger(level: Option<LevelFilter>) -> Result<::log4rs::Handle, SetLoggerError> { let stdout = ConsoleAppender::builder() .encoder(Box::new(PatternEncoder::new( "{h({d(%Y-%m-%d %H:%M:%S%.3f)} {M:>30.30}:{L:>03} {T:>25.25} {l:>5} {m})}{n}", ))) .build(); let config = Config::builder() .appender(Appender::builder().build("stdout", Box::new(stdout))) .logger(Logger::builder().build(env!("CARGO_PKG_NAME"), level.unwrap_or(LevelFilter::Info))) .build(Root::builder().appender("stdout").build(LevelFilter::Info)) .expect("Failed to create logger config"); ::log4rs::init_config(config) }
async fn query_all_accounts(mut connector: Connector) { info!("Sending account query"); let mut stream = connector .query_accounts_by_name_pattern(None, false) .await .expect("Account query failed"); info!("Accounts:"); while let Some(Ok(account)) = stream.next().await { info!(" - {:?}", account); } info!("Accounts done"); }
function_block-full_function
[ { "content": "pub fn sha256(salt: &str) -> [u8; 32] {\n\n let mut sha = Sha256::default();\n\n sha.input(salt.as_bytes());\n\n sha.fixed_result().into()\n\n}\n\n\n\npub(crate) fn to_blocks<N>(data: &mut [u8]) -> &mut [GenericArray<u8, N>]\n\nwhere\n\n N: ArrayLength<u8>,\n\n{\n\n let n = N::to_us...
Rust
src/shakespeare.rs
MarcoIeni/poke-speare
1aabb22ee49c8c0268f497d4573cf93b695ad61a
use crate::ps_error::{PSError, PSResult}; use log::error; use reqwest::Client; use serde::Deserialize; use std::collections::HashMap; const SHAKESPEARE_API_PATH: &str = "/translate/shakespeare.json"; const FUNTRANSLATIONS_API_SECRET: &str = "X-Funtranslations-Api-Secret"; #[derive(Deserialize, Debug)] struct Translation { success: Success, contents: Contents, } #[derive(Deserialize, Debug)] struct Contents { translated: String, } #[derive(Deserialize, Debug)] struct Success { total: u32, } impl Translation { fn translated_text(&self) -> PSResult<String> { match self.success.total { 1 => Ok(self.contents.translated.clone()), _ => Err(PSError::ShakespeareError), } } } fn request_url(server_uri: &str) -> String { format!("{}{}", server_uri, SHAKESPEARE_API_PATH) } pub async fn translate(text: &str, shakespeare_api_secret: Option<&str>) -> PSResult<String> { let request_url = request_url("https://api.funtranslations.com"); retrieve_translation(&request_url, text, shakespeare_api_secret).await } async fn retrieve_translation( request_url: &str, text: &str, shakespeare_api_secret: Option<&str>, ) -> PSResult<String> { let mut json_param = HashMap::new(); json_param.insert("text", text); let post = Client::new().post(request_url); let client = match shakespeare_api_secret { Some(secret) => post.header(FUNTRANSLATIONS_API_SECRET, secret), None => post, }; let response = client.json(&json_param).send().await.map_err(|e| { error!("while making shakespeare request: {}", e); PSError::ShakespeareError })?; let status = response.status(); match status.as_u16() { 200 => { let translation: Translation = response.json().await.map_err(|e| { error!("while interpreting shakespeare json payload: {}", e); PSError::ShakespeareError })?; translation.translated_text() } 429 => Err(PSError::QuotaError), _ => { error!( "shakespeare response: unexpected status code. request_url: {}, text: {}, response: {:#?}", request_url, text, response ); Err(PSError::ShakespeareError) } } } #[cfg(test)] mod tests { use super::*; use serde_json::json; use wiremock::{ matchers::{body_json, header, method, path}, Mock, MockServer, ResponseTemplate, }; async fn check_shakespeare_translation(api_secret: Option<&str>) { let mock_server = MockServer::start().await; let shakespeare_response = json!({ "success": { "total": 1 }, "contents": { "translated": "Thee did giveth mr. Tim a hearty meal, but unfortunately what he did doth englut did maketh him kicketh the bucket.", "text": "You gave Mr. Tim a hearty meal, but unfortunately what he ate made him die.", "translation": "shakespeare" } }); let input_text = "You gave Mr. Tim a hearty meal, but unfortunately what he ate made him die."; let expected_json_body = body_json(json!({ "text": input_text })); let response = ResponseTemplate::new(200).set_body_json(shakespeare_response); let mock = Mock::given(method("POST")); let mock = match api_secret { Some(secret) => mock.and(header(FUNTRANSLATIONS_API_SECRET, secret)), None => mock, }; mock.and(path(SHAKESPEARE_API_PATH)) .and(expected_json_body) .respond_with(response) .mount(&mock_server) .await; let expected_translation = "Thee did giveth mr. Tim a hearty meal, but unfortunately what he did doth englut did maketh him kicketh the bucket."; let request_url = request_url(&mock_server.uri()); dbg!(&request_url); let actual_translation = retrieve_translation(&request_url, input_text, api_secret) .await .unwrap(); assert_eq!(expected_translation, actual_translation); } #[tokio::test] async fn shakespeare_translation_is_correctly_retrieved_with_api_secret() { let api_secret = Some("secret"); check_shakespeare_translation(api_secret).await; } #[tokio::test] async fn shakespeare_translation_is_correctly_retrieved_without_api_secret() { let api_secret = None; check_shakespeare_translation(api_secret).await; } #[tokio::test] async fn report_error_if_shakespeare_quota_limits_reached() { let mock_server = MockServer::start().await; let shakespeare_response = json!({ "error": { "code": 429, "message": "Too Many Requests: Rate limit of 5 requests per hour exceeded. Please wait for 59 minutes and 54 seconds." } }); let response = ResponseTemplate::new(429).set_body_json(shakespeare_response); let input_text = "Irrelevant. This should not be translated."; let request_url = request_url(&mock_server.uri()); Mock::given(method("POST")) .and(path(SHAKESPEARE_API_PATH)) .respond_with(response) .mount(&mock_server) .await; let response = retrieve_translation(&request_url, input_text, None).await; let expected_err = Err(PSError::QuotaError); assert_eq!(expected_err, response); } }
use crate::ps_error::{PSError, PSResult}; use log::error; use reqwest::Client; use serde::Deserialize; use std::collections::HashMap; const SHAKESPEARE_API_PATH: &str = "/translate/shakespeare.json"; const FUNTRANSLATIONS_API_SECRET: &str = "X-Funtranslations-Api-Secret"; #[derive(Deserialize, Debug)] struct Translation { success: Success, contents: Contents, } #[derive(Deserialize, Debug)] struct Contents { translated: String, } #[derive(Deserialize, Debug)] struct Success { total: u32, } impl Translation { fn translated_text(&self) -> PSResult<String> { match self.success.total { 1 => Ok(self.contents.translated.clone()), _ => Err(PSError::ShakespeareError), } } } fn request_url(server_uri: &str) -> String { format!("{}{}", server_uri, SHAKESPEARE_API_PATH) } pub async fn translate(text: &str, shakespeare_api_secret: Option<&str>) -> PSResult<String> { let request_url = request_url("https://api.funtranslations.com"); retrieve_translation(&request_url, text, shakespeare_api_secret).await } async fn retrieve_translation( request_url: &str, text: &str, shake
let expected_translation = "Thee did giveth mr. Tim a hearty meal, but unfortunately what he did doth englut did maketh him kicketh the bucket."; let request_url = request_url(&mock_server.uri()); dbg!(&request_url); let actual_translation = retrieve_translation(&request_url, input_text, api_secret) .await .unwrap(); assert_eq!(expected_translation, actual_translation); } #[tokio::test] async fn shakespeare_translation_is_correctly_retrieved_with_api_secret() { let api_secret = Some("secret"); check_shakespeare_translation(api_secret).await; } #[tokio::test] async fn shakespeare_translation_is_correctly_retrieved_without_api_secret() { let api_secret = None; check_shakespeare_translation(api_secret).await; } #[tokio::test] async fn report_error_if_shakespeare_quota_limits_reached() { let mock_server = MockServer::start().await; let shakespeare_response = json!({ "error": { "code": 429, "message": "Too Many Requests: Rate limit of 5 requests per hour exceeded. Please wait for 59 minutes and 54 seconds." } }); let response = ResponseTemplate::new(429).set_body_json(shakespeare_response); let input_text = "Irrelevant. This should not be translated."; let request_url = request_url(&mock_server.uri()); Mock::given(method("POST")) .and(path(SHAKESPEARE_API_PATH)) .respond_with(response) .mount(&mock_server) .await; let response = retrieve_translation(&request_url, input_text, None).await; let expected_err = Err(PSError::QuotaError); assert_eq!(expected_err, response); } }
speare_api_secret: Option<&str>, ) -> PSResult<String> { let mut json_param = HashMap::new(); json_param.insert("text", text); let post = Client::new().post(request_url); let client = match shakespeare_api_secret { Some(secret) => post.header(FUNTRANSLATIONS_API_SECRET, secret), None => post, }; let response = client.json(&json_param).send().await.map_err(|e| { error!("while making shakespeare request: {}", e); PSError::ShakespeareError })?; let status = response.status(); match status.as_u16() { 200 => { let translation: Translation = response.json().await.map_err(|e| { error!("while interpreting shakespeare json payload: {}", e); PSError::ShakespeareError })?; translation.translated_text() } 429 => Err(PSError::QuotaError), _ => { error!( "shakespeare response: unexpected status code. request_url: {}, text: {}, response: {:#?}", request_url, text, response ); Err(PSError::ShakespeareError) } } } #[cfg(test)] mod tests { use super::*; use serde_json::json; use wiremock::{ matchers::{body_json, header, method, path}, Mock, MockServer, ResponseTemplate, }; async fn check_shakespeare_translation(api_secret: Option<&str>) { let mock_server = MockServer::start().await; let shakespeare_response = json!({ "success": { "total": 1 }, "contents": { "translated": "Thee did giveth mr. Tim a hearty meal, but unfortunately what he did doth englut did maketh him kicketh the bucket.", "text": "You gave Mr. Tim a hearty meal, but unfortunately what he ate made him die.", "translation": "shakespeare" } }); let input_text = "You gave Mr. Tim a hearty meal, but unfortunately what he ate made him die."; let expected_json_body = body_json(json!({ "text": input_text })); let response = ResponseTemplate::new(200).set_body_json(shakespeare_response); let mock = Mock::given(method("POST")); let mock = match api_secret { Some(secret) => mock.and(header(FUNTRANSLATIONS_API_SECRET, secret)), None => mock, }; mock.and(path(SHAKESPEARE_API_PATH)) .and(expected_json_body) .respond_with(response) .mount(&mock_server) .await;
random
[ { "content": "fn pokemon_path(pokemon_name: &str) -> String {\n\n format!(\"/api/v2/pokemon-species/{}\", pokemon_name)\n\n}\n\n\n\npub async fn get_description(pokemon_name: &str) -> PSResult<String> {\n\n let pokemon_path = pokemon_path(pokemon_name);\n\n let request_url = format!(\"https://pokeapi.c...
Rust
src/hash_ioc.rs
oliverdaff/ripioc
227c814ba9d5953cd7afa23fddd34857ee182064
#[cfg(feature = "serde_support")] use serde::Serialize; use crate::regex_builder::compile_re; use std::boxed::Box; use regex::Regex; use regex::RegexSet; use regex::RegexSetBuilder; #[cfg_attr(feature = "serde_support", derive(Serialize))] #[derive(Debug, PartialEq, Eq)] pub enum HashIOC<'a> { MD5(&'a str), SHA1(&'a str), SHA256(&'a str), SHA512(&'a str), SSDEEP(&'a str), } #[cfg_attr(feature = "serde_support", derive(Serialize))] #[derive(Debug, PartialEq, Eq)] pub struct HashIOCS<'a> { pub md5s: Vec<HashIOC<'a>>, pub sha1s: Vec<HashIOC<'a>>, pub sha256s: Vec<HashIOC<'a>>, pub sha512s: Vec<HashIOC<'a>>, pub ssdeeps: Vec<HashIOC<'a>>, } const MD5_PATTERN: &str = r#"\b[A-Fa-f0-9]{32}\b"#; const SHA1_PATTERN: &str = r#"\b[A-Fa-f0-9]{40}\b"#; const SHA256_PATTERN: &str = r#"\b[A-Fa-f0-9]{64}\b"#; const SHA512_PATTERN: &str = r#"\b[A-Fa-f0-9]{128}\b"#; const SSDEEP_PATTERN: &str = r#"\d{2}:[A-Za-z0-9/+]{3,}:[A-Za-z0-9/+]{3,}"#; pub fn parse_md5(input: &str) -> Vec<HashIOC> { lazy_static! { static ref MD5_RE: Box<Regex> = compile_re(MD5_PATTERN); } MD5_RE .find_iter(input) .map(|x| HashIOC::MD5(x.as_str())) .collect() } pub fn parse_sha1(input: &str) -> Vec<HashIOC> { lazy_static! { static ref SHA1_RE: Box<Regex> = compile_re(SHA1_PATTERN); } SHA1_RE .find_iter(input) .map(|x| HashIOC::SHA1(x.as_str())) .collect() } pub fn parse_sha256(input: &str) -> Vec<HashIOC> { lazy_static! { static ref SHA256_RE: Box<Regex> = compile_re(SHA256_PATTERN); } SHA256_RE .find_iter(input) .map(|x| x.as_str()) .map(HashIOC::SHA256) .collect() } pub fn parse_sha512(input: &str) -> Vec<HashIOC> { lazy_static! { static ref SHA512_RE: Box<Regex> = compile_re(SHA512_PATTERN); } SHA512_RE .find_iter(input) .map(|x| x.as_str()) .map(HashIOC::SHA512) .collect() } pub fn parse_ssdeep(input: &str) -> Vec<HashIOC> { lazy_static! { static ref SSDEEP_RE: Box<Regex> = compile_re(SSDEEP_PATTERN); } SSDEEP_RE .find_iter(input) .map(|x| x.as_str()) .map(HashIOC::SSDEEP) .collect() } pub fn parse_hash_iocs(input: &str) -> HashIOCS { lazy_static! { static ref HASH_PATTERNS: RegexSet = RegexSetBuilder::new( vec![ MD5_PATTERN, SHA1_PATTERN, SHA256_PATTERN, SHA512_PATTERN, SSDEEP_PATTERN ] ).case_insensitive(true) .ignore_whitespace(true) .build().unwrap(); } let matches = HASH_PATTERNS.matches(input); HashIOCS { md5s: if matches.matched(0) { parse_md5(input) } else { vec![] }, sha1s: if matches.matched(1) { parse_sha1(input) } else { vec![] }, sha256s: if matches.matched(2) { parse_sha256(input) } else { vec![] }, sha512s: if matches.matched(3) { parse_sha512(input) } else { vec![] }, ssdeeps: if matches.matched(4) { parse_ssdeep(input) } else { vec![] }, } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_md5() { assert_eq!( parse_md5("this ioc 08f2eb5f1bcbaf25ba97aef26593ed96"), vec![HashIOC::MD5("08f2eb5f1bcbaf25ba97aef26593ed96")] ) } #[test] fn test_parse_sha1() { assert_eq!( parse_sha1("this is a ioc a6b2fa823815336bb7352b02a93c970df51f66e8"), vec![HashIOC::SHA1("a6b2fa823815336bb7352b02a93c970df51f66e8")] ); } #[test] fn test_parse_sha256() { assert_eq!( parse_sha256( "this is a 05cc5051bfa5c2c356422f930e3f78dd63dd1252c98bf5e154c0e1a64a4b5532" ), vec![HashIOC::SHA256( "05cc5051bfa5c2c356422f930e3f78dd63dd1252c98bf5e154c0e1a64a4b5532" )] ) } #[test] fn test_parse_sha512() { assert_eq!( parse_sha512( " this is a 5671025d77521321db8be6e150d66d67c79d2ce43b203207a03710fbff10e1\ 7800179803b4f974c75816a9dd8c3697a2f32fbb2d2b1cff2933f6a9e575061a32" ), vec![HashIOC::SHA512( "5671025d77521321db8be6e150d66d67c79d2ce43b203207a\ 03710fbff10e17800179803b4f974c75816a9dd8c3697a2f32fbb2d2b1cff2933f6a9e575061a32" )] ) } #[test] fn test_parse_ssdeep() { assert_eq!( parse_ssdeep(" this is a 96:s4Ud1Lj96tHHlZDrwciQmA+4uy1I0G4HYuL8N3TzS8QsO/wqWXLcMSx:sF1LjEtHHlZDrJzrhuyZvHYm8tKp/RWO xxx"), vec![HashIOC::SSDEEP("96:s4Ud1Lj96tHHlZDrwciQmA+4uy1I0G4HYuL8N3TzS8QsO/wqWXLcMSx:sF1LjEtHHlZDrJzrhuyZvHYm8tKp/RWO")] ) } #[test] fn test_parse_hash_iocs() { assert_eq!(parse_hash_iocs(" 08f2eb5f1bcbaf25ba97aef26593ed96 a6b2fa823815336bb7352b02a93c970df51f66e8 05cc5051bfa5c2c356422f930e3f78dd63dd1252c98bf5e154c0e1a64a4b5532 5671025d77521321db8be6e150d66d67c79d2ce43b203207a03710fbff10e1\ 7800179803b4f974c75816a9dd8c3697a2f32fbb2d2b1cff2933f6a9e575061a32 96:s4Ud1Lj96tHHlZDrwciQmA+4uy1I0G4HYuL8N3TzS8QsO/wqWXLcMSx:sF1LjEtHHlZDrJzrhuyZvHYm8tKp/RWO "), HashIOCS{ md5s : vec![HashIOC::MD5("08f2eb5f1bcbaf25ba97aef26593ed96")], sha1s : vec![HashIOC::SHA1("a6b2fa823815336bb7352b02a93c970df51f66e8")], sha256s : vec![HashIOC::SHA256("05cc5051bfa5c2c356422f930e3f78dd63dd1252c98bf5e154c0e1a64a4b5532")], sha512s : vec![HashIOC::SHA512("5671025d77521321db8be6e150d66d67c79d2ce43b203207a03710fbff10e1\ 7800179803b4f974c75816a9dd8c3697a2f32fbb2d2b1cff2933f6a9e575061a32")], ssdeeps : vec![HashIOC::SSDEEP("96:s4Ud1Lj96tHHlZDrwciQmA+4uy1I0G4HYuL8N3TzS8QsO/wqWXLcMSx:sF1LjEtHHlZDrJzrhuyZvHYm8tKp/RWO")] } ) } }
#[cfg(feature = "serde_support")] use serde::Serialize; use crate::regex_builder::compile_re; use std::boxed::Box; use regex::Regex; use regex::RegexSet; use regex::RegexSetBuilder; #[cfg_attr(feature = "serde_support", derive(Serialize))] #[derive(Debug, PartialEq, Eq)] pub enum HashIOC<'a> { MD5(&'a str), SHA1(&'a str), SHA256(&'a str), SHA512(&'a str), SSDEEP(&'a str), } #[cfg_attr(feature = "serde_support", derive(Serialize))] #[derive(Debug, PartialEq, Eq)] pub struct HashIOCS<'a> { pub md5s: Vec<HashIOC<'a>>, pub sha1s: Vec<HashIOC<'a>>, pub sha256s: Vec<HashIOC<'a>>, pub sha512s: Vec<HashIOC<'a>>, pub ssdeeps: Vec<HashIOC<'a>>, } const MD5_PATTERN: &str = r#"\b[A-Fa-f0-9]{32}\b"#; const SHA1_PATTERN: &str = r#"\b[A-Fa-f0-9]{40}\b"#; const SHA256_PATTERN: &str = r#"\b[A-Fa-f0-9]{64}\b"#; const SHA512_PATTERN: &str = r#"\b[A-Fa-f0-9]{128}\b"#; const SSDEEP_PATTERN: &str = r#"\d{2}:[A-Za-z0-9/+]{3,}:[A-Za-z0-9/+]{3,}"#; pub fn parse_md5(input: &str) -> Vec<HashIOC> { lazy_static! { static ref MD5_RE: Box<Regex> = compile_re(MD5_PATTERN); } MD5_RE .find_iter(input) .map(|x| HashIOC::MD5(x.as_str())) .collect() } pub fn parse_sha1(input: &str) -> Vec<HashIOC> { lazy_static! { static ref SHA1_RE: Box<Regex> = compile_re(SHA1_PATTERN); } SHA1_RE .find_iter(input) .map(|x| HashIOC::SHA1(x.as_str())) .collect() } pub fn parse_sha256(input: &str) -> Vec<HashIOC> { lazy_static! { static ref SHA256_RE: Box<Rege
pub fn parse_sha512(input: &str) -> Vec<HashIOC> { lazy_static! { static ref SHA512_RE: Box<Regex> = compile_re(SHA512_PATTERN); } SHA512_RE .find_iter(input) .map(|x| x.as_str()) .map(HashIOC::SHA512) .collect() } pub fn parse_ssdeep(input: &str) -> Vec<HashIOC> { lazy_static! { static ref SSDEEP_RE: Box<Regex> = compile_re(SSDEEP_PATTERN); } SSDEEP_RE .find_iter(input) .map(|x| x.as_str()) .map(HashIOC::SSDEEP) .collect() } pub fn parse_hash_iocs(input: &str) -> HashIOCS { lazy_static! { static ref HASH_PATTERNS: RegexSet = RegexSetBuilder::new( vec![ MD5_PATTERN, SHA1_PATTERN, SHA256_PATTERN, SHA512_PATTERN, SSDEEP_PATTERN ] ).case_insensitive(true) .ignore_whitespace(true) .build().unwrap(); } let matches = HASH_PATTERNS.matches(input); HashIOCS { md5s: if matches.matched(0) { parse_md5(input) } else { vec![] }, sha1s: if matches.matched(1) { parse_sha1(input) } else { vec![] }, sha256s: if matches.matched(2) { parse_sha256(input) } else { vec![] }, sha512s: if matches.matched(3) { parse_sha512(input) } else { vec![] }, ssdeeps: if matches.matched(4) { parse_ssdeep(input) } else { vec![] }, } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_md5() { assert_eq!( parse_md5("this ioc 08f2eb5f1bcbaf25ba97aef26593ed96"), vec![HashIOC::MD5("08f2eb5f1bcbaf25ba97aef26593ed96")] ) } #[test] fn test_parse_sha1() { assert_eq!( parse_sha1("this is a ioc a6b2fa823815336bb7352b02a93c970df51f66e8"), vec![HashIOC::SHA1("a6b2fa823815336bb7352b02a93c970df51f66e8")] ); } #[test] fn test_parse_sha256() { assert_eq!( parse_sha256( "this is a 05cc5051bfa5c2c356422f930e3f78dd63dd1252c98bf5e154c0e1a64a4b5532" ), vec![HashIOC::SHA256( "05cc5051bfa5c2c356422f930e3f78dd63dd1252c98bf5e154c0e1a64a4b5532" )] ) } #[test] fn test_parse_sha512() { assert_eq!( parse_sha512( " this is a 5671025d77521321db8be6e150d66d67c79d2ce43b203207a03710fbff10e1\ 7800179803b4f974c75816a9dd8c3697a2f32fbb2d2b1cff2933f6a9e575061a32" ), vec![HashIOC::SHA512( "5671025d77521321db8be6e150d66d67c79d2ce43b203207a\ 03710fbff10e17800179803b4f974c75816a9dd8c3697a2f32fbb2d2b1cff2933f6a9e575061a32" )] ) } #[test] fn test_parse_ssdeep() { assert_eq!( parse_ssdeep(" this is a 96:s4Ud1Lj96tHHlZDrwciQmA+4uy1I0G4HYuL8N3TzS8QsO/wqWXLcMSx:sF1LjEtHHlZDrJzrhuyZvHYm8tKp/RWO xxx"), vec![HashIOC::SSDEEP("96:s4Ud1Lj96tHHlZDrwciQmA+4uy1I0G4HYuL8N3TzS8QsO/wqWXLcMSx:sF1LjEtHHlZDrJzrhuyZvHYm8tKp/RWO")] ) } #[test] fn test_parse_hash_iocs() { assert_eq!(parse_hash_iocs(" 08f2eb5f1bcbaf25ba97aef26593ed96 a6b2fa823815336bb7352b02a93c970df51f66e8 05cc5051bfa5c2c356422f930e3f78dd63dd1252c98bf5e154c0e1a64a4b5532 5671025d77521321db8be6e150d66d67c79d2ce43b203207a03710fbff10e1\ 7800179803b4f974c75816a9dd8c3697a2f32fbb2d2b1cff2933f6a9e575061a32 96:s4Ud1Lj96tHHlZDrwciQmA+4uy1I0G4HYuL8N3TzS8QsO/wqWXLcMSx:sF1LjEtHHlZDrJzrhuyZvHYm8tKp/RWO "), HashIOCS{ md5s : vec![HashIOC::MD5("08f2eb5f1bcbaf25ba97aef26593ed96")], sha1s : vec![HashIOC::SHA1("a6b2fa823815336bb7352b02a93c970df51f66e8")], sha256s : vec![HashIOC::SHA256("05cc5051bfa5c2c356422f930e3f78dd63dd1252c98bf5e154c0e1a64a4b5532")], sha512s : vec![HashIOC::SHA512("5671025d77521321db8be6e150d66d67c79d2ce43b203207a03710fbff10e1\ 7800179803b4f974c75816a9dd8c3697a2f32fbb2d2b1cff2933f6a9e575061a32")], ssdeeps : vec![HashIOC::SSDEEP("96:s4Ud1Lj96tHHlZDrwciQmA+4uy1I0G4HYuL8N3TzS8QsO/wqWXLcMSx:sF1LjEtHHlZDrJzrhuyZvHYm8tKp/RWO")] } ) } }
x> = compile_re(SHA256_PATTERN); } SHA256_RE .find_iter(input) .map(|x| x.as_str()) .map(HashIOC::SHA256) .collect() }
function_block-function_prefixed
[ { "content": "/// Matches all IOCs against the input and returns\n\n/// the matches in a [`IOCS`](struct.IOCS.html).\n\n///\n\n/// # Arguments\n\n///\n\n/// * `input` - A string slice that contains the text to find IOCs in\n\n///\n\n/// ```\n\n/// use ripioc::parse_all_iocs;\n\n///\n\n/// let iocs = parse_all_...
Rust
src/router/mod.rs
verkehrsministerium/autobahnkreuz-rs
03d194257709ad9ef1226141b052bf9c0f2408d6
mod handshake; mod messaging; mod pubsub; mod machine; use crate::messages::{ErrorDetails, Message, Reason, URI}; use rand::distributions::{Distribution, Uniform}; use rand::thread_rng; use crate::router::pubsub::SubscriptionPatternNode; use crate::router::machine::send_message_json; use crate::router::machine::send_message_msgpack; use std::collections::HashMap; use std::marker::Sync; use std::sync::{Arc, Mutex}; use std::thread; use std::time::Duration; use std::env; use ws::{Result as WSResult, Sender, Builder, Settings}; use simple_raft_node::{RequestManager, RequestError, Node, Config, transports::TcpConnectionManager, storages::MemStorage}; use regex::Regex; use crate::{ID, Error, ErrorType, ErrorKind, MatchingPolicy, WampResult}; use serde::{Serialize, Deserialize}; use std::net::ToSocketAddrs; #[derive(Debug, Clone, Default)] struct SubscriptionManager { subscriptions: Arc<Mutex<SubscriptionPatternNode<u64>>>, subscription_ids_to_uris: HashMap<u64, (String, bool)>, } pub struct Router { node: Node<RouterInfo>, } #[derive(Debug, Clone)] pub struct RouterCore { subscription_manager: SubscriptionManager, connections: Arc<Mutex<HashMap<u64, Arc<Mutex<ConnectionInfo>>>>>, senders: Arc<Mutex<HashMap<u64, Sender>>>, } #[derive(Debug, Clone, Default)] struct RouterInfo { request_manager: Option<RequestManager<RouterCore>>, senders: Arc<Mutex<HashMap<u64, Sender>>>, } struct ConnectionHandler { info_id: u64, router: RouterInfo, subscribed_topics: Vec<(ID, ID)>, subscriptions: Arc<Mutex<SubscriptionPatternNode<u64>>>, } #[derive(Debug)] pub struct ConnectionInfo { state: ConnectionState, protocol: String, id: u64, } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub enum ConnectionState { Initializing, Connected, ShuttingDown, Disconnected, } unsafe impl Sync for Router {} static WAMP_JSON: &'static str = "wamp.2.json"; static WAMP_MSGPACK: &'static str = "wamp.2.msgpack"; fn random_id() -> u64 { let mut rng = thread_rng(); let between = Uniform::new(0, 1u64.rotate_left(56) - 1); between.sample(&mut rng) } impl Default for Router { fn default() -> Self { Self::new() } } impl Router { #[inline] pub fn new() -> Router { let node_id_msg = "Please specify a NODE_ID >= 0 via an environment variable!"; let re = Regex::new(r"\d+").unwrap(); let node_id_str = env::var("NODE_ID").expect(node_id_msg); let mut node_id = re.find(node_id_str.as_str()) .expect(node_id_msg) .as_str() .parse::<u64>() .expect(node_id_msg); node_id += 1; let node_address = env::var("NODE_ADDRESS").ok() .map(|address| { loop { log::info!("Trying to resolve binding IP address {}...", address); match address.to_socket_addrs() { Ok(mut addr) => { return addr .next() .expect("The binding address does not resolve to a valid IP or port!"); }, Err(e) => { log::warn!("Could not resolve binding address {}: {}", address, e); }, } } }).expect("Please specify a NODE_ADDRESS (domain:port) via an environment variable!"); let gateway = env::var("NODE_GATEWAY").ok() .map(|gateway| { loop { log::info!("Trying to resolve gateway address {}...", gateway); match gateway.to_socket_addrs() { Ok(mut addr) => { return addr .next() .expect("The gateway address does not resolve to a valid IP or port!"); }, Err(e) => { log::warn!("Could not resolve gateway {}: {}", gateway, e); }, } } }).expect("The gateway address environment variable NODE_GATEWAY is not specified!"); let config = Config { id: node_id, election_tick: 10, heartbeat_tick: 3, ..Default::default() }; let machine = RouterInfo::default(); let storage = MemStorage::new(); let mgr = TcpConnectionManager::new(node_address).unwrap(); let node = Node::new( config, gateway, machine, storage, mgr, ); let stop = node.stop_handler(); ctrlc::set_handler(move || { stop(); thread::sleep(Duration::from_micros(200)); std::process::exit(0); }).expect("error setting Ctrl-C handler"); Router { node, } } pub fn listen<A>(&self, url: A) where A: ToSocketAddrs + std::fmt::Debug + Send + Sync + 'static { let router_info = self.node.machine().clone(); let ws = Builder::new().with_settings(Settings { ..Settings::default() }).build(|sender| { let id = random_id(); router_info.add_connection(id, sender); ConnectionHandler { info_id: id, subscribed_topics: Vec::new(), router: router_info.clone(), subscriptions: router_info.subscriptions(), } }).expect("websocket to be built"); ws.listen(url).unwrap(); } pub fn shutdown(&self) { let info = self.node.machine(); let arc = info.connections(); let connections = arc.lock().unwrap(); for id in connections.keys() { info.send_message( *id, Message::Goodbye(ErrorDetails::new(), Reason::SystemShutdown), ); info.set_state(*id, ConnectionState::ShuttingDown); } log::info!("Goodbye messages sent. Waiting 5 seconds for response"); thread::sleep(Duration::from_secs(5)); for id in connections.keys() { info.shutdown_sender(id); } } } impl RouterCore { pub fn send_message( &self, connection_id: u64, protocol: String, message: Message, ) -> WampResult<()> { log::debug!("handling send_message"); if let Some(sender) = self.senders.lock().unwrap().get(&connection_id) { log::debug!("Sending message {:?} via {}", message, protocol); let send_result = if protocol == WAMP_JSON { send_message_json(sender, &message) } else { send_message_msgpack(sender, &message) }; log::debug!("sending succeeded"); match send_result { Ok(()) => Ok(()), Err(e) => Err(Error::new(ErrorKind::WSError(e))), } } else { log::debug!("connection {} is not on this node, dropping message", connection_id); Ok(()) } } pub fn shutdown_sender(&self, id: &u64) { if let Some(sender) = self.senders.lock().unwrap().get_mut(id) { sender.shutdown().ok(); } } pub fn set_state(&self, connection_id: u64, state: ConnectionState) { let connections = self.connections.lock().unwrap(); let connection = connections.get(&connection_id).unwrap(); connection.lock().unwrap().state = state; } pub fn set_protocol(&self, connection_id: u64, protocol: String) { let connections = self.connections.lock().unwrap(); let connection = connections.get(&connection_id).unwrap(); connection.lock().unwrap().protocol = protocol; } pub fn add_connection(&mut self, connection_id: u64) { self.connections.lock().unwrap().insert(connection_id, Arc::new(Mutex::new(ConnectionInfo { state: ConnectionState::Initializing, protocol: String::new(), id: connection_id, }))); } pub fn remove_connection(&mut self, connection_id: u64) { self.connections.lock().unwrap().remove(&connection_id); } pub fn remove_subscription(&mut self, connection_id: &u64, subscription_id: &u64, request_id: &u64) -> WampResult<()> { if let Some(&(ref topic_uri, is_prefix)) = self.subscription_manager.subscription_ids_to_uris.get(subscription_id) { log::trace!("Removing subscription to {:?}", topic_uri); self.subscription_manager .subscriptions .lock().unwrap() .unsubscribe_with(topic_uri, connection_id, is_prefix) .map_err(|e| Error::new(ErrorKind::ErrorReason( ErrorType::Unsubscribe, *request_id, e.reason(), )))?; log::trace!("Subscription tree: {:?}", self.subscription_manager.subscriptions); } else { return Err(Error::new(ErrorKind::ErrorReason( ErrorType::Unsubscribe, *request_id, Reason::NoSuchSubscription, ))); } let connections = self.connections.lock().unwrap(); let connection = connections.get(connection_id).unwrap().lock().unwrap(); self.send_message( *connection_id, connection.protocol.clone(), Message::Unsubscribed(*request_id), )?; Ok(()) } pub fn add_subscription( &mut self, connection_id: u64, request_id: u64, topic: URI, matching_policy: MatchingPolicy, id: ID, prefix_id: ID, ) -> WampResult<()> { log::debug!( "machine is adding subscription ({}, {}, {:?}, {:?})", connection_id, request_id, topic, matching_policy, ); let topic_id = match self.subscription_manager.subscriptions.lock().unwrap().subscribe_with( &topic, connection_id, matching_policy, id, prefix_id, ) { Ok(topic_id) => topic_id, Err(e) => { return Err(Error::new(ErrorKind::ErrorReason( ErrorType::Subscribe, request_id, e.reason(), ))) } }; log::debug!("subscription for {} on {} got id {}", topic.uri, connection_id, topic_id); self.subscription_manager.subscription_ids_to_uris.insert( topic_id, (topic.uri, matching_policy == MatchingPolicy::Prefix), ); let connections = self.connections.lock().unwrap(); let connection = connections.get(&connection_id).unwrap().lock().unwrap(); self.send_message( connection_id, connection.protocol.clone(), Message::Subscribed(request_id, topic_id), )?; Ok(()) } } impl ConnectionHandler { fn remove(&self) { log::trace!( "Removing subscriptions for client {}", self.info_id, ); for (topic_id, request_id) in &self.subscribed_topics { log::trace!("Looking for subscription {}", topic_id); self.router.remove_subscription(self.info_id, *topic_id, *request_id); } self.router.remove_connection(self.info_id); } fn terminate_connection(&self) -> WSResult<()> { self.remove(); Ok(()) } fn info(&self) -> Result<Arc<Mutex<ConnectionInfo>>, RequestError> { self.router.connection(self.info_id) } }
mod handshake; mod messaging; mod pubsub; mod machine; use crate::messages::{ErrorDetails, Message, Reason, URI}; use rand::distributions::{Distribution, Uniform}; use rand::thread_rng; use crate::router::pubsub::SubscriptionPatternNode; use crate::router::machine::send_message_json; use crate::router::machine::send_message_msgpack; use std::collections::HashMap; use std::marker::Sync; use std::sync::{Arc, Mutex}; use std::thread; use std::time::Duration; use std::env; use ws::{Result as WSResult, Sender, Builder, Settings}; use simple_raft_node::{RequestManager, RequestError, Node, Config, transports::TcpConnectionManager, storages::MemStorage}; use regex::Regex; use crate::{ID, Error, ErrorType, ErrorKind, MatchingPolicy, WampResult}; use serde::{Serialize, Deserialize}; use std::net::ToSocketAddrs; #[derive(Debug, Clone, Default)] struct SubscriptionManager { subscriptions: Arc<Mutex<SubscriptionPatternNode<u64>>>, subscription_ids_to_uris: HashMap<u64, (String, bool)>, } pub struct Router { node: Node<RouterInfo>, } #[derive(Debug, Clone)] pub struct RouterCore { subscription_manager: SubscriptionManager, connections: Arc<Mutex<HashMap<u64, Arc<Mutex<ConnectionInfo>>>>>, senders: Arc<Mutex<HashMap<u64, Sender>>>, } #[derive(Debug, Clone, Default)] struct RouterInfo { request_manager: Option<Re
listen<A>(&self, url: A) where A: ToSocketAddrs + std::fmt::Debug + Send + Sync + 'static { let router_info = self.node.machine().clone(); let ws = Builder::new().with_settings(Settings { ..Settings::default() }).build(|sender| { let id = random_id(); router_info.add_connection(id, sender); ConnectionHandler { info_id: id, subscribed_topics: Vec::new(), router: router_info.clone(), subscriptions: router_info.subscriptions(), } }).expect("websocket to be built"); ws.listen(url).unwrap(); } pub fn shutdown(&self) { let info = self.node.machine(); let arc = info.connections(); let connections = arc.lock().unwrap(); for id in connections.keys() { info.send_message( *id, Message::Goodbye(ErrorDetails::new(), Reason::SystemShutdown), ); info.set_state(*id, ConnectionState::ShuttingDown); } log::info!("Goodbye messages sent. Waiting 5 seconds for response"); thread::sleep(Duration::from_secs(5)); for id in connections.keys() { info.shutdown_sender(id); } } } impl RouterCore { pub fn send_message( &self, connection_id: u64, protocol: String, message: Message, ) -> WampResult<()> { log::debug!("handling send_message"); if let Some(sender) = self.senders.lock().unwrap().get(&connection_id) { log::debug!("Sending message {:?} via {}", message, protocol); let send_result = if protocol == WAMP_JSON { send_message_json(sender, &message) } else { send_message_msgpack(sender, &message) }; log::debug!("sending succeeded"); match send_result { Ok(()) => Ok(()), Err(e) => Err(Error::new(ErrorKind::WSError(e))), } } else { log::debug!("connection {} is not on this node, dropping message", connection_id); Ok(()) } } pub fn shutdown_sender(&self, id: &u64) { if let Some(sender) = self.senders.lock().unwrap().get_mut(id) { sender.shutdown().ok(); } } pub fn set_state(&self, connection_id: u64, state: ConnectionState) { let connections = self.connections.lock().unwrap(); let connection = connections.get(&connection_id).unwrap(); connection.lock().unwrap().state = state; } pub fn set_protocol(&self, connection_id: u64, protocol: String) { let connections = self.connections.lock().unwrap(); let connection = connections.get(&connection_id).unwrap(); connection.lock().unwrap().protocol = protocol; } pub fn add_connection(&mut self, connection_id: u64) { self.connections.lock().unwrap().insert(connection_id, Arc::new(Mutex::new(ConnectionInfo { state: ConnectionState::Initializing, protocol: String::new(), id: connection_id, }))); } pub fn remove_connection(&mut self, connection_id: u64) { self.connections.lock().unwrap().remove(&connection_id); } pub fn remove_subscription(&mut self, connection_id: &u64, subscription_id: &u64, request_id: &u64) -> WampResult<()> { if let Some(&(ref topic_uri, is_prefix)) = self.subscription_manager.subscription_ids_to_uris.get(subscription_id) { log::trace!("Removing subscription to {:?}", topic_uri); self.subscription_manager .subscriptions .lock().unwrap() .unsubscribe_with(topic_uri, connection_id, is_prefix) .map_err(|e| Error::new(ErrorKind::ErrorReason( ErrorType::Unsubscribe, *request_id, e.reason(), )))?; log::trace!("Subscription tree: {:?}", self.subscription_manager.subscriptions); } else { return Err(Error::new(ErrorKind::ErrorReason( ErrorType::Unsubscribe, *request_id, Reason::NoSuchSubscription, ))); } let connections = self.connections.lock().unwrap(); let connection = connections.get(connection_id).unwrap().lock().unwrap(); self.send_message( *connection_id, connection.protocol.clone(), Message::Unsubscribed(*request_id), )?; Ok(()) } pub fn add_subscription( &mut self, connection_id: u64, request_id: u64, topic: URI, matching_policy: MatchingPolicy, id: ID, prefix_id: ID, ) -> WampResult<()> { log::debug!( "machine is adding subscription ({}, {}, {:?}, {:?})", connection_id, request_id, topic, matching_policy, ); let topic_id = match self.subscription_manager.subscriptions.lock().unwrap().subscribe_with( &topic, connection_id, matching_policy, id, prefix_id, ) { Ok(topic_id) => topic_id, Err(e) => { return Err(Error::new(ErrorKind::ErrorReason( ErrorType::Subscribe, request_id, e.reason(), ))) } }; log::debug!("subscription for {} on {} got id {}", topic.uri, connection_id, topic_id); self.subscription_manager.subscription_ids_to_uris.insert( topic_id, (topic.uri, matching_policy == MatchingPolicy::Prefix), ); let connections = self.connections.lock().unwrap(); let connection = connections.get(&connection_id).unwrap().lock().unwrap(); self.send_message( connection_id, connection.protocol.clone(), Message::Subscribed(request_id, topic_id), )?; Ok(()) } } impl ConnectionHandler { fn remove(&self) { log::trace!( "Removing subscriptions for client {}", self.info_id, ); for (topic_id, request_id) in &self.subscribed_topics { log::trace!("Looking for subscription {}", topic_id); self.router.remove_subscription(self.info_id, *topic_id, *request_id); } self.router.remove_connection(self.info_id); } fn terminate_connection(&self) -> WSResult<()> { self.remove(); Ok(()) } fn info(&self) -> Result<Arc<Mutex<ConnectionInfo>>, RequestError> { self.router.connection(self.info_id) } }
questManager<RouterCore>>, senders: Arc<Mutex<HashMap<u64, Sender>>>, } struct ConnectionHandler { info_id: u64, router: RouterInfo, subscribed_topics: Vec<(ID, ID)>, subscriptions: Arc<Mutex<SubscriptionPatternNode<u64>>>, } #[derive(Debug)] pub struct ConnectionInfo { state: ConnectionState, protocol: String, id: u64, } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub enum ConnectionState { Initializing, Connected, ShuttingDown, Disconnected, } unsafe impl Sync for Router {} static WAMP_JSON: &'static str = "wamp.2.json"; static WAMP_MSGPACK: &'static str = "wamp.2.msgpack"; fn random_id() -> u64 { let mut rng = thread_rng(); let between = Uniform::new(0, 1u64.rotate_left(56) - 1); between.sample(&mut rng) } impl Default for Router { fn default() -> Self { Self::new() } } impl Router { #[inline] pub fn new() -> Router { let node_id_msg = "Please specify a NODE_ID >= 0 via an environment variable!"; let re = Regex::new(r"\d+").unwrap(); let node_id_str = env::var("NODE_ID").expect(node_id_msg); let mut node_id = re.find(node_id_str.as_str()) .expect(node_id_msg) .as_str() .parse::<u64>() .expect(node_id_msg); node_id += 1; let node_address = env::var("NODE_ADDRESS").ok() .map(|address| { loop { log::info!("Trying to resolve binding IP address {}...", address); match address.to_socket_addrs() { Ok(mut addr) => { return addr .next() .expect("The binding address does not resolve to a valid IP or port!"); }, Err(e) => { log::warn!("Could not resolve binding address {}: {}", address, e); }, } } }).expect("Please specify a NODE_ADDRESS (domain:port) via an environment variable!"); let gateway = env::var("NODE_GATEWAY").ok() .map(|gateway| { loop { log::info!("Trying to resolve gateway address {}...", gateway); match gateway.to_socket_addrs() { Ok(mut addr) => { return addr .next() .expect("The gateway address does not resolve to a valid IP or port!"); }, Err(e) => { log::warn!("Could not resolve gateway {}: {}", gateway, e); }, } } }).expect("The gateway address environment variable NODE_GATEWAY is not specified!"); let config = Config { id: node_id, election_tick: 10, heartbeat_tick: 3, ..Default::default() }; let machine = RouterInfo::default(); let storage = MemStorage::new(); let mgr = TcpConnectionManager::new(node_address).unwrap(); let node = Node::new( config, gateway, machine, storage, mgr, ); let stop = node.stop_handler(); ctrlc::set_handler(move || { stop(); thread::sleep(Duration::from_micros(200)); std::process::exit(0); }).expect("error setting Ctrl-C handler"); Router { node, } } pub fn
random
[ { "content": "pub fn send_message_msgpack(sender: &Sender, message: &Message) -> WSResult<()> {\n\n // Send the message\n\n let mut buf: Vec<u8> = Vec::new();\n\n message\n\n .serialize(&mut Serializer::with(&mut buf, StructMapWriter))\n\n .unwrap();\n\n sender.send(WSMessage::Binary(b...
Rust
src/main.rs
weltimperator/rust_portforwarder
2885b0c5153b7809476520f32a335d94c03fec8f
use mio::*; use mio::net::{TcpListener, TcpStream, UdpSocket}; use std::net::{SocketAddr, ToSocketAddrs}; use std::collections::HashMap; use multi_map::MultiMap; use std::*; use getopts::Options; struct TcpConnection { src: TcpStream, dst_id: usize, } struct UdpConnection { src: UdpSocket, addr: SocketAddr, } fn get_ipv4_socket_addr(input :&String) -> Result<SocketAddr, io::Error> { let addrs_iter = input.to_socket_addrs()?; for addr in addrs_iter { if addr.is_ipv4() { return Ok(addr); } } Err(io::Error::new(io::ErrorKind::InvalidInput, "Can't resolve input to IPv4 socket address")) } fn forward(src: SocketAddr, dst: SocketAddr) -> Result<(), io::Error> { const TCP_SERVER: Token = Token(0); const UDP_SERVER: Token = Token(1); let mut next_token = 2; let mut tcp_conns = HashMap::with_capacity(32); let mut udp_conns = MultiMap::with_capacity(32); let poll = Poll::new()?; let tcp_server = TcpListener::bind(&src).unwrap(); poll.register(&tcp_server, TCP_SERVER, Ready::readable(), PollOpt::level())?; let udp_server = UdpSocket::bind(&src).unwrap(); poll.register(&udp_server, UDP_SERVER, Ready::readable(), PollOpt::level())?; let mut events = Events::with_capacity(1024); let mut buf = [0; 8192]; loop { poll.poll(&mut events, None)?; for event in events.iter() { match event.token() { TCP_SERVER => { let (stream1, _) = tcp_server.accept()?; poll.register(&stream1, Token(next_token), Ready::readable(), PollOpt::level())?; next_token += 1; let stream2 = TcpStream::connect(&dst)?; poll.register(&stream2, Token(next_token), Ready::readable(), PollOpt::level())?; next_token += 1; let conn1 = TcpConnection{src: stream1, dst_id: next_token - 1}; let conn2 = TcpConnection{src: stream2, dst_id: next_token - 2}; tcp_conns.insert(next_token - 2, conn1); tcp_conns.insert(next_token - 1, conn2); } UDP_SERVER => { if let Ok((len, from)) = udp_server.recv_from(&mut buf) { if !udp_conns.contains_key_alt(&from) { let addr = "0.0.0.0:0".parse().unwrap(); let dst_sock = UdpSocket::bind(&addr)?; poll.register(&dst_sock, Token(next_token), Ready::readable(), PollOpt::edge())?; let conn = UdpConnection{src: dst_sock, addr: from}; udp_conns.insert(next_token, from, conn); next_token += 1; } if let Some(dst_conn) = udp_conns.get_alt(&from) { let dst_sock = &dst_conn.src; dst_sock.send_to(&buf[..len], &dst)?; } } } Token(port) => { let mut to_remove = None; if let Some(c) = tcp_conns.get(&port) { let buffer_ref: &mut [u8] = &mut buf; let mut buffers: [&mut IoVec; 1] = [buffer_ref.into()]; match c.src.read_bufs(&mut buffers) { Ok(0) => {} Ok(len) => { if let Some(d) = tcp_conns.get(&c.dst_id) { let d_buffers: [&IoVec; 1] = [buf[..len].into()]; let _ = d.src.write_bufs(&d_buffers); } } Err(e) => { if e.kind() != io::ErrorKind::WouldBlock { to_remove = Some((port, c.dst_id)); } } } } if let Some((port1, port2)) = to_remove { println!("Clossing TCP connections {} and {}", port1, port2); tcp_conns.remove(&port1); tcp_conns.remove(&port2); } if let Some(c) = udp_conns.get(&port) { if let Ok((len, _)) = c.src.recv_from(&mut buf) { let _ = udp_server.send_to(&buf[..len], &c.addr); } } }, } } } } fn print_usage(program: &str, opts: Options) { let brief = format!("Usage: {} [options]", program); print!("{}", opts.usage(&brief)); } fn main() -> Result<(), std::io::Error> { let args: Vec<String> = env::args().collect(); let program = args[0].clone(); let mut opts = Options::new(); opts.optopt("s", "src", "where to listen on (default 0.0.0.0:815", "HOST:PORT"); opts.optopt("d", "dst", "where to forward to (default zm.tolao.de:815", "HOST:PORT"); opts.optflag("h", "help", "print this help"); let matches = match opts.parse(&args[1..]) { Ok(m) => { m } Err(f) => { panic!(f.to_string()) } }; if matches.opt_present("h") { print_usage(&program, opts); return Ok(()); } let src_str = matches.opt_str("src").unwrap_or("0.0.0.0:1815".to_string()); let dst_str = matches.opt_str("dst").unwrap_or("127.0.0.1:2815".to_string()); let src = get_ipv4_socket_addr(&src_str)?; let dst = get_ipv4_socket_addr(&dst_str)?; loop { if let Err(e) = forward(src, dst) { println!("Forwarding failed: {}", e); } } }
use mio::*; use mio::net::{TcpListener, TcpStream, UdpSocket}; use std::net::{SocketAddr, ToSocketAddrs}; use std::collections::HashMap; use multi_map::MultiMap; use std::*; use getopts::Options; struct TcpConnection { src: TcpStream, dst_id: usize, } struct UdpConnection { src: UdpSocket, addr: SocketAddr, } fn get_ipv4_socket_addr(input :&String) -> Result<SocketAddr, io::Error> { let addrs_iter = input.to_socket_addrs()?; for addr in addrs_iter { if addr.is_ipv4() { return Ok(addr); } } Err(io::Error::new(io::ErrorKind::InvalidInput, "Can't resolve input to IPv4 socket address")) } fn forward(src: SocketAddr, dst: SocketAddr) -> Result<(), io::Error> { const TCP_SERVER: Token = Token(0); const UDP_SERVER: Token = Token(1); let mut next_token = 2; let mut tcp_conns = HashMap::with_capacity(32); let mut udp_conns = MultiMap::with_capacity(32); let poll = Poll::new()?; let tcp_server = TcpListener::bind(&src).unwrap(); poll.register(&tcp_server, TCP_SERVER, Ready::readable(), PollOpt::level())?; let udp_server = UdpSocket::bind(&src).unwrap(); poll.register(&udp_server, UDP_SERVER, Ready::readable(), PollOpt::level())?; let mut events = Events::with_capacity(1024); let mut buf = [0; 8192]; loop { poll.poll(&mut events, None)?; for event in events.iter() { match event.token() { TCP_SERVER => { let (stream1, _) = tcp_server.accept()?; poll.register(&stream1, Token(next_token), Ready::readable(), PollOpt::level())?; next_token += 1; let stream2 = TcpStream::connect(&dst)?; poll.register(&stream2, Token(next_token), Ready::readable(), PollOpt::level())?; next_token += 1; let conn1 = TcpConnection{src: stream1, dst_id: next_token - 1}; let conn2 = TcpConnection{src: stream2, dst_id: next_token - 2}; tcp_conns.insert(next_token - 2, conn1); tcp_conns.insert(next_token - 1, conn2); } UDP_SERVER => { if let Ok((len, from)) = udp_server.recv_from(&mut buf) { if !udp_conns.contains_key_alt(&from) { let addr = "0.0.0.0:0".parse().unwrap(); let dst_sock = UdpSocket::bind(&addr)?; poll.register(&dst_sock, Token(next_token), Ready::readable(), PollOpt::edge())?; let conn = UdpConnection{src: dst_sock, addr: from}; udp_conns.insert(next_token, from, conn); next_token += 1; } if let Some(dst_conn) = udp_conns.get_alt(&from) { let dst_sock = &dst_conn.src; dst_sock.send_to(&buf[..len], &dst)?; } } } Token(port) => { let mut to_remove = None; if let Some(c) = tcp_conns.get(&port) { let buffer_ref: &mut [u8] = &mut buf; let mut buffers: [&mut IoVec; 1] = [buffer_ref.into()]; match c.src.read_bufs(&mut buffers) { Ok(0) => {} Ok(len) => { if let Some(d) = tcp_conns.get(&c.dst_id) { let d_buffers: [&IoVec; 1] = [buf[..len].into()]; let _ = d.src.write_bufs(&d_buffers); } } Err(e) => { if e.kind() != io::ErrorKind::WouldBlock { to_remove = Some((port, c.dst_id)); } } } } if let Some((port1, port2)) = to_remove { println!("Clossing TCP connections {} and {}", port1, port2); tcp_conns.remove(&port1); tcp_conns.remove(&port2); } if let Some(c) = udp_conns.get(&port) { if let Ok((len, _)) = c.src.recv_from(&mut buf) { let _ = udp_server.send_to(&buf[..len], &c.addr); } } }, } } } } fn print_usage(program: &str, opts: Options) { let brief = format!("Usage: {} [options]", program); print!("{}", opts.usage(&brief)); } fn main() -> Result<(), std::io::Error> { let args: Vec<String> = env::args().collect(); let program = args[0].clone(); let mut opts = Options::new(); opts.optopt("s", "src", "where to listen on (default 0.0.0.0:815", "HOST:PORT"); opts.optopt("d", "dst", "where to forward to (default zm.tolao.de:815", "HOST:PORT"); opts.optflag("h", "help", "print this help"); let matches = match opts.parse(&args[1..]) { Ok(m) => { m } Err(f) => { panic!(f.to_string()) } }; if matches.opt_present("h") { print_usage(&program, opts); return Ok(()); } let src_str = matches.opt_str("src").unwrap_or("0.0.0.0:1815".to_string());
let dst_str = matches.opt_str("dst").unwrap_or("127.0.0.1:2815".to_string()); let src = get_ipv4_socket_addr(&src_str)?; let dst = get_ipv4_socket_addr(&dst_str)?; loop { if let Err(e) = forward(src, dst) { println!("Forwarding failed: {}", e); } } }
function_block-function_prefix_line
[]
Rust
src/futures_02.rs
overdrivenpotato/futures-compat
17ab0fa0eb895254a7ed0200591ea2e23a73978e
use std::io; use std::sync::Arc; use futures_v01x::{Async as Async01, Future as Future01, Poll as Poll01, Stream as Stream01}; use futures_v01x::task::{self as task01, Task as Task01}; use futures_v02x::{Async as Async02, Future as Future02, Never, Stream as Stream02}; use futures_v02x::task::{Context, LocalMap, Wake, Waker}; use futures_v02x::executor::{Executor as Executor02, SpawnError}; use futures_v02x::io::{AsyncRead as AsyncRead02, AsyncWrite as AsyncWrite02}; use tokio_io::{AsyncRead as AsyncReadTk, AsyncWrite as AsyncWriteTk}; #[derive(Debug)] #[must_use = "futures do nothing unless polled"] pub struct Future02As01<E, F> { exec: E, v02: F, } #[derive(Debug)] #[must_use = "futures do nothing unless polled"] pub struct Future02NeverAs01Unit<E, F> { exec: E, v02: F, } #[derive(Debug)] #[must_use = "streams do nothing unless polled"] pub struct Stream02As01<E, S> { exec: E, v02: S, } #[derive(Debug)] pub struct AsyncIo02AsTokio<E, S> { exec: E, v02: S, } #[allow(missing_debug_implementations)] pub struct BoxedExecutor02(pub(crate) Box<Executor02 + Send>); impl Executor02 for BoxedExecutor02 { fn spawn(&mut self, f: Box<Future02<Item=(), Error=Never> + Send>) -> Result<(), SpawnError> { (&mut *self.0).spawn(f) } } pub trait FutureInto01: Future02 { fn into_01_compat<E>(self, exec: E) -> Future02As01<E, Self> where Self: Sized, E: Executor02; fn into_01_compat_never_unit<E>(self, exec: E) -> Future02NeverAs01Unit<E, Self> where Self: Future02<Error=Never> + Sized, E: Executor02; } pub trait StreamInto01: Stream02 { fn into_01_compat<E>(self, exec: E) -> Stream02As01<E, Self> where Self: Sized, E: Executor02; } pub trait AsyncIoIntoTokio { fn into_tokio_compat<E>(self, exec: E) -> AsyncIo02AsTokio<E, Self> where Self: AsyncRead02 + AsyncWrite02 + Sized, E: Executor02; } impl<F> FutureInto01 for F where F: Future02, { fn into_01_compat<E>(self, exec: E) -> Future02As01<E, Self> where Self: Sized, E: Executor02, { Future02As01 { exec, v02: self, } } fn into_01_compat_never_unit<E>(self, exec: E) -> Future02NeverAs01Unit<E, Self> where Self: Sized, E: Executor02, { Future02NeverAs01Unit { exec, v02: self, } } } impl<E, F> Future01 for Future02As01<E, F> where F: Future02, E: Executor02, { type Item = F::Item; type Error = F::Error; fn poll(&mut self) -> Poll01<Self::Item, Self::Error> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll(&mut cx) { Ok(Async02::Ready(val)) => Ok(Async01::Ready(val)), Ok(Async02::Pending) => Ok(Async01::NotReady), Err(err) => Err(err), } } } impl<E, F> Future01 for Future02NeverAs01Unit<E, F> where F: Future02<Error=Never>, E: Executor02, { type Item = F::Item; type Error = (); fn poll(&mut self) -> Poll01<Self::Item, Self::Error> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll(&mut cx) { Ok(Async02::Ready(val)) => Ok(Async01::Ready(val)), Ok(Async02::Pending) => Ok(Async01::NotReady), Err(never) => match never {} } } } impl<S> StreamInto01 for S where S: Stream02, { fn into_01_compat<E>(self, exec: E) -> Stream02As01<E, Self> where Self: Sized, E: Executor02, { Stream02As01 { exec, v02: self, } } } impl<E, S> Stream01 for Stream02As01<E, S> where S: Stream02, E: Executor02, { type Item = S::Item; type Error = S::Error; fn poll(&mut self) -> Poll01<Option<Self::Item>, Self::Error> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll_next(&mut cx) { Ok(Async02::Ready(val)) => Ok(Async01::Ready(val)), Ok(Async02::Pending) => Ok(Async01::NotReady), Err(err) => Err(err), } } } impl<I> AsyncIoIntoTokio for I { fn into_tokio_compat<E>(self, exec: E) -> AsyncIo02AsTokio<E, Self> where Self: AsyncRead02 + AsyncWrite02 + Sized, E: Executor02, { AsyncIo02AsTokio { exec, v02: self, } } } impl<E: Executor02, I: AsyncRead02> io::Read for AsyncIo02AsTokio<E, I> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll_read(&mut cx, buf) { Ok(Async02::Ready(val)) => Ok(val), Ok(Async02::Pending) => Err(would_block()), Err(err) => Err(err), } } } impl<E: Executor02, I: AsyncWrite02> io::Write for AsyncIo02AsTokio<E, I> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll_write(&mut cx, buf) { Ok(Async02::Ready(val)) => Ok(val), Ok(Async02::Pending) => Err(would_block()), Err(err) => Err(err), } } fn flush(&mut self) -> io::Result<()> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll_flush(&mut cx) { Ok(Async02::Ready(val)) => Ok(val), Ok(Async02::Pending) => Err(would_block()), Err(err) => Err(err), } } } fn would_block() -> io::Error { io::Error::from(io::ErrorKind::WouldBlock) } impl<E: Executor02, I: AsyncRead02> AsyncReadTk for AsyncIo02AsTokio<E, I> { unsafe fn prepare_uninitialized_buffer(&self, buf: &mut [u8]) -> bool { let init = self.v02.initializer(); if init.should_initialize() { init.initialize(buf); true } else { false } } } impl<E: Executor02, I: AsyncWrite02> AsyncWriteTk for AsyncIo02AsTokio<E, I> { fn shutdown(&mut self) -> Poll01<(), io::Error> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll_close(&mut cx) { Ok(Async02::Ready(val)) => Ok(Async01::Ready(val)), Ok(Async02::Pending) => Ok(Async01::NotReady), Err(err) => Err(err), } } } fn current_as_waker() -> Waker { Waker::from(Arc::new(Current(task01::current()))) } struct Current(Task01); impl Wake for Current { fn wake(arc_self: &Arc<Self>) { arc_self.0.notify(); } }
use std::io; use std::sync::Arc; use futures_v01x::{Async as Async01, Future as Future01, Poll as Poll01, Stream as Stream01}; use futures_v01x::task::{self as task01, Task as Task01}; use futures_v02x::{Async as Async02, Future as Future02, Never, Stream as Stream02}; use futures_v02x::task::{Context, LocalMap, Wake, Waker}; use futures_v02x::executor::{Executor as Executor02, SpawnError}; use futures_v02x::io::{AsyncRead as AsyncRead02, AsyncWrite as AsyncWrite02}; use tokio_io::{AsyncRead as AsyncReadTk, AsyncWrite as AsyncWriteTk}; #[derive(Debug)] #[must_use = "futures do nothing unless polled"] pub struct Future02As01<E, F> { exec: E, v02: F, } #[derive(Debug)] #[must_use = "futures do nothing unless polled"] pub struct Future02NeverAs01Unit<E, F> { exec: E, v02: F, } #[derive(Debug)] #[must_use = "streams do nothing unless polled"] pub struct Stream02As01<E, S> { exec: E, v02: S, } #[derive(Debug)] pub struct AsyncIo02AsTokio<E, S> { exec: E, v02: S, } #[allow(missing_debug_implementations)] pub struct BoxedExecutor02(pub(crate) Box<Executor02 + Send>); impl Executor02 for BoxedExecutor02 { fn spawn(&mut self, f: Box<Future02<Item=(), Error=Never> + Send>) -> Result<(), SpawnError> { (&mut *self.0).spawn(f) } } pub trait FutureInto01: Future02 { fn into_01_compat<E>(self, exec: E) -> Future02As01<E, Self> where Self: Sized, E: Executor02; fn into_01_compat_never_unit<E>(self, exec: E) -> Future02NeverAs01Unit<E, Self> where Self: Future02<Error=Never> + Sized, E: Executor02; } pub trait StreamInto01: Stream02 { fn into_01_compat<E>(self, exec: E) -> Stream02As01<E, Self> where Self: Sized, E: Executor02; } pub trait AsyncIoIntoTokio { fn into_tokio_compat<E>(self, exec: E) -> AsyncIo02AsTokio<E, Self> where Self: AsyncRead02 + AsyncWrite02 + Sized, E: Executor02; } impl<F> FutureInto01 for F where F: Future02, {
fn into_01_compat_never_unit<E>(self, exec: E) -> Future02NeverAs01Unit<E, Self> where Self: Sized, E: Executor02, { Future02NeverAs01Unit { exec, v02: self, } } } impl<E, F> Future01 for Future02As01<E, F> where F: Future02, E: Executor02, { type Item = F::Item; type Error = F::Error; fn poll(&mut self) -> Poll01<Self::Item, Self::Error> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll(&mut cx) { Ok(Async02::Ready(val)) => Ok(Async01::Ready(val)), Ok(Async02::Pending) => Ok(Async01::NotReady), Err(err) => Err(err), } } } impl<E, F> Future01 for Future02NeverAs01Unit<E, F> where F: Future02<Error=Never>, E: Executor02, { type Item = F::Item; type Error = (); fn poll(&mut self) -> Poll01<Self::Item, Self::Error> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll(&mut cx) { Ok(Async02::Ready(val)) => Ok(Async01::Ready(val)), Ok(Async02::Pending) => Ok(Async01::NotReady), Err(never) => match never {} } } } impl<S> StreamInto01 for S where S: Stream02, { fn into_01_compat<E>(self, exec: E) -> Stream02As01<E, Self> where Self: Sized, E: Executor02, { Stream02As01 { exec, v02: self, } } } impl<E, S> Stream01 for Stream02As01<E, S> where S: Stream02, E: Executor02, { type Item = S::Item; type Error = S::Error; fn poll(&mut self) -> Poll01<Option<Self::Item>, Self::Error> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll_next(&mut cx) { Ok(Async02::Ready(val)) => Ok(Async01::Ready(val)), Ok(Async02::Pending) => Ok(Async01::NotReady), Err(err) => Err(err), } } } impl<I> AsyncIoIntoTokio for I { fn into_tokio_compat<E>(self, exec: E) -> AsyncIo02AsTokio<E, Self> where Self: AsyncRead02 + AsyncWrite02 + Sized, E: Executor02, { AsyncIo02AsTokio { exec, v02: self, } } } impl<E: Executor02, I: AsyncRead02> io::Read for AsyncIo02AsTokio<E, I> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll_read(&mut cx, buf) { Ok(Async02::Ready(val)) => Ok(val), Ok(Async02::Pending) => Err(would_block()), Err(err) => Err(err), } } } impl<E: Executor02, I: AsyncWrite02> io::Write for AsyncIo02AsTokio<E, I> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll_write(&mut cx, buf) { Ok(Async02::Ready(val)) => Ok(val), Ok(Async02::Pending) => Err(would_block()), Err(err) => Err(err), } } fn flush(&mut self) -> io::Result<()> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll_flush(&mut cx) { Ok(Async02::Ready(val)) => Ok(val), Ok(Async02::Pending) => Err(would_block()), Err(err) => Err(err), } } } fn would_block() -> io::Error { io::Error::from(io::ErrorKind::WouldBlock) } impl<E: Executor02, I: AsyncRead02> AsyncReadTk for AsyncIo02AsTokio<E, I> { unsafe fn prepare_uninitialized_buffer(&self, buf: &mut [u8]) -> bool { let init = self.v02.initializer(); if init.should_initialize() { init.initialize(buf); true } else { false } } } impl<E: Executor02, I: AsyncWrite02> AsyncWriteTk for AsyncIo02AsTokio<E, I> { fn shutdown(&mut self) -> Poll01<(), io::Error> { let mut locals = LocalMap::new(); let waker = current_as_waker(); let mut cx = Context::new(&mut locals, &waker, &mut self.exec); match self.v02.poll_close(&mut cx) { Ok(Async02::Ready(val)) => Ok(Async01::Ready(val)), Ok(Async02::Pending) => Ok(Async01::NotReady), Err(err) => Err(err), } } } fn current_as_waker() -> Waker { Waker::from(Arc::new(Current(task01::current()))) } struct Current(Task01); impl Wake for Current { fn wake(arc_self: &Arc<Self>) { arc_self.0.notify(); } }
fn into_01_compat<E>(self, exec: E) -> Future02As01<E, Self> where Self: Sized, E: Executor02, { Future02As01 { exec, v02: self, } }
function_block-full_function
[ { "content": "/// Execute a function with the context used as a v0.1 `Notifier`, converting\n\n/// v0.1 `Poll` into v0.2 version.\n\npub fn with_context_poll<F, R, E>(cx: &mut Context, f: F) -> Poll02<R, E>\n\nwhere\n\n F: FnOnce() -> Poll01<R, E>,\n\n{\n\n with_context(cx, move || {\n\n match f() ...
Rust
src/c_interface.rs
henriquegemignani/randomprime
c2b31c0996d3c003d586be865c0a930f48619a54
use crate::patches; use crate::patch_config::PatchConfig; use std::{ cell::Cell, ffi::{CStr, CString}, panic, path::Path, os::raw::c_char, }; use serde::{Serialize}; #[derive(Serialize)] #[serde(tag = "type")] #[serde(rename_all = "camelCase")] enum CbMessage<'a> { Success, Error { msg: &'a str, }, Progress { percent: f64, msg: &'a str, }, } impl<'a> CbMessage<'a> { fn success_json() -> CString { CString::new(serde_json::to_string(&CbMessage::Success).unwrap()).unwrap() } fn error_json(msg: &str) -> CString { let msg = CbMessage::fix_msg(msg); let cbmsg = CbMessage::Error { msg }; CString::new(serde_json::to_string(&cbmsg).unwrap()).unwrap() } fn progress_json(percent: f64, msg: &str) -> CString { let msg = CbMessage::fix_msg(msg); let cbmsg = CbMessage::Progress { percent, msg }; CString::new(serde_json::to_string(&cbmsg).unwrap()).unwrap() } fn fix_msg(msg: &str) -> &str { if let Some(pos) = msg.bytes().position(|i| i == b'\0') { &msg[..pos] } else { msg } } } struct ProgressNotifier { total_size: usize, bytes_so_far: usize, cb_data: *const (), cb: extern fn(*const (), *const c_char) } impl ProgressNotifier { fn new(cb_data: *const (), cb: extern fn(*const (), *const c_char)) -> ProgressNotifier { ProgressNotifier { total_size: 0, bytes_so_far: 0, cb, cb_data } } } impl structs::ProgressNotifier for ProgressNotifier { fn notify_total_bytes(&mut self, total_size: usize) { self.total_size = total_size } fn notify_writing_file(&mut self, file_name: &reader_writer::CStr, file_bytes: usize) { let percent = self.bytes_so_far as f64 / self.total_size as f64 * 100.; let msg = format!("Writing file {:?}", file_name); (self.cb)(self.cb_data, CbMessage::progress_json(percent, &msg).as_ptr()); self.bytes_so_far += file_bytes; } fn notify_writing_header(&mut self) { let percent = self.bytes_so_far as f64 / self.total_size as f64 * 100.; (self.cb)(self.cb_data, CbMessage::progress_json(percent, "Writing ISO header").as_ptr()); } fn notify_flushing_to_disk(&mut self) { (self.cb)( self.cb_data, CbMessage::progress_json(100., "Flushing written data to the disk").as_ptr(), ); } } fn inner(config_json: *const c_char, cb_data: *const (), cb: extern fn(*const (), *const c_char)) -> Result<(), String> { let config_json = unsafe { CStr::from_ptr(config_json) }.to_str() .map_err(|e| format!("JSON parse failed: {}", e))?; let patch_config = PatchConfig::from_json(config_json)?; let pn = ProgressNotifier::new(cb_data, cb); patches::patch_iso(patch_config, pn)?; Ok(()) } #[no_mangle] pub extern fn randomprime_patch_iso(config_json: *const c_char , cb_data: *const (), cb: extern fn(*const (), *const c_char)) { thread_local! { static PANIC_DETAILS: Cell<Option<(String, u32)>> = Cell::new(None); } panic::set_hook(Box::new(|pinfo| { PANIC_DETAILS.with(|pd| { pd.set(pinfo.location().map(|l| (l.file().to_owned(), l.line()))); }); })); let r = panic::catch_unwind(|| inner(config_json, cb_data, cb)) .map_err(|e| { let msg = if let Some(e) = e.downcast_ref::<&'static str>() { e.to_string() } else if let Some(e) = e.downcast_ref::<String>() { e.clone() } else { format!("{:?}", e) }; if let Some(pd) = PANIC_DETAILS.with(|pd| pd.replace(None)) { let path = Path::new(&pd.0); let mut comp = path.components(); let found = path.components() .skip(1) .zip(&mut comp) .find(|(c, _)| c.as_os_str() == "randomprime") .is_some(); let shortened_path = if found { comp.as_path().as_os_str() } else { path.file_name().unwrap_or("".as_ref()) }; format!("{} at {}:{}", msg, shortened_path.to_string_lossy(), pd.1) } else { msg } }) .and_then(|i| i); match r { Ok(()) => cb(cb_data, CbMessage::success_json().as_ptr()), Err(msg) => cb(cb_data, CbMessage::error_json(&msg).as_ptr()), }; }
use crate::patches; use crate::patch_config::PatchConfig; use std::{ cell::Cell, ffi::{CStr, CString}, panic, path::Path, os::raw::c_char, }; use serde::{Serialize}; #[derive(Serialize)] #[serde(tag = "type")] #[serde(rename_all = "camelCase")] enum CbMessage<'a> { Success, Error { msg: &'a str, }, Progress { percent: f64, msg: &'a str, }, } impl<'a> CbMessage<'a> { fn success_json() -> CString { CString::new(serde_json::to_string(&CbMessage::Success).unwrap()).unwrap() } fn error_json(msg: &str) -> CString { let msg = CbMessage::fix_msg(msg); let cbmsg = CbMessage::Error { msg }; CString::new(serde_json::to_string(&cbmsg).unwrap()).unwrap() } fn progress_json(percent: f64, msg: &str) -> CString { let msg = CbMessage::fix_msg(msg); let cbmsg = CbMessage::Progress { percent, msg }; CString::new(serde_json::to_string(&cbmsg).unwrap()).unwrap() } fn fix_msg(msg: &str) -> &str { if let Some(pos) = msg.bytes().position(|i| i == b'\0') { &msg[..pos] } else { msg } } } struct ProgressNotifier { total_size: usize, bytes_so_far: usize, cb_data: *const (), cb: extern fn(*const (), *const c_char) } impl ProgressNotifier { fn new(cb_data: *const (), cb: extern fn(*const (), *const c_char)) -> ProgressNotifier { ProgressNotifier { total_size: 0, bytes_so_far: 0, cb, cb_data } } } impl structs::ProgressNotifier for ProgressNotifier { fn notify_total_bytes(&mut self, total_size: usize) { self.total_size = total_size } fn notify_writing_file(&mut self, file_name: &reader_writer::CStr, file_bytes: usize) { let percent = self.bytes_so_far as f64 / self.total_size as f64 * 100.; let msg = format!("Writing file {:?}", file_name); (self.cb)(self.cb_data, CbMessage::progress_json(percent, &msg).as_ptr()); self.bytes_so_far += file_bytes; } fn notify_writing_header(&mut self) { let percent = self.bytes_so_far as f64 / self.total_size as f64 * 100.; (self.cb)(self.cb_data, CbMessage::progress_json(percent, "Writing ISO header").as_ptr()); } fn notify_flushing_to_disk(&mut self) { (self.cb)( self.cb_data, CbMessage::progress_json(100., "Flushing written data to the disk").as_ptr(), ); } } fn inner(config_json: *const c_char, cb_data: *const (), cb: extern fn(*const (), *const c_char)) -> Result<(), String> {
#[no_mangle] pub extern fn randomprime_patch_iso(config_json: *const c_char , cb_data: *const (), cb: extern fn(*const (), *const c_char)) { thread_local! { static PANIC_DETAILS: Cell<Option<(String, u32)>> = Cell::new(None); } panic::set_hook(Box::new(|pinfo| { PANIC_DETAILS.with(|pd| { pd.set(pinfo.location().map(|l| (l.file().to_owned(), l.line()))); }); })); let r = panic::catch_unwind(|| inner(config_json, cb_data, cb)) .map_err(|e| { let msg = if let Some(e) = e.downcast_ref::<&'static str>() { e.to_string() } else if let Some(e) = e.downcast_ref::<String>() { e.clone() } else { format!("{:?}", e) }; if let Some(pd) = PANIC_DETAILS.with(|pd| pd.replace(None)) { let path = Path::new(&pd.0); let mut comp = path.components(); let found = path.components() .skip(1) .zip(&mut comp) .find(|(c, _)| c.as_os_str() == "randomprime") .is_some(); let shortened_path = if found { comp.as_path().as_os_str() } else { path.file_name().unwrap_or("".as_ref()) }; format!("{} at {}:{}", msg, shortened_path.to_string_lossy(), pd.1) } else { msg } }) .and_then(|i| i); match r { Ok(()) => cb(cb_data, CbMessage::success_json().as_ptr()), Err(msg) => cb(cb_data, CbMessage::error_json(&msg).as_ptr()), }; }
let config_json = unsafe { CStr::from_ptr(config_json) }.to_str() .map_err(|e| format!("JSON parse failed: {}", e))?; let patch_config = PatchConfig::from_json(config_json)?; let pn = ProgressNotifier::new(cb_data, cb); patches::patch_iso(patch_config, pn)?; Ok(()) }
function_block-function_prefix_line
[ { "content": "fn patch_main_strg(res: &mut structs::Resource, msg: &str) -> Result<(), String>\n\n{\n\n let strings = res.kind.as_strg_mut().unwrap()\n\n .string_tables\n\n .as_mut_vec()\n\n .iter_mut()\n\n .find(|table| table.lang == b\"ENGL\".into())\n\n .unwrap()\n\n ...
Rust
cst/src/lex_string.rs
ydzz/EternalR
8168f6b77a013f550a76b7639c4370d04c7dccf1
use core::str::{Chars}; use std::collections::{VecDeque}; static BACK_LEN:usize = 3usize; #[derive(Debug)] pub struct LexString<'a> { source:&'a str, chars:Chars<'a>, cache_list:VecDeque<char>, ahead_count:usize, cur_index:usize } impl<'a> LexString<'a> { pub fn new(str:&'a str) -> Self { LexString { source:str, chars:str.chars(), cache_list:VecDeque::default(), ahead_count:0, cur_index:0 } } pub fn next(&mut self) -> Option<char> { if self.ahead_count > 0 { self.cur_index += 1; let sub_count = self.sub_ahead_len(); let chr = self.cache_list[sub_count as usize]; self.ahead_count -= 1; return Some(chr); } else { let next = self.chars.next(); if let Some(chr) = next { if self.cache_list.len() >= BACK_LEN { self.cache_list.pop_front(); } self.cache_list.push_back(chr); self.cur_index += 1; }; return next; } } pub fn cur_index(&self) -> usize { self.cur_index } pub fn lookahead(&mut self,count:usize) -> Option<char> { if self.ahead_count > count { let sub_len = self.sub_ahead_len() as usize; return Some(self.cache_list[sub_len + count - 1]); } else { let add_count = count - self.ahead_count; for _ in 0..add_count { if let Some(chr) = self.chars.next() { self.ahead_count += 1; self.cache_list.push_back(chr); } else { return None; } } let sub_len = self.sub_ahead_len() as usize; let idx:i32 = sub_len as i32 + count as i32 - 1; if idx >= self.cache_list.len() as i32 || idx < 0 { return None; } return Some(self.cache_list[idx as usize]); } } pub fn lookback(&mut self,count:usize) -> Option<char> { let sub_ahead:i32 = self.sub_ahead_len(); if sub_ahead < 0 { return None; } if sub_ahead - count as i32 >= 0 { return Some(self.cache_list[sub_ahead as usize - count - 1]); } None } fn sub_ahead_len(&self) -> i32 { self.cache_list.len() as i32 - self.ahead_count as i32 } pub fn take_while<F>(&mut self,mut f:F) -> Option<&str> where F:FnMut(char) -> bool { let starti = self.cur_index; while let Some(chr) = self.lookahead(1) { if f(chr) { self.next(); } else { if starti == self.cur_index { return None; } return Some(self.slice(starti,self.cur_index)); } } if self.cur_index != starti { return Some(self.slice(starti,self.cur_index)); } None } pub fn put_rollback(&mut self,str:&str) { for chr in str.chars() { self.cache_list.push_back(chr); self.ahead_count += 1; } } pub fn put_back(&mut self,chr:char) { self.cache_list.push_back(chr); self.ahead_count += 1; } pub fn slice(&self,s:usize,e:usize) -> &str { let mut idx = 0usize; let mut u8idx_e = 0usize; let mut u8idx_s = 0usize; for chr in self.source.chars() { if idx < s { u8idx_s += chr.len_utf8(); } if idx < e { u8idx_e += chr.len_utf8(); } else { break; } idx += 1 } return &self.source[u8idx_s..u8idx_e]; } } #[test] fn test_string() { let mut lex = LexString::new(" 1234∀56"); let aa = lex.take_while(|chr| chr.is_whitespace()); dbg!(aa.map(|c| c.len())); dbg!(lex.next()); }
use core::str::{Chars}; use std::collections::{VecDeque}; static BACK_LEN:usize = 3usize; #[derive(Debug)] pub struct LexString<'a> { source:&'a str, chars:Chars<'a>, cache_list:VecDeque<char>, ahead_count:usize, cur_index:usize } impl<'a> LexString<'a> { pub fn new(str:&'a str) -> Self { LexString { source:str, chars:str.chars(), cache_list:VecDeque::default(), ahead_count:0, cur_index:0 } } pub fn next(&mut self) -> Option<char> { if self.ahead_count > 0 { self.cur_index += 1; let sub_count = self.sub_ahead_len(); let chr = self.cache_list[sub_count as usize]; self.ahead_count -= 1; return Some(chr); } else { let next = self.chars.next(); if let Some(chr) = next { if self.cache_list.len() >= BACK_LEN { self.cache_list.pop_front(); } self.cache_list.push_back(chr); self.cur_index += 1; }; return next; } } pub fn cur_index(&self) -> usize { self.cur_index } pub fn lookahead(&mut self,count:usize) -> Option<char> { if self.ahead_count > count { let sub_len = self.sub_ahead_len() as usize; return Some(self.cache_list[sub_len + count - 1]); } else { let add_count = count - self.ahead_count; for _ in 0..add_count { if let Some(chr) = self.chars.next() { self.ahead_count += 1; self.cache_list.push_back(chr); } else { return None; } } let sub_len = self.sub_ahead_len() as usize; let idx:i32 = sub_len as i32 + count as i32 - 1; if idx >= self.cache_list.len() as i32 || idx < 0 { return None; }
slice(&self,s:usize,e:usize) -> &str { let mut idx = 0usize; let mut u8idx_e = 0usize; let mut u8idx_s = 0usize; for chr in self.source.chars() { if idx < s { u8idx_s += chr.len_utf8(); } if idx < e { u8idx_e += chr.len_utf8(); } else { break; } idx += 1 } return &self.source[u8idx_s..u8idx_e]; } } #[test] fn test_string() { let mut lex = LexString::new(" 1234∀56"); let aa = lex.take_while(|chr| chr.is_whitespace()); dbg!(aa.map(|c| c.len())); dbg!(lex.next()); }
return Some(self.cache_list[idx as usize]); } } pub fn lookback(&mut self,count:usize) -> Option<char> { let sub_ahead:i32 = self.sub_ahead_len(); if sub_ahead < 0 { return None; } if sub_ahead - count as i32 >= 0 { return Some(self.cache_list[sub_ahead as usize - count - 1]); } None } fn sub_ahead_len(&self) -> i32 { self.cache_list.len() as i32 - self.ahead_count as i32 } pub fn take_while<F>(&mut self,mut f:F) -> Option<&str> where F:FnMut(char) -> bool { let starti = self.cur_index; while let Some(chr) = self.lookahead(1) { if f(chr) { self.next(); } else { if starti == self.cur_index { return None; } return Some(self.slice(starti,self.cur_index)); } } if self.cur_index != starti { return Some(self.slice(starti,self.cur_index)); } None } pub fn put_rollback(&mut self,str:&str) { for chr in str.chars() { self.cache_list.push_back(chr); self.ahead_count += 1; } } pub fn put_back(&mut self,chr:char) { self.cache_list.push_back(chr); self.ahead_count += 1; } pub fn
random
[ { "content": "pub fn text_delta(tex:&str) -> (i32,i32) {\n\n let mut l = 0;\n\n let mut c = 0;\n\n for chr in tex.chars() {\n\n if chr == '\\n' {\n\n l += 1;\n\n c = 1;\n\n } else {\n\n c += 1;\n\n }\n\n }\n\n (l,c)\n\n}\n\n\n", "file_path...
Rust
mayastor/tests/replica_uri.rs
payes/mayastor
917c4f7cd8a3d8642307a7ea9165e2403bc4b028
use composer::RpcHandle; use rpc::mayastor::{ Bdev, CreateNexusRequest, CreatePoolRequest, CreateReplicaRequest, Null, Replica, ShareProtocolReplica, ShareReplicaRequest, }; use std::str::FromStr; use tracing::info; pub mod common; use common::compose::Builder; const DISKSIZE_KB: u64 = 96 * 1024; const VOLUME_SIZE_MB: u64 = (DISKSIZE_KB / 1024) / 2; const VOLUME_SIZE_B: u64 = VOLUME_SIZE_MB * 1024 * 1024; const VOLUME_UUID: &str = "cb9e1a5c-7af8-44a7-b3ae-05390be75d83"; fn pool_name(handle_index: usize) -> String { format!("pool{}", handle_index) } #[tokio::test] async fn replica_uri() { let test = Builder::new() .name("replica_uri") .network("10.1.0.0/16") .add_container("ms1") .add_container("ms2") .with_clean(true) .with_default_tracing() .build() .await .unwrap(); let mut hdls = test.grpc_handles().await.unwrap(); for (i, hdl) in hdls.iter_mut().enumerate() { hdl.mayastor .create_pool(CreatePoolRequest { name: pool_name(i), disks: vec![format!( "malloc:///disk0?size_mb={}", DISKSIZE_KB / 1024 )], }) .await .unwrap(); } let replica_nvmf = hdls[1] .mayastor .create_replica(CreateReplicaRequest { uuid: VOLUME_UUID.to_string(), pool: pool_name(1), size: VOLUME_SIZE_B, thin: false, share: ShareProtocolReplica::ReplicaNvmf as i32, }) .await .unwrap() .into_inner(); info!("Replica: {:?}", replica_nvmf); check_replica_uri(&mut hdls[1], &replica_nvmf).await; let replica_loopback = hdls[0] .mayastor .create_replica(CreateReplicaRequest { uuid: VOLUME_UUID.to_string(), pool: pool_name(0), size: VOLUME_SIZE_B, thin: false, share: ShareProtocolReplica::ReplicaNone as i32, }) .await .unwrap() .into_inner(); info!("Replica: {:?}", replica_loopback); check_replica_uri(&mut hdls[0], &replica_loopback).await; let replica_uri = hdls[0] .mayastor .share_replica(ShareReplicaRequest { uuid: VOLUME_UUID.to_string(), share: ShareProtocolReplica::ReplicaNvmf as i32, }) .await; info!("Replica: {:?}", replica_uri); assert!(replica_uri.unwrap().into_inner().uri.contains("uuid=")); let replica_uri = hdls[0] .mayastor .share_replica(ShareReplicaRequest { uuid: VOLUME_UUID.to_string(), share: ShareProtocolReplica::ReplicaNone as i32, }) .await; info!("Replica: {:?}", replica_uri); assert!(replica_uri.unwrap().into_inner().uri.contains("uuid=")); hdls[0] .mayastor .create_nexus(CreateNexusRequest { uuid: VOLUME_UUID.to_string(), size: VOLUME_SIZE_B, children: [replica_loopback.uri, replica_nvmf.uri].to_vec(), }) .await .unwrap(); } async fn get_bdev(handle: &mut RpcHandle) -> Bdev { let bdevs = handle.bdev.list(Null {}).await.unwrap().into_inner().bdevs; bdevs .iter() .find(|b| b.name == VOLUME_UUID) .expect("Should find our replica as a bdev") .clone() } async fn check_replica_uri(handle: &mut RpcHandle, replica: &Replica) { let bdev = get_bdev(handle).await; let replica_url = url::Url::from_str(&replica.uri).unwrap(); assert_eq!( replica_url.query().unwrap().replace("uuid=", ""), bdev.uuid ); assert_ne!(VOLUME_UUID, bdev.uuid); }
use composer::RpcHandle; use rpc::mayastor::{ Bdev, CreateNexusRequest, CreatePoolRequest, CreateReplicaRequest, Null, Replica, ShareProtocolReplica, ShareReplicaRequest, }; use std::str::FromStr; use tracing::info; pub mod common; use common::compose::Builder; const DISKSIZE_KB: u64 = 96 * 1024; const VOLUME_SIZE_MB: u64 = (DISKSIZE_KB / 1024) / 2; const VOLUME_SIZE_B: u64 = VOLUME_SIZE_MB * 1024 * 1024; const VOLUME_UUID: &str = "cb9e1a5c-7af8-44a7-b3ae-05390be75d83"; fn pool_name(handle_index: usize) -> String { format!("pool{}", handle_index) } #[tokio::test]
async fn get_bdev(handle: &mut RpcHandle) -> Bdev { let bdevs = handle.bdev.list(Null {}).await.unwrap().into_inner().bdevs; bdevs .iter() .find(|b| b.name == VOLUME_UUID) .expect("Should find our replica as a bdev") .clone() } async fn check_replica_uri(handle: &mut RpcHandle, replica: &Replica) { let bdev = get_bdev(handle).await; let replica_url = url::Url::from_str(&replica.uri).unwrap(); assert_eq!( replica_url.query().unwrap().replace("uuid=", ""), bdev.uuid ); assert_ne!(VOLUME_UUID, bdev.uuid); }
async fn replica_uri() { let test = Builder::new() .name("replica_uri") .network("10.1.0.0/16") .add_container("ms1") .add_container("ms2") .with_clean(true) .with_default_tracing() .build() .await .unwrap(); let mut hdls = test.grpc_handles().await.unwrap(); for (i, hdl) in hdls.iter_mut().enumerate() { hdl.mayastor .create_pool(CreatePoolRequest { name: pool_name(i), disks: vec![format!( "malloc:///disk0?size_mb={}", DISKSIZE_KB / 1024 )], }) .await .unwrap(); } let replica_nvmf = hdls[1] .mayastor .create_replica(CreateReplicaRequest { uuid: VOLUME_UUID.to_string(), pool: pool_name(1), size: VOLUME_SIZE_B, thin: false, share: ShareProtocolReplica::ReplicaNvmf as i32, }) .await .unwrap() .into_inner(); info!("Replica: {:?}", replica_nvmf); check_replica_uri(&mut hdls[1], &replica_nvmf).await; let replica_loopback = hdls[0] .mayastor .create_replica(CreateReplicaRequest { uuid: VOLUME_UUID.to_string(), pool: pool_name(0), size: VOLUME_SIZE_B, thin: false, share: ShareProtocolReplica::ReplicaNone as i32, }) .await .unwrap() .into_inner(); info!("Replica: {:?}", replica_loopback); check_replica_uri(&mut hdls[0], &replica_loopback).await; let replica_uri = hdls[0] .mayastor .share_replica(ShareReplicaRequest { uuid: VOLUME_UUID.to_string(), share: ShareProtocolReplica::ReplicaNvmf as i32, }) .await; info!("Replica: {:?}", replica_uri); assert!(replica_uri.unwrap().into_inner().uri.contains("uuid=")); let replica_uri = hdls[0] .mayastor .share_replica(ShareReplicaRequest { uuid: VOLUME_UUID.to_string(), share: ShareProtocolReplica::ReplicaNone as i32, }) .await; info!("Replica: {:?}", replica_uri); assert!(replica_uri.unwrap().into_inner().uri.contains("uuid=")); hdls[0] .mayastor .create_nexus(CreateNexusRequest { uuid: VOLUME_UUID.to_string(), size: VOLUME_SIZE_B, children: [replica_loopback.uri, replica_nvmf.uri].to_vec(), }) .await .unwrap(); }
function_block-full_function
[ { "content": "pub fn dd_urandom_file_size(device: &str, size: u64) -> String {\n\n let (exit, stdout, _stderr) = run_script::run(\n\n r#\"\n\n dd if=/dev/urandom of=$1 conv=fsync,nocreat,notrunc iflag=count_bytes count=$2\n\n \"#,\n\n &vec![device.into(), size.to_string()],\n\n ...
Rust
packages/profiler/src/instrumentation.rs
brew0722/cosmwasm-1
a0cf296c43aa092b81457d96a9c6bc2ab223f6d3
use std::{ path::Path, sync::{Arc, Mutex}, }; use cosmwasm_vm::{ testing::{MockApi, MockQuerier, MockStorage}, Backend, Instance, }; use loupe::MemoryUsage; use wasmer::{ internals::WithEnv, wasmparser::Operator, Exports, Function, FunctionMiddleware, HostFunction, LocalFunctionIndex, ModuleMiddleware, WasmerEnv, }; use wasmer_types::{FunctionIndex, ImportIndex}; use crate::{code_blocks::BlockStore, operators::OperatorSymbol}; pub enum Module<'d> { Path(&'d Path), #[cfg(test)] Bytes(&'d [u8]), } impl<'d> Module<'d> { pub fn from_path<P: AsRef<Path> + ?Sized>(path: &'d P) -> Self { Self::Path(path.as_ref()) } #[cfg(test)] pub fn from_bytes(bytes: &'d [u8]) -> Self { Self::Bytes(bytes) } pub fn instrument<Env, F1, F2>( &self, block_store: Arc<Mutex<BlockStore>>, env: Env, start_measurement_fn: F1, take_measurement_fn: F2, ) -> InstrumentedInstance where Env: WasmerEnv + 'static, F1: HostFunction<(u32, u32), (), WithEnv, Env>, F2: HostFunction<(u32, u32, u64), (), WithEnv, Env>, { let profiling = Arc::new(Profiling::new(block_store)); let mut walrus_module = match self { Module::Path(path) => walrus::Module::from_file(path).unwrap(), #[cfg(test)] Module::Bytes(bytes) => walrus::Module::from_buffer(bytes).unwrap(), }; add_imports(&mut walrus_module); let wasm = walrus_module.emit_wasm(); let wasmer_module = cosmwasm_vm::internals::compile(&wasm, None, &[profiling.clone()]).unwrap(); let store = wasmer_module.store(); let mut fns_to_import = Exports::new(); fns_to_import.insert( "start_measurement", Function::new_native_with_env(store, env.clone(), start_measurement_fn), ); fns_to_import.insert( "take_measurement", Function::new_native_with_env(store, env, take_measurement_fn), ); let backend = Backend { api: MockApi::default(), storage: MockStorage::default(), querier: MockQuerier::new(&[]), }; let instance = cosmwasm_vm::internals::instance_from_module( &wasmer_module, backend, 999999999, false, Some(vec![("profiling", fns_to_import)].into_iter().collect()), ) .unwrap(); InstrumentedInstance { profiling, instance, } } } type MockInstance = Instance<MockApi, MockStorage, MockQuerier>; pub struct InstrumentedInstance { #[allow(dead_code)] profiling: Arc<Profiling>, instance: MockInstance, } impl InstrumentedInstance { pub fn vm_instance(&mut self) -> &mut MockInstance { &mut self.instance } } fn add_imports(module: &mut walrus::Module) -> (usize, usize) { use walrus::ValType::*; let start_type = module.types.add(&[I32, I32], &[]); let take_type = module.types.add(&[I32, I32, I64], &[]); let (fn1, _) = module.add_import_func("profiling", "start_measurement", start_type); let (fn2, _) = module.add_import_func("profiling", "take_measurement", take_type); (fn1.index(), fn2.index()) } #[non_exhaustive] #[derive(Debug, MemoryUsage)] pub struct Profiling { block_store: Arc<Mutex<BlockStore>>, indexes: Mutex<Option<ProfilingIndexes>>, } impl Profiling { pub fn new(block_store: Arc<Mutex<BlockStore>>) -> Self { Self { block_store, indexes: Mutex::new(None), } } } impl ModuleMiddleware for Profiling { fn generate_function_middleware( &self, local_function_index: wasmer::LocalFunctionIndex, ) -> Box<dyn wasmer::FunctionMiddleware> { Box::new(FunctionProfiling::new( self.block_store.clone(), self.indexes.lock().unwrap().clone().unwrap(), local_function_index, )) } fn transform_module_info(&self, module_info: &mut wasmer_vm::ModuleInfo) { let mut indexes = self.indexes.lock().unwrap(); if indexes.is_some() { panic!("Profiling::transform_module_info: Attempting to use a `Profiling` middleware from multiple modules."); } let fn1 = module_info .imports .iter() .find_map(|((module, field, _), index)| { if (module.as_str(), field.as_str()) == ("profiling", "start_measurement") { if let ImportIndex::Function(fn_index) = index { return Some(fn_index); } } None }) .unwrap(); let fn2 = module_info .imports .iter() .find_map(|((module, field, _), index)| { if (module.as_str(), field.as_str()) == ("profiling", "take_measurement") { if let ImportIndex::Function(fn_index) = index { return Some(fn_index); } } None }) .unwrap(); *indexes = Some(ProfilingIndexes { start_measurement: *fn1, take_measurement: *fn2, }); } } #[derive(Debug)] struct FunctionProfiling { block_store: Arc<Mutex<BlockStore>>, accumulated_ops: Vec<OperatorSymbol>, indexes: ProfilingIndexes, block_count: u32, fn_index: LocalFunctionIndex, } impl FunctionProfiling { fn new( block_store: Arc<Mutex<BlockStore>>, indexes: ProfilingIndexes, fn_index: LocalFunctionIndex, ) -> Self { Self { block_store, accumulated_ops: Vec::new(), indexes, block_count: 0, fn_index, } } } impl FunctionMiddleware for FunctionProfiling { fn feed<'a>( &mut self, operator: wasmer::wasmparser::Operator<'a>, state: &mut wasmer::MiddlewareReaderState<'a>, ) -> Result<(), wasmer::MiddlewareError> { match operator { Operator::Loop { .. } | Operator::End | Operator::Else | Operator::Br { .. } | Operator::BrTable { .. } | Operator::BrIf { .. } | Operator::Call { .. } | Operator::CallIndirect { .. } | Operator::Return => { if !self.accumulated_ops.is_empty() { let mut store = self.block_store.lock().unwrap(); let block_id = store.register_block(std::mem::take(&mut self.accumulated_ops)); state.extend(&[ Operator::I32Const { value: self.fn_index.as_u32() as i32 }, Operator::I32Const { value: self.block_count as i32 }, Operator::I64Const { value: block_id.as_u64() as i64 }, Operator::Call{ function_index: self.indexes.take_measurement.as_u32() }, ]); } } _ => { if self.accumulated_ops.is_empty() { state.extend(&[ Operator::I32Const { value: self.fn_index.as_u32() as i32 }, Operator::I32Const { value: self.block_count as i32 }, Operator::Call{ function_index: self.indexes.start_measurement.as_u32() }, ]); } self.accumulated_ops.push((&operator).into()); } } state.push_operator(operator); Ok(()) } } #[derive(Debug, MemoryUsage, Clone)] struct ProfilingIndexes { start_measurement: FunctionIndex, take_measurement: FunctionIndex, } #[cfg(test)] mod tests { use super::*; use crate::code_blocks::CodeBlock; use std::sync::Arc; use wasmer::{wat2wasm, WasmerEnv}; const WAT: &[u8] = br#" (module (type $t0 (func (param i32) (result i32))) (func $add_one (export "add_one") (type $t0) (param $p0 i32) (result i32) get_local $p0 i32.const 1 i32.add) (func $multisub (export "multisub") (type $t0) (param $p0 i32) (result i32) get_local $p0 i32.const 2 i32.mul call $sub_one i32.const 1 i32.sub) (func $sub_one (type $t0) (param $p0 i32) (result i32) get_local $p0 i32.const 1 i32.sub)) "#; struct Fixture { instance: InstrumentedInstance, } #[derive(Debug, Clone, WasmerEnv)] struct FixtureEnv { start_calls: Arc<Mutex<Vec<(u32, u32)>>>, end_calls: Arc<Mutex<Vec<(u32, u32, u64)>>>, } impl FixtureEnv { fn new() -> Self { Self { start_calls: Arc::new(Mutex::new(Vec::new())), end_calls: Arc::new(Mutex::new(Vec::new())), } } } impl Fixture { fn new() -> Self { let wasm = wat2wasm(WAT).unwrap(); let module = Module::from_bytes(&wasm); let env = FixtureEnv::new(); let start_measurement_fn = |env: &FixtureEnv, fun: u32, block: u32| { env.start_calls.lock().unwrap().push((fun, block)); }; let take_measurement_fn = |env: &FixtureEnv, fun: u32, block: u32, hash: u64| { env.end_calls.lock().unwrap().push((fun, block, hash)); }; let block_store = Arc::new(Mutex::new(BlockStore::new())); Self { instance: module.instrument( block_store.clone(), env, start_measurement_fn, take_measurement_fn, ), } } } #[test] fn instrumentation_registers_code_blocks() { let fixture = Fixture::new(); let block_store = fixture.instance.profiling.block_store.lock().unwrap(); assert_eq!(block_store.len(), 4); println!("{:?}", block_store); let expected_block = CodeBlock::from(vec![ OperatorSymbol::LocalGet, OperatorSymbol::I32Const, OperatorSymbol::I32Add, ]); let block = block_store.get_block(expected_block.get_hash()); assert_eq!(block, Some(&expected_block)); let expected_block = CodeBlock::from(vec![ OperatorSymbol::LocalGet, OperatorSymbol::I32Const, OperatorSymbol::I32Sub, ]); let block = block_store.get_block(expected_block.get_hash()); assert_eq!(block, Some(&expected_block)); let expected_block = CodeBlock::from(vec![ OperatorSymbol::LocalGet, OperatorSymbol::I32Const, OperatorSymbol::I32Mul, ]); let block = block_store.get_block(expected_block.get_hash()); assert_eq!(block, Some(&expected_block)); let expected_block = CodeBlock::from(vec![OperatorSymbol::I32Const, OperatorSymbol::I32Sub]); let block = block_store.get_block(expected_block.get_hash()); assert_eq!(block, Some(&expected_block)); } }
use std::{ path::Path, sync::{Arc, Mutex}, }; use cosmwasm_vm::{ testing::{MockApi, MockQuerier, MockStorage}, Backend, Instance, }; use loupe::MemoryUsage; use wasmer::{ internals::WithEnv, wasmparser::Operator, Exports, Function, FunctionMiddleware, HostFunction, LocalFunctionIndex, ModuleMiddleware, WasmerEnv, }; use wasmer_types::{FunctionIndex, ImportIndex}; use crate::{code_blocks::BlockStore, operators::OperatorSymbol}; pub enum Module<'d> { Path(&'d Path), #[cfg(test)] Bytes(&'d [u8]), } impl<'d> Module<'d> { pub fn from_path<P: AsRef<Path> + ?Sized>(path: &'d P) -> Self { Self::Path(path.as_ref()) } #[cfg(test)] pub fn from_bytes(bytes: &'d [u8]) -> Self { Self::Bytes(bytes) }
} type MockInstance = Instance<MockApi, MockStorage, MockQuerier>; pub struct InstrumentedInstance { #[allow(dead_code)] profiling: Arc<Profiling>, instance: MockInstance, } impl InstrumentedInstance { pub fn vm_instance(&mut self) -> &mut MockInstance { &mut self.instance } } fn add_imports(module: &mut walrus::Module) -> (usize, usize) { use walrus::ValType::*; let start_type = module.types.add(&[I32, I32], &[]); let take_type = module.types.add(&[I32, I32, I64], &[]); let (fn1, _) = module.add_import_func("profiling", "start_measurement", start_type); let (fn2, _) = module.add_import_func("profiling", "take_measurement", take_type); (fn1.index(), fn2.index()) } #[non_exhaustive] #[derive(Debug, MemoryUsage)] pub struct Profiling { block_store: Arc<Mutex<BlockStore>>, indexes: Mutex<Option<ProfilingIndexes>>, } impl Profiling { pub fn new(block_store: Arc<Mutex<BlockStore>>) -> Self { Self { block_store, indexes: Mutex::new(None), } } } impl ModuleMiddleware for Profiling { fn generate_function_middleware( &self, local_function_index: wasmer::LocalFunctionIndex, ) -> Box<dyn wasmer::FunctionMiddleware> { Box::new(FunctionProfiling::new( self.block_store.clone(), self.indexes.lock().unwrap().clone().unwrap(), local_function_index, )) } fn transform_module_info(&self, module_info: &mut wasmer_vm::ModuleInfo) { let mut indexes = self.indexes.lock().unwrap(); if indexes.is_some() { panic!("Profiling::transform_module_info: Attempting to use a `Profiling` middleware from multiple modules."); } let fn1 = module_info .imports .iter() .find_map(|((module, field, _), index)| { if (module.as_str(), field.as_str()) == ("profiling", "start_measurement") { if let ImportIndex::Function(fn_index) = index { return Some(fn_index); } } None }) .unwrap(); let fn2 = module_info .imports .iter() .find_map(|((module, field, _), index)| { if (module.as_str(), field.as_str()) == ("profiling", "take_measurement") { if let ImportIndex::Function(fn_index) = index { return Some(fn_index); } } None }) .unwrap(); *indexes = Some(ProfilingIndexes { start_measurement: *fn1, take_measurement: *fn2, }); } } #[derive(Debug)] struct FunctionProfiling { block_store: Arc<Mutex<BlockStore>>, accumulated_ops: Vec<OperatorSymbol>, indexes: ProfilingIndexes, block_count: u32, fn_index: LocalFunctionIndex, } impl FunctionProfiling { fn new( block_store: Arc<Mutex<BlockStore>>, indexes: ProfilingIndexes, fn_index: LocalFunctionIndex, ) -> Self { Self { block_store, accumulated_ops: Vec::new(), indexes, block_count: 0, fn_index, } } } impl FunctionMiddleware for FunctionProfiling { fn feed<'a>( &mut self, operator: wasmer::wasmparser::Operator<'a>, state: &mut wasmer::MiddlewareReaderState<'a>, ) -> Result<(), wasmer::MiddlewareError> { match operator { Operator::Loop { .. } | Operator::End | Operator::Else | Operator::Br { .. } | Operator::BrTable { .. } | Operator::BrIf { .. } | Operator::Call { .. } | Operator::CallIndirect { .. } | Operator::Return => { if !self.accumulated_ops.is_empty() { let mut store = self.block_store.lock().unwrap(); let block_id = store.register_block(std::mem::take(&mut self.accumulated_ops)); state.extend(&[ Operator::I32Const { value: self.fn_index.as_u32() as i32 }, Operator::I32Const { value: self.block_count as i32 }, Operator::I64Const { value: block_id.as_u64() as i64 }, Operator::Call{ function_index: self.indexes.take_measurement.as_u32() }, ]); } } _ => { if self.accumulated_ops.is_empty() { state.extend(&[ Operator::I32Const { value: self.fn_index.as_u32() as i32 }, Operator::I32Const { value: self.block_count as i32 }, Operator::Call{ function_index: self.indexes.start_measurement.as_u32() }, ]); } self.accumulated_ops.push((&operator).into()); } } state.push_operator(operator); Ok(()) } } #[derive(Debug, MemoryUsage, Clone)] struct ProfilingIndexes { start_measurement: FunctionIndex, take_measurement: FunctionIndex, } #[cfg(test)] mod tests { use super::*; use crate::code_blocks::CodeBlock; use std::sync::Arc; use wasmer::{wat2wasm, WasmerEnv}; const WAT: &[u8] = br#" (module (type $t0 (func (param i32) (result i32))) (func $add_one (export "add_one") (type $t0) (param $p0 i32) (result i32) get_local $p0 i32.const 1 i32.add) (func $multisub (export "multisub") (type $t0) (param $p0 i32) (result i32) get_local $p0 i32.const 2 i32.mul call $sub_one i32.const 1 i32.sub) (func $sub_one (type $t0) (param $p0 i32) (result i32) get_local $p0 i32.const 1 i32.sub)) "#; struct Fixture { instance: InstrumentedInstance, } #[derive(Debug, Clone, WasmerEnv)] struct FixtureEnv { start_calls: Arc<Mutex<Vec<(u32, u32)>>>, end_calls: Arc<Mutex<Vec<(u32, u32, u64)>>>, } impl FixtureEnv { fn new() -> Self { Self { start_calls: Arc::new(Mutex::new(Vec::new())), end_calls: Arc::new(Mutex::new(Vec::new())), } } } impl Fixture { fn new() -> Self { let wasm = wat2wasm(WAT).unwrap(); let module = Module::from_bytes(&wasm); let env = FixtureEnv::new(); let start_measurement_fn = |env: &FixtureEnv, fun: u32, block: u32| { env.start_calls.lock().unwrap().push((fun, block)); }; let take_measurement_fn = |env: &FixtureEnv, fun: u32, block: u32, hash: u64| { env.end_calls.lock().unwrap().push((fun, block, hash)); }; let block_store = Arc::new(Mutex::new(BlockStore::new())); Self { instance: module.instrument( block_store.clone(), env, start_measurement_fn, take_measurement_fn, ), } } } #[test] fn instrumentation_registers_code_blocks() { let fixture = Fixture::new(); let block_store = fixture.instance.profiling.block_store.lock().unwrap(); assert_eq!(block_store.len(), 4); println!("{:?}", block_store); let expected_block = CodeBlock::from(vec![ OperatorSymbol::LocalGet, OperatorSymbol::I32Const, OperatorSymbol::I32Add, ]); let block = block_store.get_block(expected_block.get_hash()); assert_eq!(block, Some(&expected_block)); let expected_block = CodeBlock::from(vec![ OperatorSymbol::LocalGet, OperatorSymbol::I32Const, OperatorSymbol::I32Sub, ]); let block = block_store.get_block(expected_block.get_hash()); assert_eq!(block, Some(&expected_block)); let expected_block = CodeBlock::from(vec![ OperatorSymbol::LocalGet, OperatorSymbol::I32Const, OperatorSymbol::I32Mul, ]); let block = block_store.get_block(expected_block.get_hash()); assert_eq!(block, Some(&expected_block)); let expected_block = CodeBlock::from(vec![OperatorSymbol::I32Const, OperatorSymbol::I32Sub]); let block = block_store.get_block(expected_block.get_hash()); assert_eq!(block, Some(&expected_block)); } }
pub fn instrument<Env, F1, F2>( &self, block_store: Arc<Mutex<BlockStore>>, env: Env, start_measurement_fn: F1, take_measurement_fn: F2, ) -> InstrumentedInstance where Env: WasmerEnv + 'static, F1: HostFunction<(u32, u32), (), WithEnv, Env>, F2: HostFunction<(u32, u32, u64), (), WithEnv, Env>, { let profiling = Arc::new(Profiling::new(block_store)); let mut walrus_module = match self { Module::Path(path) => walrus::Module::from_file(path).unwrap(), #[cfg(test)] Module::Bytes(bytes) => walrus::Module::from_buffer(bytes).unwrap(), }; add_imports(&mut walrus_module); let wasm = walrus_module.emit_wasm(); let wasmer_module = cosmwasm_vm::internals::compile(&wasm, None, &[profiling.clone()]).unwrap(); let store = wasmer_module.store(); let mut fns_to_import = Exports::new(); fns_to_import.insert( "start_measurement", Function::new_native_with_env(store, env.clone(), start_measurement_fn), ); fns_to_import.insert( "take_measurement", Function::new_native_with_env(store, env, take_measurement_fn), ); let backend = Backend { api: MockApi::default(), storage: MockStorage::default(), querier: MockQuerier::new(&[]), }; let instance = cosmwasm_vm::internals::instance_from_module( &wasmer_module, backend, 999999999, false, Some(vec![("profiling", fns_to_import)].into_iter().collect()), ) .unwrap(); InstrumentedInstance { profiling, instance, } }
function_block-full_function
[ { "content": "#[allow(dead_code)] // used in Wasm and tests only\n\npub fn encode_sections(sections: &[&[u8]]) -> Vec<u8> {\n\n let mut out_len: usize = sections.iter().map(|section| section.len()).sum();\n\n out_len += 4 * sections.len();\n\n let mut out_data = Vec::with_capacity(out_len);\n\n for ...
Rust
src/lib.rs
newpavlov/parstream
6f55caa8187ba5b7a635f26a6996a5585bb5a00e
use std::collections::BinaryHeap; use std::collections::binary_heap::PeekMut; use std::cmp; use std::sync::atomic::AtomicIsize; use std::sync::atomic::Ordering; use crossbeam_channel as channel; use crossbeam_utils::thread as cb_thread; struct State<T> { pos: usize, payload: T, } impl<T> PartialEq for State<T> { fn eq(&self, other: &Self) -> bool { self.pos == other.pos } } impl<T> Eq for State<T> { } impl<T> Ord for State<T> { fn cmp(&self, other: &Self) -> cmp::Ordering { other.pos.cmp(&self.pos) } } impl<T> PartialOrd for State<T> { fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> { Some(self.cmp(other)) } } enum ReportMsg<T, E> { None, NewResult((usize, Result<T, E>)), } fn run_report<T, E>( rx: channel::Receiver<ReportMsg<T, E>>, mut f: impl FnMut(T) -> Result<(), E>, flag: &AtomicIsize, ) -> Result<(), E> { let mut buf: BinaryHeap<State<T>> = BinaryHeap::new(); let mut n = 0; use self::ReportMsg::*; for val in rx.iter() { let target = flag.load(Ordering::Acquire); if target < 0 { break } match val { NewResult((i, payload)) => { let payload = payload.map_err(Into::into)?; if i != n { buf.push(State { pos: i, payload: payload }); continue; } f(payload).map_err(Into::into)?; n += 1; while let Some(pm) = buf.peek_mut() { assert!(pm.pos >= n); if pm.pos != n { break } f(PeekMut::pop(pm).payload).map_err(Into::into)?; n += 1 } }, None => (), } if target as usize == n { break; } } Ok(()) } const FLAG_INIT: isize = 0; const FLAG_ERROR: isize = -1; const FLAG_WORKER_PANIC: isize = -2; const FLAG_REPORT_PANIC: isize = -3; pub fn run<X: Send, Y: Send, E: Send>( xs: impl IntoIterator<Item=X>, threads: usize, f: impl Fn(X) -> Result<Y, E> + Sync, report: impl FnMut(Y) -> Result<(), E> + Send ) -> Result<usize, E> { let (tx, rx) = channel::bounded(2*threads); let (tx2, rx2) = channel::bounded(2*threads); let flag = &AtomicIsize::new(FLAG_INIT); let mut result = Ok(0); cb_thread::scope(|scope| { for _ in 0..threads { let rxc = rx.clone(); let txc = tx2.clone(); let fp = &f; scope.spawn(move |_| { for x in rxc.iter() { if flag.load(Ordering::Acquire) < 0 { break } match x { Some((i, x)) => { let res = (i, fp(x)) ; let r = txc.send(ReportMsg::NewResult(res)); if r.is_err() { break; } }, None => break, } } }); } let res = &mut result; scope.spawn(move |_| { if let Err(err) = run_report(rx2, report, flag) { flag.store(FLAG_ERROR, Ordering::Release); *res = Err(err); } }); let mut n = 0; for val in xs.into_iter().enumerate() { if flag.load(Ordering::Acquire) < 0 { break } n += 1; tx.send(Some(val)).unwrap(); } if flag.load(Ordering::Acquire) >= 0 { flag.store(n as isize, Ordering::Release); tx2.send(ReportMsg::None).unwrap(); } else { while let Ok(_) = rx.try_recv() {} } for _ in 0..threads { tx.send(None).unwrap(); } }).unwrap(); match flag.load(Ordering::Acquire) { n if n >= 0 => { Ok(n as usize) }, FLAG_ERROR => result, FLAG_WORKER_PANIC => panic!("worker thread has panicked"), FLAG_REPORT_PANIC => panic!("report thread has panicked"), _ => unreachable!(), } }
use std::collections::BinaryHeap; use std::collections::binary_heap::PeekMut; use std::cmp; use std::sync::atomic::AtomicIsize; use std::sync::atomic::Ordering; use crossbeam_channel as channel; use crossbeam_utils::thread as cb_thread; struct State<T> { pos: usize, payload: T, } impl<T> PartialEq for State<T> { fn eq(&self, other: &Self) -> bool { self.pos == other.pos } } impl<T> Eq for State<T> { } impl<T> Ord for State<T> { fn cmp(&self, other: &Self) -> cmp::Ordering { other.pos.cmp(&self.pos) } } impl<T> PartialOrd for State<T> { fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> { Some(self.cmp(other)) } } enum ReportMsg<T, E> { None, NewResult((usize, Result<T, E>)), } fn run_report<T, E>( rx: channel::Receiver<ReportMsg<T, E>>, mut f: impl FnMut(T) -> Result<(), E>, flag: &AtomicIsize, ) -> Result<(), E> { let mut buf: BinaryHeap<State<T>> = BinaryHeap::new(); let mut n = 0; use self::ReportMsg::*; for val in rx.iter() { let target = flag.load(Ordering::Acquire); if target < 0 { break } match val { NewResult((i, payload)) => { let payload = payload.map_err(Into::into)?; if i != n { buf.push(State { pos: i, payload: payload }); continue; } f(payload).map_err(Into::into)?; n += 1; while let Some(pm) = buf.peek_mut() { assert!(pm.pos >= n); if pm.pos != n { break } f(PeekMut::pop(pm).payload).map_err(Into::into)?; n += 1 } }, None => (), } if target as usize == n { break; } } Ok(()) } const FLAG_INIT: isize = 0; const FLAG_ERROR: isize = -1; const FLAG_WORKER_PANIC: isize = -2; const FLAG_REPORT_PANIC: isize = -3;
pub fn run<X: Send, Y: Send, E: Send>( xs: impl IntoIterator<Item=X>, threads: usize, f: impl Fn(X) -> Result<Y, E> + Sync, report: impl FnMut(Y) -> Result<(), E> + Send ) -> Result<usize, E> { let (tx, rx) = channel::bounded(2*threads); let (tx2, rx2) = channel::bounded(2*threads); let flag = &AtomicIsize::new(FLAG_INIT); let mut result = Ok(0); cb_thread::scope(|scope| { for _ in 0..threads { let rxc = rx.clone(); let txc = tx2.clone(); let fp = &f; scope.spawn(move |_| { for x in rxc.iter() { if flag.load(Ordering::Acquire) < 0 { break } match x { Some((i, x)) => { let res = (i, fp(x)) ; let r = txc.send(ReportMsg::NewResult(res)); if r.is_err() { break; } }, None => break, } } }); } let res = &mut result; scope.spawn(move |_| { if let Err(err) = run_report(rx2, report, flag) { flag.store(FLAG_ERROR, Ordering::Release); *res = Err(err); } }); let mut n = 0; for val in xs.into_iter().enumerate() { if flag.load(Ordering::Acquire) < 0 { break } n += 1; tx.send(Some(val)).unwrap(); } if flag.load(Ordering::Acquire) >= 0 { flag.store(n as isize, Ordering::Release); tx2.send(ReportMsg::None).unwrap(); } else { while let Ok(_) = rx.try_recv() {} } for _ in 0..threads { tx.send(None).unwrap(); } }).unwrap(); match flag.load(Ordering::Acquire) { n if n >= 0 => { Ok(n as usize) }, FLAG_ERROR => result, FLAG_WORKER_PANIC => panic!("worker thread has panicked"), FLAG_REPORT_PANIC => panic!("report thread has panicked"), _ => unreachable!(), } }
function_block-full_function
[ { "content": "# parstream [![crates.io](https://img.shields.io/crates/v/parstream.svg)](https://crates.io/crates/parstream) [![Documentation](https://docs.rs/parstream/badge.svg)](https://docs.rs/parstream) [![Build Status](https://travis-ci.org/newpavlov/parstream.svg?branch=master)](https://travis-ci.org/newp...
Rust
src/tests/data_type/interval.rs
Snowapril/gluesql
eaca157102aca8ad05c4f73c0d25349169d9fb8f
use crate::*; test_case!(interval, async move { run!( r#" CREATE TABLE IntervalLog ( id INTEGER, interval1 INTERVAL, interval2 INTERVAL, )"# ); run!( r#" INSERT INTO IntervalLog VALUES (1, INTERVAL "1-2" YEAR TO MONTH, INTERVAL "30" MONTH), (2, INTERVAL "12" DAY, INTERVAL "35" HOUR), (3, INTERVAL "12" MINUTE, INTERVAL "300" SECOND), (4, INTERVAL "-3 14" DAY TO HOUR, INTERVAL "3 12:30" DAY TO MINUTE), (5, INTERVAL "3 14:00:00" DAY TO SECOND, INTERVAL "3 12:30:12.1324" DAY TO SECOND), (6, INTERVAL "12:00" HOUR TO MINUTE, INTERVAL "-12:30:12" HOUR TO SECOND), (7, INTERVAL "-1000-11" YEAR TO MONTH, INTERVAL "-30:11" MINUTE TO SECOND); "# ); use data::Interval as I; use Value::*; test!( Ok(select!( id | interval1 | interval2 I64 | Interval | Interval; 1 I::months(14) I::months(30); 2 I::days(12) I::hours(35); 3 I::minutes(12) I::minutes(5); 4 I::hours(-86) I::minutes(84 * 60 + 30); 5 I::minutes(86 * 60) I::microseconds((((84 * 60) + 30) * 60 + 12) * 1_000_000 + 132_400); 6 I::hours(12) I::seconds(-(12 * 3600 + 30 * 60 + 12)); 7 I::months(-12_011) I::seconds(-(30 * 60 + 11)) )), "SELECT * FROM IntervalLog;" ); test!( Ok(select!( id | i1 | i2 I64 | Interval | Interval; 1 I::months(28) I::months(66) )), r#"SELECT id, interval1 * 2 AS i1, interval2 - INTERVAL "-3" YEAR AS i2 FROM IntervalLog WHERE id = 1"# ); test!( Ok(select!( id | i1 | i2 | i3 I64 | Interval | Interval | Interval; 2 I::days(4) I::hours(34) I::minutes(1) )), r#"SELECT id, interval1 / 3 AS i1, interval2 - INTERVAL "3600" SECOND AS i2, INTERVAL "30" SECOND + INTERVAL "10" SECOND * 3 AS i3 FROM IntervalLog WHERE id = 2;"# ); test!( Err(IntervalError::UnsupportedRange("Minute".to_owned(), "Hour".to_owned()).into()), r#"INSERT INTO IntervalLog VALUES (1, INTERVAL "20:00" MINUTE TO HOUR, INTERVAL "1-2" YEAR TO MONTH)"# ); test!( Err(IntervalError::AddBetweenYearToMonthAndHourToSecond.into()), r#"SELECT INTERVAL "1" YEAR + INTERVAL "1" HOUR FROM IntervalLog;"# ); test!( Err(IntervalError::SubtractBetweenYearToMonthAndHourToSecond.into()), r#"SELECT INTERVAL "1" YEAR - INTERVAL "1" HOUR FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseInteger("1.4".to_owned()).into()), r#"SELECT INTERVAL "1.4" YEAR FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseDecimal("1.4ab".to_owned()).into()), r#"SELECT INTERVAL "1.4ab" HOUR FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseTime("111:34".to_owned()).into()), r#"SELECT INTERVAL "111:34" HOUR TO MINUTE FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseYearToMonth("111".to_owned()).into()), r#"SELECT INTERVAL "111" YEAR TO MONTH FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseDayToHour("111".to_owned()).into()), r#"SELECT INTERVAL "111" DAY TO HOUR FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseDayToHour("111".to_owned()).into()), r#"SELECT INTERVAL "111" DAY TO HOUR FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseDayToMinute("111".to_owned()).into()), r#"SELECT INTERVAL "111" DAY TO MINUTE FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseDayToSecond("111".to_owned()).into()), r#"SELECT INTERVAL "111" DAY TO Second FROM IntervalLog;"# ); });
use crate::*; test_case!(interval, async move { run!( r#" CREATE TABLE IntervalLog ( id INTEGER, interval1 INTERVAL, interval2 INTERVAL, )"# ); run!( r#" INSERT INTO IntervalLog VALUES (1, INTERVAL "1-2" YEAR TO MONTH, INTERVAL "30" MONTH), (2, INTERVAL "12" DAY, INTERVAL "35" HOUR), (3, INTERVAL "12" MINUTE, INTERVAL "300" SECOND), (4, INTERVAL "-3 14" DAY TO HOUR, INTERVAL "3 12:30" DAY TO MINUTE), (5, INTERVAL "3 14:00:00" DAY TO SECOND, INTERVAL "3 12:30:12.1324" DAY TO SECOND), (6, INTERVAL "12:00" HOUR TO MINUTE, INTERVAL "-12:30:12" HOUR TO SECOND), (7, INTERVAL "-1000-11" YEAR TO MONTH, INTERVAL "-30:11" MINUTE TO SECOND); "# ); use data::Interval as I; use Value::*; test!( Ok(select!( id | interval1 | interval2 I64 | Interval | Interval; 1 I::months(14) I::months(30); 2 I::days(12) I::hours(35); 3 I::minutes(12) I::minutes(5); 4 I::hours(-86) I::minutes(84 * 60 + 30); 5 I::minutes(86 * 60) I::microseconds((((84 * 60) + 30) * 60 + 12) * 1_000_000 + 132_400); 6 I::hours(12) I::seconds(-(12 * 3600 + 30 * 60 + 12)); 7
Err(IntervalError::FailedToParseDecimal("1.4ab".to_owned()).into()), r#"SELECT INTERVAL "1.4ab" HOUR FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseTime("111:34".to_owned()).into()), r#"SELECT INTERVAL "111:34" HOUR TO MINUTE FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseYearToMonth("111".to_owned()).into()), r#"SELECT INTERVAL "111" YEAR TO MONTH FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseDayToHour("111".to_owned()).into()), r#"SELECT INTERVAL "111" DAY TO HOUR FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseDayToHour("111".to_owned()).into()), r#"SELECT INTERVAL "111" DAY TO HOUR FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseDayToMinute("111".to_owned()).into()), r#"SELECT INTERVAL "111" DAY TO MINUTE FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseDayToSecond("111".to_owned()).into()), r#"SELECT INTERVAL "111" DAY TO Second FROM IntervalLog;"# ); });
I::months(-12_011) I::seconds(-(30 * 60 + 11)) )), "SELECT * FROM IntervalLog;" ); test!( Ok(select!( id | i1 | i2 I64 | Interval | Interval; 1 I::months(28) I::months(66) )), r#"SELECT id, interval1 * 2 AS i1, interval2 - INTERVAL "-3" YEAR AS i2 FROM IntervalLog WHERE id = 1"# ); test!( Ok(select!( id | i1 | i2 | i3 I64 | Interval | Interval | Interval; 2 I::days(4) I::hours(34) I::minutes(1) )), r#"SELECT id, interval1 / 3 AS i1, interval2 - INTERVAL "3600" SECOND AS i2, INTERVAL "30" SECOND + INTERVAL "10" SECOND * 3 AS i3 FROM IntervalLog WHERE id = 2;"# ); test!( Err(IntervalError::UnsupportedRange("Minute".to_owned(), "Hour".to_owned()).into()), r#"INSERT INTO IntervalLog VALUES (1, INTERVAL "20:00" MINUTE TO HOUR, INTERVAL "1-2" YEAR TO MONTH)"# ); test!( Err(IntervalError::AddBetweenYearToMonthAndHourToSecond.into()), r#"SELECT INTERVAL "1" YEAR + INTERVAL "1" HOUR FROM IntervalLog;"# ); test!( Err(IntervalError::SubtractBetweenYearToMonthAndHourToSecond.into()), r#"SELECT INTERVAL "1" YEAR - INTERVAL "1" HOUR FROM IntervalLog;"# ); test!( Err(IntervalError::FailedToParseInteger("1.4".to_owned()).into()), r#"SELECT INTERVAL "1.4" YEAR FROM IntervalLog;"# ); test!(
random
[ { "content": "CREATE TABLE IdxValue (\n\n id INTEGER NULL,\n\n time TIME NULL,\n\n flag BOOLEAN\n\n)\"#\n\n );\n\n\n\n run!(\n\n r#\"\n\n INSERT INTO IdxValue\n\n VALUES\n\n (NULL, \"01:30 PM\", True),\n\n (1, \"12:10 AM\", False),\n\n (2, ...
Rust
wiz/wiz_syntax/src/syntax/expression.rs
ChanTsune/wiz
199d0f4698822a177ede8015bf8e04f190f39934
mod array_syntax; mod binary_operation_syntax; mod call_syntax; mod if_syntax; mod member_syntax; mod name_syntax; mod parenthesized_syntax; mod return_syntax; mod subscript_syntax; mod type_cast_syntax; mod unary_operation_syntax; pub use crate::syntax::expression::array_syntax::{ArrayElementSyntax, ArraySyntax}; pub use crate::syntax::expression::binary_operation_syntax::BinaryOperationSyntax; pub use crate::syntax::expression::call_syntax::{ ArgLabelSyntax, CallArg, CallArgElementSyntax, CallArgListSyntax, CallExprSyntax, LambdaSyntax, }; pub use crate::syntax::expression::if_syntax::{ElseSyntax, IfExprSyntax}; pub use crate::syntax::expression::member_syntax::MemberSyntax; pub use crate::syntax::expression::name_syntax::NameExprSyntax; pub use crate::syntax::expression::parenthesized_syntax::ParenthesizedExprSyntax; pub use crate::syntax::expression::return_syntax::ReturnSyntax; pub use crate::syntax::expression::subscript_syntax::{ SubscriptIndexElementSyntax, SubscriptIndexListSyntax, SubscriptSyntax, }; pub use crate::syntax::expression::type_cast_syntax::TypeCastSyntax; pub use crate::syntax::expression::unary_operation_syntax::{ PostfixUnaryOperationSyntax, PrefixUnaryOperationSyntax, UnaryOperationSyntax, }; use crate::syntax::literal::LiteralSyntax; use crate::syntax::token::TokenSyntax; use crate::syntax::trivia::Trivia; use crate::syntax::type_name::TypeArgumentListSyntax; use crate::syntax::Syntax; #[derive(Debug, Eq, PartialEq, Clone)] pub enum Expr { Name(NameExprSyntax), Literal(LiteralSyntax), BinOp(BinaryOperationSyntax), UnaryOp(UnaryOperationSyntax), Subscript(SubscriptSyntax), Member(MemberSyntax), Array(ArraySyntax), Tuple { values: Vec<Expr>, }, Dict { }, StringBuilder { }, Call(CallExprSyntax), If(IfExprSyntax), When { }, Lambda(LambdaSyntax), Return(ReturnSyntax), TypeCast(TypeCastSyntax), Parenthesized(ParenthesizedExprSyntax), } impl Syntax for Expr { fn with_leading_trivia(self, trivia: Trivia) -> Self { match self { Expr::Name(n) => Expr::Name(n.with_leading_trivia(trivia)), Expr::Literal(l) => Expr::Literal(l.with_leading_trivia(trivia)), Expr::BinOp(b) => Expr::BinOp(b.with_leading_trivia(trivia)), Expr::UnaryOp(u) => Expr::UnaryOp(u.with_leading_trivia(trivia)), Expr::Subscript(s) => Expr::Subscript(s.with_leading_trivia(trivia)), Expr::Member(m) => Expr::Member(m.with_leading_trivia(trivia)), Expr::Array(a) => Expr::Array(a.with_leading_trivia(trivia)), Expr::Tuple { .. } => { todo!() } Expr::Dict { .. } => { todo!() } Expr::StringBuilder { .. } => { todo!() } Expr::Call(c) => Expr::Call(c.with_leading_trivia(trivia)), Expr::If(i) => Expr::If(i.with_leading_trivia(trivia)), Expr::When { .. } => { todo!() } Expr::Lambda(_) => { todo!() } Expr::Return(r) => Expr::Return(r.with_leading_trivia(trivia)), Expr::TypeCast(t) => Expr::TypeCast(t.with_leading_trivia(trivia)), Expr::Parenthesized(p) => Expr::Parenthesized(p.with_leading_trivia(trivia)), } } fn with_trailing_trivia(self, trivia: Trivia) -> Self { match self { Expr::Name(n) => Expr::Name(n.with_trailing_trivia(trivia)), Expr::Literal(l) => Expr::Literal(l.with_trailing_trivia(trivia)), Expr::BinOp(b) => Expr::BinOp(b.with_trailing_trivia(trivia)), Expr::UnaryOp(u) => Expr::UnaryOp(u.with_trailing_trivia(trivia)), Expr::Subscript(s) => Expr::Subscript(s.with_trailing_trivia(trivia)), Expr::Member(m) => Expr::Member(m.with_trailing_trivia(trivia)), Expr::Array(a) => Expr::Array(a.with_trailing_trivia(trivia)), Expr::Tuple { .. } => { todo!() } Expr::Dict { .. } => { todo!() } Expr::StringBuilder { .. } => { todo!() } Expr::Call(c) => Expr::Call(c.with_trailing_trivia(trivia)), Expr::If(i) => Expr::If(i.with_trailing_trivia(trivia)), Expr::When { .. } => { todo!() } Expr::Lambda(_) => { todo!() } Expr::Return(r) => Expr::Return(r.with_trailing_trivia(trivia)), Expr::TypeCast(t) => Expr::TypeCast(t.with_trailing_trivia(trivia)), Expr::Parenthesized(p) => Expr::Parenthesized(p.with_trailing_trivia(trivia)), } } } #[derive(Debug, Eq, PartialEq, Clone)] pub enum PostfixSuffix { Operator(String), TypeArgumentSuffix(TypeArgumentListSyntax), CallSuffix { args: Option<CallArgListSyntax>, tailing_lambda: Option<LambdaSyntax>, }, IndexingSuffix(SubscriptIndexListSyntax), NavigationSuffix { navigation: TokenSyntax, name: TokenSyntax, }, }
mod array_syntax; mod binary_operation_syntax; mod call_syntax; mod if_syntax; mod member_syntax; mod name_syntax; mod parenthesized_syntax; mod return_syntax; mod subscript_syntax; mod type_cast_syntax; mod unary_operation_syntax; pub use crate::syntax::expression::array_syntax::{ArrayElementSyntax, ArraySyntax}; pub use crate::syntax::expression::binary_operation_syntax::BinaryOperationSyntax; pub use crate::syntax::expression::call_syntax::{ ArgLabelSyntax, CallArg, CallArgElementSyntax, CallArgListSyntax, CallExprSyntax, LambdaSyntax, }; pub use crate::syntax::expression::if_syntax::{ElseSyntax, IfExprSyntax}; pub use crate::syntax::expression::member_syntax::MemberSyntax; pub use crate::syntax::expression::name_syntax::NameExprSyntax; pub use crate::syntax::expression::parenthesized_syntax::ParenthesizedExprSyntax; pub use crate::syntax::expression::return_syntax::ReturnSyntax; pub use crate::syntax::expression::subscript_syntax::{ SubscriptIndexElementSyntax, SubscriptIndexListSyntax, SubscriptSyntax, }; pub use crate::syntax::expression::type_cast_syntax::TypeCastSyntax; pub use crate::syntax::expression::unary_operation_syntax::{ PostfixUnaryOperationSyntax, PrefixUnaryOperationSyntax, UnaryOperationSyntax, }; use crate::syntax::literal::LiteralSyntax; use crate::syntax::token::TokenSyntax; use crate::syntax::trivia::Trivia; use crate::syntax::type_name::TypeArgumentListSyntax; use crate::syntax::Syntax; #[derive(Debug, Eq, PartialEq, Clone)] pub enum Expr { Name(NameExprSyntax), Literal(LiteralSyntax), BinOp(BinaryOperationSyntax), UnaryOp(UnaryOperationSyntax), Subscript(SubscriptSyntax), Member(MemberSyntax), Array(ArraySyntax), Tuple { values: Vec<Expr>, }, Dict { }, StringBuilder { }, Call(CallExprSyntax), If(IfExprSyntax), When { }, Lambda(LambdaSyntax), Return(ReturnSyntax), TypeCast(TypeCastSyntax), Parenthesized(ParenthesizedExprSyntax), } impl Syntax for Expr { fn with_leading_trivia(self, trivia: Trivia) -> Self { match self { Expr::Name(n) => Expr::Name(n.with_leading_trivia(trivia)), Expr::Literal(l) => Expr::Literal(l.with_leading_trivia(trivia)), Expr::BinOp(b) => Expr::BinOp(b.with_leading_trivia(trivia)), Expr::UnaryOp(u) => Expr::UnaryOp(u.with_leading_trivia(trivia)), Expr::Subscript(s) => Expr::Subscript(s.with_leading_trivia(trivia)), Expr::Member(m) => Expr::Member(m.with_leading_trivia(trivia)), Expr::Array(a) => Expr::Array(a.with_leading_trivia(trivia)), Expr::Tuple { .. } => { todo!() } Expr::Dict { .. } => { todo!() } Expr::StringBuilder { .. } => { todo!() } Expr::Call(c) => Expr::Call(c.with_leading_trivia(trivia)), Expr::If(i) => Expr::If(i.with_leading_trivia(trivia)),
fn with_trailing_trivia(self, trivia: Trivia) -> Self { match self { Expr::Name(n) => Expr::Name(n.with_trailing_trivia(trivia)), Expr::Literal(l) => Expr::Literal(l.with_trailing_trivia(trivia)), Expr::BinOp(b) => Expr::BinOp(b.with_trailing_trivia(trivia)), Expr::UnaryOp(u) => Expr::UnaryOp(u.with_trailing_trivia(trivia)), Expr::Subscript(s) => Expr::Subscript(s.with_trailing_trivia(trivia)), Expr::Member(m) => Expr::Member(m.with_trailing_trivia(trivia)), Expr::Array(a) => Expr::Array(a.with_trailing_trivia(trivia)), Expr::Tuple { .. } => { todo!() } Expr::Dict { .. } => { todo!() } Expr::StringBuilder { .. } => { todo!() } Expr::Call(c) => Expr::Call(c.with_trailing_trivia(trivia)), Expr::If(i) => Expr::If(i.with_trailing_trivia(trivia)), Expr::When { .. } => { todo!() } Expr::Lambda(_) => { todo!() } Expr::Return(r) => Expr::Return(r.with_trailing_trivia(trivia)), Expr::TypeCast(t) => Expr::TypeCast(t.with_trailing_trivia(trivia)), Expr::Parenthesized(p) => Expr::Parenthesized(p.with_trailing_trivia(trivia)), } } } #[derive(Debug, Eq, PartialEq, Clone)] pub enum PostfixSuffix { Operator(String), TypeArgumentSuffix(TypeArgumentListSyntax), CallSuffix { args: Option<CallArgListSyntax>, tailing_lambda: Option<LambdaSyntax>, }, IndexingSuffix(SubscriptIndexListSyntax), NavigationSuffix { navigation: TokenSyntax, name: TokenSyntax, }, }
Expr::When { .. } => { todo!() } Expr::Lambda(_) => { todo!() } Expr::Return(r) => Expr::Return(r.with_leading_trivia(trivia)), Expr::TypeCast(t) => Expr::TypeCast(t.with_leading_trivia(trivia)), Expr::Parenthesized(p) => Expr::Parenthesized(p.with_leading_trivia(trivia)), } }
function_block-function_prefix_line
[ { "content": "pub trait Syntax: Debug + Eq + PartialEq + Clone {\n\n fn with_leading_trivia(self, trivia: Trivia) -> Self;\n\n fn with_trailing_trivia(self, trivia: Trivia) -> Self;\n\n fn span(&self) -> Location {\n\n Location::default()\n\n }\n\n fn id(&self) -> NodeId {\n\n NodeI...
Rust
src/thread_pool.rs
tom-heimbrodt/stateful_async_worker
fc7f9994a0a8112ea599232359151fe7a07ebdc2
use std::thread; use std::future::Future; use std::sync::Arc; use crossbeam::channel::{self, Sender}; use crate::*; type TaskFunc<ThreadState, Result> = dyn FnOnce(&ThreadState) -> Result + Send + 'static; type BoxedTaskFunc<ThreadState, Result> = Box<TaskFunc<ThreadState, Result>>; pub struct ThreadPool<ThreadState, Result> { sender: Sender<Task<BoxedTaskFunc<ThreadState, Result>, Result>>, } impl<ThreadState, Result> Clone for ThreadPool<ThreadState, Result> { fn clone(&self) -> Self { Self { sender: self.sender.clone(), } } } impl<ThreadState, Result> ThreadPool<ThreadState, Result> where ThreadState: Default + Send + Sync + 'static, Result: Send + 'static { pub fn spawn() -> Self { Self::spawn_with(Arc::new(Default::default())) } pub fn spawn_exactly(thread_count: usize) -> Self { Self::spawn_exactly_with(Arc::new(Default::default()), thread_count) } } impl<ThreadState, Result> ThreadPool<ThreadState, Result> where ThreadState: Sync + Send + 'static, Result: Send + 'static { pub fn spawn_with<T>(data: T) -> Self where T: Into<Arc<ThreadState>> { Self::spawn_exactly_with(data, num_cpus::get()) } pub fn spawn_exactly_with<T>(data: T, thread_count: usize) -> Self where T: Into<Arc<ThreadState>> { let (input_tx, input_rx) = channel::unbounded(); let data = data.into(); for _ in 0..thread_count { let input_rx = input_rx.clone(); let data = Arc::clone(&data); thread::spawn(move || { loop { if let Ok(task) = input_rx.recv() { let task: Task<BoxedTaskFunc<ThreadState, Result>, Result> = task; let result = (task.func)(&*data); task.future.complete(result); } else { return; } } }); } Self { sender: input_tx, } } pub async fn work_on<F>(&self, func: F) -> Result where F: FnOnce(&ThreadState) -> Result + Send + 'static { self.work_on_boxed_inner(Box::new(func)).await } pub async fn work_on_boxed(&self, func: BoxedTaskFunc<ThreadState, Result>) -> Result { self.work_on_boxed_inner(func).await } fn work_on_boxed_inner(&self, func: BoxedTaskFunc<ThreadState, Result>) -> impl Future<Output = Result> { let future = MutexFuture::new(); let future_ = future.clone(); self.sender.send(Task { func, future }).unwrap(); future_ } } #[cfg(test)] mod tests { use super::*; use std::time::{Duration, Instant}; #[test] fn test_thread_pool_concurrency() { futures::executor::block_on(test_concurrency()); } async fn test_concurrency() { let worker = ThreadPool::spawn_exactly(3); let long_computation1 = worker.work_on(|num: &i64| { thread::sleep(Duration::from_millis(100)); *num }); let long_computation2 = worker.work_on(|num: &i64| { thread::sleep(Duration::from_millis(100)); *num }); let long_computation3 = worker.work_on(|num: &i64| { thread::sleep(Duration::from_millis(50)); *num }); let start = Instant::now(); let (a, b, c) = futures::future::join3(long_computation1, long_computation2, long_computation3).await; assert_eq!(a, 0); assert_eq!(b, 0); assert_eq!(c, 0); let elapsed = start.elapsed(); assert!(elapsed.as_millis() < 120); } #[test] fn test_thread_pool_rwlock() { futures::executor::block_on(test_rwlock()); } use std::sync::RwLock; async fn test_rwlock() { let worker = ThreadPool::spawn_exactly_with(RwLock::new(0), 4); let long_computation1 = worker.work_on(|num: &RwLock<u64>| { thread::sleep(Duration::from_millis(80)); let mut num = num.write().unwrap(); *num += 1; *num }); let long_computation2 = worker.work_on(|num| { thread::sleep(Duration::from_millis(100)); let mut num = num.write().unwrap(); *num += 1; *num }); let long_computation3 = worker.work_on(|num| { thread::sleep(Duration::from_millis(60)); let mut num = num.write().unwrap(); *num += 1; *num }); let long_computation4 = worker.work_on(|num| { thread::sleep(Duration::from_millis(20)); let mut num = num.write().unwrap(); *num += 1; *num }); let (a, b, c, d) = futures::future::join4( long_computation1, long_computation2, long_computation3, long_computation4).await; println!("{:?}", (a, b, c, d)); assert_eq!(a, 3); assert_eq!(b, 4); assert_eq!(c, 2); assert_eq!(d, 1); } }
use std::thread; use std::future::Future; use std::sync::Arc; use crossbeam::channel::{self, Sender}; use crate::*; type TaskFunc<ThreadState, Result> = dyn FnOnce(&ThreadState) -> Result + Send + 'static; type BoxedTaskFunc<ThreadState, Result> = Box<TaskFunc<ThreadState, Result>>; pub struct ThreadPool<ThreadState, Result> { sender: Sender<Task<BoxedTaskFunc<ThreadState, Result>, Result>>, } impl<ThreadState, Result> Clone for ThreadPool<ThreadState, Result> { fn clone(&self) -> Self { Self { sender: self.sender.clone(), } } } impl<ThreadState, Result> ThreadPool<ThreadState, Result> where ThreadState: Default + Send + Sync + 'static, Result: Send + 'static { pub fn spawn() -> Self { Self::spawn_with(Arc::new(Default::default())) } pub fn spawn_exactly(thread_count: usize) -> Self { Self::spawn_exactly_with(Arc::new(Default::default()), thread_count) } } impl<ThreadState, Result> ThreadPool<ThreadState, Result> where ThreadState: Sync + Send + 'static, Result: Send + 'static { pub fn spawn_with<T>(data: T) -> Self where T: Into<Arc<ThreadState>> { Self::spawn_exactly_with(data, num_cpus::get()) } pub fn spawn_exactly_with<T>(data: T, thread_count: usize) -> Self where T: Into<Arc<ThreadState>> { let (input_tx, input_rx) = channel::unbounded(); let data = data.into(); for _ in 0..thread_count { let input_rx = input_rx.clone(); let data = Arc::clone(&data); thread::spawn(move || { loop { if let Ok(task) = input_rx.recv() { let task: Task<BoxedTaskFunc<ThreadState, Result>, Result> = task; let result = (task.func)(&*data); task.future.complete(result); } else { return; } } }); } Self { sender: input_tx, } } pub async fn work_on<F>(&self, func: F) -> Result where F: FnOnce(&ThreadState) -> Result + Send + 'static { self.work_on_boxed_inner(Box::new(func)).await } pub async fn work_on_boxed(&self, func: BoxedTaskFunc<ThreadState, Result>) -> Result { self.work_on_boxed_inner(func).await } fn work_on_boxed_inner(&self, func: BoxedTaskFunc<ThreadState, Result>) -> impl Future<Output = Result> { let future = MutexFuture::new(); let future_ = future.clone(); self.sender.send(Task { func, future }).unwrap(); future_ } } #[cfg(test)] mod tests { use super::*; use std::time::{Duration, Instant}; #[test] fn test_thread_pool_concurrency() { futures::executor::block_on(test_concurrency()); } async fn test_concurrency() { let worker = ThreadPool::spawn_exactly(3); let long_computation1 = worker.work_on(|num: &i64| { thread::sleep(Duration::from_millis(100)); *num }); let long_computation2 = worker.work_on(|num: &i64| { thread::sleep(Duration::from_millis(100)); *num }); let long_computation3 = worker.work_on(|num: &i64| { thread::sleep(Duration::from_millis(50)); *num }); let start = Instant::now(); let (a, b, c) = futures::future::join3(long_computation1, long_computation2, long_computation3).await; assert_eq!(a, 0); assert_eq!(b, 0); assert_eq!(c, 0); let elapsed = start.elapsed(); assert!(elapsed.as_millis() < 120); } #[test] fn test_thread_pool_rwlock() { futures::executor::block_on(test_rwlock()); } use std::sync::RwLock;
}
async fn test_rwlock() { let worker = ThreadPool::spawn_exactly_with(RwLock::new(0), 4); let long_computation1 = worker.work_on(|num: &RwLock<u64>| { thread::sleep(Duration::from_millis(80)); let mut num = num.write().unwrap(); *num += 1; *num }); let long_computation2 = worker.work_on(|num| { thread::sleep(Duration::from_millis(100)); let mut num = num.write().unwrap(); *num += 1; *num }); let long_computation3 = worker.work_on(|num| { thread::sleep(Duration::from_millis(60)); let mut num = num.write().unwrap(); *num += 1; *num }); let long_computation4 = worker.work_on(|num| { thread::sleep(Duration::from_millis(20)); let mut num = num.write().unwrap(); *num += 1; *num }); let (a, b, c, d) = futures::future::join4( long_computation1, long_computation2, long_computation3, long_computation4).await; println!("{:?}", (a, b, c, d)); assert_eq!(a, 3); assert_eq!(b, 4); assert_eq!(c, 2); assert_eq!(d, 1); }
function_block-full_function
[ { "content": "type TaskFunc<ThreadState, Result> = dyn FnOnce(&mut ThreadState) -> Result + Send + 'static;\n", "file_path": "src/worker_thread.rs", "rank": 0, "score": 128334.61925333255 }, { "content": "struct Task<F, Result> {\n\n func: F,\n\n future: MutexFuture<Result>,\n\n}\n", ...
Rust
rive-rs/src/shapes/metrics_path.rs
Kangz/cassia
e003a0b7109b039a76e3d40103a2ad198a350222
use std::num::NonZeroUsize; use crate::{ math::{self, Mat}, shapes::command_path::{Command, CommandPath, CommandPathBuilder}, }; #[derive(Clone, Copy, Debug)] struct CubicSegment { t: f32, length: f32, } #[derive(Clone, Copy, Debug, Eq, PartialEq)] enum PathPartType { Line, Cubic(NonZeroUsize), } #[derive(Clone, Copy, Debug)] struct PathPart { r#type: PathPartType, offset: usize, num_segments: usize, } impl PathPart { pub fn line(offset: usize) -> Self { Self { r#type: PathPartType::Line, offset, num_segments: 0, } } pub fn cubic(offset: usize) -> Self { Self { r#type: PathPartType::Cubic(NonZeroUsize::new(1).unwrap()), offset, num_segments: 0, } } } fn compute_hull( from: math::Vec, from_out: math::Vec, to_in: math::Vec, to: math::Vec, t: f32, hull: &mut [math::Vec; 6], ) { hull[0] = from.lerp(from_out, t); hull[1] = from_out.lerp(to_in, t); hull[2] = to_in.lerp(to, t); hull[3] = hull[0].lerp(hull[1], t); hull[4] = hull[1].lerp(hull[2], t); hull[5] = hull[3].lerp(hull[4], t); } fn too_far(a: math::Vec, b: math::Vec) -> bool { const TOO_FAR: f32 = 1.0; (a.x - b.x).abs().max((a.y - b.y).abs()) > TOO_FAR } fn should_split_cubic( from: math::Vec, from_out: math::Vec, to_in: math::Vec, to: math::Vec, ) -> bool { let one_third = from.lerp(to, 1.0 / 3.0); let two_thirds = from.lerp(to, 2.0 / 3.0); too_far(from_out, one_third) || too_far(to_in, two_thirds) } fn segment_cubic( from: math::Vec, from_out: math::Vec, to_in: math::Vec, to: math::Vec, mut running_length: f32, t1: f32, t2: f32, segments: &mut Vec<CubicSegment>, ) -> f32 { const MIN_SEGMENT_LENGTH: f32 = 0.05; if should_split_cubic(from, from_out, to_in, to) { let half_t = (t1 + t2) / 2.0; let mut hull = [math::Vec::default(); 6]; compute_hull(from, from_out, to_in, to, 0.5, &mut hull); running_length = segment_cubic( from, hull[0], hull[3], hull[5], running_length, t1, half_t, segments, ); running_length = segment_cubic( hull[5], hull[4], hull[2], to, running_length, t1, half_t, segments, ); } else { let length = from.distance(to); running_length += length; if length > MIN_SEGMENT_LENGTH { segments.push(CubicSegment { t: t2, length: running_length, }); } } running_length } #[derive(Debug)] pub struct MetricsPath { points: Vec<math::Vec>, transformed_points: Vec<math::Vec>, cubic_segments: Vec<CubicSegment>, parts: Vec<PathPart>, lengths: Vec<f32>, computed_length: f32, computed_length_transform: Mat, } impl MetricsPath { pub fn new(command_path: &CommandPath) -> Self { let mut points = Vec::new(); let mut parts = Vec::new(); for command in &command_path.commands { match *command { Command::MoveTo(p) => points.push(p), Command::LineTo(p) => { parts.push(PathPart::line(points.len())); points.push(p); } Command::CubicTo(c0, c1, p) => { parts.push(PathPart::cubic(points.len())); points.push(c0); points.push(c1); points.push(p); } Command::Close => { if parts.last().map(|part| part.r#type) == Some(PathPartType::Line) { parts.push(PathPart::line(points.len())); points.push(points[0]); } } } } Self { points, transformed_points: Vec::new(), cubic_segments: Vec::new(), parts, lengths: Vec::new(), computed_length: 0.0, computed_length_transform: Mat::default(), } } pub fn compute_length(&mut self, transform: Mat) -> f32 { if !self.lengths.is_empty() && transform == self.computed_length_transform { return self.computed_length; } self.computed_length_transform = transform; self.transformed_points.clear(); self.cubic_segments.clear(); self.lengths.clear(); self.transformed_points .extend(self.points.iter().map(|&point| transform * point)); let mut i = 0; let mut length = 0.0; for part in &mut self.parts { match part.r#type { PathPartType::Line => { let from = self.transformed_points[i]; let to = self.transformed_points[i + 1]; i += 1; let part_length = from.distance(to); self.lengths.push(part_length); length += part_length; } PathPartType::Cubic(ref mut ci) => { let from = self.transformed_points[i]; let from_out = self.transformed_points[i + 1]; let to_in = self.transformed_points[i + 2]; let to = self.transformed_points[i + 3]; i += 3; let index = self.cubic_segments.len(); *ci = NonZeroUsize::new(index + 1).unwrap(); let part_length = segment_cubic( from, from_out, to_in, to, 0.0, 0.0, 1.0, &mut self.cubic_segments, ); self.lengths.push(part_length); length += part_length; part.num_segments = self.cubic_segments.len() - index; } } } self.computed_length = length; length } pub fn trimmed(&self, start_length: f32, end_length: f32, move_to: bool) -> CommandPath { assert!(end_length >= start_length); let mut builder = CommandPathBuilder::new(); if start_length == end_length || self.parts.is_empty() { return builder.build(); } let parts_and_lengths = self.lengths.iter().scan(0.0, |length, &part_length| { let old_length = *length; *length += part_length; Some((old_length, part_length)) }); let first_part = parts_and_lengths .clone() .enumerate() .find(|(_, (length, part_length))| length + part_length > start_length) .map(|(i, (length, part_length))| (i, (start_length - length) / part_length)); match first_part { None => return builder.build(), Some((first_part_index, start_t)) => { let (last_part_index, end_t) = parts_and_lengths .enumerate() .skip(first_part_index) .find(|(_, (length, part_length))| length + part_length > end_length) .map(|(i, (length, part_length))| (i, (end_length - length) / part_length)) .unwrap_or_else(|| (self.parts.len(), 1.0)); if first_part_index == last_part_index { self.extract_sub_part(first_part_index, start_t, end_t, move_to, &mut builder); } else { self.extract_sub_part(first_part_index, start_t, 1.0, move_to, &mut builder); for part in &self.parts[first_part_index..last_part_index] { match part.r#type { PathPartType::Line => { builder.line_to(self.transformed_points[part.offset]); } PathPartType::Cubic(_) => { builder.cubic_to( self.transformed_points[part.offset], self.transformed_points[part.offset + 1], self.transformed_points[part.offset + 2], ); } } } self.extract_sub_part(last_part_index, 0.0, end_t, false, &mut builder); } } } builder.build() } fn extract_sub_part( &self, i: usize, mut start_t: f32, mut end_t: f32, move_to: bool, builder: &mut CommandPathBuilder, ) { assert!(start_t >= 0.0); assert!(start_t <= 1.0); assert!(end_t >= 0.0); assert!(end_t <= 1.0); let part = self.parts[i]; match part.r#type { PathPartType::Line => { let from = self.transformed_points[part.offset - 1]; let to = self.transformed_points[part.offset]; let dir = to - from; if move_to { builder.move_to(from + dir * start_t); } builder.line_to(from + dir * end_t); } PathPartType::Cubic(ci) => { let starting_segment_index = ci.get() - 1; let mut start_end_segment_index = starting_segment_index; let ending_segment_index = starting_segment_index + part.num_segments; let length = self.lengths[i]; if start_t != 0.0 { let start_length = start_t * length; for si in starting_segment_index..ending_segment_index { let segment = self.cubic_segments[si]; if segment.length >= start_length { if si == starting_segment_index { start_t = segment.t * (start_length / segment.length); } else { let previous_length = self.cubic_segments[si - 1].length; let t = (start_length - previous_length) / (segment.length - previous_length); start_t = math::lerp(self.cubic_segments[si - 1].t, segment.t, t); } start_end_segment_index = si; } } } if end_t != 1.0 { let end_length = end_t * length; for si in start_end_segment_index..ending_segment_index { let segment = self.cubic_segments[si]; if segment.length >= end_length { if si == starting_segment_index { end_t = segment.t * (end_length / segment.length); } else { let previous_length = self.cubic_segments[si - 1].length; let t = (end_length - previous_length) / (segment.length - previous_length); end_t = math::lerp(self.cubic_segments[si - 1].t, segment.t, t); } } } } let mut hull = [math::Vec::default(); 6]; let from = self.transformed_points[part.offset - 1]; let from_out = self.transformed_points[part.offset]; let to_in = self.transformed_points[part.offset + 1]; let to = self.transformed_points[part.offset + 2]; if start_t == 0.0 { compute_hull(from, from_out, to_in, to, end_t, &mut hull); if move_to { builder.move_to(from); } builder.cubic_to(hull[0], hull[3], hull[5]); } else { compute_hull(from, from_out, to_in, to, start_t, &mut hull); if move_to { builder.move_to(hull[5]); } if end_t == 1.0 { builder.cubic_to(hull[4], hull[2], to); } else { compute_hull( hull[5], hull[4], hull[2], to, (end_t - start_t) / (1.0 - start_t), &mut hull, ); builder.cubic_to(hull[0], hull[3], hull[5]); } } } } } }
use std::num::NonZeroUsize; use crate::{ math::{self, Mat}, shapes::command_path::{Command, CommandPath, CommandPathBuilder}, }; #[derive(Clone, Copy, Debug)] struct CubicSegment { t: f32, length: f32, } #[derive(Clone, Copy, Debug, Eq, PartialEq)] enum PathPartType { Line, Cubic(NonZeroUsize), } #[derive(Clone, Copy, Debug)] struct PathPart { r#type: PathPartType, offset: usize, num_segments: usize, } impl PathPart { pub fn line(offset: usize) -> Self { Self { r#type: PathPartType::Line, offset, num_segments: 0, } } pub fn cubic(offset: usize) -> Self { Self { r#type: PathPartType::Cubic(NonZeroUsize::new(1).unwrap()), offset, num_segments: 0, } } } fn compute_hull( from: math::Vec, from_out: math::Vec, to_in: math::Vec, to: math::Vec, t: f32, hull: &mut [math::Vec; 6], ) { hull[0] = from.lerp(from_out, t); hull[1] = from_out.lerp(to_in, t); hull[2] = to_in.lerp(to, t); hull[3] = hull[0].lerp(hull[1], t); hull[4] = hull[1].lerp(hull[2], t); hull[5] = hull[3].lerp(hull[4], t); } fn too_far(a: math::Vec, b: math::Vec) -> bool { const TOO_FAR: f32 = 1.0; (a.x - b.x).abs().max((a.y - b.y).abs()) > TOO_FAR } fn should_split_cubic( from: math::Vec, from_out: math::Vec, to_in: math::Vec, to: math::Vec, ) -> bool { let one_third = from.lerp(to, 1.0 / 3.0); let two_thirds = from.lerp(to, 2.0 / 3.0); too_far(from_out, one_third) || too_far(to_in, two_thirds) } fn segment_cubic( from: math::Vec, from_out: math::Vec, to_in: math::Vec, to: math::Vec, mut running_length: f32, t1: f32, t2: f32, segments: &mut Vec<CubicSegment>, ) -> f32 { const MI
#[derive(Debug)] pub struct MetricsPath { points: Vec<math::Vec>, transformed_points: Vec<math::Vec>, cubic_segments: Vec<CubicSegment>, parts: Vec<PathPart>, lengths: Vec<f32>, computed_length: f32, computed_length_transform: Mat, } impl MetricsPath { pub fn new(command_path: &CommandPath) -> Self { let mut points = Vec::new(); let mut parts = Vec::new(); for command in &command_path.commands { match *command { Command::MoveTo(p) => points.push(p), Command::LineTo(p) => { parts.push(PathPart::line(points.len())); points.push(p); } Command::CubicTo(c0, c1, p) => { parts.push(PathPart::cubic(points.len())); points.push(c0); points.push(c1); points.push(p); } Command::Close => { if parts.last().map(|part| part.r#type) == Some(PathPartType::Line) { parts.push(PathPart::line(points.len())); points.push(points[0]); } } } } Self { points, transformed_points: Vec::new(), cubic_segments: Vec::new(), parts, lengths: Vec::new(), computed_length: 0.0, computed_length_transform: Mat::default(), } } pub fn compute_length(&mut self, transform: Mat) -> f32 { if !self.lengths.is_empty() && transform == self.computed_length_transform { return self.computed_length; } self.computed_length_transform = transform; self.transformed_points.clear(); self.cubic_segments.clear(); self.lengths.clear(); self.transformed_points .extend(self.points.iter().map(|&point| transform * point)); let mut i = 0; let mut length = 0.0; for part in &mut self.parts { match part.r#type { PathPartType::Line => { let from = self.transformed_points[i]; let to = self.transformed_points[i + 1]; i += 1; let part_length = from.distance(to); self.lengths.push(part_length); length += part_length; } PathPartType::Cubic(ref mut ci) => { let from = self.transformed_points[i]; let from_out = self.transformed_points[i + 1]; let to_in = self.transformed_points[i + 2]; let to = self.transformed_points[i + 3]; i += 3; let index = self.cubic_segments.len(); *ci = NonZeroUsize::new(index + 1).unwrap(); let part_length = segment_cubic( from, from_out, to_in, to, 0.0, 0.0, 1.0, &mut self.cubic_segments, ); self.lengths.push(part_length); length += part_length; part.num_segments = self.cubic_segments.len() - index; } } } self.computed_length = length; length } pub fn trimmed(&self, start_length: f32, end_length: f32, move_to: bool) -> CommandPath { assert!(end_length >= start_length); let mut builder = CommandPathBuilder::new(); if start_length == end_length || self.parts.is_empty() { return builder.build(); } let parts_and_lengths = self.lengths.iter().scan(0.0, |length, &part_length| { let old_length = *length; *length += part_length; Some((old_length, part_length)) }); let first_part = parts_and_lengths .clone() .enumerate() .find(|(_, (length, part_length))| length + part_length > start_length) .map(|(i, (length, part_length))| (i, (start_length - length) / part_length)); match first_part { None => return builder.build(), Some((first_part_index, start_t)) => { let (last_part_index, end_t) = parts_and_lengths .enumerate() .skip(first_part_index) .find(|(_, (length, part_length))| length + part_length > end_length) .map(|(i, (length, part_length))| (i, (end_length - length) / part_length)) .unwrap_or_else(|| (self.parts.len(), 1.0)); if first_part_index == last_part_index { self.extract_sub_part(first_part_index, start_t, end_t, move_to, &mut builder); } else { self.extract_sub_part(first_part_index, start_t, 1.0, move_to, &mut builder); for part in &self.parts[first_part_index..last_part_index] { match part.r#type { PathPartType::Line => { builder.line_to(self.transformed_points[part.offset]); } PathPartType::Cubic(_) => { builder.cubic_to( self.transformed_points[part.offset], self.transformed_points[part.offset + 1], self.transformed_points[part.offset + 2], ); } } } self.extract_sub_part(last_part_index, 0.0, end_t, false, &mut builder); } } } builder.build() } fn extract_sub_part( &self, i: usize, mut start_t: f32, mut end_t: f32, move_to: bool, builder: &mut CommandPathBuilder, ) { assert!(start_t >= 0.0); assert!(start_t <= 1.0); assert!(end_t >= 0.0); assert!(end_t <= 1.0); let part = self.parts[i]; match part.r#type { PathPartType::Line => { let from = self.transformed_points[part.offset - 1]; let to = self.transformed_points[part.offset]; let dir = to - from; if move_to { builder.move_to(from + dir * start_t); } builder.line_to(from + dir * end_t); } PathPartType::Cubic(ci) => { let starting_segment_index = ci.get() - 1; let mut start_end_segment_index = starting_segment_index; let ending_segment_index = starting_segment_index + part.num_segments; let length = self.lengths[i]; if start_t != 0.0 { let start_length = start_t * length; for si in starting_segment_index..ending_segment_index { let segment = self.cubic_segments[si]; if segment.length >= start_length { if si == starting_segment_index { start_t = segment.t * (start_length / segment.length); } else { let previous_length = self.cubic_segments[si - 1].length; let t = (start_length - previous_length) / (segment.length - previous_length); start_t = math::lerp(self.cubic_segments[si - 1].t, segment.t, t); } start_end_segment_index = si; } } } if end_t != 1.0 { let end_length = end_t * length; for si in start_end_segment_index..ending_segment_index { let segment = self.cubic_segments[si]; if segment.length >= end_length { if si == starting_segment_index { end_t = segment.t * (end_length / segment.length); } else { let previous_length = self.cubic_segments[si - 1].length; let t = (end_length - previous_length) / (segment.length - previous_length); end_t = math::lerp(self.cubic_segments[si - 1].t, segment.t, t); } } } } let mut hull = [math::Vec::default(); 6]; let from = self.transformed_points[part.offset - 1]; let from_out = self.transformed_points[part.offset]; let to_in = self.transformed_points[part.offset + 1]; let to = self.transformed_points[part.offset + 2]; if start_t == 0.0 { compute_hull(from, from_out, to_in, to, end_t, &mut hull); if move_to { builder.move_to(from); } builder.cubic_to(hull[0], hull[3], hull[5]); } else { compute_hull(from, from_out, to_in, to, start_t, &mut hull); if move_to { builder.move_to(hull[5]); } if end_t == 1.0 { builder.cubic_to(hull[4], hull[2], to); } else { compute_hull( hull[5], hull[4], hull[2], to, (end_t - start_t) / (1.0 - start_t), &mut hull, ); builder.cubic_to(hull[0], hull[3], hull[5]); } } } } } }
N_SEGMENT_LENGTH: f32 = 0.05; if should_split_cubic(from, from_out, to_in, to) { let half_t = (t1 + t2) / 2.0; let mut hull = [math::Vec::default(); 6]; compute_hull(from, from_out, to_in, to, 0.5, &mut hull); running_length = segment_cubic( from, hull[0], hull[3], hull[5], running_length, t1, half_t, segments, ); running_length = segment_cubic( hull[5], hull[4], hull[2], to, running_length, t1, half_t, segments, ); } else { let length = from.distance(to); running_length += length; if length > MIN_SEGMENT_LENGTH { segments.push(CubicSegment { t: t2, length: running_length, }); } } running_length }
function_block-function_prefixed
[ { "content": "pub fn lerp(a: f32, b: f32, ratio: f32) -> f32 {\n\n a + (b - a) * ratio\n\n}\n\n\n", "file_path": "rive-rs/src/math/mod.rs", "rank": 0, "score": 197363.3596401754 }, { "content": "fn lerp(t: f32, a: f32, b: f32) -> f32 {\n\n a * (1.0 - t) + b * t\n\n}\n\n\n\nmacro_rules!...
Rust
github/src/user.rs
gregnazario/bors
74537e15dac8136122b6d5d201cea649226a2e7b
use super::{DateTime, NodeId}; use serde::Deserialize; #[derive(Clone, Debug, PartialEq, Deserialize)] pub enum UserType { Bot, Organization, User, } #[derive(Clone, Debug, Deserialize)] pub struct User { pub login: String, pub id: u64, pub node_id: NodeId, pub avatar_url: String, pub gravatar_id: String, pub url: String, pub html_url: String, pub followers_url: String, pub following_url: String, pub gists_url: String, pub starred_url: String, pub subscriptions_url: String, pub organizations_url: String, pub repos_url: String, pub events_url: String, pub received_events_url: String, #[serde(rename = "type")] pub user_type: UserType, pub site_admin: bool, } #[derive(Clone, Debug, Deserialize)] pub struct Team { id: u64, node_id: NodeId, url: String, html_url: String, name: String, slug: String, description: Option<String>, privacy: String, permission: String, members_url: String, repositories_url: String, parent: Option<Box<Team>>, } #[derive(Clone, Debug, Deserialize)] pub struct Pusher { name: String, email: String, } #[derive(Clone, Debug, Deserialize)] pub struct Key { id: u64, key: String, url: String, title: String, read_only: bool, created_at: DateTime, verified: bool, } #[cfg(test)] mod test { use super::{User, UserType}; #[test] fn user() { const USER_JSON: &str = r#" { "login": "Codertocat", "id": 21031067, "node_id": "MDQ6VXNlcjIxMDMxMDY3", "avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4", "gravatar_id": "", "url": "https://api.github.com/users/Codertocat", "html_url": "https://github.com/Codertocat", "followers_url": "https://api.github.com/users/Codertocat/followers", "following_url": "https://api.github.com/users/Codertocat/following{/other_user}", "gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}", "starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions", "organizations_url": "https://api.github.com/users/Codertocat/orgs", "repos_url": "https://api.github.com/users/Codertocat/repos", "events_url": "https://api.github.com/users/Codertocat/events{/privacy}", "received_events_url": "https://api.github.com/users/Codertocat/received_events", "type": "User", "site_admin": false } "#; let user: User = serde_json::from_str(USER_JSON).unwrap(); assert_eq!(user.user_type, UserType::User); } #[test] fn org() { const ORGANIZATION_JSON: &str = r#" { "login": "Octocoders", "id": 38302899, "node_id": "MDEyOk9yZ2FuaXphdGlvbjM4MzAyODk5", "avatar_url": "https://avatars1.githubusercontent.com/u/38302899?v=4", "gravatar_id": "", "url": "https://api.github.com/users/Octocoders", "html_url": "https://github.com/Octocoders", "followers_url": "https://api.github.com/users/Octocoders/followers", "following_url": "https://api.github.com/users/Octocoders/following{/other_user}", "gists_url": "https://api.github.com/users/Octocoders/gists{/gist_id}", "starred_url": "https://api.github.com/users/Octocoders/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Octocoders/subscriptions", "organizations_url": "https://api.github.com/users/Octocoders/orgs", "repos_url": "https://api.github.com/users/Octocoders/repos", "events_url": "https://api.github.com/users/Octocoders/events{/privacy}", "received_events_url": "https://api.github.com/users/Octocoders/received_events", "type": "Organization", "site_admin": false } "#; let user: User = serde_json::from_str(ORGANIZATION_JSON).unwrap(); assert_eq!(user.user_type, UserType::Organization); } }
use super::{DateTime, NodeId}; use serde::Deserialize; #[derive(Clone, Debug, PartialEq, Deserialize)] pub enum UserType { Bot, Organization, User, } #[derive(Clone, Debug, Deserialize)] pub struct User { pub login: String, pub id: u64, pub node_id: NodeId, pub avatar_url: String, pub gravatar_id: String, pub url: String, pub html_url: String, pub followers_url: String, pub following_url: String, pub gists_url: String, pub starred_url: String, pub subscriptions_url: String, pub organizations_url: String, pub repos_url: String, pub events_url: String, pub received_events_url: String, #[serde(rename = "type")] pub user_type: UserType, pub site_admin: bool, } #[derive(Clone, Debug, Deserialize)] pub struct Team { id: u64, node_id: NodeId, url: String, html_url: String, name: String, slug: String, description: Option<String>, privacy: String, permission: String, members_url: String, repositories_url: String, parent: Option<Box<Team>>, } #[derive(Clone, Debug, Deserialize)] pub struct Pusher { name: String, email: String, } #[derive(Clone, Debug, Deserialize)] pub struct Key { id: u64, key: String, url: String, title: String, read_only: bool, created_at: DateTime, verified: bool, } #[cfg(test)] mod test { use super::{User, UserType}; #[test] fn user() { const USER_JSON: &str = r#" { "login": "Codertocat", "id": 21031067, "node_id": "MDQ6VXNlcjIxMDMxMDY3", "avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4", "gravatar_id": "", "url": "https://api.github.com/users/Codertocat", "html_url": "https://github.com/Codertocat", "followers_url": "https://api.github.com/users/Codertocat/followers", "following_url": "https://api.github.com/users/Codertocat/following{/other_user}", "gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}", "starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions", "organizations_url": "https://api.github.com/users/Codertocat/orgs", "repos_url": "https://api.github.com/users/Codertocat/repos", "events_url": "https://api.github.com/users/Codertocat/events{/privacy}", "received_events_url": "https://api.github.com/users/Codertocat/received_events", "type": "User", "site_admin": false } "#; let user: User = serde_json::from_str(USER_JSON).unwrap(); assert_eq!(user.user_type, UserType::User); } #[test]
}
fn org() { const ORGANIZATION_JSON: &str = r#" { "login": "Octocoders", "id": 38302899, "node_id": "MDEyOk9yZ2FuaXphdGlvbjM4MzAyODk5", "avatar_url": "https://avatars1.githubusercontent.com/u/38302899?v=4", "gravatar_id": "", "url": "https://api.github.com/users/Octocoders", "html_url": "https://github.com/Octocoders", "followers_url": "https://api.github.com/users/Octocoders/followers", "following_url": "https://api.github.com/users/Octocoders/following{/other_user}", "gists_url": "https://api.github.com/users/Octocoders/gists{/gist_id}", "starred_url": "https://api.github.com/users/Octocoders/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/Octocoders/subscriptions", "organizations_url": "https://api.github.com/users/Octocoders/orgs", "repos_url": "https://api.github.com/users/Octocoders/repos", "events_url": "https://api.github.com/users/Octocoders/events{/privacy}", "received_events_url": "https://api.github.com/users/Octocoders/received_events", "type": "Organization", "site_admin": false } "#; let user: User = serde_json::from_str(ORGANIZATION_JSON).unwrap(); assert_eq!(user.user_type, UserType::Organization); }
function_block-full_function
[ { "content": "// Super quick and dirty parsing of raw http into a `Request<Body>` type.\n\n// This assumes that the content is JSON\n\nfn request_from_raw_http(raw: &'static str) -> Request<Body> {\n\n let (headers, payload) = raw.split_at(raw.find(\"{\\n\").unwrap());\n\n\n\n let mut headers = headers.li...
Rust
src/pagination/recent.rs
MaxOhn/Bathbot
609a9367ac60e86b0e54a333375d3a8eb3bd3e56
use super::{create_collector, Pages, Pagination}; use crate::{ embeds::{EmbedData, RecentEmbed}, MySQL, Osu, }; use failure::Error; use rosu::models::{Beatmap, Score, User}; use serenity::{ async_trait, cache::Cache, collector::ReactionCollector, http::Http, model::{channel::Message, id::UserId}, prelude::{Context, RwLock, TypeMap}, }; use std::{ collections::{HashMap, HashSet}, sync::Arc, }; pub struct RecentPagination { msg: Message, collector: ReactionCollector, pages: Pages, user: User, scores: Vec<Score>, maps: HashMap<u32, Beatmap>, best: Vec<Score>, global: HashMap<u32, Vec<Score>>, maps_in_db: HashSet<u32>, embed_data: RecentEmbed, cache: Arc<Cache>, data: Arc<RwLock<TypeMap>>, } impl RecentPagination { #[allow(clippy::too_many_arguments)] pub async fn new( ctx: &Context, msg: Message, author: UserId, user: User, scores: Vec<Score>, maps: HashMap<u32, Beatmap>, best: Vec<Score>, global: HashMap<u32, Vec<Score>>, maps_in_db: HashSet<u32>, embed_data: RecentEmbed, ) -> Self { let collector = create_collector(ctx, &msg, author, 60).await; let cache = Arc::clone(&ctx.cache); let data = Arc::clone(&ctx.data); Self { msg, collector, pages: Pages::new(5, scores.len()), user, scores, maps, best, global, maps_in_db, embed_data, cache, data, } } } #[async_trait] impl Pagination for RecentPagination { type PageData = RecentEmbed; fn msg(&mut self) -> &mut Message { &mut self.msg } fn collector(&mut self) -> &mut ReactionCollector { &mut self.collector } fn pages(&self) -> Pages { self.pages } fn pages_mut(&mut self) -> &mut Pages { &mut self.pages } fn reactions() -> &'static [&'static str] { &["⏮️", "⏪", "◀️", "▶️", "⏩", "⏭️"] } fn process_data(&mut self, data: &Self::PageData) { self.embed_data = data.clone(); } fn content(&self) -> Option<String> { Some(format!("Recent score #{}", self.pages.index + 1)) } async fn final_processing(mut self, cache: Arc<Cache>, http: Arc<Http>) -> Result<(), Error> { let mut msg = self.msg.clone(); msg.edit((&cache, &*http), |m| { m.embed(|e| self.embed_data.minimize(e)) }) .await?; if self.maps.len() > self.maps_in_db.len() { let data = Arc::clone(&self.data); let map_ids = self.maps_in_db.clone(); let maps: Vec<_> = self .maps .into_iter() .filter(|(id, _)| !map_ids.contains(&id)) .map(|(_, map)| map) .collect(); let data = data.read().await; let mysql = data.get::<MySQL>().unwrap(); let len = maps.len(); match mysql.insert_beatmaps(&maps).await { Ok(_) if len == 1 => {} Ok(_) => info!("Added {} maps to DB", len), Err(why) => warn!("Error while adding maps to DB: {}", why), } } Ok(()) } async fn build_page(&mut self) -> Result<Self::PageData, Error> { let score = self.scores.get(self.pages.index).unwrap(); let map_id = score.beatmap_id.unwrap(); #[allow(clippy::clippy::map_entry)] if !self.maps.contains_key(&map_id) { let data = self.data.read().await; let osu = data.get::<Osu>().unwrap(); let map = score.get_beatmap(osu).await?; self.maps.insert(map_id, map); } let map = self.maps.get(&map_id).unwrap(); #[allow(clippy::clippy::map_entry)] if !self.global.contains_key(&map.beatmap_id) { let data = self.data.read().await; let osu = data.get::<Osu>().unwrap(); let global_lb = map.get_global_leaderboard(&osu, 50).await?; self.global.insert(map.beatmap_id, global_lb); }; let global_lb = self .global .get(&map.beatmap_id) .map(|global| global.as_slice()); RecentEmbed::new( &self.user, score, map, &self.best, global_lb, (&self.cache, &self.data), ) .await } }
use super::{create_collector, Pages, Pagination}; use crate::{ embeds::{EmbedData, RecentEmbed}, MySQL, Osu, }; use failure::Error; use rosu::models::{Beatmap, Score, User}; use serenity::{ async_trait, cache::Cache, collector::ReactionCollector, http::Http, model::{channel::Message, id::UserId}, prelude::{Context, RwLock, TypeMap}, }; use std::{ collections::{HashMap, HashSet}, sync::Arc, }; pub struct RecentPagination { msg: Message, collector: ReactionCollector, pages: Pages, user: User, scores: Vec<Score>, maps: HashMap<u32, Beatmap>, best: Vec<Score>, global: HashMap<u32, Vec<Score>>, maps_in_db: HashSet<u32>, embed_data: RecentEmbed, cache: Arc<Cache>, data: Arc<RwLock<TypeMap>>, } impl RecentPagination { #[allow(clippy::too_many_arguments)] pub async fn new( ctx: &Context, msg: Message, author: UserId, user: User, scores: Vec<Score>, maps: HashMap<u32, Beatmap>, best: Vec<Score>, global: HashMap<u32, Vec<Score>>, maps_in_db: HashSet<u32>, embed_data: RecentEmbed, ) -> Self { let collector = create_collector(ctx, &msg, author, 60).await; let cache = Arc::clone(&ctx.cache); let data = Arc::clone(&ctx.data); Self { msg, collector, pages: Pages::new(5, scores.len()), user, scores, maps, best, global, maps_in_db, embed_data, cache, data, } } } #[async_trait] impl Pagination for RecentPagination { type PageData = RecentEmbed; fn msg(&mut self) -> &mut Message { &mut self.msg } fn collector(&mut self) -> &mut ReactionCollector { &mut self.collector } fn pages(&self) -> Pages { self.pages } fn pages_mut(&mut self) -> &mut Pages { &mut self.pages } fn reactions() -> &'static [&'static str] { &["⏮️", "⏪", "◀️", "▶️", "⏩", "⏭️"] } fn process_data(&mut self, data: &Self::PageData) { self.embed_data = data.clone(); } fn content(&self) -> Option<String> { Some(format!("Recent score #{}", self.pages.index + 1)) } async fn final_processing(mut self, cache: Arc<Cache>, http: Arc<Http>) -> Result<(), Error> { let mut msg = self.msg.clone(); msg.edit((&cache, &*http), |m| { m.embed(|e| self.embed_data.minimize(e)) }) .await?; if self.maps.len() > self.maps_in_db.len() { let data = Arc::clone(&self.data); let map_ids = self.maps_in_db.clone(); let maps: Vec<_> = self .maps .into_iter() .filter(|(id, _)| !map_ids.contains(&id)) .map(|(_, map)| map) .collect(); let data = data.read().await; let mysql = data.get::<MySQL>().unwrap(); let len = maps.len(); match mysql.insert_beatmaps(&maps).await { Ok(_) if len == 1 => {} Ok(_) => info!("Added {} maps to DB", len), Err(why) => warn!("Error while adding maps to DB: {}", why), } } Ok(()) } async fn build_page(&mut self) -> Result<Self::PageData, Error> { let score = self.scores.get(self.pages.index).unwrap(); let map_id = score.beatmap_id.unwrap(); #[allow(clippy::clippy::map_entry)] if !self.maps.contains_key(&map_id) { let data = self.data.read().await; let osu = data.get::<Osu>().unwrap(); let map = score.get_beatmap(osu).await?; self.maps.insert(map_id, map); } let map = self.maps.get(&map_id).unwrap(); #[allow(clippy::clippy::map_entry)] if !self.global.contains_key(&map.beatmap_id) { let data = self.data.read().await; let osu = data.get::<Osu>().u
}
nwrap(); let global_lb = map.get_global_leaderboard(&osu, 50).await?; self.global.insert(map.beatmap_id, global_lb); }; let global_lb = self .global .get(&map.beatmap_id) .map(|global| global.as_slice()); RecentEmbed::new( &self.user, score, map, &self.best, global_lb, (&self.cache, &self.data), ) .await }
function_block-function_prefixed
[ { "content": "pub fn unchoke_score(score: &mut Score, map: &Beatmap) {\n\n score.pp = None;\n\n match map.mode {\n\n GameMode::STD => {\n\n let max_combo = map\n\n .max_combo\n\n .unwrap_or_else(|| panic!(\"Max combo of beatmap not found\"));\n\n ...
Rust
embedded-graphics/src/image/image_bmp.rs
wjakobczyk/embedded-graphics
c58e96a39d45f6ca5c5b13da2c671a36ba2db2cc
use super::ImageFile; use crate::drawable::{Drawable, Pixel}; use crate::geometry::{Dimensions, Point, Size}; use crate::pixelcolor::raw::{LittleEndian, RawData, RawDataIter}; use crate::pixelcolor::PixelColor; use crate::transform::Transform; use crate::DrawTarget; use core::marker::PhantomData; use tinybmp::Bmp; #[derive(Debug, Clone)] pub struct ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { bmp: Bmp<'a>, pub offset: Point, pixel_type: PhantomData<C>, } impl<'a, C> ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { fn bytes_per_row(&self) -> usize { let bits_per_row = self.bmp.width() as usize * self.bmp.bpp() as usize; (bits_per_row + 31) / 32 * (32 / 8) } } impl<'a, C> ImageFile<'a> for ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { fn new(image_data: &'a [u8]) -> Result<Self, ()> { let im = Self { bmp: Bmp::from_slice(image_data)?, offset: Point::zero(), pixel_type: PhantomData, }; Ok(im) } fn width(&self) -> u32 { self.bmp.width() } fn height(&self) -> u32 { self.bmp.height() } } impl<'a, C> Dimensions for ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { fn top_left(&self) -> Point { self.offset } fn bottom_right(&self) -> Point { self.top_left() + self.size() } fn size(&self) -> Size { Size::new(self.bmp.width(), self.bmp.height()) } } impl<'a, C> Transform for ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { fn translate(&self, by: Point) -> Self { Self { offset: self.offset + by, ..self.clone() } } fn translate_mut(&mut self, by: Point) -> &mut Self { self.offset += by; self } } impl<'a, 'b, C> IntoIterator for &'b ImageBmp<'a, C> where 'b: 'a, C: PixelColor + From<<C as PixelColor>::Raw>, { type Item = Pixel<C>; type IntoIter = ImageBmpIterator<'a, C>; fn into_iter(self) -> Self::IntoIter { if self.bmp.bpp() as usize != C::Raw::BITS_PER_PIXEL { panic!("invalid bits per pixel"); } ImageBmpIterator { data: RawDataIter::new(self.bmp.image_data()), x: 0, y: 0, image: self, } } } #[derive(Debug)] pub struct ImageBmpIterator<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { data: RawDataIter<'a, C::Raw, LittleEndian>, x: u32, y: u32, image: &'a ImageBmp<'a, C>, } impl<'a, C> Iterator for ImageBmpIterator<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { type Item = Pixel<C>; fn next(&mut self) -> Option<Self::Item> { if self.y < self.image.bmp.height() { if self.x == 0 { let row_index = (self.image.height() - 1) - self.y; let row_start = self.image.bytes_per_row() * row_index as usize; self.data.set_byte_position(row_start); } let data = self.data.next()?; let mut point = Point::new(self.x as i32, self.y as i32); point += self.image.offset; self.x += 1; if self.x >= self.image.bmp.width() { self.y += 1; self.x = 0; } Some(Pixel(point, data.into())) } else { None } } } impl<'a, C: 'a> Drawable<C> for &ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { fn draw<D: DrawTarget<C>>(self, display: &mut D) { display.draw_iter(self.into_iter()); } } #[cfg(test)] mod tests { use super::*; use crate::mock_display::MockDisplay; use crate::pixelcolor::{BinaryColor, Gray8, GrayColor, Rgb555, Rgb565, Rgb888, RgbColor}; #[test] fn negative_top_left() { let image: ImageBmp<Rgb565> = ImageBmp::new(include_bytes!( "../../tests/chessboard-4px-colour-16bit.bmp" )) .unwrap() .translate(Point::new(-1, -1)); assert_eq!(image.top_left(), Point::new(-1, -1)); assert_eq!(image.bottom_right(), Point::new(3, 3)); assert_eq!(image.size(), Size::new(4, 4)); } #[test] fn dimensions() { let image: ImageBmp<Rgb565> = ImageBmp::new(include_bytes!( "../../tests/chessboard-4px-colour-16bit.bmp" )) .unwrap() .translate(Point::new(100, 200)); assert_eq!(image.top_left(), Point::new(100, 200)); assert_eq!(image.bottom_right(), Point::new(104, 204)); assert_eq!(image.size(), Size::new(4, 4)); } #[test] #[ignore] fn it_can_have_negative_offsets() { let image: ImageBmp<Rgb565> = ImageBmp::new(include_bytes!( "../../tests/chessboard-4px-colour-16bit.bmp" )) .unwrap() .translate(Point::new(-1, -1)); assert_eq!(image.into_iter().count(), 9); let it = image.into_iter(); let expected: [Pixel<Rgb565>; 9] = [ Pixel(Point::new(0, 0), Rgb565::RED), Pixel(Point::new(1, 0), Rgb565::BLACK), Pixel(Point::new(2, 0), Rgb565::GREEN), Pixel(Point::new(0, 1), Rgb565::BLACK), Pixel(Point::new(1, 1), Rgb565::BLUE), Pixel(Point::new(2, 1), Rgb565::BLACK), Pixel(Point::new(0, 2), Rgb565::WHITE), Pixel(Point::new(1, 2), Rgb565::BLACK), Pixel(Point::new(2, 2), Rgb565::WHITE), ]; for (idx, pixel) in it.enumerate() { assert_eq!(pixel, expected[idx]); } } fn create_color_pattern<C>() -> [[C; 4]; 2] where C: RgbColor, { [ [C::BLACK, C::RED, C::GREEN, C::YELLOW], [C::BLUE, C::MAGENTA, C::CYAN, C::WHITE], ] } macro_rules! test_pattern { ($color_type:ident, $image_data:expr) => { let image: ImageBmp<$color_type> = ImageBmp::new($image_data).unwrap(); let pattern = create_color_pattern(); assert_eq!(image.size(), Size::new(4, 2)); let mut iter = image.into_iter(); for (y, row) in pattern.iter().enumerate() { for (x, &expected_color) in row.iter().enumerate() { let pos = Point::new(x as i32, y as i32); let pixel = iter.next().unwrap(); assert_eq!(pixel, Pixel(pos, expected_color)); } } assert!(iter.next().is_none()); }; } #[test] fn colors_rgb555() { test_pattern!(Rgb555, include_bytes!("../../tests/colors_rgb555.bmp")); } #[test] fn colors_rgb565() { test_pattern!(Rgb565, include_bytes!("../../tests/colors_rgb565.bmp")); } #[test] fn colors_rgb888_24bit() { test_pattern!( Rgb888, include_bytes!("../../tests/colors_rgb888_24bit.bmp") ); } #[test] #[ignore] fn colors_rgb888_32bit() { test_pattern!( Rgb888, include_bytes!("../../tests/colors_rgb888_32bit.bmp") ); } #[test] fn colors_grey8() { let image: ImageBmp<Gray8> = ImageBmp::new(include_bytes!("../../tests/colors_grey8.bmp")).unwrap(); assert_eq!(image.size(), Size::new(3, 1)); let mut iter = image.into_iter(); let p = iter.next().unwrap(); assert_eq!(p.0, Point::new(0, 0)); assert_eq!(p.1, Gray8::BLACK); let p = iter.next().unwrap(); assert_eq!(p.0, Point::new(1, 0)); assert_eq!(p.1, Gray8::new(128)); let p = iter.next().unwrap(); assert_eq!(p.0, Point::new(2, 0)); assert_eq!(p.1, Gray8::WHITE); assert!(iter.next().is_none()); } #[test] fn issue_136_row_size_is_multiple_of_4_bytes() { let image: ImageBmp<Rgb565> = ImageBmp::new(include_bytes!("../../tests/issue_136.bmp")).unwrap(); let mut display = MockDisplay::new(); image .into_iter() .map(|Pixel(p, c)| { Pixel( p, match c { Rgb565::BLACK => BinaryColor::Off, Rgb565::WHITE => BinaryColor::On, _ => panic!("Unexpected color in image"), }, ) }) .draw(&mut display); assert_eq!( display, MockDisplay::from_pattern(&[ "####.####", "#....#...", "####.#.##", "#....#..#", "####.####", ]) ); } }
use super::ImageFile; use crate::drawable::{Drawable, Pixel}; use crate::geometry::{Dimensions, Point, Size}; use crate::pixelcolor::raw::{LittleEndian, RawData, RawDataIter}; use crate::pixelcolor::PixelColor; use crate::transform::Transform; use crate::DrawTarget; use core::marker::PhantomData; use tinybmp::Bmp; #[derive(Debug, Clone)] pub struct ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { bmp: Bmp<'a>, pub offset: Point, pixel_type: PhantomData<C>, } impl<'a, C> ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { fn bytes_per_row(&self) -> usize { let bits_per_row = self.bmp.width() as usize * self.bmp.bpp() as usize; (bits_per_row + 31) / 32 * (32 / 8) } } impl<'a, C> ImageFile<'a> for ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { fn new(image_data: &'a [u8]) -> Result<Self, ()> { let im = Self { bmp: Bmp::from_slice(image_data)?, offset: Point::zero(), pixel_type: PhantomData, }; Ok(im) } fn width(&self) -> u32 { self.bmp.width() } fn height(&self) -> u32 { self.bmp.height() } } impl<'a, C> Dimensions for ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { fn top_left(&self) -> Point { self.offset } fn bottom_right(&self) -> Point { self.top_left() + self.size() } fn size(&self) -> Size { Size::new(self.bmp.width(), self.bmp.height()) } } impl<'a, C> Transform for ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { fn translate(&self, by: Point) -> Self { Self { offset: self.offset + by, ..self.clone() } } fn translate_mut(&mut self, by: Point) -> &mut Self { self.offset += by; self } } impl<'a, 'b, C> IntoIterator for &'b ImageBmp<'a, C> where 'b: 'a, C: PixelColor + From<<C as PixelColor>::Raw>, { type Item = Pixel<C>; type IntoIter = ImageBmpIterator<'a, C>; fn into_iter(self) -> Self::IntoIter { if self.bmp.bpp() as usize != C::Raw::BITS_PER_PIXEL { panic!("invalid bits per pixel"); } ImageBmpIterator { data: RawDataIter::new(self.bmp.image_data()), x: 0, y: 0, image: self, } } } #[derive(Debug)] pub struct ImageBmpIterator<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { data: RawDataIter<'a, C::Raw, LittleEndian>, x: u32, y: u32, image: &'a ImageBmp<'a, C>, } impl<'a, C> Iterator for ImageBmpIterator<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { type Item = Pixel<C>; fn next(&mut self) -> Option<Self::Item> { if self.y < self.image.bmp.height() { if self.x == 0 { let row_index = (self.image.height() - 1) - self.y; let row_start = self.image.bytes_per_row() * row_index as usize; self.data.set_byte_position(row_star
} impl<'a, C: 'a> Drawable<C> for &ImageBmp<'a, C> where C: PixelColor + From<<C as PixelColor>::Raw>, { fn draw<D: DrawTarget<C>>(self, display: &mut D) { display.draw_iter(self.into_iter()); } } #[cfg(test)] mod tests { use super::*; use crate::mock_display::MockDisplay; use crate::pixelcolor::{BinaryColor, Gray8, GrayColor, Rgb555, Rgb565, Rgb888, RgbColor}; #[test] fn negative_top_left() { let image: ImageBmp<Rgb565> = ImageBmp::new(include_bytes!( "../../tests/chessboard-4px-colour-16bit.bmp" )) .unwrap() .translate(Point::new(-1, -1)); assert_eq!(image.top_left(), Point::new(-1, -1)); assert_eq!(image.bottom_right(), Point::new(3, 3)); assert_eq!(image.size(), Size::new(4, 4)); } #[test] fn dimensions() { let image: ImageBmp<Rgb565> = ImageBmp::new(include_bytes!( "../../tests/chessboard-4px-colour-16bit.bmp" )) .unwrap() .translate(Point::new(100, 200)); assert_eq!(image.top_left(), Point::new(100, 200)); assert_eq!(image.bottom_right(), Point::new(104, 204)); assert_eq!(image.size(), Size::new(4, 4)); } #[test] #[ignore] fn it_can_have_negative_offsets() { let image: ImageBmp<Rgb565> = ImageBmp::new(include_bytes!( "../../tests/chessboard-4px-colour-16bit.bmp" )) .unwrap() .translate(Point::new(-1, -1)); assert_eq!(image.into_iter().count(), 9); let it = image.into_iter(); let expected: [Pixel<Rgb565>; 9] = [ Pixel(Point::new(0, 0), Rgb565::RED), Pixel(Point::new(1, 0), Rgb565::BLACK), Pixel(Point::new(2, 0), Rgb565::GREEN), Pixel(Point::new(0, 1), Rgb565::BLACK), Pixel(Point::new(1, 1), Rgb565::BLUE), Pixel(Point::new(2, 1), Rgb565::BLACK), Pixel(Point::new(0, 2), Rgb565::WHITE), Pixel(Point::new(1, 2), Rgb565::BLACK), Pixel(Point::new(2, 2), Rgb565::WHITE), ]; for (idx, pixel) in it.enumerate() { assert_eq!(pixel, expected[idx]); } } fn create_color_pattern<C>() -> [[C; 4]; 2] where C: RgbColor, { [ [C::BLACK, C::RED, C::GREEN, C::YELLOW], [C::BLUE, C::MAGENTA, C::CYAN, C::WHITE], ] } macro_rules! test_pattern { ($color_type:ident, $image_data:expr) => { let image: ImageBmp<$color_type> = ImageBmp::new($image_data).unwrap(); let pattern = create_color_pattern(); assert_eq!(image.size(), Size::new(4, 2)); let mut iter = image.into_iter(); for (y, row) in pattern.iter().enumerate() { for (x, &expected_color) in row.iter().enumerate() { let pos = Point::new(x as i32, y as i32); let pixel = iter.next().unwrap(); assert_eq!(pixel, Pixel(pos, expected_color)); } } assert!(iter.next().is_none()); }; } #[test] fn colors_rgb555() { test_pattern!(Rgb555, include_bytes!("../../tests/colors_rgb555.bmp")); } #[test] fn colors_rgb565() { test_pattern!(Rgb565, include_bytes!("../../tests/colors_rgb565.bmp")); } #[test] fn colors_rgb888_24bit() { test_pattern!( Rgb888, include_bytes!("../../tests/colors_rgb888_24bit.bmp") ); } #[test] #[ignore] fn colors_rgb888_32bit() { test_pattern!( Rgb888, include_bytes!("../../tests/colors_rgb888_32bit.bmp") ); } #[test] fn colors_grey8() { let image: ImageBmp<Gray8> = ImageBmp::new(include_bytes!("../../tests/colors_grey8.bmp")).unwrap(); assert_eq!(image.size(), Size::new(3, 1)); let mut iter = image.into_iter(); let p = iter.next().unwrap(); assert_eq!(p.0, Point::new(0, 0)); assert_eq!(p.1, Gray8::BLACK); let p = iter.next().unwrap(); assert_eq!(p.0, Point::new(1, 0)); assert_eq!(p.1, Gray8::new(128)); let p = iter.next().unwrap(); assert_eq!(p.0, Point::new(2, 0)); assert_eq!(p.1, Gray8::WHITE); assert!(iter.next().is_none()); } #[test] fn issue_136_row_size_is_multiple_of_4_bytes() { let image: ImageBmp<Rgb565> = ImageBmp::new(include_bytes!("../../tests/issue_136.bmp")).unwrap(); let mut display = MockDisplay::new(); image .into_iter() .map(|Pixel(p, c)| { Pixel( p, match c { Rgb565::BLACK => BinaryColor::Off, Rgb565::WHITE => BinaryColor::On, _ => panic!("Unexpected color in image"), }, ) }) .draw(&mut display); assert_eq!( display, MockDisplay::from_pattern(&[ "####.####", "#....#...", "####.#.##", "#....#..#", "####.####", ]) ); } }
t); } let data = self.data.next()?; let mut point = Point::new(self.x as i32, self.y as i32); point += self.image.offset; self.x += 1; if self.x >= self.image.bmp.width() { self.y += 1; self.x = 0; } Some(Pixel(point, data.into())) } else { None } }
function_block-function_prefixed
[ { "content": "/// Draw the seconds hand given a seconds value (0 - 59)\n\nfn draw_seconds_hand(seconds: u32) -> impl Iterator<Item = Pixel<BinaryColor>> {\n\n // Convert seconds into a position around the circle in radians\n\n let seconds_radians = ((seconds as f32 / 60.0) * 2.0 * PI) + START;\n\n\n\n ...
Rust
proc-macro/builder/src/lib.rs
2shiori17/learn
569a47494032a0a87196a0173f7036acd384b0cd
#![feature(if_let_guard)] #![feature(let_chains)] use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::{format_ident, quote}; use syn::{ parse_macro_input, Attribute, Data, DataStruct, DeriveInput, Error as SynError, Expr, ExprAssign, Field, GenericArgument, Ident, Lit, PathArguments, Type, }; #[proc_macro_derive(Builder, attributes(builder))] pub fn derive(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); TokenStream::from(match input.data { Data::Struct(ref data) => StructBuilder::derive(&input, data), _ => unimplemented!(), }) } struct StructBuilder<'a> { input: &'a DeriveInput, fields: Vec<FieldType<'a>>, } impl<'a> StructBuilder<'a> { fn derive(input: &DeriveInput, data: &DataStruct) -> TokenStream2 { let generator = StructBuilder::analyze(input, data); match generator { Ok(gen) => gen.generate(), Err(err) => err.to_compile_error(), } } fn analyze(input: &'a DeriveInput, data: &'a DataStruct) -> Result<Self, SynError> { let fields = data .fields .iter() .map(FieldType::new) .collect::<Result<Vec<_>, _>>()?; Ok(Self { input, fields }) } fn generate(&self) -> TokenStream2 { let target = &self.input.ident; let builder = format_ident!("{}Builder", target); let partial = self.gen_partial(); let init = self.gen_init(); let setters = self.gen_setters(); let build = self.gen_build(); quote! { impl #target { pub fn builder() -> #builder { #builder { #init } } } pub struct #builder { #partial } impl #builder { #setters pub fn build(&mut self) -> std::result::Result<#target, std::boxed::Box<dyn std::error::Error>> { #build } } } } fn gen_partial(&self) -> TokenStream2 { fields_map(&self.fields, |field| match field { FieldType::Normal { ident, ty } => quote! { #ident: std::option::Option<#ty>, }, FieldType::Option { ident, ty } => quote! { #ident: std::option::Option<#ty>, }, FieldType::Each { ident, ty, .. } => quote! { #ident: std::vec::Vec<#ty>, }, }) } fn gen_init(&self) -> TokenStream2 { fields_map(&self.fields, |field| match field { FieldType::Normal { ident, .. } => quote! { #ident: std::option::Option::None, }, FieldType::Option { ident, .. } => quote! { #ident: std::option::Option::None, }, FieldType::Each { ident, .. } => quote! { #ident: std::vec::Vec::new(), }, }) } fn gen_setters(&self) -> TokenStream2 { fields_map(&self.fields, |field| match field { FieldType::Normal { ident, ty } => quote! { pub fn #ident(&mut self, #ident: #ty) -> &mut Self { self.#ident = std::option::Option::Some(#ident); self } }, FieldType::Option { ident, ty } => quote! { pub fn #ident(&mut self, #ident: #ty) -> &mut Self { self.#ident = std::option::Option::Some(#ident); self } }, FieldType::Each { ident, ty, each } => quote! { pub fn #each(&mut self, #each: #ty) -> &mut Self { self.#ident.push(#each); self } }, }) } fn gen_build(&self) -> TokenStream2 { let unwrapped = fields_map(&self.fields, |field| { let ident = field.ident(); let err_msg = format!("{:?} is required", &ident); match field { FieldType::Normal { ident, .. } => quote! { let #ident = if let std::option::Option::Some(x) = &self.#ident { x.clone() } else { return std::result::Result::Err(#err_msg.into()); }; }, FieldType::Option { ident, .. } => quote! { let #ident = self.#ident.clone(); }, FieldType::Each { ident, .. } => quote! { let #ident = self.#ident.clone(); }, } }); let target = &self.input.ident; let idents = fields_map(&self.fields, |field| { let ident = field.ident(); quote! { #ident, } }); quote! { #unwrapped std::result::Result::Ok(#target { #idents }) } } } enum FieldType<'a> { Normal { ident: &'a Option<Ident>, ty: &'a Type, }, Option { ident: &'a Option<Ident>, ty: &'a Type, }, Each { ident: &'a Option<Ident>, ty: &'a Type, each: Ident, }, } impl<'a> FieldType<'a> { fn new(field: &'a Field) -> Result<Self, SynError> { Self::check_option(field) .or(Self::check_vec(field)) .unwrap_or({ let (ident, ty) = (&field.ident, &field.ty); Ok(Self::Normal { ident, ty }) }) } fn check_option(field: &'a Field) -> Option<Result<Self, SynError>> { let (ident, ty) = (&field.ident, &field.ty); first_generic_arg(ty, "Option").map(|ty| Ok(Self::Option { ident, ty })) } fn check_vec(field: &'a Field) -> Option<Result<Self, SynError>> { let (ident, ty) = (&field.ident, &field.ty); first_generic_arg(ty, "Vec").map(|gen_arg| { if let Some(attr) = field.attrs.first() && let Some(each) = attribute_each(attr) { match each { Ok(each) => Ok(Self::Each { ident, ty: gen_arg, each, }), Err(err) => Err(err), } } else { Ok(Self::Normal { ident, ty }) } }) } fn ident(&self) -> &'a Option<Ident> { match self { Self::Normal { ident, .. } => ident, Self::Option { ident, .. } => ident, Self::Each { ident, .. } => ident, } } } fn fields_map<F>(fields: &[FieldType], f: F) -> TokenStream2 where F: FnMut(&FieldType) -> TokenStream2, { fields.iter().flat_map(f).collect() } fn first_generic_arg<'a>(target: &'a Type, ty: &str) -> Option<&'a Type> { match target { Type::Path(path) => path.path.segments.first(), _ => None, } .and_then(|seg| (seg.ident == ty).then(|| &seg.arguments)) .and_then(|args| match args { PathArguments::AngleBracketed(args) => args.args.first(), _ => None, }) .and_then(|arg| match arg { GenericArgument::Type(ty) => Some(ty), _ => None, }) } fn attribute_each(attr: &Attribute) -> Option<Result<Ident, SynError>> { attr.parse_args::<ExprAssign>() .ok() .and_then(|ExprAssign { left, right, .. }| { if let Expr::Path(path) = *left && let Some(ident) = path.path.segments.first().map(|seg| &seg.ident) { if ident != "each" { return Some(Err(SynError::new_spanned(attr.parse_meta().unwrap(), "expected `builder(each = \"...\")`"))) } } else { return None } if let Expr::Lit(lit) = *right && let Lit::Str(lit) = lit.lit { Some(Ok(format_ident!("{}", lit.value()))) } else { None } }) }
#![feature(if_let_guard)] #![feature(let_chains)] use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::{format_ident, quote}; use syn::{ parse_macro_input, Attribute, Data, DataStruct, DeriveInput, Error as SynError, Expr, ExprAssign, Field, GenericArgument, Ident, Lit, PathArguments, Type, }; #[proc_macro_derive(Builder, attributes(builder))] pub fn derive(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); TokenStream::from(match input.data { Data::Struct(ref data) => StructBuilder::derive(&input, data), _ => unimplemented!(), }) } struct StructBuilder<'a> { input: &'a DeriveInput, fields: Vec<FieldType<'a>>, } impl<'a> StructBuilder<'a> { fn derive(input: &DeriveInput, data: &DataStruct) -> TokenStream2 { let generator = StructBuilder::analyze(input, data); match generator { Ok(gen) => gen.generate(), Err(err) => err.to_compile_error(), } } fn analyze(input: &'a DeriveInput, data: &'a DataStruct) -> Result<Self, SynError> { let fields = data .fields .iter() .map(FieldType::new) .collect::<Result<Vec<_>, _>>()?; Ok(Self { input, fields }) } fn generate(&self) -> TokenStream2 { let target = &self.input.ident;
d::vec::Vec::new(), }, }) } fn gen_setters(&self) -> TokenStream2 { fields_map(&self.fields, |field| match field { FieldType::Normal { ident, ty } => quote! { pub fn #ident(&mut self, #ident: #ty) -> &mut Self { self.#ident = std::option::Option::Some(#ident); self } }, FieldType::Option { ident, ty } => quote! { pub fn #ident(&mut self, #ident: #ty) -> &mut Self { self.#ident = std::option::Option::Some(#ident); self } }, FieldType::Each { ident, ty, each } => quote! { pub fn #each(&mut self, #each: #ty) -> &mut Self { self.#ident.push(#each); self } }, }) } fn gen_build(&self) -> TokenStream2 { let unwrapped = fields_map(&self.fields, |field| { let ident = field.ident(); let err_msg = format!("{:?} is required", &ident); match field { FieldType::Normal { ident, .. } => quote! { let #ident = if let std::option::Option::Some(x) = &self.#ident { x.clone() } else { return std::result::Result::Err(#err_msg.into()); }; }, FieldType::Option { ident, .. } => quote! { let #ident = self.#ident.clone(); }, FieldType::Each { ident, .. } => quote! { let #ident = self.#ident.clone(); }, } }); let target = &self.input.ident; let idents = fields_map(&self.fields, |field| { let ident = field.ident(); quote! { #ident, } }); quote! { #unwrapped std::result::Result::Ok(#target { #idents }) } } } enum FieldType<'a> { Normal { ident: &'a Option<Ident>, ty: &'a Type, }, Option { ident: &'a Option<Ident>, ty: &'a Type, }, Each { ident: &'a Option<Ident>, ty: &'a Type, each: Ident, }, } impl<'a> FieldType<'a> { fn new(field: &'a Field) -> Result<Self, SynError> { Self::check_option(field) .or(Self::check_vec(field)) .unwrap_or({ let (ident, ty) = (&field.ident, &field.ty); Ok(Self::Normal { ident, ty }) }) } fn check_option(field: &'a Field) -> Option<Result<Self, SynError>> { let (ident, ty) = (&field.ident, &field.ty); first_generic_arg(ty, "Option").map(|ty| Ok(Self::Option { ident, ty })) } fn check_vec(field: &'a Field) -> Option<Result<Self, SynError>> { let (ident, ty) = (&field.ident, &field.ty); first_generic_arg(ty, "Vec").map(|gen_arg| { if let Some(attr) = field.attrs.first() && let Some(each) = attribute_each(attr) { match each { Ok(each) => Ok(Self::Each { ident, ty: gen_arg, each, }), Err(err) => Err(err), } } else { Ok(Self::Normal { ident, ty }) } }) } fn ident(&self) -> &'a Option<Ident> { match self { Self::Normal { ident, .. } => ident, Self::Option { ident, .. } => ident, Self::Each { ident, .. } => ident, } } } fn fields_map<F>(fields: &[FieldType], f: F) -> TokenStream2 where F: FnMut(&FieldType) -> TokenStream2, { fields.iter().flat_map(f).collect() } fn first_generic_arg<'a>(target: &'a Type, ty: &str) -> Option<&'a Type> { match target { Type::Path(path) => path.path.segments.first(), _ => None, } .and_then(|seg| (seg.ident == ty).then(|| &seg.arguments)) .and_then(|args| match args { PathArguments::AngleBracketed(args) => args.args.first(), _ => None, }) .and_then(|arg| match arg { GenericArgument::Type(ty) => Some(ty), _ => None, }) } fn attribute_each(attr: &Attribute) -> Option<Result<Ident, SynError>> { attr.parse_args::<ExprAssign>() .ok() .and_then(|ExprAssign { left, right, .. }| { if let Expr::Path(path) = *left && let Some(ident) = path.path.segments.first().map(|seg| &seg.ident) { if ident != "each" { return Some(Err(SynError::new_spanned(attr.parse_meta().unwrap(), "expected `builder(each = \"...\")`"))) } } else { return None } if let Expr::Lit(lit) = *right && let Lit::Str(lit) = lit.lit { Some(Ok(format_ident!("{}", lit.value()))) } else { None } }) }
let builder = format_ident!("{}Builder", target); let partial = self.gen_partial(); let init = self.gen_init(); let setters = self.gen_setters(); let build = self.gen_build(); quote! { impl #target { pub fn builder() -> #builder { #builder { #init } } } pub struct #builder { #partial } impl #builder { #setters pub fn build(&mut self) -> std::result::Result<#target, std::boxed::Box<dyn std::error::Error>> { #build } } } } fn gen_partial(&self) -> TokenStream2 { fields_map(&self.fields, |field| match field { FieldType::Normal { ident, ty } => quote! { #ident: std::option::Option<#ty>, }, FieldType::Option { ident, ty } => quote! { #ident: std::option::Option<#ty>, }, FieldType::Each { ident, ty, .. } => quote! { #ident: std::vec::Vec<#ty>, }, }) } fn gen_init(&self) -> TokenStream2 { fields_map(&self.fields, |field| match field { FieldType::Normal { ident, .. } => quote! { #ident: std::option::Option::None, }, FieldType::Option { ident, .. } => quote! { #ident: std::option::Option::None, }, FieldType::Each { ident, .. } => quote! { #ident: st
random
[]
Rust
src/event/mod.rs
not-a-seagull/beetle
87e90ab8c71c4ae1787ae02ee5a7809c8bed62a0
/* ----------------------------------------------------------------------------------- * src/event/mod.rs - An event from the event loop. * beetle - Pull-based GUI framework. * Copyright © 2020 not_a_seagull * * This project is licensed under either the Apache 2.0 license or the MIT license, at * your option. For more information, please consult the LICENSE-APACHE or LICENSE-MIT * files in the repository root. * ----------------------------------------------------------------------------------- * MIT License: * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the “Software”), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * ----------------------------------------------------------------------------------- * Apache 2.0 License Declaration: * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ---------------------------------------------------------------------------------- */ use crate::{Graphics, KeyInfo, MouseButton, Texture, Window}; use alloc::{string::String, sync::Arc, vec, vec::Vec}; use core::{any::Any, fmt, option::Option}; use euclid::default::{Point2D, Rect}; #[cfg(target_os = "linux")] mod flutter; #[cfg(windows)] mod porc; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum EventType { NoOp, KeyDown, KeyUp, AboutToPaint, Paint, TextChanging, TextChanged, Quit, Close, BoundsChanging, BoundsChanged, BackgroundChanging, BackgroundChanged, MouseButtonDown, MouseButtonUp, Integer(usize), Str(&'static str), } #[derive(Debug)] pub enum EventData { NoOp, KeyDown(KeyInfo, Option<Point2D<u32>>), KeyUp(KeyInfo, Option<Point2D<u32>>), AboutToPaint, Paint(Graphics), TextChanging { old: String, new: String }, TextChanged { old: String, new: String }, Quit, Close, BoundsChanging { old: Rect<u32>, new: Rect<u32> }, BoundsChanged { old: Rect<u32>, new: Rect<u32> }, BackgroundChanging { old: Option<Texture>, new: Option<Texture>, }, BackgroundChanged, MouseButtonDown(Point2D<u32>, MouseButton), MouseButtonUp(Point2D<u32>, MouseButton), Integer(usize), Str(&'static str), } impl EventData { #[inline] pub fn ty(&self) -> EventType { match self { EventData::NoOp => EventType::NoOp, EventData::KeyDown(ref _k, ref _o) => EventType::KeyDown, EventData::KeyUp(ref _k, ref _o) => EventType::KeyUp, EventData::AboutToPaint => EventType::AboutToPaint, EventData::Paint(ref _g) => EventType::Paint, EventData::TextChanging { old: _, new: _ } => EventType::TextChanging, EventData::TextChanged { old: _, new: _ } => EventType::TextChanged, EventData::Quit => EventType::Quit, EventData::Close => EventType::Close, EventData::BoundsChanging { old: _, new: _ } => EventType::BoundsChanging, EventData::BoundsChanged { old: _, new: _ } => EventType::BoundsChanged, EventData::BackgroundChanging { old: _, new: _ } => EventType::BackgroundChanging, EventData::BackgroundChanged => EventType::BackgroundChanged, EventData::MouseButtonDown(ref _p, ref _b) => EventType::MouseButtonDown, EventData::MouseButtonUp(ref _p, ref _b) => EventType::MouseButtonUp, EventData::Integer(id) => EventType::Integer(*id), EventData::Str(id) => EventType::Str(id), } } } pub struct Event { target_window: Window, data: EventData, arguments: Vec<Arc<dyn Any + Send + Sync + 'static>>, hidden_data: Option<Arc<dyn Any + Send + Sync + 'static>>, needs_quit: bool, } impl fmt::Debug for Event { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Event") .field("target_window", &self.target_window) .field("data", &self.data) .field("needs_quit", &self.needs_quit) .finish() } } impl Event { #[inline] pub fn new(target_window: &Window, data: EventData) -> Self { Self { target_window: target_window.clone(), data, arguments: vec![], hidden_data: None, needs_quit: false, } } #[inline] pub(crate) fn set_hidden_data<T: Any + Send + Sync + 'static>(&mut self, data: T) { self.hidden_data = Some(Arc::new(data)); } #[inline] pub(crate) fn hidden_data<T: Any + Send + Sync + 'static>(&self) -> Option<Arc<T>> { match self.hidden_data { None => None, Some(ref hd) => Arc::downcast(hd.clone()).ok(), } } #[inline] pub fn ty(&self) -> EventType { self.data.ty() } #[inline] pub fn data(&self) -> &EventData { &self.data } #[inline] pub fn window(&self) -> &Window { &self.target_window } #[inline] pub fn arguments(&self) -> &[Arc<dyn Any + Send + Sync + 'static>] { &self.arguments } #[inline] pub fn push_argument<T>(&mut self, arg: T) where T: Any + Send + Sync + 'static, { self.arguments.push(Arc::new(arg)); } #[inline] pub fn dispatch(&self) -> crate::Result<()> { self.window().handle_event(self) } #[inline] pub fn is_exit_event(&self) -> bool { self.needs_quit } #[inline] pub fn set_is_exit_event(&mut self, is_quit: bool) { self.needs_quit = is_quit; } }
/* ----------------------------------------------------------------------------------- * src/event/mod.rs - An event from the event loop. * beetle - Pull-based GUI framework. * Copyright © 2020 not_a_seagull * * This project is licensed under either the Apache 2.0 license or the MIT license, at * your option. For more information, please consult the LICENSE-APACHE or LICENSE-MIT * files in the repository root. * ----------------------------------------------------------------------------------- * MIT License: * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the “Software”), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * ----------------------------------------------------------------------------------- * Apache 2.0 License Declaration: * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ---------------------------------------------------------------------------------- */ use crate::{Graphics, KeyInfo, MouseButton, Texture, Window}; use alloc::{string::String, sync::Arc, vec, vec::Vec}; use core::{any::Any, fmt, option::Option}; use euclid::default::{Point2D, Rect}; #[cfg(target_os = "linux")] mod flutter; #[cfg(windows)] mod porc; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum EventType { NoOp, KeyDown, KeyUp, AboutToPaint, Paint, TextChanging, TextChanged, Quit, Close, BoundsChanging, BoundsChanged, BackgroundChanging, BackgroundChanged, MouseButtonDown, MouseButtonUp, Integer(usize), Str(&'static str), } #[derive(Debug)] pub enum EventData { NoOp, KeyDown(KeyInfo, Option<Point2D<u32>>), KeyUp(KeyInfo, Option<Point2D<u32>>), AboutToPaint, Paint(Graphics), TextChanging { old: String, new: String }, TextChanged { old: String, new: String }, Quit, Close, BoundsChanging { old: Rect<u32>, new: Rect<u32> }, BoundsChanged { old: Rect<u32>, new: Rect<u32> }, BackgroundChanging { old: Option<Texture>, new: Option<Texture>, }, BackgroundChanged, MouseButtonDown(Point2D<u32>, MouseButton), MouseButtonUp(Point2D<u32>, MouseButton), Integer(usize), Str(&'static str), } impl EventData { #[inline] pub fn ty(&self) -> EventType { match self { EventData::NoOp => EventType::NoOp, EventData::KeyDown(ref _k, ref _o) => EventType::KeyDown, EventData::KeyUp(ref _k, ref _o) => EventType::KeyUp, EventData::AboutToPaint => EventType::AboutToPaint, EventData::Paint(ref _g) => EventType::Paint, EventData::TextChanging { old: _, new: _ } => EventType::TextChanging, EventData::TextChanged { old: _, new: _ } => EventType::TextChanged, EventData::Quit => EventType::Quit, EventData::Close => EventType::Close, EventData::BoundsChanging { old: _, new: _ } => EventType::BoundsChanging, EventData::BoundsChanged { old: _, new: _ } => EventType::BoundsChanged, EventData::BackgroundChanging { old: _, new: _ } => EventType::BackgroundChanging, EventData::BackgroundChanged => EventType::BackgroundChanged, EventData::MouseButtonDown(ref _p, ref _b) => EventType::MouseButtonDown, EventData::MouseButtonUp(ref _p, ref _b) => EventType::MouseButtonUp, EventData::Integer(id) => EventType::Integer(*id), EventData::Str(id) => EventType::Str(id), } } } pub struct Event { target_window: Window, data: EventData, arguments: Vec<Arc<dyn Any + Send + Sync + 'static>>, hidden_data: Option<Arc<dyn Any + Send + Sync + 'static>>, needs_quit: bool, } impl fmt::Debug for Event {
} impl Event { #[inline] pub fn new(target_window: &Window, data: EventData) -> Self { Self { target_window: target_window.clone(), data, arguments: vec![], hidden_data: None, needs_quit: false, } } #[inline] pub(crate) fn set_hidden_data<T: Any + Send + Sync + 'static>(&mut self, data: T) { self.hidden_data = Some(Arc::new(data)); } #[inline] pub(crate) fn hidden_data<T: Any + Send + Sync + 'static>(&self) -> Option<Arc<T>> { match self.hidden_data { None => None, Some(ref hd) => Arc::downcast(hd.clone()).ok(), } } #[inline] pub fn ty(&self) -> EventType { self.data.ty() } #[inline] pub fn data(&self) -> &EventData { &self.data } #[inline] pub fn window(&self) -> &Window { &self.target_window } #[inline] pub fn arguments(&self) -> &[Arc<dyn Any + Send + Sync + 'static>] { &self.arguments } #[inline] pub fn push_argument<T>(&mut self, arg: T) where T: Any + Send + Sync + 'static, { self.arguments.push(Arc::new(arg)); } #[inline] pub fn dispatch(&self) -> crate::Result<()> { self.window().handle_event(self) } #[inline] pub fn is_exit_event(&self) -> bool { self.needs_quit } #[inline] pub fn set_is_exit_event(&mut self, is_quit: bool) { self.needs_quit = is_quit; } }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Event") .field("target_window", &self.target_window) .field("data", &self.data) .field("needs_quit", &self.needs_quit) .finish() }
function_block-full_function
[ { "content": "pub trait EventHandler = Fn(&Event) -> crate::Result<()> + Sync + Send + 'static;\n\n\n\npub(crate) fn default_event_handler(_ev: &Event) -> crate::Result<()> {\n\n log::debug!(\"Found event: {:?}\", _ev);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/window/internal/mod.rs", "rank": 0, ...
Rust
src/connectivity/bluetooth/profiles/bt-a2dp-manager/src/inspect.rs
EnderNightLord-ChromeBook/fuchsia-pine64-pinephone
05e2c059b57b6217089090a0315971d1735ecf57
use { fidl_fuchsia_bluetooth_a2dp::Role, fuchsia_async as fasync, fuchsia_inspect::{self as inspect, Node, NumericProperty, Property}, fuchsia_inspect_contrib::nodes::NodeExt, fuchsia_inspect_derive::{AttachError, Inspect}, fuchsia_zircon as zx, futures::FutureExt, std::sync::Arc, }; use crate::util; #[derive(Default)] pub struct A2dpManagerInspect { role: inspect::StringProperty, role_set_count: inspect::UintProperty, role_set_at: Option<fuchsia_inspect_contrib::nodes::TimeProperty>, set_at: Arc<futures::lock::Mutex<Option<fasync::Time>>>, inspect_node: inspect::Node, } impl Inspect for &mut A2dpManagerInspect { fn iattach<'a>(self, parent: &'a Node, name: impl AsRef<str>) -> Result<(), AttachError> { self.inspect_node = parent.create_child(name); self.role = self.inspect_node.create_string("role", "Not Set"); self.role_set_count = self.inspect_node.create_uint("role_set_count", 0); self.set_at = Arc::new(futures::lock::Mutex::new(None)); let set_at_reader = self.set_at.clone(); self.inspect_node.record_lazy_values("time_since_role_set", move || { let set_at_reader = set_at_reader.clone(); async move { let inspector = inspect::Inspector::new(); if let Some(set_at) = *set_at_reader.lock().await { let time = duration_to_formatted_seconds(fasync::Time::now() - set_at); inspector.root().record_string("time_since_role_set", time); } Ok(inspector) } .boxed() }); Ok(()) } } impl A2dpManagerInspect { pub async fn set_role(&mut self, role: Role) { self.role.set(util::to_display_str(role)); self.role_set_count.add(1); let now = fasync::Time::now(); if let Some(prop) = &self.role_set_at { prop.set_at(now.into()); } else { self.role_set_at = Some(self.inspect_node.create_time_at("role_set_at_time", now.into())); } *self.set_at.lock().await = Some(now); } } fn duration_to_formatted_seconds(duration: zx::Duration) -> String { let seconds = duration.into_seconds(); let millis = duration.into_millis() % 1000; format!("{}.{:03}", seconds, millis) } #[cfg(test)] mod tests { use super::*; use async_utils::PollExt; use fuchsia_async::DurationExt; use fuchsia_inspect::assert_inspect_tree; use fuchsia_inspect_derive::WithInspect; use fuchsia_zircon::DurationNum; use futures::pin_mut; #[test] fn inspect_tree() { let mut exec = fasync::Executor::new_with_fake_time().unwrap(); exec.set_fake_time(fasync::Time::from_nanos(1_234500000)); let inspector = inspect::component::inspector(); let root = inspector.root(); let mut inspect = A2dpManagerInspect::default().with_inspect(&root, "operating_mode").unwrap(); assert_inspect_tree!(inspector, root: { operating_mode: { role: "Not Set", role_set_count: 0u64, } }); { let fut = inspect.set_role(Role::Sink); pin_mut!(fut); exec.run_until_stalled(&mut fut).unwrap(); } assert_inspect_tree!(inspector, root: { operating_mode: { role: "Sink", role_set_count: 1u64, role_set_at_time: 1_234500000i64, time_since_role_set: "0.000", } }); exec.set_fake_time(5.seconds().after_now()); assert_inspect_tree!(inspector, root: { operating_mode: { role: "Sink", role_set_count: 1u64, role_set_at_time: 1_234500000i64, time_since_role_set: "5.000", } }); exec.set_fake_time(1.seconds().after_now()); { let fut = inspect.set_role(Role::Source); pin_mut!(fut); exec.run_until_stalled(&mut fut).unwrap(); } exec.set_fake_time(2_123.millis().after_now()); assert_inspect_tree!(inspector, root: { operating_mode: { role: "Source", role_set_count: 2u64, role_set_at_time: 7_234500000i64, time_since_role_set: "2.123", } }); } }
use { fidl_fuchsia_bluetooth_a2dp::Role, fuchsia_async as fasync, fuchsia_inspect::{self as inspect, Node, NumericProperty, Property}, fuchsia_inspect_contrib::nodes::NodeExt, fuchsia_inspect_derive::{AttachError, Inspect}, fuchsia_zircon as zx, futures::FutureExt, std::sync::Arc, }; use crate::util; #[derive(Default)] pub struct A2dpManagerInspect { role: inspect::StringProperty, role_set_count: inspect::UintProperty, role_set_at: Option<fuchsia_inspect_contrib::nodes::TimeProperty>, set_at: Arc<futures::lock::Mutex<Option<fasync::Time>>>, inspect_node: inspect::Node, } impl Inspect for &mut A2dpManagerInspect {
:now(); if let Some(prop) = &self.role_set_at { prop.set_at(now.into()); } else { self.role_set_at = Some(self.inspect_node.create_time_at("role_set_at_time", now.into())); } *self.set_at.lock().await = Some(now); } } fn duration_to_formatted_seconds(duration: zx::Duration) -> String { let seconds = duration.into_seconds(); let millis = duration.into_millis() % 1000; format!("{}.{:03}", seconds, millis) } #[cfg(test)] mod tests { use super::*; use async_utils::PollExt; use fuchsia_async::DurationExt; use fuchsia_inspect::assert_inspect_tree; use fuchsia_inspect_derive::WithInspect; use fuchsia_zircon::DurationNum; use futures::pin_mut; #[test] fn inspect_tree() { let mut exec = fasync::Executor::new_with_fake_time().unwrap(); exec.set_fake_time(fasync::Time::from_nanos(1_234500000)); let inspector = inspect::component::inspector(); let root = inspector.root(); let mut inspect = A2dpManagerInspect::default().with_inspect(&root, "operating_mode").unwrap(); assert_inspect_tree!(inspector, root: { operating_mode: { role: "Not Set", role_set_count: 0u64, } }); { let fut = inspect.set_role(Role::Sink); pin_mut!(fut); exec.run_until_stalled(&mut fut).unwrap(); } assert_inspect_tree!(inspector, root: { operating_mode: { role: "Sink", role_set_count: 1u64, role_set_at_time: 1_234500000i64, time_since_role_set: "0.000", } }); exec.set_fake_time(5.seconds().after_now()); assert_inspect_tree!(inspector, root: { operating_mode: { role: "Sink", role_set_count: 1u64, role_set_at_time: 1_234500000i64, time_since_role_set: "5.000", } }); exec.set_fake_time(1.seconds().after_now()); { let fut = inspect.set_role(Role::Source); pin_mut!(fut); exec.run_until_stalled(&mut fut).unwrap(); } exec.set_fake_time(2_123.millis().after_now()); assert_inspect_tree!(inspector, root: { operating_mode: { role: "Source", role_set_count: 2u64, role_set_at_time: 7_234500000i64, time_since_role_set: "2.123", } }); } }
fn iattach<'a>(self, parent: &'a Node, name: impl AsRef<str>) -> Result<(), AttachError> { self.inspect_node = parent.create_child(name); self.role = self.inspect_node.create_string("role", "Not Set"); self.role_set_count = self.inspect_node.create_uint("role_set_count", 0); self.set_at = Arc::new(futures::lock::Mutex::new(None)); let set_at_reader = self.set_at.clone(); self.inspect_node.record_lazy_values("time_since_role_set", move || { let set_at_reader = set_at_reader.clone(); async move { let inspector = inspect::Inspector::new(); if let Some(set_at) = *set_at_reader.lock().await { let time = duration_to_formatted_seconds(fasync::Time::now() - set_at); inspector.root().record_string("time_since_role_set", time); } Ok(inspector) } .boxed() }); Ok(()) } } impl A2dpManagerInspect { pub async fn set_role(&mut self, role: Role) { self.role.set(util::to_display_str(role)); self.role_set_count.add(1); let now = fasync::Time:
random
[]
Rust
src/systems/gsm.rs
thebluefish/bevy_sandbox
de175f89cdfd04045c82b41988d50cd9ea743fd5
use crate::resources::*; use super::states; use bevy::{prelude::*, ecs::{Schedule, ThreadLocalExecution, SystemId, TypeAccess, ArchetypeAccess, ParallelExecutor}}; use std::borrow::Cow; pub struct GamePlugin; impl Plugin for GamePlugin { fn build(&self, app: &mut AppBuilder) { app .add_resource(Loader::default()) .add_resource(GameState::Stage0) .add_resource(GameTick::default()) .add_stage_before("pre_update", "game_update") .add_system_to_stage("game_update", Box::new(GameStateManager::new())) ; } } pub struct StateSchedule(pub Schedule, pub ParallelExecutor); impl Default for StateSchedule { fn default() -> Self { StateSchedule(Default::default(), ParallelExecutor::without_tracker_clears()) } } impl StateSchedule { fn run(&mut self, mut world: &mut World, mut resources: &mut Resources) { self.0.initialize(world, resources); self.1.run(&mut self.0, &mut world, &mut resources); } } pub struct GameStateManager { pub resource_access: TypeAccess, pub id: SystemId, pub archetype_access: ArchetypeAccess, pub acc: f64, pub stage_0: StateSchedule, pub stage_1: StateSchedule, pub stage_2: StateSchedule, pub stage_3: StateSchedule, pub game_start: StateSchedule, pub game: StateSchedule, } impl GameStateManager { pub fn new() -> Self { GameStateManager { resource_access: Default::default(), id: SystemId::new(), archetype_access: Default::default(), acc: 0.0, stage_0: Default::default(), stage_1: Default::default(), stage_2: Default::default(), stage_3: Default::default(), game_start: Default::default(), game: Default::default() } } } impl System for GameStateManager { fn name(&self) -> Cow<'static, str> { "GSM".into() } fn id(&self) -> SystemId { self.id } fn update_archetype_access(&mut self, _world: &World) { self.archetype_access.clear(); } fn archetype_access(&self) -> &ArchetypeAccess { &self.archetype_access } fn resource_access(&self) -> &TypeAccess { &self.resource_access } fn thread_local_execution(&self) -> ThreadLocalExecution { ThreadLocalExecution::Immediate } fn run(&mut self, _world: &World, _resources: &Resources) { } fn run_thread_local(&mut self, mut world: &mut World, mut resources: &mut Resources) { let state = *resources.get::<GameState>().unwrap(); match state { GameState::Stage0 => self.stage_0.run(&mut world, &mut resources), GameState::Stage1 => self.stage_1.run(&mut world, &mut resources), GameState::Stage2 => self.stage_2.run(&mut world, &mut resources), GameState::Stage3 => self.stage_3.run(&mut world, &mut resources), GameState::GameStart => self.game_start.run(&mut world, &mut resources), GameState::Game => { let rate = match (resources.get::<Time>(), resources.get::<GameTick>()) { (Some(time), Some(rate)) => { self.acc += time.delta_seconds_f64; Some(rate.rate) }, _ => { None }, }; match rate { Some(rate) => { while self.acc >= rate { self.game.run(world, resources); self.acc -= rate; } }, _ => {}, } }, } } fn initialize(&mut self, _world: &mut World, _resources: &mut Resources) { states::stage_0::initialize(&mut self.stage_0.0); states::stage_1::initialize(&mut self.stage_1.0); states::stage_2::initialize(&mut self.stage_2.0); states::stage_3::initialize(&mut self.stage_3.0); states::game_start::initialize(&mut self.game_start.0); states::game::initialize(&mut self.game.0); } }
use crate::resources::*; use super::states; use bevy::{prelude::*, ecs::{Schedule, ThreadLoc
peAccess { &self.resource_access } fn thread_local_execution(&self) -> ThreadLocalExecution { ThreadLocalExecution::Immediate } fn run(&mut self, _world: &World, _resources: &Resources) { } fn run_thread_local(&mut self, mut world: &mut World, mut resources: &mut Resources) { let state = *resources.get::<GameState>().unwrap(); match state { GameState::Stage0 => self.stage_0.run(&mut world, &mut resources), GameState::Stage1 => self.stage_1.run(&mut world, &mut resources), GameState::Stage2 => self.stage_2.run(&mut world, &mut resources), GameState::Stage3 => self.stage_3.run(&mut world, &mut resources), GameState::GameStart => self.game_start.run(&mut world, &mut resources), GameState::Game => { let rate = match (resources.get::<Time>(), resources.get::<GameTick>()) { (Some(time), Some(rate)) => { self.acc += time.delta_seconds_f64; Some(rate.rate) }, _ => { None }, }; match rate { Some(rate) => { while self.acc >= rate { self.game.run(world, resources); self.acc -= rate; } }, _ => {}, } }, } } fn initialize(&mut self, _world: &mut World, _resources: &mut Resources) { states::stage_0::initialize(&mut self.stage_0.0); states::stage_1::initialize(&mut self.stage_1.0); states::stage_2::initialize(&mut self.stage_2.0); states::stage_3::initialize(&mut self.stage_3.0); states::game_start::initialize(&mut self.game_start.0); states::game::initialize(&mut self.game.0); } }
alExecution, SystemId, TypeAccess, ArchetypeAccess, ParallelExecutor}}; use std::borrow::Cow; pub struct GamePlugin; impl Plugin for GamePlugin { fn build(&self, app: &mut AppBuilder) { app .add_resource(Loader::default()) .add_resource(GameState::Stage0) .add_resource(GameTick::default()) .add_stage_before("pre_update", "game_update") .add_system_to_stage("game_update", Box::new(GameStateManager::new())) ; } } pub struct StateSchedule(pub Schedule, pub ParallelExecutor); impl Default for StateSchedule { fn default() -> Self { StateSchedule(Default::default(), ParallelExecutor::without_tracker_clears()) } } impl StateSchedule { fn run(&mut self, mut world: &mut World, mut resources: &mut Resources) { self.0.initialize(world, resources); self.1.run(&mut self.0, &mut world, &mut resources); } } pub struct GameStateManager { pub resource_access: TypeAccess, pub id: SystemId, pub archetype_access: ArchetypeAccess, pub acc: f64, pub stage_0: StateSchedule, pub stage_1: StateSchedule, pub stage_2: StateSchedule, pub stage_3: StateSchedule, pub game_start: StateSchedule, pub game: StateSchedule, } impl GameStateManager { pub fn new() -> Self { GameStateManager { resource_access: Default::default(), id: SystemId::new(), archetype_access: Default::default(), acc: 0.0, stage_0: Default::default(), stage_1: Default::default(), stage_2: Default::default(), stage_3: Default::default(), game_start: Default::default(), game: Default::default() } } } impl System for GameStateManager { fn name(&self) -> Cow<'static, str> { "GSM".into() } fn id(&self) -> SystemId { self.id } fn update_archetype_access(&mut self, _world: &World) { self.archetype_access.clear(); } fn archetype_access(&self) -> &ArchetypeAccess { &self.archetype_access } fn resource_access(&self) -> &Ty
random
[ { "content": "use bevy::prelude::*;\n\nuse crate::components::AnimSprite;\n\n\n", "file_path": "src/systems/anim_sprite.rs", "rank": 0, "score": 2.126389271913369 }, { "content": "use bevy::prelude::*;\n\nuse crate::{assets, core::loader::*, resources::LoadingScreen};\n\nuse crate::component...
Rust
src/zksolc/main.rs
matter-labs/compiler-solidity
4bd2b9c9786a234660f2012d8ee49874031ac9ff
pub mod arguments; use self::arguments::Arguments; fn main() { std::process::exit(match main_inner() { Ok(()) => compiler_common::EXIT_CODE_SUCCESS, Err(error) => { eprintln!("{}", error); compiler_common::EXIT_CODE_FAILURE } }) } fn main_inner() -> anyhow::Result<()> { let mut arguments = Arguments::new(); arguments.validate()?; let dump_flags = compiler_solidity::DumpFlag::from_booleans( arguments.dump_yul, arguments.dump_ethir, arguments.dump_evm, arguments.dump_llvm, arguments.dump_assembly, ); for path in arguments.input_files.iter_mut() { *path = path.canonicalize()?; } let solc = compiler_solidity::SolcCompiler::new(arguments.solc.unwrap_or_else(|| { compiler_solidity::SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned() })); let solc_version = solc.version()?; let pipeline = if solc_version.minor >= 8 { compiler_solidity::SolcPipeline::Yul } else { compiler_solidity::SolcPipeline::EVM }; compiler_llvm_context::initialize_target(); if let Some(llvm_options) = arguments.llvm_options { let llvm_options = shell_words::split(llvm_options.as_str()) .map_err(|error| anyhow::anyhow!("LLVM options parsing error: {}", error))?; let llvm_options = Vec::from_iter(llvm_options.iter().map(String::as_str)); inkwell::support::parse_command_line_options( llvm_options.len() as i32, llvm_options.as_slice(), "", ); } let build = if arguments.yul { let path = match arguments.input_files.len() { 1 => arguments.input_files.remove(0), 0 => anyhow::bail!("The input file is missing"), length => anyhow::bail!( "Only one input file is allowed in the Yul mode, but found {}", length ), }; let project = compiler_solidity::Project::try_from_default_yul(&path, &solc_version)?; let optimizer_settings = if arguments.optimize { compiler_llvm_context::OptimizerSettings::cycles() } else { compiler_llvm_context::OptimizerSettings::none() }; project.compile_all(optimizer_settings, dump_flags) } else { let output_selection = compiler_solidity::SolcStandardJsonInputSettings::get_output_selection( arguments .input_files .iter() .map(|path| path.to_string_lossy().to_string()) .collect(), pipeline, ); let solc_input = if arguments.standard_json { let mut input: compiler_solidity::SolcStandardJsonInput = serde_json::from_reader(std::io::BufReader::new(std::io::stdin()))?; input.settings.output_selection = output_selection; input } else { compiler_solidity::SolcStandardJsonInput::try_from_paths( compiler_solidity::SolcStandardJsonInputLanguage::Solidity, arguments.input_files.as_slice(), arguments.libraries, output_selection, true, )? }; let libraries = solc_input.settings.libraries.clone().unwrap_or_default(); let optimize = if arguments.standard_json { solc_input.settings.optimizer.enabled } else { arguments.optimize }; let mut solc_output = solc.standard_json( solc_input, arguments.base_path, arguments.include_paths, arguments.allow_paths, )?; if let Some(errors) = solc_output.errors.as_deref() { let mut cannot_compile = false; for error in errors.iter() { if error.severity.as_str() == "error" { cannot_compile = true; if arguments.standard_json { serde_json::to_writer(std::io::stdout(), &solc_output)?; return Ok(()); } } if !arguments.standard_json && arguments.combined_json.is_none() { eprintln!("{}", error); } } if cannot_compile { anyhow::bail!("Error(s) found. Compilation aborted"); } } let project = solc_output.try_to_project(libraries, pipeline, solc_version, dump_flags.as_slice())?; let optimizer_settings = if optimize { compiler_llvm_context::OptimizerSettings::cycles() } else { compiler_llvm_context::OptimizerSettings::none() }; let build = project.compile_all(optimizer_settings, dump_flags)?; if arguments.standard_json { build.write_to_standard_json(&mut solc_output)?; serde_json::to_writer(std::io::stdout(), &solc_output)?; return Ok(()); } Ok(build) }?; let combined_json = if let Some(combined_json) = arguments.combined_json { Some(solc.combined_json(arguments.input_files.as_slice(), combined_json.as_str())?) } else { None }; if let Some(output_directory) = arguments.output_directory { std::fs::create_dir_all(&output_directory)?; if let Some(mut combined_json) = combined_json { build.write_to_combined_json(&mut combined_json)?; combined_json.write_to_directory(&output_directory, arguments.overwrite)?; } else { build.write_to_directory( &output_directory, arguments.output_assembly, arguments.output_binary, arguments.output_abi, arguments.overwrite, )?; } eprintln!( "Compiler run successful. Artifact(s) can be found in directory {:?}.", output_directory ); } else if let Some(mut combined_json) = combined_json { build.write_to_combined_json(&mut combined_json)?; println!( "{}", serde_json::to_string(&combined_json).expect("Always valid") ); } else if arguments.output_assembly || arguments.output_binary || arguments.output_hashes || arguments.output_abi { for (path, contract) in build.contracts.into_iter() { if arguments.output_assembly { println!( "Contract `{}` assembly:\n\n{}", path, contract.build.assembly_text ); } if arguments.output_binary { println!( "Contract `{}` bytecode: 0x{}", path, hex::encode(contract.build.bytecode) ); } } if arguments.output_abi || arguments.output_hashes { let extra_output = solc.extra_output( arguments.input_files.as_slice(), arguments.output_abi, arguments.output_hashes, )?; print!("{}", extra_output); } } else { eprintln!("Compiler run successful. No output requested. Use --asm and --bin flags."); } Ok(()) }
pub mod arguments; use self::arguments::Arguments; fn main() { std::process::exit(match main_inner() { Ok(()) => compiler_common::EXIT_CODE_SUCCESS, Err(error) => { eprintln!("{}", error); compiler_common::EXIT_CODE_FAILURE } }) }
fn main_inner() -> anyhow::Result<()> { let mut arguments = Arguments::new(); arguments.validate()?; let dump_flags = compiler_solidity::DumpFlag::from_booleans( arguments.dump_yul, arguments.dump_ethir, arguments.dump_evm, arguments.dump_llvm, arguments.dump_assembly, ); for path in arguments.input_files.iter_mut() { *path = path.canonicalize()?; } let solc = compiler_solidity::SolcCompiler::new(arguments.solc.unwrap_or_else(|| { compiler_solidity::SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned() })); let solc_version = solc.version()?; let pipeline = if solc_version.minor >= 8 { compiler_solidity::SolcPipeline::Yul } else { compiler_solidity::SolcPipeline::EVM }; compiler_llvm_context::initialize_target(); if let Some(llvm_options) = arguments.llvm_options { let llvm_options = shell_words::split(llvm_options.as_str()) .map_err(|error| anyhow::anyhow!("LLVM options parsing error: {}", error))?; let llvm_options = Vec::from_iter(llvm_options.iter().map(String::as_str)); inkwell::support::parse_command_line_options( llvm_options.len() as i32, llvm_options.as_slice(), "", ); } let build = if arguments.yul { let path = match arguments.input_files.len() { 1 => arguments.input_files.remove(0), 0 => anyhow::bail!("The input file is missing"), length => anyhow::bail!( "Only one input file is allowed in the Yul mode, but found {}", length ), }; let project = compiler_solidity::Project::try_from_default_yul(&path, &solc_version)?; let optimizer_settings = if arguments.optimize { compiler_llvm_context::OptimizerSettings::cycles() } else { compiler_llvm_context::OptimizerSettings::none() }; project.compile_all(optimizer_settings, dump_flags) } else { let output_selection = compiler_solidity::SolcStandardJsonInputSettings::get_output_selection( arguments .input_files .iter() .map(|path| path.to_string_lossy().to_string()) .collect(), pipeline, ); let solc_input = if arguments.standard_json { let mut input: compiler_solidity::SolcStandardJsonInput = serde_json::from_reader(std::io::BufReader::new(std::io::stdin()))?; input.settings.output_selection = output_selection; input } else { compiler_solidity::SolcStandardJsonInput::try_from_paths( compiler_solidity::SolcStandardJsonInputLanguage::Solidity, arguments.input_files.as_slice(), arguments.libraries, output_selection, true, )? }; let libraries = solc_input.settings.libraries.clone().unwrap_or_default(); let optimize = if arguments.standard_json { solc_input.settings.optimizer.enabled } else { arguments.optimize }; let mut solc_output = solc.standard_json( solc_input, arguments.base_path, arguments.include_paths, arguments.allow_paths, )?; if let Some(errors) = solc_output.errors.as_deref() { let mut cannot_compile = false; for error in errors.iter() { if error.severity.as_str() == "error" { cannot_compile = true; if arguments.standard_json { serde_json::to_writer(std::io::stdout(), &solc_output)?; return Ok(()); } } if !arguments.standard_json && arguments.combined_json.is_none() { eprintln!("{}", error); } } if cannot_compile { anyhow::bail!("Error(s) found. Compilation aborted"); } } let project = solc_output.try_to_project(libraries, pipeline, solc_version, dump_flags.as_slice())?; let optimizer_settings = if optimize { compiler_llvm_context::OptimizerSettings::cycles() } else { compiler_llvm_context::OptimizerSettings::none() }; let build = project.compile_all(optimizer_settings, dump_flags)?; if arguments.standard_json { build.write_to_standard_json(&mut solc_output)?; serde_json::to_writer(std::io::stdout(), &solc_output)?; return Ok(()); } Ok(build) }?; let combined_json = if let Some(combined_json) = arguments.combined_json { Some(solc.combined_json(arguments.input_files.as_slice(), combined_json.as_str())?) } else { None }; if let Some(output_directory) = arguments.output_directory { std::fs::create_dir_all(&output_directory)?; if let Some(mut combined_json) = combined_json { build.write_to_combined_json(&mut combined_json)?; combined_json.write_to_directory(&output_directory, arguments.overwrite)?; } else { build.write_to_directory( &output_directory, arguments.output_assembly, arguments.output_binary, arguments.output_abi, arguments.overwrite, )?; } eprintln!( "Compiler run successful. Artifact(s) can be found in directory {:?}.", output_directory ); } else if let Some(mut combined_json) = combined_json { build.write_to_combined_json(&mut combined_json)?; println!( "{}", serde_json::to_string(&combined_json).expect("Always valid") ); } else if arguments.output_assembly || arguments.output_binary || arguments.output_hashes || arguments.output_abi { for (path, contract) in build.contracts.into_iter() { if arguments.output_assembly { println!( "Contract `{}` assembly:\n\n{}", path, contract.build.assembly_text ); } if arguments.output_binary { println!( "Contract `{}` bytecode: 0x{}", path, hex::encode(contract.build.bytecode) ); } } if arguments.output_abi || arguments.output_hashes { let extra_output = solc.extra_output( arguments.input_files.as_slice(), arguments.output_abi, arguments.output_hashes, )?; print!("{}", extra_output); } } else { eprintln!("Compiler run successful. No output requested. Use --asm and --bin flags."); } Ok(()) }
function_block-full_function
[]
Rust
lib/interface-types/src/interpreter/instructions/strings.rs
terra-money/wasmer
567a790dae9e428650a5c85e6b4dae82a06bce47
use super::to_native; use crate::{ errors::{InstructionError, InstructionErrorKind}, interpreter::Instruction, types::InterfaceType, values::InterfaceValue, }; use std::{cell::Cell, convert::TryInto}; executable_instruction!( string_lift_memory(instruction: Instruction) -> _ { move |runtime| -> _ { let inputs = runtime.stack.pop(2).ok_or_else(|| { InstructionError::new( instruction, InstructionErrorKind::StackIsTooSmall { needed: 2 }, ) })?; let memory_index: u32 = 0; let memory = runtime .wasm_instance .memory(memory_index as usize) .ok_or_else(|| { InstructionError::new( instruction, InstructionErrorKind::MemoryIsMissing { memory_index }, ) })?; let pointer: usize = to_native::<i32>(&inputs[0], instruction)? .try_into() .map_err(|e| (e, "pointer").into()) .map_err(|k| InstructionError::new(instruction, k))?; let length: usize = to_native::<i32>(&inputs[1], instruction)? .try_into() .map_err(|e| (e, "length").into()) .map_err(|k| InstructionError::new(instruction, k))?; let memory_view = memory.view(); if length == 0 { runtime.stack.push(InterfaceValue::String("".into())); return Ok(()) } if memory_view.len() <= pointer + length - 1 { return Err(InstructionError::new( instruction, InstructionErrorKind::MemoryOutOfBoundsAccess { index: pointer + length, length: memory_view.len(), }, )); } let data: Vec<u8> = (&memory_view[pointer..=pointer + length - 1]) .iter() .map(Cell::get) .collect(); let string = String::from_utf8(data) .map_err(|error| InstructionError::new(instruction, InstructionErrorKind::String(error)))?; runtime.stack.push(InterfaceValue::String(string)); Ok(()) } } ); executable_instruction!( string_lower_memory(instruction: Instruction) -> _ { move |runtime| -> _ { let inputs = runtime.stack.pop(2).ok_or_else(|| { InstructionError::new( instruction, InstructionErrorKind::StackIsTooSmall { needed: 2 }, ) })?; let string_pointer: usize = to_native::<i32>(&inputs[0], instruction)? .try_into() .map_err(|e| (e, "pointer").into()) .map_err(|k| InstructionError::new(instruction, k))?; let string: String = to_native(&inputs[1], instruction)?; let string_bytes = string.as_bytes(); let string_length: i32 = string_bytes.len().try_into().map_err(|_| { InstructionError::new( instruction, InstructionErrorKind::NegativeValue { subject: "string_length" }, ) })?; let instance = &mut runtime.wasm_instance; let memory_index: u32 = 0; let memory_view = instance .memory(memory_index as usize) .ok_or_else(|| { InstructionError::new( instruction, InstructionErrorKind::MemoryIsMissing { memory_index }, ) })? .view(); for (nth, byte) in string_bytes.iter().enumerate() { memory_view[string_pointer as usize + nth].set(*byte); } runtime.stack.push(InterfaceValue::I32(string_pointer as i32)); runtime.stack.push(InterfaceValue::I32(string_length)); Ok(()) } } ); executable_instruction!( string_size(instruction: Instruction) -> _ { move |runtime| -> _ { match runtime.stack.pop1() { Some(InterfaceValue::String(string)) => { let length = string.len() as i32; runtime.stack.push(InterfaceValue::I32(length)); Ok(()) }, Some(value) => Err(InstructionError::new( instruction, InstructionErrorKind::InvalidValueOnTheStack { expected_type: InterfaceType::String, received_type: (&value).into(), }, )), None => Err(InstructionError::new( instruction, InstructionErrorKind::StackIsTooSmall { needed: 1 }, )), } } } ); #[cfg(test)] mod tests { test_executable_instruction!( test_string_lift_memory = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(13), ], instance: Instance { memory: Memory::new("Hello, World!".as_bytes().iter().map(|u| Cell::new(*u)).collect()), ..Default::default() }, stack: [InterfaceValue::String("Hello, World!".into())], ); test_executable_instruction!( test_string_lift_memory__empty_string = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(0), ], instance: Instance { memory: Memory::new(vec![]), ..Default::default() }, stack: [InterfaceValue::String("".into())], ); test_executable_instruction!( test_string_lift_memory__negative_pointer = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(-42), InterfaceValue::I32(13), ], instance: Instance { memory: Memory::new("Hello!".as_bytes().iter().map(|u| Cell::new(*u)).collect()), ..Default::default() }, error: r#"`string.lift_memory` attempted to convert `pointer` but it appears to be a negative value"#, ); test_executable_instruction!( test_string_lift_memory__negative_length = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(-1), ], instance: Instance { memory: Memory::new("Hello!".as_bytes().iter().map(|u| Cell::new(*u)).collect()), ..Default::default() }, error: r#"`string.lift_memory` attempted to convert `length` but it appears to be a negative value"#, ); test_executable_instruction!( test_string_lift_memory__read_out_of_memory = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(13), ], instance: Instance { memory: Memory::new("Hello!".as_bytes().iter().map(|u| Cell::new(*u)).collect()), ..Default::default() }, error: r#"`string.lift_memory` read out of the memory bounds (index 13 > memory length 6)"#, ); test_executable_instruction!( test_string_lift_memory__invalid_encoding = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(4), ], instance: Instance { memory: Memory::new(vec![0, 159, 146, 150].iter().map(|b| Cell::new(*b)).collect::<Vec<Cell<u8>>>()), ..Default::default() }, error: r#"`string.lift_memory` invalid utf-8 sequence of 1 bytes from index 1"#, ); test_executable_instruction!( test_string_lift_memory__stack_is_too_small = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(13), ], instance: Instance::new(), error: r#"`string.lift_memory` needed to read `2` value(s) from the stack, but it doesn't contain enough data"#, ); test_executable_instruction!( test_string_lower_memory = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::StringSize, Instruction::CallCore { function_index: 43 }, Instruction::ArgumentGet { index: 0 }, Instruction::StringLowerMemory, ], invocation_inputs: [InterfaceValue::String("Hello, World!".into())], instance: Instance::new(), stack: [ InterfaceValue::I32(0), InterfaceValue::I32(13), ] ); test_executable_instruction!( test_string__roundtrip = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::StringSize, Instruction::CallCore { function_index: 43 }, Instruction::ArgumentGet { index: 0 }, Instruction::StringLowerMemory, Instruction::StringLiftMemory, ], invocation_inputs: [InterfaceValue::String("Hello, World!".into())], instance: Instance::new(), stack: [InterfaceValue::String("Hello, World!".into())], ); test_executable_instruction!( test_string_lower_memory__stack_is_too_small = instructions: [ Instruction::StringLowerMemory, ], invocation_inputs: [], instance: Instance::new(), error: r#"`string.lower_memory` needed to read `2` value(s) from the stack, but it doesn't contain enough data"#, ); test_executable_instruction!( test_string_size = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::StringSize, ], invocation_inputs: [InterfaceValue::String("Hello, World!".into())], instance: Instance::new(), stack: [InterfaceValue::I32(13)], ); test_executable_instruction!( test_string_size__stack_is_too_small = instructions: [ Instruction::StringSize, ], invocation_inputs: [], instance: Instance::new(), error: r#"`string.size` needed to read `1` value(s) from the stack, but it doesn't contain enough data"#, ); test_executable_instruction!( test_string_size__invalid_value_on_the_stack = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::StringSize, ], invocation_inputs: [InterfaceValue::I32(42)], instance: Instance::new(), error: r#"`string.size` read a value of type `I32` from the stack, but the type `String` was expected"#, ); }
use super::to_native; use crate::{ errors::{InstructionError, InstructionErrorKind}, interpreter::Instruction, types::InterfaceType, values::InterfaceValue, }; use std::{cell::Cell, convert::TryInto}; executable_instruction!( string_lift_memory(instruction: Instruction) -> _ { move |runtime| -> _ { let inputs = runtime.stack.pop(2).ok_or_else(|| { InstructionError::new( instruction, InstructionErrorKind::StackIsTooSmall { needed: 2 }, ) })?; let memory_index: u32 = 0; let memory = runtime .wasm_instance .memory(memory_index as usize) .ok_or_else(|| { InstructionError::new( instruction,
ew(instruction, k))?; let memory_view = memory.view(); if length == 0 { runtime.stack.push(InterfaceValue::String("".into())); return Ok(()) } if memory_view.len() <= pointer + length - 1 { return Err(InstructionError::new( instruction, InstructionErrorKind::MemoryOutOfBoundsAccess { index: pointer + length, length: memory_view.len(), }, )); } let data: Vec<u8> = (&memory_view[pointer..=pointer + length - 1]) .iter() .map(Cell::get) .collect(); let string = String::from_utf8(data) .map_err(|error| InstructionError::new(instruction, InstructionErrorKind::String(error)))?; runtime.stack.push(InterfaceValue::String(string)); Ok(()) } } ); executable_instruction!( string_lower_memory(instruction: Instruction) -> _ { move |runtime| -> _ { let inputs = runtime.stack.pop(2).ok_or_else(|| { InstructionError::new( instruction, InstructionErrorKind::StackIsTooSmall { needed: 2 }, ) })?; let string_pointer: usize = to_native::<i32>(&inputs[0], instruction)? .try_into() .map_err(|e| (e, "pointer").into()) .map_err(|k| InstructionError::new(instruction, k))?; let string: String = to_native(&inputs[1], instruction)?; let string_bytes = string.as_bytes(); let string_length: i32 = string_bytes.len().try_into().map_err(|_| { InstructionError::new( instruction, InstructionErrorKind::NegativeValue { subject: "string_length" }, ) })?; let instance = &mut runtime.wasm_instance; let memory_index: u32 = 0; let memory_view = instance .memory(memory_index as usize) .ok_or_else(|| { InstructionError::new( instruction, InstructionErrorKind::MemoryIsMissing { memory_index }, ) })? .view(); for (nth, byte) in string_bytes.iter().enumerate() { memory_view[string_pointer as usize + nth].set(*byte); } runtime.stack.push(InterfaceValue::I32(string_pointer as i32)); runtime.stack.push(InterfaceValue::I32(string_length)); Ok(()) } } ); executable_instruction!( string_size(instruction: Instruction) -> _ { move |runtime| -> _ { match runtime.stack.pop1() { Some(InterfaceValue::String(string)) => { let length = string.len() as i32; runtime.stack.push(InterfaceValue::I32(length)); Ok(()) }, Some(value) => Err(InstructionError::new( instruction, InstructionErrorKind::InvalidValueOnTheStack { expected_type: InterfaceType::String, received_type: (&value).into(), }, )), None => Err(InstructionError::new( instruction, InstructionErrorKind::StackIsTooSmall { needed: 1 }, )), } } } ); #[cfg(test)] mod tests { test_executable_instruction!( test_string_lift_memory = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(13), ], instance: Instance { memory: Memory::new("Hello, World!".as_bytes().iter().map(|u| Cell::new(*u)).collect()), ..Default::default() }, stack: [InterfaceValue::String("Hello, World!".into())], ); test_executable_instruction!( test_string_lift_memory__empty_string = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(0), ], instance: Instance { memory: Memory::new(vec![]), ..Default::default() }, stack: [InterfaceValue::String("".into())], ); test_executable_instruction!( test_string_lift_memory__negative_pointer = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(-42), InterfaceValue::I32(13), ], instance: Instance { memory: Memory::new("Hello!".as_bytes().iter().map(|u| Cell::new(*u)).collect()), ..Default::default() }, error: r#"`string.lift_memory` attempted to convert `pointer` but it appears to be a negative value"#, ); test_executable_instruction!( test_string_lift_memory__negative_length = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(-1), ], instance: Instance { memory: Memory::new("Hello!".as_bytes().iter().map(|u| Cell::new(*u)).collect()), ..Default::default() }, error: r#"`string.lift_memory` attempted to convert `length` but it appears to be a negative value"#, ); test_executable_instruction!( test_string_lift_memory__read_out_of_memory = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(13), ], instance: Instance { memory: Memory::new("Hello!".as_bytes().iter().map(|u| Cell::new(*u)).collect()), ..Default::default() }, error: r#"`string.lift_memory` read out of the memory bounds (index 13 > memory length 6)"#, ); test_executable_instruction!( test_string_lift_memory__invalid_encoding = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::ArgumentGet { index: 1 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(4), ], instance: Instance { memory: Memory::new(vec![0, 159, 146, 150].iter().map(|b| Cell::new(*b)).collect::<Vec<Cell<u8>>>()), ..Default::default() }, error: r#"`string.lift_memory` invalid utf-8 sequence of 1 bytes from index 1"#, ); test_executable_instruction!( test_string_lift_memory__stack_is_too_small = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::StringLiftMemory, ], invocation_inputs: [ InterfaceValue::I32(0), InterfaceValue::I32(13), ], instance: Instance::new(), error: r#"`string.lift_memory` needed to read `2` value(s) from the stack, but it doesn't contain enough data"#, ); test_executable_instruction!( test_string_lower_memory = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::StringSize, Instruction::CallCore { function_index: 43 }, Instruction::ArgumentGet { index: 0 }, Instruction::StringLowerMemory, ], invocation_inputs: [InterfaceValue::String("Hello, World!".into())], instance: Instance::new(), stack: [ InterfaceValue::I32(0), InterfaceValue::I32(13), ] ); test_executable_instruction!( test_string__roundtrip = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::StringSize, Instruction::CallCore { function_index: 43 }, Instruction::ArgumentGet { index: 0 }, Instruction::StringLowerMemory, Instruction::StringLiftMemory, ], invocation_inputs: [InterfaceValue::String("Hello, World!".into())], instance: Instance::new(), stack: [InterfaceValue::String("Hello, World!".into())], ); test_executable_instruction!( test_string_lower_memory__stack_is_too_small = instructions: [ Instruction::StringLowerMemory, ], invocation_inputs: [], instance: Instance::new(), error: r#"`string.lower_memory` needed to read `2` value(s) from the stack, but it doesn't contain enough data"#, ); test_executable_instruction!( test_string_size = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::StringSize, ], invocation_inputs: [InterfaceValue::String("Hello, World!".into())], instance: Instance::new(), stack: [InterfaceValue::I32(13)], ); test_executable_instruction!( test_string_size__stack_is_too_small = instructions: [ Instruction::StringSize, ], invocation_inputs: [], instance: Instance::new(), error: r#"`string.size` needed to read `1` value(s) from the stack, but it doesn't contain enough data"#, ); test_executable_instruction!( test_string_size__invalid_value_on_the_stack = instructions: [ Instruction::ArgumentGet { index: 0 }, Instruction::StringSize, ], invocation_inputs: [InterfaceValue::I32(42)], instance: Instance::new(), error: r#"`string.size` read a value of type `I32` from the stack, but the type `String` was expected"#, ); }
InstructionErrorKind::MemoryIsMissing { memory_index }, ) })?; let pointer: usize = to_native::<i32>(&inputs[0], instruction)? .try_into() .map_err(|e| (e, "pointer").into()) .map_err(|k| InstructionError::new(instruction, k))?; let length: usize = to_native::<i32>(&inputs[1], instruction)? .try_into() .map_err(|e| (e, "length").into()) .map_err(|k| InstructionError::n
random
[ { "content": "#[inline(always)]\n\nfn align_pointer(ptr: usize, align: usize) -> usize {\n\n // clears bits below aligment amount (assumes power of 2) to align pointer\n\n debug_assert!(align.count_ones() == 1);\n\n ptr & !(align - 1)\n\n}\n\n\n\n/// Methods for `WasmPtr`s to data that can be dereferen...
Rust
sources/rust/relm/html/src/parsers.rs
xunilrj/sandbox
f92c12f83433cac01a885585e41c02bb5826a01f
use syn::{ parse::{Parse, ParseStream}, token::{CustomToken, Token}, }; pub enum Or<A, B> { A(A), B(B), } impl<TA: Token, TB: Token> CustomToken for Or<TA, TB> { fn peek(cursor: syn::buffer::Cursor) -> bool { TA::peek(cursor) || TB::peek(cursor) } fn display() -> &'static str { "OR" } } impl<TA: Parse, TB: Parse> Parse for Or<TA, TB> { fn parse(input: ParseStream) -> syn::Result<Self> { if let Ok(ta) = TA::parse(input) { Ok(Or::A(ta)) } else if let Ok(tb) = TB::parse(input) { Ok(Or::B(tb)) } else { Err(input.error("Nor A nor B")) } } } pub fn parse_seq1<T1: Parse>(stream: &mut ParseStream) -> syn::Result<T1> { let fork = stream.fork(); match T1::parse(&fork) { Ok(a) => { let _ = T1::parse(&stream); Ok(a) } Err(e) => Err(e), } } pub fn parse_seq2<T1: Parse, T2: Parse>(stream: &mut ParseStream) -> syn::Result<(T1, T2)> { let fork = stream.fork(); match (T1::parse(&fork), T2::parse(&fork)) { (Ok(a), Ok(b)) => { let _ = T1::parse(&stream); let _ = T2::parse(&stream); Ok((a, b)) } (Err(e), _) => Err(e), (_, Err(e)) => Err(e), } } #[allow(dead_code)] pub fn parse_seq3<T1: Parse, T2: Parse, T3: Parse>( stream: &mut ParseStream, ) -> syn::Result<(T1, T2, T3)> { let fork = stream.fork(); match (T1::parse(&fork), T2::parse(&fork), T3::parse(&fork)) { (Ok(a), Ok(b), Ok(c)) => { let _ = T1::parse(&stream); let _ = T2::parse(&stream); let _ = T3::parse(&stream); Ok((a, b, c)) } (Err(e), _, _) => Err(e), (_, Err(e), _) => Err(e), (_, _, Err(e)) => Err(e), } } pub fn parse_seq4<T1: Parse, T2: Parse, T3: Parse, T4: Parse>( stream: &mut ParseStream, ) -> syn::Result<(T1, T2, T3, T4)> { let fork = stream.fork(); match ( T1::parse(&fork), T2::parse(&fork), T3::parse(&fork), T4::parse(&fork), ) { (Ok(a), Ok(b), Ok(c), Ok(d)) => { let _ = T1::parse(&stream); let _ = T2::parse(&stream); let _ = T3::parse(&stream); let _ = T4::parse(&stream); Ok((a, b, c, d)) } (Err(e), _, _, _) => Err(e), (_, Err(e), _, _) => Err(e), (_, _, Err(e), _) => Err(e), (_, _, _, Err(e)) => Err(e), } } #[allow(dead_code)] pub fn parse_seq5<T1: Parse, T2: Parse, T3: Parse, T4: Parse, T5: Parse>( stream: &mut ParseStream, ) -> syn::Result<(T1, T2, T3, T4, T5)> { let fork = stream.fork(); match ( T1::parse(&fork), T2::parse(&fork), T3::parse(&fork), T4::parse(&fork), T5::parse(&fork), ) { (Ok(a), Ok(b), Ok(c), Ok(d), Ok(e)) => { let _ = T1::parse(&stream); let _ = T2::parse(&stream); let _ = T3::parse(&stream); let _ = T4::parse(&stream); let _ = T5::parse(&stream); Ok((a, b, c, d, e)) } (Err(e), _, _, _, _) => Err(e), (_, Err(e), _, _, _) => Err(e), (_, _, Err(e), _, _) => Err(e), (_, _, _, Err(e), _) => Err(e), (_, _, _, _, Err(e)) => Err(e), } } #[allow(dead_code)] pub fn parse_seq6<T1: Parse, T2: Parse, T3: Parse, T4: Parse, T5: Parse, T6: Parse>( stream: &mut ParseStream, ) -> syn::Result<(T1, T2, T3, T4, T5, T6)> { let fork = stream.fork(); match ( T1::parse(&fork), T2::parse(&fork), T3::parse(&fork), T4::parse(&fork), T5::parse(&fork), T6::parse(&fork), ) { (Ok(a), Ok(b), Ok(c), Ok(d), Ok(e), Ok(f)) => { let _ = T1::parse(&stream); let _ = T2::parse(&stream); let _ = T3::parse(&stream); let _ = T4::parse(&stream); let _ = T5::parse(&stream); let _ = T6::parse(&stream); Ok((a, b, c, d, e, f)) } (Err(e), _, _, _, _, _) => Err(e), (_, Err(e), _, _, _, _) => Err(e), (_, _, Err(e), _, _, _) => Err(e), (_, _, _, Err(e), _, _) => Err(e), (_, _, _, _, Err(e), _) => Err(e), (_, _, _, _, _, Err(e)) => Err(e), } } pub struct OpenBrace {} impl syn::parse::Parse for OpenBrace { fn parse(input: ParseStream) -> syn::Result<Self> { if input.peek(syn::token::Brace) { input.parse() } else { Err(syn::Error::new(input.span(), "Expected [{]")) } } } pub struct CloseBrace {} impl syn::parse::Parse for CloseBrace { fn parse(input: ParseStream) -> syn::Result<Self> { if input.peek(syn::token::Brace) { input.parse() } else { Err(syn::Error::new(input.span(), "Expected [{]")) } } } pub fn braced_map<'a, T, F>(stream: &mut ParseStream, f: F) -> syn::Result<T> where F: Fn(&mut ParseStream) -> syn::Result<T> + 'a, { let content; syn::braced!(content in stream); let mut stream: ParseStream = &content; f(&mut stream) } pub fn braced<T: Parse>(stream: &mut ParseStream) -> syn::Result<T> { let content; syn::braced!(content in stream); content.parse::<T>() }
use syn::{ parse::{Parse, ParseStream}, token::{CustomToken, Token}, }; pub enum Or<A, B> { A(A), B(B), } impl<TA: Token, TB: Token> CustomToken for Or<TA, TB> { fn peek(cursor: syn::buffer::Cursor) -> bool { TA::peek(cursor) || TB::peek(cursor) } fn display() -> &'static str { "OR" } } impl<TA: Parse, TB: Parse> Parse for Or<TA, TB> { fn parse(input: ParseStream) -> syn::Result<Self> { if let Ok(ta) = TA::parse(input) { Ok(Or::A(ta)) } else if let Ok(tb) = TB::parse(input) { Ok(Or::B(tb)) } else { Err(input.error("Nor A nor B")) } } } pub fn parse_seq1<T1: Parse>(stream: &mut ParseStream) -> syn::Result<T1> { let fork = stream.fork(); match T1::parse(&fork) { Ok(a) => { let _ = T1::parse(&stream); Ok(a) } Err(e) => Err(e), } } pub fn parse_seq2<T1: Parse, T2: Parse>(stream: &mut ParseStream) -> syn::Result<(T1, T2)> { let fork = stream.fork(); match (T1::parse(&fork), T2::parse(&fork)) { (Ok(a), Ok(b)) => { let _ = T1::parse(&stream); let _ = T2::parse(&stream); Ok((a, b)) } (Err(e), _) => Err(e), (_, Err(e)) => Err(e), } } #[allow(dead_code)] pub fn parse_seq3<T1: Parse, T2: Parse, T3: Parse>( stream: &mut ParseStream, ) -> syn::Result<(T1, T2, T3)> { let fork = stream.fork(); match (T1::parse(&fork), T2::parse(&fork), T3::parse(&fork)) { (Ok(a), Ok(b), Ok(c)) => { let _ = T1::parse(&stream); let _ = T2::parse(&stream); let _ = T3::parse(&stream); Ok((a, b, c)) } (Err(e), _, _) => Err(e), (_, Err(e), _) => Err(e), (_, _, Err(e)) => Err(e), } } pub fn parse_seq4<T1: Parse, T2: Parse, T3: Parse, T4: Parse>( stream: &mut ParseStream, ) -> syn::Result<(T1, T2, T3, T4)> { let fork = stream.fork(); match ( T1::parse(&fork), T2::parse(&fork), T3::parse(&fork), T4::parse(&fork), ) { (Ok(a), Ok(b), Ok(c), Ok(d)) => { let _ = T1::parse(&str
(_, Err(e), _, _) => Err(e), (_, _, Err(e), _) => Err(e), (_, _, _, Err(e)) => Err(e), } } #[allow(dead_code)] pub fn parse_seq5<T1: Parse, T2: Parse, T3: Parse, T4: Parse, T5: Parse>( stream: &mut ParseStream, ) -> syn::Result<(T1, T2, T3, T4, T5)> { let fork = stream.fork(); match ( T1::parse(&fork), T2::parse(&fork), T3::parse(&fork), T4::parse(&fork), T5::parse(&fork), ) { (Ok(a), Ok(b), Ok(c), Ok(d), Ok(e)) => { let _ = T1::parse(&stream); let _ = T2::parse(&stream); let _ = T3::parse(&stream); let _ = T4::parse(&stream); let _ = T5::parse(&stream); Ok((a, b, c, d, e)) } (Err(e), _, _, _, _) => Err(e), (_, Err(e), _, _, _) => Err(e), (_, _, Err(e), _, _) => Err(e), (_, _, _, Err(e), _) => Err(e), (_, _, _, _, Err(e)) => Err(e), } } #[allow(dead_code)] pub fn parse_seq6<T1: Parse, T2: Parse, T3: Parse, T4: Parse, T5: Parse, T6: Parse>( stream: &mut ParseStream, ) -> syn::Result<(T1, T2, T3, T4, T5, T6)> { let fork = stream.fork(); match ( T1::parse(&fork), T2::parse(&fork), T3::parse(&fork), T4::parse(&fork), T5::parse(&fork), T6::parse(&fork), ) { (Ok(a), Ok(b), Ok(c), Ok(d), Ok(e), Ok(f)) => { let _ = T1::parse(&stream); let _ = T2::parse(&stream); let _ = T3::parse(&stream); let _ = T4::parse(&stream); let _ = T5::parse(&stream); let _ = T6::parse(&stream); Ok((a, b, c, d, e, f)) } (Err(e), _, _, _, _, _) => Err(e), (_, Err(e), _, _, _, _) => Err(e), (_, _, Err(e), _, _, _) => Err(e), (_, _, _, Err(e), _, _) => Err(e), (_, _, _, _, Err(e), _) => Err(e), (_, _, _, _, _, Err(e)) => Err(e), } } pub struct OpenBrace {} impl syn::parse::Parse for OpenBrace { fn parse(input: ParseStream) -> syn::Result<Self> { if input.peek(syn::token::Brace) { input.parse() } else { Err(syn::Error::new(input.span(), "Expected [{]")) } } } pub struct CloseBrace {} impl syn::parse::Parse for CloseBrace { fn parse(input: ParseStream) -> syn::Result<Self> { if input.peek(syn::token::Brace) { input.parse() } else { Err(syn::Error::new(input.span(), "Expected [{]")) } } } pub fn braced_map<'a, T, F>(stream: &mut ParseStream, f: F) -> syn::Result<T> where F: Fn(&mut ParseStream) -> syn::Result<T> + 'a, { let content; syn::braced!(content in stream); let mut stream: ParseStream = &content; f(&mut stream) } pub fn braced<T: Parse>(stream: &mut ParseStream) -> syn::Result<T> { let content; syn::braced!(content in stream); content.parse::<T>() }
eam); let _ = T2::parse(&stream); let _ = T3::parse(&stream); let _ = T4::parse(&stream); Ok((a, b, c, d)) } (Err(e), _, _, _) => Err(e),
function_block-random_span
[ { "content": "fn parse_seq2<T1: Parse, T2: Parse>(stream: &mut ParseStream) -> syn::Result<(T1, T2)> {\n\n let fork = stream.fork();\n\n match (T1::parse(&fork), T2::parse(&fork)) {\n\n (Ok(a), Ok(b)) => {\n\n let _ = T1::parse(&stream);\n\n let _ = T2::parse(&stream);\n\n ...
Rust
07-rust/stm32f446/stm32f446_pac/src/tim5/ccmr2_output.rs
aaronhktan/stm32-exploration
dcd7674424cd17b02b85c6b3ce533456d5037d65
#[doc = "Reader of register CCMR2_Output"] pub type R = crate::R<u32, super::CCMR2_OUTPUT>; #[doc = "Writer for register CCMR2_Output"] pub type W = crate::W<u32, super::CCMR2_OUTPUT>; #[doc = "Register CCMR2_Output `reset()`'s with value 0"] impl crate::ResetValue for super::CCMR2_OUTPUT { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `O24CE`"] pub type O24CE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `O24CE`"] pub struct O24CE_W<'a> { w: &'a mut W, } impl<'a> O24CE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15); self.w } } #[doc = "Reader of field `OC4M`"] pub type OC4M_R = crate::R<u8, u8>; #[doc = "Write proxy for field `OC4M`"] pub struct OC4M_W<'a> { w: &'a mut W, } impl<'a> OC4M_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 12)) | (((value as u32) & 0x07) << 12); self.w } } #[doc = "Reader of field `OC4PE`"] pub type OC4PE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OC4PE`"] pub struct OC4PE_W<'a> { w: &'a mut W, } impl<'a> OC4PE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } #[doc = "Reader of field `OC4FE`"] pub type OC4FE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OC4FE`"] pub struct OC4FE_W<'a> { w: &'a mut W, } impl<'a> OC4FE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } #[doc = "Reader of field `CC4S`"] pub type CC4S_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CC4S`"] pub struct CC4S_W<'a> { w: &'a mut W, } impl<'a> CC4S_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8); self.w } } #[doc = "Reader of field `OC3CE`"] pub type OC3CE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OC3CE`"] pub struct OC3CE_W<'a> { w: &'a mut W, } impl<'a> OC3CE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `OC3M`"] pub type OC3M_R = crate::R<u8, u8>; #[doc = "Write proxy for field `OC3M`"] pub struct OC3M_W<'a> { w: &'a mut W, } impl<'a> OC3M_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4); self.w } } #[doc = "Reader of field `OC3PE`"] pub type OC3PE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OC3PE`"] pub struct OC3PE_W<'a> { w: &'a mut W, } impl<'a> OC3PE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `OC3FE`"] pub type OC3FE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OC3FE`"] pub struct OC3FE_W<'a> { w: &'a mut W, } impl<'a> OC3FE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `CC3S`"] pub type CC3S_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CC3S`"] pub struct CC3S_W<'a> { w: &'a mut W, } impl<'a> CC3S_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03); self.w } } impl R { #[doc = "Bit 15 - O24CE"] #[inline(always)] pub fn o24ce(&self) -> O24CE_R { O24CE_R::new(((self.bits >> 15) & 0x01) != 0) } #[doc = "Bits 12:14 - OC4M"] #[inline(always)] pub fn oc4m(&self) -> OC4M_R { OC4M_R::new(((self.bits >> 12) & 0x07) as u8) } #[doc = "Bit 11 - OC4PE"] #[inline(always)] pub fn oc4pe(&self) -> OC4PE_R { OC4PE_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 10 - OC4FE"] #[inline(always)] pub fn oc4fe(&self) -> OC4FE_R { OC4FE_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bits 8:9 - CC4S"] #[inline(always)] pub fn cc4s(&self) -> CC4S_R { CC4S_R::new(((self.bits >> 8) & 0x03) as u8) } #[doc = "Bit 7 - OC3CE"] #[inline(always)] pub fn oc3ce(&self) -> OC3CE_R { OC3CE_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bits 4:6 - OC3M"] #[inline(always)] pub fn oc3m(&self) -> OC3M_R { OC3M_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bit 3 - OC3PE"] #[inline(always)] pub fn oc3pe(&self) -> OC3PE_R { OC3PE_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 2 - OC3FE"] #[inline(always)] pub fn oc3fe(&self) -> OC3FE_R { OC3FE_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bits 0:1 - CC3S"] #[inline(always)] pub fn cc3s(&self) -> CC3S_R { CC3S_R::new((self.bits & 0x03) as u8) } } impl W { #[doc = "Bit 15 - O24CE"] #[inline(always)] pub fn o24ce(&mut self) -> O24CE_W { O24CE_W { w: self } } #[doc = "Bits 12:14 - OC4M"] #[inline(always)] pub fn oc4m(&mut self) -> OC4M_W { OC4M_W { w: self } } #[doc = "Bit 11 - OC4PE"] #[inline(always)] pub fn oc4pe(&mut self) -> OC4PE_W { OC4PE_W { w: self } } #[doc = "Bit 10 - OC4FE"] #[inline(always)] pub fn oc4fe(&mut self) -> OC4FE_W { OC4FE_W { w: self } } #[doc = "Bits 8:9 - CC4S"] #[inline(always)] pub fn cc4s(&mut self) -> CC4S_W { CC4S_W { w: self } } #[doc = "Bit 7 - OC3CE"] #[inline(always)] pub fn oc3ce(&mut self) -> OC3CE_W { OC3CE_W { w: self } } #[doc = "Bits 4:6 - OC3M"] #[inline(always)] pub fn oc3m(&mut self) -> OC3M_W { OC3M_W { w: self } } #[doc = "Bit 3 - OC3PE"] #[inline(always)] pub fn oc3pe(&mut self) -> OC3PE_W { OC3PE_W { w: self } } #[doc = "Bit 2 - OC3FE"] #[inline(always)] pub fn oc3fe(&mut self) -> OC3FE_W { OC3FE_W { w: self } } #[doc = "Bits 0:1 - CC3S"] #[inline(always)] pub fn cc3s(&mut self) -> CC3S_W { CC3S_W { w: self } } }
#[doc = "Reader of register CCMR2_Output"] pub type R = crate::R<u32, super::CCMR2_OUTPUT>; #[doc = "Writer for register CCMR2_Output"] pub type W = crate::W<u32, super::CCMR2_OUTPUT>; #[doc = "Register CCMR2_Output `reset()`'s with value 0"] impl crate::ResetValue for super::CCMR2_OUTPUT { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `O24CE`"] pub type O24CE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `O24CE`"] pub struct O24CE_W<'a> { w: &'a mut W, } impl<'a> O24CE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15); self.w } } #[doc = "Reader of field `OC4M`"] pub type OC4M_R = crate::R<u8, u8>; #[doc = "Write proxy for field `OC4M`"] pub struct OC4M_W<'a> { w: &'a mut W, } impl<'a> OC4M_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 12)) | (((value as u32) & 0x07) << 12); self.w } } #[doc = "Reader of field `OC4PE`"] pub type OC4PE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OC4PE`"] pub struct OC4PE_W<'a> { w: &'a mut W, } impl<'a> OC4PE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 <<
alse) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } #[doc = "Reader of field `CC4S`"] pub type CC4S_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CC4S`"] pub struct CC4S_W<'a> { w: &'a mut W, } impl<'a> CC4S_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8); self.w } } #[doc = "Reader of field `OC3CE`"] pub type OC3CE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OC3CE`"] pub struct OC3CE_W<'a> { w: &'a mut W, } impl<'a> OC3CE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `OC3M`"] pub type OC3M_R = crate::R<u8, u8>; #[doc = "Write proxy for field `OC3M`"] pub struct OC3M_W<'a> { w: &'a mut W, } impl<'a> OC3M_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4); self.w } } #[doc = "Reader of field `OC3PE`"] pub type OC3PE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OC3PE`"] pub struct OC3PE_W<'a> { w: &'a mut W, } impl<'a> OC3PE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `OC3FE`"] pub type OC3FE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OC3FE`"] pub struct OC3FE_W<'a> { w: &'a mut W, } impl<'a> OC3FE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `CC3S`"] pub type CC3S_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CC3S`"] pub struct CC3S_W<'a> { w: &'a mut W, } impl<'a> CC3S_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03); self.w } } impl R { #[doc = "Bit 15 - O24CE"] #[inline(always)] pub fn o24ce(&self) -> O24CE_R { O24CE_R::new(((self.bits >> 15) & 0x01) != 0) } #[doc = "Bits 12:14 - OC4M"] #[inline(always)] pub fn oc4m(&self) -> OC4M_R { OC4M_R::new(((self.bits >> 12) & 0x07) as u8) } #[doc = "Bit 11 - OC4PE"] #[inline(always)] pub fn oc4pe(&self) -> OC4PE_R { OC4PE_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 10 - OC4FE"] #[inline(always)] pub fn oc4fe(&self) -> OC4FE_R { OC4FE_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bits 8:9 - CC4S"] #[inline(always)] pub fn cc4s(&self) -> CC4S_R { CC4S_R::new(((self.bits >> 8) & 0x03) as u8) } #[doc = "Bit 7 - OC3CE"] #[inline(always)] pub fn oc3ce(&self) -> OC3CE_R { OC3CE_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bits 4:6 - OC3M"] #[inline(always)] pub fn oc3m(&self) -> OC3M_R { OC3M_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bit 3 - OC3PE"] #[inline(always)] pub fn oc3pe(&self) -> OC3PE_R { OC3PE_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 2 - OC3FE"] #[inline(always)] pub fn oc3fe(&self) -> OC3FE_R { OC3FE_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bits 0:1 - CC3S"] #[inline(always)] pub fn cc3s(&self) -> CC3S_R { CC3S_R::new((self.bits & 0x03) as u8) } } impl W { #[doc = "Bit 15 - O24CE"] #[inline(always)] pub fn o24ce(&mut self) -> O24CE_W { O24CE_W { w: self } } #[doc = "Bits 12:14 - OC4M"] #[inline(always)] pub fn oc4m(&mut self) -> OC4M_W { OC4M_W { w: self } } #[doc = "Bit 11 - OC4PE"] #[inline(always)] pub fn oc4pe(&mut self) -> OC4PE_W { OC4PE_W { w: self } } #[doc = "Bit 10 - OC4FE"] #[inline(always)] pub fn oc4fe(&mut self) -> OC4FE_W { OC4FE_W { w: self } } #[doc = "Bits 8:9 - CC4S"] #[inline(always)] pub fn cc4s(&mut self) -> CC4S_W { CC4S_W { w: self } } #[doc = "Bit 7 - OC3CE"] #[inline(always)] pub fn oc3ce(&mut self) -> OC3CE_W { OC3CE_W { w: self } } #[doc = "Bits 4:6 - OC3M"] #[inline(always)] pub fn oc3m(&mut self) -> OC3M_W { OC3M_W { w: self } } #[doc = "Bit 3 - OC3PE"] #[inline(always)] pub fn oc3pe(&mut self) -> OC3PE_W { OC3PE_W { w: self } } #[doc = "Bit 2 - OC3FE"] #[inline(always)] pub fn oc3fe(&mut self) -> OC3FE_W { OC3FE_W { w: self } } #[doc = "Bits 0:1 - CC3S"] #[inline(always)] pub fn cc3s(&mut self) -> CC3S_W { CC3S_W { w: self } } }
11)) | (((value as u32) & 0x01) << 11); self.w } } #[doc = "Reader of field `OC4FE`"] pub type OC4FE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OC4FE`"] pub struct OC4FE_W<'a> { w: &'a mut W, } impl<'a> OC4FE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(f
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Res...
Rust
src/peripetyd/collector.rs
cathay4t/eureka
135da097feabc08fb395a1f7b9b3563b4d670519
use chrono::{Local, SecondsFormat, TimeZone}; use nix; use nix::sys::select::FdSet; use peripety::{LogSeverity, StorageEvent, StorageSubSystem, BlkInfo}; use sdjournal; use std::collections::HashMap; use std::os::unix::io::AsRawFd; use std::sync::mpsc::{Receiver, Sender}; use uuid::Uuid; use buildin_regex::BUILD_IN_REGEX_CONFS; use conf::ConfCollector; use data::RegexConf; fn process_journal_entry( entry: &HashMap<String, String>, sender: &Sender<StorageEvent>, buildin_regex_confs: &[RegexConf], user_regex_confs: &[RegexConf], ) { let msg = match entry.get("MESSAGE") { Some(m) => { if m.is_empty() { return; } m } None => return, }; if !entry.contains_key("SYSLOG_IDENTIFIER") { return; } if entry.get("IS_PERIPETY") == Some(&"TRUE".to_string()) { return; } if entry.get("SYSLOG_IDENTIFIER") != Some(&"kernel".to_string()) { return; } let mut event: StorageEvent = Default::default(); if let Some(s) = entry.get("_KERNEL_SUBSYSTEM") { if let Ok(s) = s.parse::<StorageSubSystem>() { event.sub_system = s; } } if let Some(d) = entry.get("_KERNEL_DEVICE") { event.kdev = d.to_string(); } for regex_conf in buildin_regex_confs.iter().chain(user_regex_confs.iter()) { if event.sub_system != StorageSubSystem::Unknown && regex_conf.sub_system != event.sub_system { continue; } if let Some(ref s) = regex_conf.starts_with { if !msg.starts_with(s) { continue; } } if let Some(cap) = regex_conf.regex.captures(msg) { if let Some(m) = cap.name("kdev") { event.kdev = m.as_str().to_string(); } if event.kdev.is_empty() { continue; } match BlkInfo::new_hierarchy(&event.kdev) { Ok(i) => event.hierarchy_blk_info = i, Err(e) => { println!("collector: {}", e); return; } } match BlkInfo::new_current(&event.kdev) { Ok(i) => event.cur_blk_info = i, Err(e) => { println!("collector: {}", e); return; } } if regex_conf.sub_system != StorageSubSystem::Unknown { event.sub_system = regex_conf.sub_system; } if !regex_conf.event_type.is_empty() { event.event_type = regex_conf.event_type.to_string(); } for name in regex_conf.regex.capture_names() { if let Some(name) = name { if name == "kdev" { continue; } if let Some(m) = cap.name(name) { event .extension .insert(name.to_string(), m.as_str().to_string()); } } } break; } } if event.sub_system == StorageSubSystem::Unknown || event.kdev.is_empty() { return; } event.hostname = entry .get("_HOSTNAME") .unwrap_or(&"".to_string()) .to_string(); if let Some(t) = entry.get("__REALTIME_TIMESTAMP") { let tp = match t.parse::<i64>() { Ok(t) => t, Err(_) => return, }; event.timestamp = Local .timestamp(tp / 10i64.pow(6), (tp % 10i64.pow(6)) as u32) .to_rfc3339_opts(SecondsFormat::Micros, false) } else { return; } if let Some(p) = entry.get("PRIORITY") { event.severity = match p.parse::<LogSeverity>() { Ok(s) => s, Err(e) => { println!("collector: {}", e); LogSeverity::Unknown } } } event.raw_msg = msg.to_string(); event.event_id = Uuid::new_v4().hyphenated().to_string(); if let Err(e) = sender.send(event) { println!("collector: Failed to send event: {}", e); } } pub fn new( sender: &Sender<StorageEvent>, config_changed: &Receiver<ConfCollector>, ) { let mut journal = sdjournal::Journal::new().expect("Failed to open systemd journal"); journal.timeout_us = 0; journal .seek_tail() .expect("Unable to seek to end of journal!"); let mut buildin_regex_confs: Vec<RegexConf> = Vec::new(); let mut user_regex_confs: Vec<RegexConf> = Vec::new(); for regex_conf_str in BUILD_IN_REGEX_CONFS { let regex_conf = regex_conf_str.to_regex_conf(); buildin_regex_confs.push(regex_conf); } loop { let mut fds = FdSet::new(); fds.insert(journal.as_raw_fd()); if let Err(e) = nix::sys::select::select(None, Some(&mut fds), None, None, None) { println!("collector: Failed select against journal fd: {}", e); continue; } if !fds.contains(journal.as_raw_fd()) { continue; } for entry in &mut journal { match entry { Ok(entry) => { if let Ok(conf) = config_changed.try_recv() { user_regex_confs.clear(); for regex in conf.regexs { match regex.to_regex_conf() { Ok(r) => user_regex_confs.push(r), Err(e) => { println!( "collector: Invalid config: {}", e ); continue; } } } } process_journal_entry( &entry, sender, &buildin_regex_confs, &user_regex_confs, ) } Err(e) => { println!("Error retrieving the journal entry: {:?}", e) } } } } }
use chrono::{Local, SecondsFormat, TimeZone}; use nix; use nix::sys::select::FdSet; use peripety::{LogSeverity, StorageEvent, StorageSubSystem, BlkInfo}; use sdjournal; use std::collections::HashMap; use std::os::unix::io::AsRawFd; use std::sync::mpsc::{Receiver, Sender}; use uuid::Uuid; use buildin_regex::BUILD_IN_REGEX_CONFS; use conf::ConfCollector; use data::RegexConf;
pub fn new( sender: &Sender<StorageEvent>, config_changed: &Receiver<ConfCollector>, ) { let mut journal = sdjournal::Journal::new().expect("Failed to open systemd journal"); journal.timeout_us = 0; journal .seek_tail() .expect("Unable to seek to end of journal!"); let mut buildin_regex_confs: Vec<RegexConf> = Vec::new(); let mut user_regex_confs: Vec<RegexConf> = Vec::new(); for regex_conf_str in BUILD_IN_REGEX_CONFS { let regex_conf = regex_conf_str.to_regex_conf(); buildin_regex_confs.push(regex_conf); } loop { let mut fds = FdSet::new(); fds.insert(journal.as_raw_fd()); if let Err(e) = nix::sys::select::select(None, Some(&mut fds), None, None, None) { println!("collector: Failed select against journal fd: {}", e); continue; } if !fds.contains(journal.as_raw_fd()) { continue; } for entry in &mut journal { match entry { Ok(entry) => { if let Ok(conf) = config_changed.try_recv() { user_regex_confs.clear(); for regex in conf.regexs { match regex.to_regex_conf() { Ok(r) => user_regex_confs.push(r), Err(e) => { println!( "collector: Invalid config: {}", e ); continue; } } } } process_journal_entry( &entry, sender, &buildin_regex_confs, &user_regex_confs, ) } Err(e) => { println!("Error retrieving the journal entry: {:?}", e) } } } } }
fn process_journal_entry( entry: &HashMap<String, String>, sender: &Sender<StorageEvent>, buildin_regex_confs: &[RegexConf], user_regex_confs: &[RegexConf], ) { let msg = match entry.get("MESSAGE") { Some(m) => { if m.is_empty() { return; } m } None => return, }; if !entry.contains_key("SYSLOG_IDENTIFIER") { return; } if entry.get("IS_PERIPETY") == Some(&"TRUE".to_string()) { return; } if entry.get("SYSLOG_IDENTIFIER") != Some(&"kernel".to_string()) { return; } let mut event: StorageEvent = Default::default(); if let Some(s) = entry.get("_KERNEL_SUBSYSTEM") { if let Ok(s) = s.parse::<StorageSubSystem>() { event.sub_system = s; } } if let Some(d) = entry.get("_KERNEL_DEVICE") { event.kdev = d.to_string(); } for regex_conf in buildin_regex_confs.iter().chain(user_regex_confs.iter()) { if event.sub_system != StorageSubSystem::Unknown && regex_conf.sub_system != event.sub_system { continue; } if let Some(ref s) = regex_conf.starts_with { if !msg.starts_with(s) { continue; } } if let Some(cap) = regex_conf.regex.captures(msg) { if let Some(m) = cap.name("kdev") { event.kdev = m.as_str().to_string(); } if event.kdev.is_empty() { continue; } match BlkInfo::new_hierarchy(&event.kdev) { Ok(i) => event.hierarchy_blk_info = i, Err(e) => { println!("collector: {}", e); return; } } match BlkInfo::new_current(&event.kdev) { Ok(i) => event.cur_blk_info = i, Err(e) => { println!("collector: {}", e); return; } } if regex_conf.sub_system != StorageSubSystem::Unknown { event.sub_system = regex_conf.sub_system; } if !regex_conf.event_type.is_empty() { event.event_type = regex_conf.event_type.to_string(); } for name in regex_conf.regex.capture_names() { if let Some(name) = name { if name == "kdev" { continue; } if let Some(m) = cap.name(name) { event .extension .insert(name.to_string(), m.as_str().to_string()); } } } break; } } if event.sub_system == StorageSubSystem::Unknown || event.kdev.is_empty() { return; } event.hostname = entry .get("_HOSTNAME") .unwrap_or(&"".to_string()) .to_string(); if let Some(t) = entry.get("__REALTIME_TIMESTAMP") { let tp = match t.parse::<i64>() { Ok(t) => t, Err(_) => return, }; event.timestamp = Local .timestamp(tp / 10i64.pow(6), (tp % 10i64.pow(6)) as u32) .to_rfc3339_opts(SecondsFormat::Micros, false) } else { return; } if let Some(p) = entry.get("PRIORITY") { event.severity = match p.parse::<LogSeverity>() { Ok(s) => s, Err(e) => { println!("collector: {}", e); LogSeverity::Unknown } } } event.raw_msg = msg.to_string(); event.event_id = Uuid::new_v4().hyphenated().to_string(); if let Err(e) = sender.send(event) { println!("collector: Failed to send event: {}", e); } }
function_block-full_function
[ { "content": "pub fn parser_start(sender: Sender<StorageEvent>) -> ParserInfo {\n\n let (event_in_sender, event_in_recver) = mpsc::channel();\n\n\n\n spawn(move || loop {\n\n match event_in_recver.recv() {\n\n Ok(event) => parse_event(&event, &sender),\n\n Err(e) => println!(\...
Rust
src/gc/src/heap/freelist/freelist_space.rs
qinsoon/zebu
6f6475568e38696b129cf81ff4d5ab0ea1f688cb
use common::ptr::*; use heap::*; use objectmodel::sidemap::*; use utils::mem::memsec::memzero; use utils::mem::*; use std::mem; use std::sync::Mutex; const LOG_BYTES_IN_PAGE: usize = 12; const BYTES_IN_PAGE: ByteSize = 1 << LOG_BYTES_IN_PAGE; const PAGES_IN_SPACE: usize = 1 << (LOG_BYTES_PREALLOC_SPACE - LOG_BYTES_IN_PAGE); #[repr(C)] pub struct FreelistSpace { desc: SpaceDescriptor, start: Address, end: Address, size: ByteSize, cur_end: Address, cur_size: ByteSize, cur_pages: usize, total_pages: usize, usable_nodes: Mutex<Vec<FreelistNode>>, used_nodes: Mutex<Vec<FreelistNode>>, pub last_gc_free_pages: usize, pub last_gc_used_pages: usize, mmap_start: Address, mmap_size: ByteSize, padding: [u64; (BYTES_IN_PAGE - 32 - 24 - 88 - 32) >> 3], page_encode_table: [LargeObjectEncode; PAGES_IN_SPACE], page_mark_table: [PageMark; PAGES_IN_SPACE], mem: [u8; 0], } impl RawMemoryMetadata for FreelistSpace { #[inline(always)] fn addr(&self) -> Address { Address::from_ptr(self as *const FreelistSpace) } #[inline(always)] fn mem_start(&self) -> Address { self.start } } impl Space for FreelistSpace { #[inline(always)] fn start(&self) -> Address { self.start } #[inline(always)] fn end(&self) -> Address { self.cur_end } #[inline(always)] #[allow(unused_variables)] fn is_valid_object(&self, addr: Address) -> bool { true } fn destroy(&mut self) { munmap(self.mmap_start, self.mmap_size); } fn prepare_for_gc(&mut self) { unsafe { memzero( &mut self.page_mark_table[0] as *mut PageMark, self.cur_pages, ); } } fn sweep(&mut self) { debug!("=== {:?} Sweep ===", self.desc); debug_assert_eq!(self.n_used_pages() + self.n_usable_pages(), self.cur_pages); let mut free_pages = 0; let mut used_pages = 0; { let mut used_nodes = self.used_nodes.lock().unwrap(); let mut usable_nodes = self.usable_nodes.lock().unwrap(); let mut all_nodes: Vec<FreelistNode> = { let mut ret = vec![]; ret.append(&mut used_nodes); ret.append(&mut usable_nodes); ret }; debug_assert_eq!(all_nodes.len(), self.cur_pages); while !all_nodes.is_empty() { let node: FreelistNode = all_nodes.pop().unwrap(); let index = self.get_page_index(node.addr); if self.page_mark_table[index] == PageMark::Live { used_pages += node.size >> LOG_BYTES_IN_PAGE; used_nodes.push(node); } else { free_pages += node.size >> LOG_BYTES_IN_PAGE; usable_nodes.push(node); } } } if cfg!(debug_assertions) { debug!("free pages = {} of {} total", free_pages, self.cur_pages); debug!("used pages = {} of {} total", used_pages, self.cur_pages); } self.last_gc_free_pages = free_pages; self.last_gc_used_pages = used_pages; if self.n_used_pages() == self.total_pages && self.total_pages != 0 { use std::process; println!("Out of memory in Freelist Space"); process::exit(1); } debug_assert_eq!(self.n_used_pages() + self.n_usable_pages(), self.cur_pages); trace!("======================="); } #[inline(always)] fn mark_object_traced(&mut self, obj: ObjectReference) { let index = self.get_page_index(obj.to_address()); self.page_mark_table[index] = PageMark::Live; } #[inline(always)] fn is_object_traced(&self, obj: ObjectReference) -> bool { let index = self.get_page_index(obj.to_address()); self.page_mark_table[index] == PageMark::Live } } impl FreelistSpace { pub fn new(desc: SpaceDescriptor, space_size: ByteSize) -> Raw<FreelistSpace> { let mmap_size = BYTES_PREALLOC_SPACE * 2; let mmap_start = mmap_large(mmap_size); trace!(" mmap ptr: {}", mmap_start); let space_size = math::align_up(space_size, BYTES_IN_PAGE); let meta_start = mmap_start.align_up(SPACE_ALIGN); let mem_start = meta_start + BYTES_IN_PAGE + mem::size_of::<LargeObjectEncode>() * PAGES_IN_SPACE + mem::size_of::<PageMark>() * PAGES_IN_SPACE; let mem_end = mem_start + space_size; trace!(" space metadata: {}", meta_start); trace!(" space: {} ~ {}", mem_start, mem_end); let mut space: Raw<FreelistSpace> = unsafe { Raw::from_addr(meta_start) }; trace!(" acquired Raw<FreelistSpace>"); space.desc = desc; space.start = mem_start; space.end = mem_end; space.size = space_size; trace!(" initialized desc/start/end/size"); space.cur_end = space.start; space.cur_size = 0; space.cur_pages = 0; trace!(" initialized cur_end/size/pages"); space.total_pages = space_size >> LOG_BYTES_IN_PAGE; unsafe { use std::ptr; ptr::write( &mut space.usable_nodes as *mut Mutex<Vec<FreelistNode>>, Mutex::new(Vec::new()), ); ptr::write( &mut space.used_nodes as *mut Mutex<Vec<FreelistNode>>, Mutex::new(Vec::new()), ); } trace!(" initialized total/usable/used_nodes"); space.mmap_start = mmap_start; space.mmap_size = mmap_size; trace!(" store mmap"); debug_assert_eq!(Address::from_ptr(&space.mem as *const [u8; 0]), mem_start); space.trace_details(); space } #[inline(always)] pub fn get_page_index(&self, obj: Address) -> usize { (obj - self.mem_start()) >> LOG_BYTES_IN_PAGE } pub fn alloc(&mut self, size: ByteSize, align: ByteSize) -> Address { assert!(align <= BYTES_IN_PAGE); let size = math::align_up(size, BYTES_IN_PAGE); let mut usable_nodes = self.usable_nodes.lock().unwrap(); let mut candidate = None; for i in 0..usable_nodes.len() { let ref node = usable_nodes[i]; if node.size >= size { candidate = Some(i); break; } } let opt_node = if let Some(index) = candidate { Some(usable_nodes.remove(index)) } else { let pages_required = size >> LOG_BYTES_IN_PAGE; if self.cur_pages + pages_required <= self.total_pages { let start = self.cur_end; self.cur_end += size; self.cur_size += size; self.cur_pages += pages_required; Some(FreelistNode { size, addr: start }) } else { None } }; if let Some(node) = opt_node { let res = node.addr; self.used_nodes.lock().unwrap().push(node); unsafe { memzero(res.to_ptr_mut::<u8>(), size); } res } else { unsafe { Address::zero() } } } pub fn n_used_pages(&self) -> usize { let lock = self.used_nodes.lock().unwrap(); let mut ret = 0; for node in lock.iter() { ret += node.size; } ret = ret >> LOG_BYTES_IN_PAGE; ret } pub fn n_usable_pages(&self) -> usize { let lock = self.usable_nodes.lock().unwrap(); let mut ret = 0; for node in lock.iter() { ret += node.size; } ret = ret >> LOG_BYTES_IN_PAGE; ret } pub fn get_type_encode(&self, obj: ObjectReference) -> LargeObjectEncode { let index = self.get_page_index(obj.to_address()); self.page_encode_table[index] } pub fn get_type_encode_slot(&self, addr: Address) -> Address { let index = self.get_page_index(addr); Address::from_ptr(&self.page_encode_table[index] as *const LargeObjectEncode) } fn trace_details(&self) { trace!("=== {:?} ===", self.desc); trace!( "-range: {} ~ {} (size: {})", self.start, self.end, self.size ); trace!( "-cur : {} ~ {} (size: {})", self.start, self.cur_end, self.cur_size ); trace!( "-pages: current {} (usable: {}, used: {}), total {}", self.cur_pages, self.usable_nodes.lock().unwrap().len(), self.used_nodes.lock().unwrap().len(), self.total_pages ); trace!( "-page type encode starts at {}", Address::from_ptr(&self.page_encode_table as *const LargeObjectEncode) ); trace!( "-page mark table starts at {}", Address::from_ptr(&self.page_mark_table as *const PageMark) ); trace!("-memory starts at {}", self.mem_start()); trace!("=== {:?} ===", self.desc); } } #[repr(C)] pub struct FreelistNode { size: ByteSize, addr: Address, } #[repr(u8)] #[derive(Copy, Clone, Debug, PartialEq)] #[allow(dead_code)] pub enum PageMark { Free = 0, Live, }
use common::ptr::*; use heap::*; use objectmodel::sidemap::*; use utils::mem::memsec::memzero; use utils::mem::*; use std::mem; use std::sync::Mutex; const LOG_BYTES_IN_PAGE: usize = 12; const BYTES_IN_PAGE: ByteSize = 1 << LOG_BYTES_IN_PAGE; const PAGES_IN_SPACE: usize = 1 << (LOG_BYTES_PREALLOC_SPACE - LOG_BYTES_IN_PAGE); #[repr(C)] pub struct FreelistSpace { desc: SpaceDescriptor, start: Address, end: Address, size: ByteSize, cur_end: Address, cur_size: ByteSize, cur_pages: usize, total_pages: usize, usable_nodes: Mutex<Vec<FreelistNode>>, used_nodes: Mutex<Vec<FreelistNode>>, pub last_gc_free_pages: usize, pub last_gc_used_pages: usize, mmap_start: Address, mmap_size: ByteSize, padding: [u64; (BYTES_IN_PAGE - 32 - 24 - 88 - 32) >> 3], page_encode_table: [LargeObjectEncode; PAGES_IN_SPACE], page_mark_table: [PageMark; PAGES_IN_SPACE], mem: [u8; 0], } impl RawMemoryMetadata for FreelistSpace { #[inline(always)] fn addr(&self) -> Address { Address::from_ptr(self as *const FreelistSpace) } #[inline(always)] fn mem_start(&self) -> Address { self.start } } impl Space for FreelistSpace { #[inline(always)] fn start(&self) -> Address { self.start } #[inline(always)] fn end(&self) -> Address { self.cur_end } #[inline(always)] #[allow(unused_variables)] fn is_valid_object(&self, addr: Address) -> bool { true } fn destroy(&mut self) { munmap(self.mmap_start, self.mmap_size); } fn prepare_for_gc(&mut self) { unsafe { memzero( &mut self.page_mark_table[0] as *mut PageMark, self.cur_pages, ); } } fn sweep(&mut self) { debug!("=== {:?} Sweep ===", self.desc); debug_assert_eq!(self.n_used_pages() + self.n_usable_pages(), self.cur_pages); let mut free_pages = 0; let mut used_pages = 0; { let mut used_nodes = self.used_nodes.lock().unwrap(); let mut usable_nodes = self.usable_nodes.lock().unwrap(); let mut all_nodes: Vec<FreelistNode> = { let mut ret = vec![]; ret.append(&mut used_nodes); ret.append(&mut usable_nodes); ret }; debug_assert_eq!(all_nodes.len(), self.cur_pages); while !all_nodes.is_empty() { let node: FreelistNode = all_nodes.pop().unwrap(); let index = self.get_page_index(node.addr); if self.page_mark_table[index] == PageMark::Live { used_pages += node.size >> LOG_BYTES_IN_PAGE; used_nodes.push(node); } else { free_pages += node.size >> LOG_BYTES_IN_PAGE; usable_nodes.push(node); } } } if cfg!(debug_assertions) { debug!("free pages = {} of {} total", free_pages, self.cur_pages); debug!("used pages = {} of {} total", used_pages, self.cur_pages); } self.last_gc_free_pages = free_pages; self.last_gc_used_pages = used_pages; if self.n_used_pages() == self.total_pages && self.total_pages != 0 { use std::process; println!("Out of memory in Freelist Space"); process::exit(1); } debug_assert_eq!(self.n_used_pages() + self.n_usable_pages(), self.cur_pages); trace!("======================="); } #[inline(always)] fn mark_object_traced(&mut self, obj: ObjectReference) { let index = self.get_page_index(obj.to_address()); self.page_mark_table[index] = PageMark::Live; } #[inline(always)] fn is_object_traced(&self, obj: ObjectReference) -> bool { let index = self.get_page_index(obj.to_address()); self.page_mark_table[index] == PageMark::Live } } impl FreelistSpace { pub fn new(desc: SpaceDescriptor, space_size: ByteSize) -> Raw<FreelistSpace> { let mmap_size = BYTES_PREALLOC_SPACE * 2; let mmap_start = mmap_large(mmap_size); trace!(" mmap ptr: {}", mmap_start); let space_size = math::align_up(space_size, BYTES_IN_PAGE); let meta_start = mmap_start.align_up(SPACE_ALIGN); let mem_start = meta_start + BYTES_IN_PAGE + mem::size_of::<LargeObjectEncode>() * PAGES_IN_SPACE + mem::size_of::<PageMark>() * PAGES_IN_SPACE; let mem_end = mem_start + space_size; trace!(" space metadata: {}", meta_start); trace!(" space: {} ~ {}", mem_start, mem_end); let mut space: Raw<FreelistSpace> = unsafe { Raw::from_addr(meta_start) }; trace!(" acquired Raw<FreelistSpace>"); space.desc = desc; space.start = mem_start; space.end = mem_end; space.size = space_size; trace!(" initialized desc/start/end/size"); space.cur_end = space.start; space.cur_size = 0; space.cur_pages = 0; trace!(" initialized cur_end/size/pages"); space.total_pages = space_size >> LOG_BYTES_IN_PAGE; unsafe { use std::ptr; ptr::write( &mut space.usable_nodes as *mut Mutex<Vec<FreelistNode>>, Mutex::new(Vec::new()), ); ptr::write( &mut space.used_nodes as *mut Mutex<Vec<FreelistNode>>, Mutex::new(Vec::new()), ); } trace!(" initialized total/usable/used_nodes"); space.mmap_start = mmap_start; space.mmap_size = mmap_size; trace!(" store mmap"); debug_assert_eq!(Address::from_ptr(&space.mem as *const [u8; 0]), mem_start); space.trace_details(); space } #[inline(always)] pub fn get_page_index(&self, obj: Address) -> usize { (obj - self.mem_start()) >> LOG_BYTES_IN_PAGE } pub fn alloc(&mut self, size: ByteSize, ali
).len(), self.total_pages ); trace!( "-page type encode starts at {}", Address::from_ptr(&self.page_encode_table as *const LargeObjectEncode) ); trace!( "-page mark table starts at {}", Address::from_ptr(&self.page_mark_table as *const PageMark) ); trace!("-memory starts at {}", self.mem_start()); trace!("=== {:?} ===", self.desc); } } #[repr(C)] pub struct FreelistNode { size: ByteSize, addr: Address, } #[repr(u8)] #[derive(Copy, Clone, Debug, PartialEq)] #[allow(dead_code)] pub enum PageMark { Free = 0, Live, }
gn: ByteSize) -> Address { assert!(align <= BYTES_IN_PAGE); let size = math::align_up(size, BYTES_IN_PAGE); let mut usable_nodes = self.usable_nodes.lock().unwrap(); let mut candidate = None; for i in 0..usable_nodes.len() { let ref node = usable_nodes[i]; if node.size >= size { candidate = Some(i); break; } } let opt_node = if let Some(index) = candidate { Some(usable_nodes.remove(index)) } else { let pages_required = size >> LOG_BYTES_IN_PAGE; if self.cur_pages + pages_required <= self.total_pages { let start = self.cur_end; self.cur_end += size; self.cur_size += size; self.cur_pages += pages_required; Some(FreelistNode { size, addr: start }) } else { None } }; if let Some(node) = opt_node { let res = node.addr; self.used_nodes.lock().unwrap().push(node); unsafe { memzero(res.to_ptr_mut::<u8>(), size); } res } else { unsafe { Address::zero() } } } pub fn n_used_pages(&self) -> usize { let lock = self.used_nodes.lock().unwrap(); let mut ret = 0; for node in lock.iter() { ret += node.size; } ret = ret >> LOG_BYTES_IN_PAGE; ret } pub fn n_usable_pages(&self) -> usize { let lock = self.usable_nodes.lock().unwrap(); let mut ret = 0; for node in lock.iter() { ret += node.size; } ret = ret >> LOG_BYTES_IN_PAGE; ret } pub fn get_type_encode(&self, obj: ObjectReference) -> LargeObjectEncode { let index = self.get_page_index(obj.to_address()); self.page_encode_table[index] } pub fn get_type_encode_slot(&self, addr: Address) -> Address { let index = self.get_page_index(addr); Address::from_ptr(&self.page_encode_table[index] as *const LargeObjectEncode) } fn trace_details(&self) { trace!("=== {:?} ===", self.desc); trace!( "-range: {} ~ {} (size: {})", self.start, self.end, self.size ); trace!( "-cur : {} ~ {} (size: {})", self.start, self.cur_end, self.cur_size ); trace!( "-pages: current {} (usable: {}, used: {}), total {}", self.cur_pages, self.usable_nodes.lock().unwrap().len(), self.used_nodes.lock().unwrap(
random
[ { "content": "pub fn munmap(addr: Address, size: ByteSize) {\n\n use self::libc::*;\n\n unsafe {\n\n munmap(addr.to_ptr_mut() as *mut c_void, size as size_t);\n\n }\n\n}\n\n\n\n/// malloc's and zeroes the memory\n\npub unsafe fn malloc_zero(size: usize) -> *mut u8 {\n\n use self::memsec;\n\n ...
Rust
src/core/compression/zlib.rs
BlockProject3D/bpx-rs
e4e06ce6160ea5ed90e93db8f801d1abfc4006cd
use std::io::{Read, Write}; use libz_sys::{ deflate, deflateEnd, deflateInit_, inflate, inflateEnd, inflateInit_, z_stream, Z_DATA_ERROR, Z_DEFAULT_COMPRESSION, Z_FINISH, Z_MEM_ERROR, Z_NEED_DICT, Z_NO_FLUSH, Z_OK, Z_STREAM_ERROR, Z_VERSION_ERROR }; use crate::{ core::{ compression::{Checksum, Deflater, Inflater}, error::{DeflateError, InflateError} }, utils::ReadFill }; const ENCODER_BUF_SIZE: usize = 8192; const DECODER_BUF_SIZE: usize = ENCODER_BUF_SIZE * 2; unsafe fn zstream_zeroed() -> z_stream { let arr: [u8; std::mem::size_of::<z_stream>()] = [0; std::mem::size_of::<z_stream>()]; std::mem::transmute(arr) } fn new_encoder() -> Result<z_stream, DeflateError> { unsafe { let mut stream: z_stream = zstream_zeroed(); let err = deflateInit_( &mut stream as _, Z_DEFAULT_COMPRESSION, "1.1.3".as_ptr() as _, std::mem::size_of::<z_stream>() as _ ); if err == Z_OK { return Ok(stream); } match err { Z_MEM_ERROR => Err(DeflateError::Memory), Z_STREAM_ERROR => Err(DeflateError::Unsupported("compression level")), Z_VERSION_ERROR => Err(DeflateError::Unsupported("version")), _ => Err(DeflateError::Unknown) } } } fn new_decoder() -> Result<z_stream, InflateError> { unsafe { let mut stream: z_stream = zstream_zeroed(); let err = inflateInit_( &mut stream as _, "1.1.3".as_ptr() as _, std::mem::size_of::<z_stream>() as _ ); if err == Z_OK { return Ok(stream); } match err { Z_MEM_ERROR => Err(InflateError::Memory), Z_DATA_ERROR => Err(InflateError::Data), Z_VERSION_ERROR => Err(InflateError::Unsupported("version")), _ => Err(InflateError::Unknown) } } } fn do_deflate<TRead: Read, TWrite: Write, TChecksum: Checksum>( stream: &mut z_stream, mut input: TRead, mut output: TWrite, inflated_size: usize, chksum: &mut TChecksum ) -> Result<usize, DeflateError> { let mut inbuf: [u8; ENCODER_BUF_SIZE] = [0; ENCODER_BUF_SIZE]; let mut outbuf: [u8; ENCODER_BUF_SIZE] = [0; ENCODER_BUF_SIZE]; let mut count: usize = 0; let mut csize: usize = 0; loop { let len = input.read_fill(&mut inbuf)?; count += len; chksum.push(&inbuf[0..len]); stream.avail_in = len as _; let action = { if count == inflated_size { Z_FINISH } else { Z_NO_FLUSH } }; stream.next_in = inbuf.as_mut_ptr(); loop { stream.avail_out = ENCODER_BUF_SIZE as _; stream.next_out = outbuf.as_mut_ptr(); unsafe { let err = deflate(stream, action); if err != Z_OK { return match err { Z_MEM_ERROR => Err(DeflateError::Memory), Z_STREAM_ERROR => Err(DeflateError::Unsupported("compression level")), Z_VERSION_ERROR => Err(DeflateError::Unsupported("version")), _ => Err(DeflateError::Unknown) }; } } let len = ENCODER_BUF_SIZE - stream.avail_out as usize; output.write_all(&outbuf[0..len])?; csize += len; if stream.avail_out == 0 { break; } } if action == Z_FINISH { break; } } Ok(csize) } fn do_inflate<TRead: Read, TWrite: Write, TChecksum: Checksum>( stream: &mut z_stream, mut input: TRead, mut output: TWrite, deflated_size: usize, chksum: &mut TChecksum ) -> Result<(), InflateError> { let mut inbuf: [u8; DECODER_BUF_SIZE] = [0; DECODER_BUF_SIZE]; let mut outbuf: [u8; DECODER_BUF_SIZE] = [0; DECODER_BUF_SIZE]; let mut remaining = deflated_size; loop { let len = input.read_fill(&mut inbuf)?; remaining -= len; if remaining == 0 && len == 0 { break; } stream.avail_in = len as _; stream.next_in = inbuf.as_mut_ptr(); loop { stream.avail_out = DECODER_BUF_SIZE as _; stream.next_out = outbuf.as_mut_ptr(); unsafe { let err = inflate(stream, Z_NO_FLUSH); match err { Z_MEM_ERROR => return Err(InflateError::Memory), Z_DATA_ERROR => return Err(InflateError::Data), Z_NEED_DICT => return Err(InflateError::Data), Z_VERSION_ERROR => return Err(InflateError::Unsupported("version")), _ => () } } let len = DECODER_BUF_SIZE - stream.avail_out as usize; chksum.push(&outbuf[0..len]); output.write_all(&outbuf[0..len])?; if stream.avail_out == 0 { break; } } } Ok(()) } pub struct ZlibCompressionMethod {} impl Deflater for ZlibCompressionMethod { fn deflate<TRead: Read, TWrite: Write, TChecksum: Checksum>( input: TRead, output: TWrite, inflated_size: usize, chksum: &mut TChecksum ) -> Result<usize, DeflateError> { let mut encoder = new_encoder()?; let res = do_deflate(&mut encoder, input, output, inflated_size, chksum); unsafe { deflateEnd(&mut encoder); } res } } impl Inflater for ZlibCompressionMethod { fn inflate<TRead: Read, TWrite: Write, TChecksum: Checksum>( input: TRead, output: TWrite, deflated_size: usize, chksum: &mut TChecksum ) -> Result<(), InflateError> { let mut decoder = new_decoder()?; let res = do_inflate(&mut decoder, input, output, deflated_size, chksum); unsafe { inflateEnd(&mut decoder); } res } }
use std::io::{Read, Write}; use libz_sys::{ deflate, deflateEnd, deflateInit_, inflate, inflateEnd, inflateInit_, z_stream, Z_DATA_ERROR, Z_DEFAULT_COMPRESSION, Z_FINISH, Z_MEM_ERROR, Z_NEED_DICT, Z_NO_FLUSH, Z_OK, Z_STREAM_ERROR, Z_VERSION_ERROR }; use crate::{ core::{ compression::{Checksum, Deflater, Inflater}, error::{DeflateError, InflateError} }, utils::ReadFill }; const ENCODER_BUF_SIZE: usize = 8192; const DECODER_BUF_SIZE: usize = ENCODER_BUF_SIZE * 2; unsafe fn zstream_zeroed() -> z_stream { let arr: [u8; std::mem::size_of::<z_stream>()] = [0; std::mem::size_of::<z_stream>()]; std::mem::transmute(arr) } fn new_encoder() -> Result<z_stream, DeflateError> { unsafe { let mut stream: z_stream = zstream_zeroed(); let err = deflateInit_( &mut stream as _, Z_DEFAULT_COMPRESSION, "1.1.3".as_ptr() as _, std::mem::size_of::<z_stream>() as _ ); if err == Z_OK { return Ok(stream); } match err { Z_MEM_ERROR => Err(DeflateError::Memory), Z_STREAM_ERROR => Err(DeflateError::Unsupported("compression level")), Z_VERSION_ERROR => Err(DeflateError::Unsupported("version")), _ => Err(DeflateError::Unknown) } } } fn new_decoder() -> Result<z_stream, InflateError> { unsafe { let mut stream: z_stream = zstream_zeroed(); let err = inflateInit_( &mut stream as _, "1.1.3".as_ptr() as _, std::mem::size_of::<z_stream>() as _ ); if err == Z_OK { return Ok(stream); } match err { Z_MEM_ERROR => Err(InflateError::Memory), Z_DATA_ERROR => Err(InflateError::Data), Z_VERSION_ERROR => Err(InflateError::Unsupported("version")), _ => Err(InflateError::Unknown) } } } fn do_deflate<TRead: Read, TWrite: Write, TChecksum: Checksum>( stream: &mut z_stream, mut input: TRead, mut output: TWrite, inflated_size: usize, chksum: &mut TChecksum ) -> Result<usize, DeflateError> { let mut inbuf: [u8; ENCODER_BUF_SIZE] = [0; ENCODER_BUF_SIZE]; let mut outbuf: [u8; ENCODER_BUF_SIZE] = [0; ENCODER_BUF_SIZE]; let mut count: usize = 0; let mut csize: usize = 0; loop { let len = input.read_fill(&mut inbuf)?; count += len; chksum.push(&inbuf[0..len]); stream.avail_in = len as _; let action = { if count == inflated_size { Z_FINISH } else { Z_NO_FLUSH } }; stream.next_in = inbuf.as_mut_ptr(); loop { stream.avail_out = ENCODER_BUF_SIZE as _; stream.next_out = outbuf.as_mut_ptr(); unsafe { let err = deflate(stream, action); if err != Z_OK { return match err { Z_MEM_ERROR => Err(DeflateError::Memory), Z_STREAM_ERROR => Err(DeflateError::Unsupported("compression level")), Z_VERSION_ERROR => Err(DeflateError::Unsupported("version")), _ => Err(DeflateError::Unknown) }; } } let len = ENCODER_BUF_SIZE - stream.avail_out as usize; output.write_all(&outbuf[0..len])?; csize += len; if stream.avail_out == 0 { break; } } if action == Z_FINISH { break; } } Ok(csize) } fn do_inflate<TRead: Read, TWrite: Write, TChecksum: Checksum>( stream: &mut z_stream, mut input: TRead, mut output: TWrite, deflated_size: usize, chksum: &mut TChecksum ) -> Result<(), InflateError> { let mut inbuf: [u8; DECODER_BUF_SIZE] = [0; DECODER_BUF_SIZE]; let mut outbuf: [u8; DECODER_BUF_SIZE] = [0; DECODER_BUF_SIZE]; let mut remaining = deflated_size; loop { let len = input.read_fill(&mut inbuf)?; remaining -= len; if remaining == 0 && len == 0 { break; } stream.avail_in = len as _; stream.next_in = inbuf.as_mut_ptr(); loop { stream.avail_out = DECODER_BUF_SIZE as _; stream.next_out = outbuf.as_mut_ptr(); unsafe { let err = inflate(stream, Z_NO_FLUSH); match err { Z_MEM_ERROR => return Err(InflateError::Memory), Z_DATA_ERROR => return Err(InflateError::Data), Z_NEED_DICT => return Err(InflateError::Data), Z_VERSION_ERROR => return Err(InflateError::Unsupported("version")), _ => () } } let len = DECODER_BUF_SIZE - stream.avail_out as usize; chksum.push(&outbuf[0..len]); output.write_all(&outbuf[0..len])?; if stream.avail_out == 0 { break; } } } Ok(()) } pub struct ZlibCompressionMethod {} impl Deflater for ZlibCompressionMethod { fn deflate<TRead: Read, TWrite: Write, TChecksum: Checksum>( input: TRead,
deflated_size: usize, chksum: &mut TChecksum ) -> Result<(), InflateError> { let mut decoder = new_decoder()?; let res = do_inflate(&mut decoder, input, output, deflated_size, chksum); unsafe { inflateEnd(&mut decoder); } res } }
output: TWrite, inflated_size: usize, chksum: &mut TChecksum ) -> Result<usize, DeflateError> { let mut encoder = new_encoder()?; let res = do_deflate(&mut encoder, input, output, inflated_size, chksum); unsafe { deflateEnd(&mut encoder); } res } } impl Inflater for ZlibCompressionMethod { fn inflate<TRead: Read, TWrite: Write, TChecksum: Checksum>( input: TRead, output: TWrite,
random
[]
Rust
src/prepared_email.rs
mikedilger/mailstrom
0253dabc0c304f43991943b54e5b73da07192414
use crate::delivery_result::DeliveryResult; use email_format::rfc5322::headers::Bcc; use email_format::rfc5322::types::{Address, GroupList, Mailbox}; use email_format::Email; use crate::error::Error; use lettre::{EmailAddress, SendableEmail, Envelope}; use crate::message_status::InternalMessageStatus; use crate::recipient_status::InternalRecipientStatus; use uuid::Uuid; #[derive(Debug, Clone, Serialize, Deserialize, Default)] pub struct PreparedEmail { pub to: Vec<String>, pub from: String, pub message_id: String, pub message: Vec<u8>, } impl PreparedEmail { pub fn as_sendable_email(&self) -> Result<SendableEmail, lettre::error::Error> { let to: Result<Vec<EmailAddress>, lettre::error::Error> = self.to.iter().map(|s| EmailAddress::new(s.clone())).collect(); let to = to?; Ok(SendableEmail::new( Envelope::new( Some(EmailAddress::new(self.from.clone())?), to)?, self.message_id.clone(), self.message.clone() )) } } pub fn prepare_email( mut email: Email, helo_name: &str, ) -> Result<(PreparedEmail, InternalMessageStatus), Error> { let recipients = determine_recipients(&email); email.clear_bcc(); let message_id = match email.get_message_id() { Some(mid) => format!("{}@{}", mid.0.id_left, mid.0.id_right), None => { let message_id = format!("{}@{}", Uuid::new_v4().hyphenated().to_string(), helo_name); email.set_message_id(&*format!("<{}>", message_id))?; message_id } }; let prepared_email = PreparedEmail { to: recipients .iter() .map(|r| r.smtp_email_addr.clone()) .collect(), from: format!("{}", email.get_from().0), message_id: message_id.clone(), message: format!("{}", email).into_bytes(), }; let _ = ::lettre::EmailAddress::new(prepared_email.from.clone())?; prepared_email.to.iter() .try_for_each(|s| ::lettre::EmailAddress::new(s.clone()).map(|_|()))?; let internal_message_status = InternalMessageStatus { message_id, recipients, attempts_remaining: 3, }; Ok((prepared_email, internal_message_status)) } fn determine_recipients(email: &Email) -> Vec<InternalRecipientStatus> { let mut addresses: Vec<Address> = Vec::new(); if let Some(to) = email.get_to() { addresses.extend((to.0).0); } if let Some(cc) = email.get_cc() { addresses.extend((cc.0).0); } if let Some(bcc) = email.get_bcc() { if let Bcc::AddressList(al) = bcc { addresses.extend(al.0); } } addresses.dedup(); let mut recipients: Vec<InternalRecipientStatus> = Vec::new(); for address in addresses { match address { Address::Mailbox(mb) => { recipients.push(recipient_from_mailbox(mb)); } Address::Group(grp) => { if let Some(gl) = grp.group_list { match gl { GroupList::MailboxList(mbl) => { for mb in mbl.0 { recipients.push(recipient_from_mailbox(mb)); } } GroupList::CFWS(_) => continue, } } } } } recipients } fn recipient_from_mailbox(mb: Mailbox) -> InternalRecipientStatus { let (email_addr, smtp_email_addr, domain) = match mb { Mailbox::NameAddr(na) => ( format!("{}", na), format!("{}", na.angle_addr.addr_spec), format!("{}", na.angle_addr.addr_spec.domain), ), Mailbox::AddrSpec(ads) => ( format!("{}", ads), format!("{}", ads), format!("{}", ads.domain), ), }; InternalRecipientStatus { email_addr: email_addr.trim().to_owned(), smtp_email_addr: smtp_email_addr.trim().to_owned(), domain: domain.trim().to_owned(), mx_servers: None, current_mx: 0, result: DeliveryResult::Queued, } }
use crate::delivery_result::DeliveryResult; use email_format::rfc5322::headers::Bcc; use email_format::rfc5322::types::{Address, GroupList, Mailbox}; use email_format::Email; use crate::error::Error; use lettre::{EmailAddress, SendableEmail, Envelope}; use crate::message_status::InternalMessageStatus; use crate::recipient_status::InternalRecipientStatus; use uuid::Uuid; #[derive(Debug, Clone, Serialize, Deserialize, Default)] pub struct PreparedEmail { pub to: Vec<String>, pub from: String, pub message_id: String, pub message: Vec<u8>, } impl PreparedEmail { pub fn as_sendable_email(&self) -> Result<SendableEmail, lettre::error::Erro
} pub fn prepare_email( mut email: Email, helo_name: &str, ) -> Result<(PreparedEmail, InternalMessageStatus), Error> { let recipients = determine_recipients(&email); email.clear_bcc(); let message_id = match email.get_message_id() { Some(mid) => format!("{}@{}", mid.0.id_left, mid.0.id_right), None => { let message_id = format!("{}@{}", Uuid::new_v4().hyphenated().to_string(), helo_name); email.set_message_id(&*format!("<{}>", message_id))?; message_id } }; let prepared_email = PreparedEmail { to: recipients .iter() .map(|r| r.smtp_email_addr.clone()) .collect(), from: format!("{}", email.get_from().0), message_id: message_id.clone(), message: format!("{}", email).into_bytes(), }; let _ = ::lettre::EmailAddress::new(prepared_email.from.clone())?; prepared_email.to.iter() .try_for_each(|s| ::lettre::EmailAddress::new(s.clone()).map(|_|()))?; let internal_message_status = InternalMessageStatus { message_id, recipients, attempts_remaining: 3, }; Ok((prepared_email, internal_message_status)) } fn determine_recipients(email: &Email) -> Vec<InternalRecipientStatus> { let mut addresses: Vec<Address> = Vec::new(); if let Some(to) = email.get_to() { addresses.extend((to.0).0); } if let Some(cc) = email.get_cc() { addresses.extend((cc.0).0); } if let Some(bcc) = email.get_bcc() { if let Bcc::AddressList(al) = bcc { addresses.extend(al.0); } } addresses.dedup(); let mut recipients: Vec<InternalRecipientStatus> = Vec::new(); for address in addresses { match address { Address::Mailbox(mb) => { recipients.push(recipient_from_mailbox(mb)); } Address::Group(grp) => { if let Some(gl) = grp.group_list { match gl { GroupList::MailboxList(mbl) => { for mb in mbl.0 { recipients.push(recipient_from_mailbox(mb)); } } GroupList::CFWS(_) => continue, } } } } } recipients } fn recipient_from_mailbox(mb: Mailbox) -> InternalRecipientStatus { let (email_addr, smtp_email_addr, domain) = match mb { Mailbox::NameAddr(na) => ( format!("{}", na), format!("{}", na.angle_addr.addr_spec), format!("{}", na.angle_addr.addr_spec.domain), ), Mailbox::AddrSpec(ads) => ( format!("{}", ads), format!("{}", ads), format!("{}", ads.domain), ), }; InternalRecipientStatus { email_addr: email_addr.trim().to_owned(), smtp_email_addr: smtp_email_addr.trim().to_owned(), domain: domain.trim().to_owned(), mx_servers: None, current_mx: 0, result: DeliveryResult::Queued, } }
r> { let to: Result<Vec<EmailAddress>, lettre::error::Error> = self.to.iter().map(|s| EmailAddress::new(s.clone())).collect(); let to = to?; Ok(SendableEmail::new( Envelope::new( Some(EmailAddress::new(self.from.clone())?), to)?, self.message_id.clone(), self.message.clone() )) }
function_block-function_prefixed
[ { "content": "// Deliver an email to an SMTP server\n\npub fn smtp_delivery(\n\n prepared_email: &PreparedEmail,\n\n smtp_server_domain: &str,\n\n port: u16,\n\n config: &Config\n\n) -> DeliveryResult {\n\n\n\n // lettre::EmailAddress checks validity. But we checked that when we created\n\n /...
Rust
src/main.rs
pandaman64/qopter
698240f06c530f767359b847a0bdfedb168d063e
#![feature(duration_as_u128)] extern crate clap; #[macro_use] extern crate failure; extern crate openqasm; extern crate qopter; extern crate rayon; use clap::{App, Arg, SubCommand}; use openqasm::Qasm; use qopter::*; use rayon::prelude::*; use std::str::FromStr; #[derive(Debug)] struct CommandLineOption { topology: topology::ConnectionGraph, qasm: Qasm, beams: usize, initial_mappings: usize, paths: usize, edge_to_edge: bool, is_etequal: bool, } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Fail)] #[fail(display = "not enough arguments")] struct NotEnoughArgumentsError; fn command_line() -> Result<CommandLineOption, failure::Error> { let matches = App::new("QOPTER") .arg( Arg::with_name("topology") .long("topology") .value_name("TOPOLOGY_FILE") .help("file of topology"), ) .arg( Arg::with_name("qasm") .long("qasm") .value_name("QASM_FILE") .help("file to compile"), ) .arg( Arg::with_name("beams") .short("b") .long("beams") .value_name("NUM_BEAMS") .help("number of beams to use"), ) .arg( Arg::with_name("mapping size") .short("s") .long("mapping_size") .value_name("NUM_MAPPING") .help("number of initial mappings"), ) .arg( Arg::with_name("number of paths") .short("p") .long("path") .value_name("NUM_PATHS") .help("number of paths"), ) .arg( Arg::with_name("edge-to-edge mapping") .short("e") .long("edge-to-edge") .value_name("EDGE_TO_EDGE") .help("use edge-to-edge mapping") .takes_value(false), ) .subcommand(SubCommand::with_name("etequal")) .get_matches(); let (topology, qasm) = match (matches.value_of("topology"), matches.value_of("qasm")) { (Some(topology), Some(qasm)) => { let topology = parse_topology(topology)?; let qasm = parse_qasm(qasm)?; (topology, qasm) } _ => return Err(NotEnoughArgumentsError.into()), }; let beams = match matches.value_of("beams") { Some(s) => FromStr::from_str(s)?, None => 10, }; let initial_mappings = match matches.value_of("mapping size") { Some(s) => FromStr::from_str(s)?, None => 0, }; let paths = match matches.value_of("number of paths") { Some(s) => FromStr::from_str(s)?, None => 1, }; Ok(CommandLineOption { topology, qasm, beams, initial_mappings, paths, edge_to_edge: matches.is_present("edge-to-edge mapping"), is_etequal: matches.subcommand_name() == Some("etequal"), }) } fn main() -> Result<(), failure::Error> { let options = command_line()?; if !options.is_etequal { let solution = run_solve( &options.topology, options.initial_mappings, options.edge_to_edge, options.beams, options.paths, options.qasm, None, false, ); println!("{:?}", solution); println!("{}", solution.qasm); println!("{}", solution.fidelity.into_inner().exp()); } else { const STEP: usize = 50; const SHOTS: usize = 100; (1..options.initial_mappings) .step_by(STEP) .map(|x| std::iter::repeat(x).take(SHOTS).collect::<Vec<_>>()) .flatten() .collect::<Vec<_>>() .par_iter() .map(|&initial_mappings| { let start = std::time::Instant::now(); let fidelity = run_solve( &options.topology, initial_mappings, options.edge_to_edge, options.beams, options.paths, options.qasm.clone(), None, false, ) .fidelity; let elapsed = start.elapsed(); (initial_mappings, fidelity, elapsed) }) .for_each(|(initial_mappings, f, elapsed)| { println!("{}, {}, {}", initial_mappings, f, elapsed.as_nanos()) }); } /* const SHOTS: usize = 50; (3..100000).step_by(500) .map(|x| std::iter::repeat(x).take(SHOTS).collect::<Vec<_>>()) .flatten() .collect::<Vec<_>>() .par_iter() .map(|&size| { let answer = beam_solve(&connection, &gates, RandomMapper::new(connection.size, size), 40); (size, answer.scheduler.fidelity) }) .for_each(|(size, fidelity)| println!("{},{}", size, fidelity)); */ /* (1..100).step_by(5) .map(|x| std::iter::repeat(x).take(SHOTS)) .flatten() .collect::<Vec<_>>() .par_iter() .map(|&beams| { let answer = beam_solve(&connection, &gates, RandomMapper::new(connection.size, 50000), beams); (beams, answer.scheduler.fidelity) }) .for_each(|(beams, fidelity)| println!("{}, {}", beams, fidelity)); */ Ok(()) }
#![feature(duration_as_u128)] extern crate clap; #[macro_use] extern crate failure; extern crate openqasm; extern crate qopter; extern crate rayon; use clap::{App, Arg, SubCommand}; use openqasm::Qasm; use qopter::*; use rayon::prelude::*; use std::str::FromStr; #[derive(Debug)] struct CommandLineOption { topology: topology::ConnectionGraph, qasm: Qasm, beams: usize, initial_mappings: usize, paths: usize, edge_to_edge: bool, is_etequal: bool, } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Fail)] #[fail(display = "not enough arguments")] struct NotEnoughArgumentsError; fn command_line() -> Result<CommandLineOption, failure::Error> { let matches = App::new("QOPTER") .arg( Arg::with_name("topology") .long("topology") .value_name("TOPOLOGY_FILE") .help("file of topology"), ) .arg( Arg::with_name("qasm") .long("qasm") .value_name("QASM_FILE") .help("file to compile"), ) .arg( Arg::with_name("beams") .short("b") .long("beams") .value_name("NUM_BEAMS") .help("number of beams to use"), ) .arg( Arg::with_name("mapping size") .short("s") .long("mapping_size")
e_qasm(qasm)?; (topology, qasm) } _ => return Err(NotEnoughArgumentsError.into()), }; let beams = match matches.value_of("beams") { Some(s) => FromStr::from_str(s)?, None => 10, }; let initial_mappings = match matches.value_of("mapping size") { Some(s) => FromStr::from_str(s)?, None => 0, }; let paths = match matches.value_of("number of paths") { Some(s) => FromStr::from_str(s)?, None => 1, }; Ok(CommandLineOption { topology, qasm, beams, initial_mappings, paths, edge_to_edge: matches.is_present("edge-to-edge mapping"), is_etequal: matches.subcommand_name() == Some("etequal"), }) } fn main() -> Result<(), failure::Error> { let options = command_line()?; if !options.is_etequal { let solution = run_solve( &options.topology, options.initial_mappings, options.edge_to_edge, options.beams, options.paths, options.qasm, None, false, ); println!("{:?}", solution); println!("{}", solution.qasm); println!("{}", solution.fidelity.into_inner().exp()); } else { const STEP: usize = 50; const SHOTS: usize = 100; (1..options.initial_mappings) .step_by(STEP) .map(|x| std::iter::repeat(x).take(SHOTS).collect::<Vec<_>>()) .flatten() .collect::<Vec<_>>() .par_iter() .map(|&initial_mappings| { let start = std::time::Instant::now(); let fidelity = run_solve( &options.topology, initial_mappings, options.edge_to_edge, options.beams, options.paths, options.qasm.clone(), None, false, ) .fidelity; let elapsed = start.elapsed(); (initial_mappings, fidelity, elapsed) }) .for_each(|(initial_mappings, f, elapsed)| { println!("{}, {}, {}", initial_mappings, f, elapsed.as_nanos()) }); } /* const SHOTS: usize = 50; (3..100000).step_by(500) .map(|x| std::iter::repeat(x).take(SHOTS).collect::<Vec<_>>()) .flatten() .collect::<Vec<_>>() .par_iter() .map(|&size| { let answer = beam_solve(&connection, &gates, RandomMapper::new(connection.size, size), 40); (size, answer.scheduler.fidelity) }) .for_each(|(size, fidelity)| println!("{},{}", size, fidelity)); */ /* (1..100).step_by(5) .map(|x| std::iter::repeat(x).take(SHOTS)) .flatten() .collect::<Vec<_>>() .par_iter() .map(|&beams| { let answer = beam_solve(&connection, &gates, RandomMapper::new(connection.size, 50000), beams); (beams, answer.scheduler.fidelity) }) .for_each(|(beams, fidelity)| println!("{}, {}", beams, fidelity)); */ Ok(()) }
.value_name("NUM_MAPPING") .help("number of initial mappings"), ) .arg( Arg::with_name("number of paths") .short("p") .long("path") .value_name("NUM_PATHS") .help("number of paths"), ) .arg( Arg::with_name("edge-to-edge mapping") .short("e") .long("edge-to-edge") .value_name("EDGE_TO_EDGE") .help("use edge-to-edge mapping") .takes_value(false), ) .subcommand(SubCommand::with_name("etequal")) .get_matches(); let (topology, qasm) = match (matches.value_of("topology"), matches.value_of("qasm")) { (Some(topology), Some(qasm)) => { let topology = parse_topology(topology)?; let qasm = pars
function_block-random_span
[ { "content": "fn count_gates_on_same_qubits(size: usize, operations: &[openqasm::Operation]) -> usize {\n\n let mut last_target = vec![vec![]; size];\n\n let mut count = 0;\n\n for op in operations {\n\n use openqasm::Operation::*;\n\n match op {\n\n Unitary(_, _, target) => {\...
Rust
src/ra_proc_macro/tt.rs
qiaoruntao/cargo-equip
0f09aa7df4ea00fad38bcea3843251c86bb3fc81
use serde::{Deserialize, Serialize}; use smol_str::SmolStr; #[derive(Clone, Deserialize, Serialize)] enum TokenTree { Leaf(Leaf), Subtree(Subtree), } impl From<proc_macro2::TokenTree> for TokenTree { fn from(tt: proc_macro2::TokenTree) -> Self { match tt { proc_macro2::TokenTree::Group(group) => TokenTree::Subtree(group.into()), proc_macro2::TokenTree::Ident(ident) => TokenTree::Leaf(Leaf::Ident(ident.into())), proc_macro2::TokenTree::Punct(punct) => TokenTree::Leaf(Leaf::Punct(punct.into())), proc_macro2::TokenTree::Literal(lit) => TokenTree::Leaf(Leaf::Literal(lit.into())), } } } impl From<TokenTree> for proc_macro2::TokenTree { fn from(tt: TokenTree) -> Self { match tt { TokenTree::Subtree(group) => proc_macro2::TokenTree::Group(group.into()), TokenTree::Leaf(Leaf::Ident(ident)) => proc_macro2::TokenTree::Ident(ident.into()), TokenTree::Leaf(Leaf::Punct(punct)) => proc_macro2::TokenTree::Punct(punct.into()), TokenTree::Leaf(Leaf::Literal(lit)) => proc_macro2::TokenTree::Literal(lit.into()), } } } #[derive(Clone, Deserialize, Serialize)] pub(super) struct Subtree { delimiter: Option<Delimiter>, token_trees: Vec<TokenTree>, } impl From<proc_macro2::Group> for Subtree { fn from(group: proc_macro2::Group) -> Self { return Subtree { delimiter: match group.delimiter() { proc_macro2::Delimiter::Parenthesis => Some(delimiter(DelimiterKind::Parenthesis)), proc_macro2::Delimiter::Brace => Some(delimiter(DelimiterKind::Brace)), proc_macro2::Delimiter::Bracket => Some(delimiter(DelimiterKind::Bracket)), proc_macro2::Delimiter::None => None, }, token_trees: group.stream().into_iter().map(Into::into).collect(), }; fn delimiter(kind: DelimiterKind) -> Delimiter { Delimiter { kind } } } } impl From<Subtree> for proc_macro2::Group { fn from(subtree: Subtree) -> Self { let delimiter = match subtree.delimiter.map(|Delimiter { kind, .. }| kind) { Some(DelimiterKind::Parenthesis) => proc_macro2::Delimiter::Parenthesis, Some(DelimiterKind::Brace) => proc_macro2::Delimiter::Brace, Some(DelimiterKind::Bracket) => proc_macro2::Delimiter::Bracket, None => proc_macro2::Delimiter::None, }; let token_stream = subtree .token_trees .iter() .cloned() .map(proc_macro2::TokenTree::from) .collect(); proc_macro2::Group::new(delimiter, token_stream) } } #[derive(Clone, Copy, Deserialize, Serialize)] struct Delimiter { kind: DelimiterKind, } #[derive(Clone, Copy, Deserialize, Serialize)] enum DelimiterKind { Parenthesis, Brace, Bracket, } #[derive(Clone, Deserialize, Serialize)] enum Leaf { Literal(Literal), Punct(Punct), Ident(Ident), } #[derive(Clone, Deserialize, Serialize)] struct Literal { text: SmolStr, } impl From<proc_macro2::Literal> for Literal { fn from(lit: proc_macro2::Literal) -> Self { Self { text: lit.to_string().into(), } } } impl From<Literal> for proc_macro2::Literal { fn from(lit: Literal) -> Self { syn::parse_str(&lit.text) .unwrap_or_else(|e| panic!("could not parse {:?} as a literal: {}", &lit.text, e)) } } #[derive(Clone, Copy, Deserialize, Serialize)] struct Punct { char: char, spacing: Spacing, } impl From<proc_macro2::Punct> for Punct { fn from(punct: proc_macro2::Punct) -> Self { Self { char: punct.as_char(), spacing: punct.spacing().into(), } } } impl From<Punct> for proc_macro2::Punct { fn from(punct: Punct) -> Self { proc_macro2::Punct::new(punct.char, punct.spacing.into()) } } #[derive(Clone, Copy, Deserialize, Serialize)] enum Spacing { Alone, Joint, } impl From<proc_macro2::Spacing> for Spacing { fn from(spacing: proc_macro2::Spacing) -> Self { match spacing { proc_macro2::Spacing::Alone => Spacing::Alone, proc_macro2::Spacing::Joint => Spacing::Joint, } } } impl From<Spacing> for proc_macro2::Spacing { fn from(spacing: Spacing) -> Self { match spacing { Spacing::Alone => proc_macro2::Spacing::Alone, Spacing::Joint => proc_macro2::Spacing::Joint, } } } #[derive(Clone, Deserialize, Serialize)] struct Ident { text: SmolStr, } impl From<proc_macro2::Ident> for Ident { fn from(ident: proc_macro2::Ident) -> Self { Self { text: ident.to_string().into(), } } } impl From<Ident> for proc_macro2::Ident { fn from(ident: Ident) -> Self { proc_macro2::Ident::new(&ident.text, proc_macro2::Span::call_site()) } }
use serde::{Deserialize, Serialize}; use smol_str::SmolStr; #[derive(Clone, Deserialize, Serialize)] enum TokenTree { Leaf(Leaf), Subtree(Subtree), } impl From<proc_macro2::TokenTree> for TokenTree { fn from(tt: proc_macro2::TokenTree) -> Self { match tt { proc_macro2::TokenTree::Group(group) => TokenTree::Subtree(group.into()), proc_macro2::TokenTree::Ident(ident) => TokenTree::Leaf(Leaf::Ident(ident.into())), proc_macro2::TokenTree::Punct(punct) => TokenTree::Leaf(Leaf::Punct(punct.into())), proc_macro2::TokenTree::Literal(lit) => TokenTree::Leaf(Leaf::Literal(lit.into())), } } } impl From<TokenTree> for proc_macro2::TokenTree { fn from(tt: TokenTree) -> Self { match tt { TokenTree::Subtree(group) => proc_macro2::TokenTree::Group(group.into()), TokenTree::Leaf(Leaf::Ident(ident)) => proc_macro2::TokenTree::Ident(ident.into()), TokenTree::Leaf(Leaf::Punct(punct)) => proc_macro2::TokenTree::Punct(punct.into()), TokenTree::Leaf(Leaf::Literal(lit)) => proc_macro2::TokenTree::Literal(lit.into()), } } } #[derive(Clone, Deserialize, Serialize)] pub(super) struct Subtree { delimiter: Option<Delimiter>, token_trees: Vec<TokenTree>, } impl From<proc_macro2::Group> for Subtree { fn from(group: proc_macro2::Group) -> Self { retur
} impl From<Subtree> for proc_macro2::Group { fn from(subtree: Subtree) -> Self { let delimiter = match subtree.delimiter.map(|Delimiter { kind, .. }| kind) { Some(DelimiterKind::Parenthesis) => proc_macro2::Delimiter::Parenthesis, Some(DelimiterKind::Brace) => proc_macro2::Delimiter::Brace, Some(DelimiterKind::Bracket) => proc_macro2::Delimiter::Bracket, None => proc_macro2::Delimiter::None, }; let token_stream = subtree .token_trees .iter() .cloned() .map(proc_macro2::TokenTree::from) .collect(); proc_macro2::Group::new(delimiter, token_stream) } } #[derive(Clone, Copy, Deserialize, Serialize)] struct Delimiter { kind: DelimiterKind, } #[derive(Clone, Copy, Deserialize, Serialize)] enum DelimiterKind { Parenthesis, Brace, Bracket, } #[derive(Clone, Deserialize, Serialize)] enum Leaf { Literal(Literal), Punct(Punct), Ident(Ident), } #[derive(Clone, Deserialize, Serialize)] struct Literal { text: SmolStr, } impl From<proc_macro2::Literal> for Literal { fn from(lit: proc_macro2::Literal) -> Self { Self { text: lit.to_string().into(), } } } impl From<Literal> for proc_macro2::Literal { fn from(lit: Literal) -> Self { syn::parse_str(&lit.text) .unwrap_or_else(|e| panic!("could not parse {:?} as a literal: {}", &lit.text, e)) } } #[derive(Clone, Copy, Deserialize, Serialize)] struct Punct { char: char, spacing: Spacing, } impl From<proc_macro2::Punct> for Punct { fn from(punct: proc_macro2::Punct) -> Self { Self { char: punct.as_char(), spacing: punct.spacing().into(), } } } impl From<Punct> for proc_macro2::Punct { fn from(punct: Punct) -> Self { proc_macro2::Punct::new(punct.char, punct.spacing.into()) } } #[derive(Clone, Copy, Deserialize, Serialize)] enum Spacing { Alone, Joint, } impl From<proc_macro2::Spacing> for Spacing { fn from(spacing: proc_macro2::Spacing) -> Self { match spacing { proc_macro2::Spacing::Alone => Spacing::Alone, proc_macro2::Spacing::Joint => Spacing::Joint, } } } impl From<Spacing> for proc_macro2::Spacing { fn from(spacing: Spacing) -> Self { match spacing { Spacing::Alone => proc_macro2::Spacing::Alone, Spacing::Joint => proc_macro2::Spacing::Joint, } } } #[derive(Clone, Deserialize, Serialize)] struct Ident { text: SmolStr, } impl From<proc_macro2::Ident> for Ident { fn from(ident: proc_macro2::Ident) -> Self { Self { text: ident.to_string().into(), } } } impl From<Ident> for proc_macro2::Ident { fn from(ident: Ident) -> Self { proc_macro2::Ident::new(&ident.text, proc_macro2::Span::call_site()) } }
n Subtree { delimiter: match group.delimiter() { proc_macro2::Delimiter::Parenthesis => Some(delimiter(DelimiterKind::Parenthesis)), proc_macro2::Delimiter::Brace => Some(delimiter(DelimiterKind::Brace)), proc_macro2::Delimiter::Bracket => Some(delimiter(DelimiterKind::Bracket)), proc_macro2::Delimiter::None => None, }, token_trees: group.stream().into_iter().map(Into::into).collect(), }; fn delimiter(kind: DelimiterKind) -> Delimiter { Delimiter { kind } } }
function_block-function_prefixed
[ { "content": "fn targets_in_ws(metadata: &cm::Metadata) -> impl Iterator<Item = (&cm::Target, &cm::Package)> {\n\n metadata\n\n .packages\n\n .iter()\n\n .filter(move |cm::Package { id, .. }| metadata.workspace_members.contains(id))\n\n .flat_map(|p| p.targets.iter().map(move |t| ...
Rust
src/qqhtd.rs
signupsi/qht-rs
1d363d755b7797e05e3c44cbef8cef8e2f08e184
use crate::basicqht::*; use crate::filter::Filter; pub use rand::rngs::StdRng; pub use rand::{FromEntropy, Rng}; pub use std::collections::hash_map::DefaultHasher; pub use std::hash::{Hash, Hasher}; pub use rust_dense_bitset::DenseBitSetExtended; const FINGERPRINT_SIZE_LIMIT: usize = 8; pub struct QQuotientHashTableD { n_cells: usize, n_buckets: usize, fingerprint_size: usize, pow_fingerprint_size: u64, qht: DenseBitSetExtended, } impl QQuotientHashTableD { pub fn new(memory_size: usize, n_buckets: usize, fingerprint_size: usize) -> Self { if fingerprint_size > FINGERPRINT_SIZE_LIMIT { panic!("[qQHTcd Filter] Incorrect parameters, fingerprint_size cannot exceed 8."); } else if fingerprint_size == 0 { panic!("[QHTc Filter] Incorrect parameters, fingerprint_size cannot be zero."); } if n_buckets == 0 { panic!("[QHTc Filter] Incorrect parameters, n_buckets cannot be zero."); } let pow_fingerprint_size = 2u64.pow(fingerprint_size as u32); let n_cells = memory_size / (n_buckets * fingerprint_size); let qht = DenseBitSetExtended::with_capacity(n_cells * n_buckets * fingerprint_size); if n_cells == 0 { panic!("[QHT Filter] Incorrect parameters, memory size should be at least n_buckets * fingerprint_size"); } Self { n_cells, n_buckets, fingerprint_size, pow_fingerprint_size, qht, } } fn insert_fingerprint_in_last_bucket(&mut self, address: usize, fingerprint: Fingerprint) { for prev in 0..(self.n_buckets - 1) { let idx = prev + 1; let fg = self.get_fingerprint_from_bucket(address, idx); self.insert_fingerprint_in_bucket(address, prev, fg); } let last_bucket = self.n_buckets - 1; self.insert_fingerprint_in_bucket(address, last_bucket, fingerprint) } } impl_basicqht!(QQuotientHashTableD); impl Filter for QQuotientHashTableD { fn lookup(&self, e: impl Hash) -> bool { let fingerprint = self.get_fingerprint(&e); let address = (get_hash(&e, 1, 0) as usize) % self.n_cells; self.in_cell(address, fingerprint) } fn insert(&mut self, e: impl Hash) -> bool { let fingerprint = self.get_fingerprint(&e); let address = (get_hash(&e, 1, 0) as usize) % self.n_cells; let detected = self.in_cell(address, fingerprint); self.insert_fingerprint_in_last_bucket(address, fingerprint); detected } }
use crate::basicqht::*; use crate::filter::Filter; pub use rand::rngs::StdRng; pub use rand::{FromEntropy, Rng}; pub use std::collections::hash_map::DefaultHasher; pub use std::hash::{Hash, Hasher}; pub use rust_dense_bitset::DenseBitSetExtended; const FINGERPRINT_SIZE_LIMIT: usize = 8; pub struct QQuotientHashTableD { n_cells: usize, n_buckets: usize, fingerprint_size: usize, pow_fingerprint_size: u64, qht: DenseBitSetExtended, } impl QQuotientHashTableD { pub fn new(memory_size: usize, n_buckets: usize, fingerprint_size: usize) -> Self { if fingerprint_size > FINGERPRINT_SIZE_LIMIT { panic!("[qQHTcd Filter] Incorrect parameters, fingerprint_size cannot exceed 8."); } else if fingerprint_size == 0 { panic!("[QHTc Filter] Incorrect parameters, fingerprint_size cannot be zero."); } if n_buckets == 0 { panic!("[QHTc Filter] Incorrect parameters, n_buckets cannot be zero."); } let pow_fingerprint_size = 2u64.pow(fingerprint_size as u32); let n_cells = memory_size / (n_buckets * fingerprint_size); let qht = DenseBitSetExtended::with_capacity(n_cells * n_buckets * finger
Hash) -> bool { let fingerprint = self.get_fingerprint(&e); let address = (get_hash(&e, 1, 0) as usize) % self.n_cells; self.in_cell(address, fingerprint) } fn insert(&mut self, e: impl Hash) -> bool { let fingerprint = self.get_fingerprint(&e); let address = (get_hash(&e, 1, 0) as usize) % self.n_cells; let detected = self.in_cell(address, fingerprint); self.insert_fingerprint_in_last_bucket(address, fingerprint); detected } }
print_size); if n_cells == 0 { panic!("[QHT Filter] Incorrect parameters, memory size should be at least n_buckets * fingerprint_size"); } Self { n_cells, n_buckets, fingerprint_size, pow_fingerprint_size, qht, } } fn insert_fingerprint_in_last_bucket(&mut self, address: usize, fingerprint: Fingerprint) { for prev in 0..(self.n_buckets - 1) { let idx = prev + 1; let fg = self.get_fingerprint_from_bucket(address, idx); self.insert_fingerprint_in_bucket(address, prev, fg); } let last_bucket = self.n_buckets - 1; self.insert_fingerprint_in_bucket(address, last_bucket, fingerprint) } } impl_basicqht!(QQuotientHashTableD); impl Filter for QQuotientHashTableD { fn lookup(&self, e: impl
random
[ { "content": "/// Returns the hash of (e, base, counter)\n\npub fn get_hash(e: impl Hash, base: u64, counter: u64) -> u64 {\n\n let mut s = DefaultHasher::new();\n\n e.hash(&mut s);\n\n base.hash(&mut s);\n\n counter.hash(&mut s);\n\n s.finish()\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! impl_ba...
Rust
sdk/cosmos/src/requests/execute_stored_procedure_builder.rs
extrawurst/azure-sdk-for-rust
bc74cad7cc5f9226cf39c4abae0d902a85765e74
use crate::prelude::*; use crate::resources::stored_procedure::Parameters; use crate::responses::ExecuteStoredProcedureResponse; use http::StatusCode; use serde::de::DeserializeOwned; use std::convert::TryInto; #[derive(Debug, Clone)] pub struct ExecuteStoredProcedureBuilder<'a, 'b> { stored_procedure_client: &'a StoredProcedureClient, parameters: Option<&'b Parameters>, user_agent: Option<azure_core::UserAgent<'b>>, activity_id: Option<azure_core::ActivityId<'b>>, consistency_level: Option<ConsistencyLevel>, allow_tentative_writes: TenativeWritesAllowance, partition_keys: Option<&'b PartitionKeys>, } impl<'a, 'b> ExecuteStoredProcedureBuilder<'a, 'b> { pub(crate) fn new(stored_procedure_client: &'a StoredProcedureClient) -> Self { Self { stored_procedure_client, parameters: None, user_agent: None, activity_id: None, consistency_level: None, allow_tentative_writes: TenativeWritesAllowance::Deny, partition_keys: None, } } fn stored_procedure_client(&self) -> &'a StoredProcedureClient { self.stored_procedure_client } fn parameters(&self) -> Option<&'b Parameters> { self.parameters } fn user_agent(&self) -> Option<azure_core::UserAgent<'b>> { self.user_agent } fn activity_id(&self) -> Option<azure_core::ActivityId<'b>> { self.activity_id } fn partition_keys(&self) -> Option<&'b PartitionKeys> { self.partition_keys } fn consistency_level(&self) -> Option<ConsistencyLevel> { self.consistency_level.clone() } fn allow_tentative_writes(&self) -> TenativeWritesAllowance { self.allow_tentative_writes } pub fn with_parameters(self, parameters: &'b Parameters) -> Self { Self { parameters: Some(parameters), ..self } } pub fn with_user_agent(self, user_agent: &'b str) -> Self { Self { user_agent: Some(azure_core::UserAgent::new(user_agent)), ..self } } pub fn with_activity_id(self, activity_id: &'b str) -> Self { ExecuteStoredProcedureBuilder { activity_id: Some(azure_core::ActivityId::new(activity_id)), ..self } } pub fn with_consistency_level(self, consistency_level: ConsistencyLevel) -> Self { Self { consistency_level: Some(consistency_level), ..self } } pub fn with_allow_tentative_writes( self, allow_tentative_writes: TenativeWritesAllowance, ) -> Self { Self { allow_tentative_writes, ..self } } pub fn with_partition_keys(self, partition_keys: &'b PartitionKeys) -> Self { Self { partition_keys: Some(partition_keys), ..self } } pub async fn execute<T>(&self) -> Result<ExecuteStoredProcedureResponse<T>, CosmosError> where T: DeserializeOwned, { trace!("ExecuteStoredProcedureBuilder::execute called"); let request = self .stored_procedure_client() .prepare_request_with_stored_procedure_name(http::Method::POST); let request = crate::headers::add_header(self.user_agent(), request); let request = crate::headers::add_header(self.activity_id(), request); let request = crate::headers::add_header(self.consistency_level(), request); let request = crate::headers::add_header(Some(self.allow_tentative_writes()), request); let request = crate::headers::add_header(self.partition_keys(), request); let request = request.header(http::header::CONTENT_TYPE, "application/json"); let body = if let Some(parameters) = self.parameters() { parameters.to_json() } else { String::from("[]") }; let request = request.body(body.as_bytes())?; Ok(self .stored_procedure_client() .http_client() .execute_request_check_status(request, StatusCode::OK) .await? .try_into()?) } }
use crate::prelude::*; use crate::resources::stored_procedure::Parameters; use crate::responses::ExecuteStoredProcedureResponse; use http::StatusCode; use serde::de::DeserializeOwned; use std::convert::TryInto; #[derive(Debug, Clone)] pub struct ExecuteStoredProcedureBuilder<'a, 'b> { stored_procedure_client: &'a StoredProcedureClient, parameters: Option<&'b Parameters>, user_agent: Option<azure_core::UserAgent<'b>>, activity_id: Option<azure_core::ActivityId<'b>>, consistency_level: Option<ConsistencyLevel>, allow_tentative_writes: TenativeWritesAllowance, partition_keys: Option<&'b PartitionKeys>, } impl<'a, 'b> ExecuteStoredProcedureBuilder<'a, 'b> { pub(crate) fn new(stored_procedure_client: &'a StoredProcedureClient) -> Self { Self { stored_procedure_client, parameters: None, user_agent: None, activity_id: None, consistency_level: None, allow_tentative_writes: TenativeWritesAllowance::Deny, partition_keys: None, } } fn stored_procedure_client(&self) -> &'a StoredProcedureClient { self.stored_procedure_client } fn parameters(&self) -> Option<&'b Parameters> { self.parameters } fn user_agent(&self) -> Option<azure_core::UserAgent<'b>> { self.user_agent } fn activity_id(&self) -> Option<azure_core::ActivityId<'b>> { self.activity_id } fn partition_keys(&self) -> Option<&'b PartitionKeys> { self.partition_keys } fn consistency_level(&self) -> Option<ConsistencyLevel> { self.consistency_level.clone() } fn allow_tentative_writes(&self) -> TenativeWritesAllowance { self.allow_tentative_writes } pub fn with_parameters(self, parameters: &'b Parameters) -> Self { Self { parameters: Some(parameters), ..self } } pub fn with_user_agent(self, user_agent: &'b str) -> Self { Self { user_agent: Some(azure_core::UserAgent::new(user_agent)), ..self } } pub fn with_activity_id(self, activity_id: &'b str) -> Self { ExecuteStoredProcedureBuilder { activity_id: Some(azure_core::ActivityId::new(activity_id)), ..self } } pub fn with_consistency_level(self, consistency_level: ConsistencyLevel) -> Self { Self { consistency_level: Some(consistency_level), ..self } }
pub fn with_partition_keys(self, partition_keys: &'b PartitionKeys) -> Self { Self { partition_keys: Some(partition_keys), ..self } } pub async fn execute<T>(&self) -> Result<ExecuteStoredProcedureResponse<T>, CosmosError> where T: DeserializeOwned, { trace!("ExecuteStoredProcedureBuilder::execute called"); let request = self .stored_procedure_client() .prepare_request_with_stored_procedure_name(http::Method::POST); let request = crate::headers::add_header(self.user_agent(), request); let request = crate::headers::add_header(self.activity_id(), request); let request = crate::headers::add_header(self.consistency_level(), request); let request = crate::headers::add_header(Some(self.allow_tentative_writes()), request); let request = crate::headers::add_header(self.partition_keys(), request); let request = request.header(http::header::CONTENT_TYPE, "application/json"); let body = if let Some(parameters) = self.parameters() { parameters.to_json() } else { String::from("[]") }; let request = request.body(body.as_bytes())?; Ok(self .stored_procedure_client() .http_client() .execute_request_check_status(request, StatusCode::OK) .await? .try_into()?) } }
pub fn with_allow_tentative_writes( self, allow_tentative_writes: TenativeWritesAllowance, ) -> Self { Self { allow_tentative_writes, ..self } }
function_block-full_function
[]
Rust
nannou-apps/src/bin/check.rs
reedrosenbluth/oscen
722829dd35beccaa141893a43d6c3d91e3622b62
use crossbeam::crossbeam_channel::{unbounded, Receiver, Sender}; use nannou::prelude::*; use nannou_audio as audio; use nannou_audio::Buffer; use oscen::envelopes::*; use oscen::filters::*; use oscen::operators::*; use oscen::oscillators::*; use oscen::rack::*; use std::sync::Arc; fn main() { nannou::app(model).update(update).run(); } struct Model { pub stream: audio::Stream<Synth>, receiver: Receiver<f32>, samples: Vec<f32>, } struct Synth { sender: Sender<f32>, rack: Rack, controls: Box<Controls>, state: Box<State>, outputs: Box<Outputs>, buffers: Box<Buffers>, union: Arc<Union>, adsr: Arc<Adsr>, names: Vec<&'static str>, } fn model(app: &App) -> Model { let (sender, receiver) = unbounded(); let mut names = vec![]; app.new_window() .key_pressed(key_pressed) .size(700, 350) .view(view) .build() .unwrap(); let audio_host = audio::Host::new(); let (mut rack, mut controls, mut state, outputs, mut buffers) = tables(); let mut oscs = vec![]; let freq = 220.0; let sine = OscBuilder::new(sine_osc) .hz(freq) .rack(&mut rack, &mut controls, &mut state); oscs.push(sine.tag()); names.push("Sine"); let square = OscBuilder::new(square_osc) .hz(freq) .rack(&mut rack, &mut controls, &mut state); oscs.push(square.tag()); names.push("Square"); let saw = OscBuilder::new(saw_osc) .hz(freq) .rack(&mut rack, &mut controls, &mut state); oscs.push(saw.tag()); names.push("Saw"); let tri = OscBuilder::new(triangle_osc) .hz(freq) .rack(&mut rack, &mut controls, &mut state); oscs.push(tri.tag()); names.push("Triangle"); let mut builder = square_wave(8); builder.hz(freq); let sq8 = builder.rack(&mut rack, &mut controls); oscs.push(sq8.tag()); names.push("Fourier Square 8"); let mut builder = triangle_wave(8); builder.hz(freq); let tri8 = builder.rack(&mut rack, &mut controls); oscs.push(tri8.tag()); names.push("Fourier Triangle 8"); let wn = WhiteNoiseBuilder::new() .amplitude(0.5) .rack(&mut rack, &mut controls); oscs.push(wn.tag()); names.push("White Noise"); let pn = PinkNoiseBuilder::new() .amplitude(0.5) .rack(&mut rack, &mut controls); oscs.push(pn.tag()); names.push("Pink Noise"); let mix = MixerBuilder::new(vec![sine.tag(), square.tag()]).rack(&mut rack, &mut controls); oscs.push(mix.tag()); names.push("Mixer Sine & Square"); let prod = ProductBuilder::new(vec![sine.tag(), pn.tag()]).rack(&mut rack, &mut controls); oscs.push(prod.tag()); names.push("Product Sine & Square"); let lfo = OscBuilder::new(sine_osc) .hz(2.0) .rack(&mut rack, &mut controls, &mut state); let vca = VcaBuilder::new(sine.tag()) .level(lfo.tag()) .rack(&mut rack, &mut controls); oscs.push(vca.tag()); names.push("Vca amp controlled by sine"); let cf = CrossFadeBuilder::new(sine.tag(), square.tag()).rack(&mut rack, &mut controls); cf.set_alpha(&mut controls, Control::V(lfo.tag(), 0)); oscs.push(cf.tag()); names.push("CrossFade Sine & Square, alpha is sine lfo"); let adsr = AdsrBuilder::linear() .attack(0.5) .decay(0.5) .sustain(0.75) .release(1.0) .rack(&mut rack, &mut controls); let adsr_vca = VcaBuilder::new(sine.tag()) .level(adsr.tag()) .rack(&mut rack, &mut controls); oscs.push(adsr_vca.tag()); names.push("Adsr - . = on , = off"); let modulator = ModulatorBuilder::new(sine_osc) .hz(220.0) .ratio(2.0) .index(4.0) .rack(&mut rack, &mut controls, &mut state); let fm = OscBuilder::new(triangle_osc).hz(modulator.tag()).rack( &mut rack, &mut controls, &mut state, ); oscs.push(fm.tag()); names.push("FM synthesis"); let lpf = LpfBuilder::new(square.tag()) .cut_off(440.0) .rack(&mut rack, &mut controls); oscs.push(lpf.tag()); names.push("Low Pass Filter"); let delay = DelayBuilder::new(sine.tag(), 0.02.into()).rack(&mut rack, &mut controls, &mut buffers); let d = CrossFadeBuilder::new(sine.tag(), delay.tag()).rack(&mut rack, &mut controls); oscs.push(d.tag()); names.push("Delay Line"); let union = UnionBuilder::new(oscs).rack(&mut rack, &mut controls); let _out = VcaBuilder::new(union.tag()) .level(0.25) .rack(&mut rack, &mut controls); let synth = Synth { sender, rack, controls, state, outputs, buffers, union, adsr, names, }; let stream = audio_host .new_output_stream(synth) .render(audio) .build() .unwrap(); Model { stream, receiver, samples: vec![], } } fn audio(synth: &mut Synth, buffer: &mut Buffer) { let sample_rate = buffer.sample_rate() as f32; for frame in buffer.frames_mut() { let amp = synth.rack.mono( &synth.controls, &mut synth.state, &mut synth.outputs, &mut synth.buffers, sample_rate, ); for channel in frame { *channel = amp; } synth.sender.send(amp).unwrap(); } } fn update(_app: &App, model: &mut Model, _update: Update) { let samples: Vec<f32> = model.receiver.try_iter().collect(); model.samples = samples; } fn view(app: &App, model: &Model, frame: Frame) { if frame.nth() == 0 { println!("Active module: 0 - Sine"); }; use nannou_apps::scope; scope(app, &model.samples, frame); } fn key_pressed(_app: &App, model: &mut Model, key: Key) { match key { Key::Space => { model .stream .send(|synth| { let active = synth.union.active(&synth.controls, &synth.outputs); let n = synth.names.len(); println!( "Active module: {} - {}", (active + 1) % n, synth.names[(active + 1) % n] ); synth .union .set_active(&mut synth.controls, Control::I((active + 1) % n)); }) .unwrap(); } Key::Period => { model .stream .send(|synth| { synth.adsr.on(&mut synth.controls, &mut synth.state); }) .unwrap(); } Key::Comma => { model .stream .send(|synth| { synth.adsr.off(&mut synth.controls); }) .unwrap(); } _ => {} } }
use crossbeam::crossbeam_channel::{unbounded, Receiver, Sender}; use nannou::prelude::*; use nannou_audio as audio; use nannou_audio::Buffer; use oscen::envelopes::*; use oscen::filters::*; use oscen::operators::*; use oscen::oscillators::*; use oscen::rack::*; use std::sync::Arc; fn main() { nannou::app(model).update(update).run(); } struct Model { pub stream: audio::Stream<Synth>, receiver: Receiver<f32>, samples: Vec<f32>, } struct Synth { sender: Sender<f32>, rack: Rack, controls: Box<Controls>, state: Box<State>, outputs: Box<Outputs>, buffers: Box<Buffers>, union: Arc<Union>, adsr: Arc<Adsr>, names: Vec<&'static str>, } fn model(app: &App) -> Model { let (sender, receiver) = unbounded(); let mut names = vec![]; app.new_window() .key_pressed(key_pressed) .size(700, 350) .view(view) .build() .unwrap(); let audio_host = audio::Host::new(); let (mut rack, mut controls, mut state, outputs, mut buffers) = tables(); let mut oscs = vec![]; let freq = 220.0; let sine = OscBuilder::new(sine_osc) .hz(freq) .rack(&mut rack, &mut controls, &mut state); oscs.push(sine.tag()); names.push("Sine"); let square = OscBuilder::new(square_osc) .hz(freq) .rack(&mut rack, &mut controls, &mut state); oscs.push(square.tag()); names.push("Square"); let saw = OscBuilder::new(saw_osc) .hz(freq) .rack(&mut rack, &mut controls, &mut state); oscs.push(saw.tag()); names.push("Saw"); let tri = OscBuilder::new(triangle_osc) .hz(freq) .rack(&mut rack, &mut controls, &mut state); oscs.push(tri.tag()); names.push("Triangle"); let mut builder = square_wave(8); builder.hz(freq); let sq8 = builder.rack(&mut rack, &mut controls); oscs.push(sq8.tag()); names.push("Fourier Square 8"); let mut builder = triangle_wave(8); builder.hz(freq); let tri8 = builder.rack(&mut rack, &mut controls); oscs.push(tri8.tag()); names.push("Fourier Triangle 8"); let wn = WhiteNoiseBuilder::new() .amplitude(0.5) .rack(&mut rack, &mut controls); oscs.push(wn.tag()); names.push("White Noise"); let pn = PinkNoiseBuilder::new() .amplitude(0.5) .rack(&mut rack, &mut controls); oscs.push(pn.tag()); names.push("Pink Noise"); let mix = MixerBuilder::new(vec![sine.tag(), square.tag()]).rack(&mut rack, &mut controls); oscs.push(mix.tag()); names.push("Mixer Sine & Square"); let prod = ProductBuilder::new(vec![sine.tag(), pn.tag()]).rack(&mut rack, &mut controls); oscs.push(prod.tag()); names.push("Product Sine & Square"); let lfo = OscBuilder::new(sine_osc) .hz(2.0) .rack(&mut rack, &mut controls, &mut state); let vca = VcaBuilder::new(sine.tag()) .level(lfo.tag()) .rack(&mut rack, &mut controls); oscs.push(vca.tag()); names.push("Vca amp controlled by sine"); let cf = CrossFadeBuilder::new(sine.tag(), square.tag()).rack(&mut rack, &mut controls); cf.set_alpha(&mut controls, Control::V(lfo.tag(), 0)); oscs.push(cf.tag()); names.push("CrossFade Sine & Square, alpha is sine lfo"); let adsr = AdsrBuilder::linear() .attack(0.5) .decay(0.5) .sustain(0.75) .release(1.0) .rack(&mut rack, &mut controls); let adsr_vca = VcaBuilder::new(sine.tag()) .level(adsr.tag()) .rack(&mut rack, &mut controls); oscs.push(adsr_vca.tag()); names.push("Adsr - . = on , = off"); let modulator = ModulatorBuilder::new(sine_osc) .hz(220.0) .ratio(2.0) .index(4.0) .rack(&mut rack, &mut controls, &mut state); let fm = OscBuilder::new(triangle_osc).hz(modulator.tag()).rack( &mut rack, &mut controls, &mut state, ); oscs.push(fm.tag()); names.push("FM synthesis"); let lpf = LpfBuilder::new(square.tag()) .cut_off(440.0) .rack(&mut rack, &mut controls); oscs.push(lpf.tag()); names.push("Low Pass Filter"); let delay = DelayBuilder::new(sine.tag(), 0.02.into()).rack(&mut rack, &mut controls, &mut buffers); let d = CrossFadeBuilder::new(sine.tag(), delay.tag()).rack(&mut rack, &mut controls); oscs.push(d.tag()); names.push("Delay Line"); let union = UnionBuilder::new(oscs).rack(&mut rack, &mut controls); let _out = VcaBuilder::new(union.tag()) .level(0.25) .rack(&mut rack, &mut controls); let synth = Synth { sender, rack, controls, state, outputs, buffers, union, adsr, names, }; let stream = audio_host .new_output_stream(synth) .render(audio) .build() .unwrap(); Model { stream, receiver, samples: vec![], } } fn audio(synth: &mut Synth, buffer: &mut Buffer) { let sample_rate = buffer.sample_rate() as f32; for frame in buffer.frames_mut() { let amp =
amp).unwrap(); } } fn update(_app: &App, model: &mut Model, _update: Update) { let samples: Vec<f32> = model.receiver.try_iter().collect(); model.samples = samples; } fn view(app: &App, model: &Model, frame: Frame) { if frame.nth() == 0 { println!("Active module: 0 - Sine"); }; use nannou_apps::scope; scope(app, &model.samples, frame); } fn key_pressed(_app: &App, model: &mut Model, key: Key) { match key { Key::Space => { model .stream .send(|synth| { let active = synth.union.active(&synth.controls, &synth.outputs); let n = synth.names.len(); println!( "Active module: {} - {}", (active + 1) % n, synth.names[(active + 1) % n] ); synth .union .set_active(&mut synth.controls, Control::I((active + 1) % n)); }) .unwrap(); } Key::Period => { model .stream .send(|synth| { synth.adsr.on(&mut synth.controls, &mut synth.state); }) .unwrap(); } Key::Comma => { model .stream .send(|synth| { synth.adsr.off(&mut synth.controls); }) .unwrap(); } _ => {} } }
synth.rack.mono( &synth.controls, &mut synth.state, &mut synth.outputs, &mut synth.buffers, sample_rate, ); for channel in frame { *channel = amp; } synth.sender.send(
function_block-random_span
[ { "content": "/// Generate the Environment variables needed for the synth.\n\npub fn tables() -> (Rack, Box<Controls>, Box<State>, Box<Outputs>, Box<Buffers>) {\n\n (\n\n Rack::new(),\n\n Box::new(Controls::new()),\n\n Box::new(State::new()),\n\n Box::new(Outputs::new()),\n\n ...
Rust
metalos/lib/package_download/src/lib.rs
facebookincubator/fs_image
3515a24bb0e93176a5584bdc8839464fa28390d7
/* * Copyright (c) Meta Platforms, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use std::fs::Permissions; use std::io::Write; use std::os::unix::fs::PermissionsExt; use std::path::PathBuf; use anyhow::Context; use async_trait::async_trait; use bytes::Bytes; use futures::{Stream, StreamExt}; use slog::{debug, Logger}; use tempfile::NamedTempFile; use thiserror::Error; use metalos_host_configs::packages::{self, Format}; mod https; pub use https::HttpsDownloader; use btrfs::sendstream::Zstd; use btrfs::{Sendstream, SendstreamExt, Subvolume}; #[derive(Error, Debug)] pub enum Error { #[error("package '{package}' was not found", package = .0.identifier())] NotFound(packages::generic::Package), #[error( "failed to download '{package}': {error}", package = package.identifier(), )] Download { package: packages::generic::Package, error: anyhow::Error, }, #[error( "failed to install package '{package}': {error}", package = package.identifier(), )] Install { package: packages::generic::Package, error: anyhow::Error, }, } pub type Result<T> = std::result::Result<T, Error>; #[async_trait] pub trait PackageDownloader { type BytesStream: Stream<Item = std::io::Result<Bytes>> + Unpin + Send; async fn open_bytes_stream( &self, log: Logger, package: &packages::generic::Package, ) -> Result<Self::BytesStream>; } pub trait PackageExt: Clone + Into<packages::generic::Package> { type Artifact; fn on_disk(&self) -> Option<Self::Artifact>; } macro_rules! subvol_package { ($p:ty) => { impl PackageExt for $p { type Artifact = Subvolume; fn on_disk(&self) -> Option<Self::Artifact> { Subvolume::get(self.path()).ok() } } }; } macro_rules! file_package { ($p:ty) => { impl PackageExt for $p { type Artifact = PathBuf; fn on_disk(&self) -> Option<Self::Artifact> { let dest = self.path(); if dest.exists() { Some(dest) } else { None } } } }; } subvol_package!(packages::Rootfs); subvol_package!(packages::Kernel); subvol_package!(packages::Service); file_package!(packages::Initrd); file_package!(packages::ImagingInitrd); file_package!(packages::GptRootDisk); pub async fn ensure_package_on_disk<D, P, A>(log: Logger, dl: D, pkgext: P) -> Result<A> where D: PackageDownloader, P: PackageExt<Artifact = A>, { if let Some(artifacts) = pkgext.on_disk() { return Ok(artifacts); } let pkg: packages::generic::Package = pkgext.clone().into(); ensure_package_on_disk_ignoring_artifacts(log, dl, &pkg).await?; pkgext .on_disk() .context("package supposedly downloaded but was not on disk") .map_err(|error| Error::Install { error, package: pkg, }) } pub async fn ensure_package_on_disk_ignoring_artifacts<D>( log: Logger, dl: D, pkg: &packages::generic::Package, ) -> Result<()> where D: PackageDownloader, { let dest = pkg.path(); if dest.exists() { return Ok(()); } let map_install_err = |error: anyhow::Error| Error::Install { error, package: pkg.clone(), }; std::fs::create_dir_all(dest.parent().unwrap()) .with_context(|| format!("while creating parent directory for {}", dest.display())) .map_err(|error| Error::Install { error, package: pkg.clone(), })?; let mut stream = dl.open_bytes_stream(log.clone(), pkg).await?; match pkg.format { Format::Sendstream => { debug!(log, "receiving {:?} into {}", pkg, dest.display()); let sendstream = Sendstream::<Zstd, _>::new(stream); let mut subvol = sendstream .receive_into(&dest) .await .map_err(anyhow::Error::msg) .map_err(map_install_err)?; subvol .set_readonly(false) .context("while setting subvolume rw") .map_err(map_install_err)?; } Format::File => { let mut tmp_dest = NamedTempFile::new_in(dest.parent().unwrap()) .with_context(|| { format!( "while creating temporary file in {}", dest.parent().unwrap().display() ) }) .map_err(map_install_err)?; debug!( log, "downloading {:?} to {}", pkg, tmp_dest.path().display() ); while let Some(item) = stream.next().await { tmp_dest .write_all( &item .context("while reading chunk from downloader") .map_err(|error| Error::Download { error, package: pkg.clone(), })?, ) .with_context(|| { format!("while writing chunk to {}", tmp_dest.path().display()) }) .map_err(map_install_err)?; } let tmp_dest_path = tmp_dest.path().to_path_buf(); tmp_dest .persist(&dest) .with_context(|| { format!( "while moving {} -> {}", tmp_dest_path.display(), dest.display() ) }) .map_err(map_install_err)?; } }; xattr::set( &dest, "user.metalos.package", &fbthrift::simplejson_protocol::serialize(&pkg), ) .with_context(|| { format!( "while writing user.metalos.package xattr on {}", dest.display() ) }) .map_err(map_install_err)?; match pkg.format { Format::Sendstream => { Subvolume::get(&dest) .context("while getting subvol") .map_err(map_install_err)? .set_readonly(true) .context("while setting subvol ro") .map_err(map_install_err)?; } Format::File => { let perm = Permissions::from_mode(0o444); tokio::fs::set_permissions(&dest, perm) .await .with_context(|| format!("while setting {} readonly", dest.display())) .map_err(map_install_err)?; } } Ok(()) }
/* * Copyright (c) Meta Platforms, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use std::fs::Permissions; use std::io::Write; use std::os::unix::fs::PermissionsExt; use std::path::PathBuf; use anyhow::Context; use async_trait::async_trait; use bytes::Bytes; use futures::{Stream, StreamExt}; use slog::{debug, Logger}; use tempfile::NamedTempFile; use thiserror::Error; use metalos_host_configs::packages::{self, Format}; mod https; pub use https::HttpsDownloader; use btrfs::sendstream::Zstd; use btrfs::{Sendstream, SendstreamExt, Subvolume}; #[derive(Error, Debug)] pub enum Error { #[error("package '{package}' was not found", package = .0.identifier())] NotFound(packages::generic::Package), #[error( "failed to download '{package}': {error}", package = package.identifier(), )] Download { package: packages::generic::Package, error: anyhow::Error, }, #[error( "failed to install package '{package}': {error}", package = package.identifier(), )] Install { package: packages::generic::Package, error: anyhow::Error, }, } pub type Result<T> = std::result::Result<T, Error>; #[async_trait] pub trait PackageDownloader { type BytesStream: Stream<Item = std::io::Result<Bytes>> + Unpin + Send; async fn open_bytes_stream( &self, log: Logger, package: &packages::generic::Package, ) -> Result<Self::BytesStream>; } pub trait PackageExt: Clone + Into<packages::generic::Package> { type Artifact; fn on_disk(&self) -> Option<Self::Artifact>; } macro_rules! subvol_package { ($p:ty) => { impl PackageExt for $p { type Artifact = Subvolume; fn on_disk(&self) -> Option<Self::Artifact> { Subvolume::get(self.path()).ok() } } }; } macro_rules! file_package { ($p:ty) => { impl PackageExt for $p { type Artifact = PathBuf; fn on_disk(&self) -> Option<Self::Artifact> { let dest = self.path(); if dest.exists() { Some(dest) } else { None } } } }; } subvol_package!(packages::Rootfs); subvol_package!(packages::Kernel); subvol_package!(packages::Service); file_package!(packages::Initrd); file_package!(packages::ImagingInitrd); file_package!(packages::GptRootDisk); pub async fn ensure_package_on_disk<D, P, A>(log: Logger, dl: D, pkgext: P) -> Result<A> where D: PackageDownloader, P: PackageExt<Artifact = A>, { if let Some(artifacts) = pkgext.on_disk() { return Ok(artifacts); } let pkg: packages::generic::Package = pkgext.clone().into(); ensure_package_on_disk_ignoring_artifacts(log, dl, &pkg).await?; pkgext .on_disk() .context("package supposedly downloaded but was not on disk") .map_err(|error| Error::Install { error, package: pkg, }) }
pub async fn ensure_package_on_disk_ignoring_artifacts<D>( log: Logger, dl: D, pkg: &packages::generic::Package, ) -> Result<()> where D: PackageDownloader, { let dest = pkg.path(); if dest.exists() { return Ok(()); } let map_install_err = |error: anyhow::Error| Error::Install { error, package: pkg.clone(), }; std::fs::create_dir_all(dest.parent().unwrap()) .with_context(|| format!("while creating parent directory for {}", dest.display())) .map_err(|error| Error::Install { error, package: pkg.clone(), })?; let mut stream = dl.open_bytes_stream(log.clone(), pkg).await?; match pkg.format { Format::Sendstream => { debug!(log, "receiving {:?} into {}", pkg, dest.display()); let sendstream = Sendstream::<Zstd, _>::new(stream); let mut subvol = sendstream .receive_into(&dest) .await .map_err(anyhow::Error::msg) .map_err(map_install_err)?; subvol .set_readonly(false) .context("while setting subvolume rw") .map_err(map_install_err)?; } Format::File => { let mut tmp_dest = NamedTempFile::new_in(dest.parent().unwrap()) .with_context(|| { format!( "while creating temporary file in {}", dest.parent().unwrap().display() ) }) .map_err(map_install_err)?; debug!( log, "downloading {:?} to {}", pkg, tmp_dest.path().display() ); while let Some(item) = stream.next().await { tmp_dest .write_all( &item .context("while reading chunk from downloader") .map_err(|error| Error::Download { error, package: pkg.clone(), })?, ) .with_context(|| { format!("while writing chunk to {}", tmp_dest.path().display()) }) .map_err(map_install_err)?; } let tmp_dest_path = tmp_dest.path().to_path_buf(); tmp_dest .persist(&dest) .with_context(|| { format!( "while moving {} -> {}", tmp_dest_path.display(), dest.display() ) }) .map_err(map_install_err)?; } }; xattr::set( &dest, "user.metalos.package", &fbthrift::simplejson_protocol::serialize(&pkg), ) .with_context(|| { format!( "while writing user.metalos.package xattr on {}", dest.display() ) }) .map_err(map_install_err)?; match pkg.format { Format::Sendstream => { Subvolume::get(&dest) .context("while getting subvol") .map_err(map_install_err)? .set_readonly(true) .context("while setting subvol ro") .map_err(map_install_err)?; } Format::File => { let perm = Permissions::from_mode(0o444); tokio::fs::set_permissions(&dest, perm) .await .with_context(|| format!("while setting {} readonly", dest.display())) .map_err(map_install_err)?; } } Ok(()) }
function_block-full_function
[ { "content": "pub trait Kind: Debug + Copy + Clone + PartialEq + Eq + Sync + Send + __private::Sealed {\n\n const NAME: &'static str;\n\n const THRIFT: ThriftKind;\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct PackageTag(String);\n\n\n\n#[derive(Clone, PartialEq, Eq)]\n\npub struct Pack...
Rust
crates/nu-cli/src/commands/xpath.rs
wcarss/nushell
2573441e2875cb9c2075efe1f6ade35aeb87ebe3
extern crate sxd_document; extern crate sxd_xpath; use crate::commands::WholeStreamCommand; use crate::prelude::*; use bigdecimal::FromPrimitive; use nu_errors::ShellError; use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, Value}; use nu_source::Tagged; use sxd_document::parser; use sxd_xpath::{Context, Factory}; pub struct XPath; #[derive(Deserialize)] struct XPathArgs { query: Tagged<String>, } #[async_trait] impl WholeStreamCommand for XPath { fn name(&self) -> &str { "xpath" } fn signature(&self) -> Signature { Signature::build("xpath").required("query", SyntaxShape::String, "xpath query") } fn usage(&self) -> &str { "execute xpath query on xml" } fn examples(&self) -> Vec<Example> { vec![Example { description: "find items with name attribute", example: r#"echo '<?xml version="1.0" encoding="UTF-8"?><main><nushell rocks="true"/></main>' | from xml | to xml | xpath '//nushell/@rocks'"#, result: None, }] } async fn run( &self, args: CommandArgs, registry: &CommandRegistry, ) -> Result<OutputStream, ShellError> { let tag = args.call_info.name_tag.clone(); let (XPathArgs { query }, input) = args.process(&registry).await?; let query_string = query.as_str(); let input_string = input.collect_string(tag.clone()).await?.item; let result_string = execute_xpath_query(input_string, query_string.to_string()); match result_string { Some(r) => Ok( futures::stream::iter(r.into_iter().map(ReturnSuccess::value)).to_output_stream(), ), None => Err(ShellError::labeled_error( "xpath query error", "xpath query error", query.tag(), )), } } } pub fn execute_xpath_query(input_string: String, query_string: String) -> Option<Vec<Value>> { let xpath = build_xpath(&query_string); let package = parser::parse(&input_string).expect("failed to parse xml"); let document = package.as_document(); let context = Context::new(); let res = xpath.evaluate(&context, document.root()); let mut key = query_string.clone(); if query_string.len() >= 20 { key.truncate(17); key += "..."; } else { key = query_string; }; match res { Ok(r) => { let rows: Vec<Value> = match r { sxd_xpath::Value::Nodeset(ns) => ns .into_iter() .map(|a| { let mut row = TaggedDictBuilder::new(Tag::unknown()); row.insert_value(&key, UntaggedValue::string(a.string_value())); row.into_value() }) .collect::<Vec<Value>>(), sxd_xpath::Value::Boolean(b) => { let mut row = TaggedDictBuilder::new(Tag::unknown()); row.insert_value(&key, UntaggedValue::boolean(b)); vec![row.into_value()] } sxd_xpath::Value::Number(n) => { let mut row = TaggedDictBuilder::new(Tag::unknown()); row.insert_value( &key, UntaggedValue::decimal(BigDecimal::from_f64(n).expect("error with f64")) .into_untagged_value(), ); vec![row.into_value()] } sxd_xpath::Value::String(s) => { let mut row = TaggedDictBuilder::new(Tag::unknown()); row.insert_value(&key, UntaggedValue::string(s)); vec![row.into_value()] } }; if !rows.is_empty() { Some(rows) } else { None } } Err(_) => None, } } fn build_xpath(xpath_str: &str) -> sxd_xpath::XPath { let factory = Factory::new(); factory .build(xpath_str) .unwrap_or_else(|e| panic!("Unable to compile XPath {}: {}", xpath_str, e)) .expect("error with building the xpath factory") } #[cfg(test)] mod tests { use super::ShellError; use super::XPath; #[test] fn examples_work_as_expected() -> Result<(), ShellError> { use crate::examples::test as test_examples; Ok(test_examples(XPath {})?) } }
extern crate sxd_document; extern crate sxd_xpath; use crate::commands::WholeStreamCommand; use crate::prelude::*; use bigdecimal::FromPrimitive; use nu_errors::ShellError; use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, Value}; use nu_source::Tagged; use sxd_document::parser; use sxd_xpath::{Context, Factory}; pub struct XPath; #[derive(Deserialize)] struct XPathArgs { query: Tagged<String>, } #[async_trait] impl WholeStreamCommand for XPath { fn name(&self) -> &str { "xpath" } fn signature(&self) -> Signature { Signature::build("xpath").required("query", SyntaxShape::String, "xpath query") } fn usage(&self) -> &str { "execute xpath query on xml" } fn examples(&self) -> Vec<Example> { vec![Example { description: "find items with name attribute", example: r#"echo '<?xml version="1.0" encoding="UTF-8"?><main><nushell rocks="true"/></main>' | from xml | to xml | xpath '//nushell/@rocks'"#, result: None, }] } async fn run( &self, args: CommandArgs, registry: &CommandRegistry, ) -> Result<OutputStream, ShellError> { let tag = args.call_info.name_tag.clone(); let (XPathArgs { query }, input) = args.process(&registry).await?; let query_string = query.as_str(); let input_string = input.collect_string(tag.clone()).await?.item; let result_string = execute_xpath_query(input_string, query_string.to_string()); match result_string { Some(r) => Ok( futures::stream::iter(r.into_iter().map(ReturnSuccess::value)).to_output_stream(), ), None => Err(ShellError::labeled_error( "xpath query error", "xpath query error", query.tag(), )), } } } pub fn execute_xpath_query(input_string: String, query_string: String) -> Option<Vec<Value>> { let xpath = build_xpath(&query_string); let package = parser::parse(&input_string).expect("failed to parse xml"); let document = package.as_document(); let context = Context::new(); let res = xpath.evaluate(&context, document.root()); let mut key = query_string.clone(); if query_string.len() >= 20 { key.truncate(17); key += "..."; } else { key = query_string; }; match res { Ok(r) => { let rows: Vec<Value> = match r { sxd_xpath::Value::Nodeset(ns) => ns .into_iter() .map(|a| { let mut row = TaggedDictBuilder::new(Tag::unknown()); row.insert_value(&key, UntaggedValue::string(a.string_value())); row.into_value() }) .collect::<Vec<Value>>(), sxd_xpath::Value::Boolean(b) => { let mut row = TaggedDictBuilder::new(Tag::unknown()); row.insert_value(&key, UntaggedValue::boolean(b)); vec![row.into_value()] } sxd_xpath::Value::Number(n) => { let mut row = TaggedDictBuilder::new(Tag::unknown()); row.insert_value( &key, UntaggedValue::decimal(BigDecimal::from_f64(n).expect("error with f64")) .into_untagged_value(), ); vec![row.into_value()] } sxd_xpath::Value::String(s) => { let mut row = TaggedDictBuilder::new(Tag::unknown()); row.insert_value(&key, UntaggedValue::string(s)); vec![row.into_value()] } }; if !rows.is_empty() { Some(rows) } else { None } } Err(_) => None, } }
#[cfg(test)] mod tests { use super::ShellError; use super::XPath; #[test] fn examples_work_as_expected() -> Result<(), ShellError> { use crate::examples::test as test_examples; Ok(test_examples(XPath {})?) } }
fn build_xpath(xpath_str: &str) -> sxd_xpath::XPath { let factory = Factory::new(); factory .build(xpath_str) .unwrap_or_else(|e| panic!("Unable to compile XPath {}: {}", xpath_str, e)) .expect("error with building the xpath factory") }
function_block-function_prefixed
[ { "content": "pub fn from_xml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, roxmltree::Error> {\n\n let parsed = roxmltree::Document::parse(&s)?;\n\n Ok(from_document_to_value(&parsed, tag))\n\n}\n\n\n\nasync fn from_xml(\n\n args: CommandArgs,\n\n registry: &CommandRegistry,\n\n)...
Rust
yatta/src/windows_event.rs
limethyst/yatta
50e26a4d6ce38bdfc02eb43c4855663c93657222
use std::{ sync::{ atomic::{AtomicIsize, Ordering}, Arc, Mutex, }, thread, time::Duration, }; use crossbeam_channel::{unbounded, Receiver, Sender}; use lazy_static::lazy_static; use log::{error, info}; use strum::Display; use bindings::Windows::Win32::{ Foundation::HWND, Graphics::Gdi::{MonitorFromWindow, MONITOR_DEFAULTTOPRIMARY}, UI::{ Accessibility::{SetWinEventHook, HWINEVENTHOOK}, WindowsAndMessaging::{EVENT_MAX, EVENT_MIN}, }, }; use crate::{ message_loop, window::{exe_name_from_path, Window}, Message, YATTA_CHANNEL, }; lazy_static! { static ref WINDOWS_EVENT_CHANNEL: Arc<Mutex<(Sender<WindowsEvent>, Receiver<WindowsEvent>)>> = Arc::new(Mutex::new(unbounded())); } #[derive(Debug, Clone)] pub struct WindowsEventListener { hook: Arc<AtomicIsize>, } impl Default for WindowsEventListener { fn default() -> Self { Self { hook: Arc::new(AtomicIsize::new(0)), } } } impl WindowsEventListener { pub fn start(&self) { let hook = self.hook.clone(); let yatta_sender = YATTA_CHANNEL.lock().unwrap().0.clone(); thread::spawn(move || unsafe { let hook_ref = SetWinEventHook( EVENT_MIN as u32, EVENT_MAX as u32, None, Some(handler), 0, 0, 0, ); hook.store(hook_ref.0, Ordering::SeqCst); info!("starting windows event listener"); message_loop::start(|_| { if let Ok(event) = WINDOWS_EVENT_CHANNEL.lock().unwrap().1.try_recv() { match yatta_sender.send(Message::WindowsEvent(event)) { Ok(_) => {} Err(error) => { error!("could not send windows event to yatta channel: {}", error) } } } thread::sleep(Duration::from_millis(10)); true }); }); } } extern "system" fn handler( _h_win_event_hook: HWINEVENTHOOK, event: u32, hwnd: HWND, id_object: i32, _id_child: i32, _id_event_thread: u32, _dwms_event_time: u32, ) { if id_object != 0 { return; } let hmonitor = unsafe { MonitorFromWindow(hwnd, MONITOR_DEFAULTTOPRIMARY) }; let window = Window { hwnd, hmonitor, tile: true, resize: None, }; let event_code = unsafe { ::std::mem::transmute(event) }; let event_type = match WindowsEventType::from_event_code(event_code) { Some(event) => event, None => { let object_name_change_on_launch = vec!["firefox.exe", "idea64.exe"]; if let Ok(path) = window.exe_path() { if event_code == WinEventCode::ObjectNameChange { if object_name_change_on_launch.contains(&&*exe_name_from_path(&path)) { WindowsEventType::Show } else { return; } } else { return; } } else { return; } } }; if window.should_manage(Option::from(event_type)) { let event = WindowsEvent { event_type, event_code, window, title: window.title(), }; WINDOWS_EVENT_CHANNEL .lock() .unwrap() .0 .send(event) .expect("Failed to forward WindowsEvent"); } } #[derive(Clone, Copy, Debug, Display, PartialEq)] pub enum WindowsEventType { Destroy, FocusChange, Hide, Show, MoveResizeStart, MoveResizeEnd, } impl WindowsEventType { pub fn from_event_code(event_code: WinEventCode) -> Option<Self> { match event_code { WinEventCode::ObjectDestroy => Some(Self::Destroy), WinEventCode::ObjectCloaked | WinEventCode::ObjectHide | WinEventCode::SystemMinimizeStart => Some(Self::Hide), WinEventCode::ObjectShow | WinEventCode::ObjectUncloaked | WinEventCode::SystemMinimizeEnd => Some(Self::Show), WinEventCode::ObjectFocus | WinEventCode::SystemForeground => Some(Self::FocusChange), WinEventCode::SystemMoveSizeStart => Some(Self::MoveResizeStart), WinEventCode::SystemMoveSizeEnd => Some(Self::MoveResizeEnd), _ => None, } } } #[derive(Clone, Debug)] pub struct WindowsEvent { pub event_type: WindowsEventType, pub event_code: WinEventCode, pub window: Window, pub title: Option<String>, } #[derive(Clone, Copy, FromPrimitive, ToPrimitive, PartialEq, Display, Debug)] #[repr(u32)] #[allow(dead_code)] pub enum WinEventCode { ObjectAcceleratorChange = 0x8012, ObjectCloaked = 0x8017, ObjectContentScrolled = 0x8015, ObjectCreate = 0x8000, ObjectDefActionChange = 0x8011, ObjectDescriptionChange = 0x800D, ObjectDestroy = 0x8001, ObjectDragStart = 0x8021, ObjectDragCancel = 0x8022, ObjectDragComplete = 0x8023, ObjectDragEnter = 0x8024, ObjectDragLeave = 0x8025, ObjectDragDropped = 0x8026, ObjectEnd = 0x80FF, ObjectFocus = 0x8005, ObjectHelpChange = 0x8010, ObjectHide = 0x8003, ObjectHostedObjectsInvalidated = 0x8020, ObjectImeHide = 0x8028, ObjectImeShow = 0x8027, ObjectImeChange = 0x8029, ObjectInvoked = 0x8013, ObjectLiveRegionChanged = 0x8019, ObjectLocationChange = 0x800B, ObjectNameChange = 0x800C, ObjectParentChange = 0x800F, ObjectReorder = 0x8004, ObjectSelection = 0x8006, ObjectSelectionAdd = 0x8007, ObjectSelectionRemove = 0x8008, ObjectSelectionWithin = 0x8009, ObjectShow = 0x8002, ObjectStateChange = 0x800A, ObjectTextEditConversionTargetChanged = 0x8030, ObjectTextSelectionChanged = 0x8014, ObjectUncloaked = 0x8018, ObjectValueChange = 0x800E, SystemAlert = 0x0002, SystemArrangementPreview = 0x8016, SystemCaptureEnd = 0x0009, SystemCaptureStart = 0x0008, SystemContextHelpEnd = 0x000D, SystemContextHelpStart = 0x000C, SystemDesktopSwitch = 0x0020, SystemDialogEnd = 0x0011, SystemDialogStart = 0x0010, SystemDragDropEnd = 0x000F, SystemDragDropStart = 0x000E, SystemEnd = 0x00FF, SystemForeground = 0x0003, SystemMenuPopupEnd = 0x0007, SystemMenuPopupStart = 0x0006, SystemMenuEnd = 0x0005, SystemMenuStart = 0x0004, SystemMinimizeEnd = 0x0017, SystemMinimizeStart = 0x0016, SystemMoveSizeEnd = 0x000B, SystemMoveSizeStart = 0x000A, SystemScrollingEnd = 0x0013, SystemScrollingStart = 0x0012, SystemSound = 0x0001, SystemSwitchEnd = 0x0015, SystemSwitchStart = 0x0014, }
use std::{ sync::{ atomic::{AtomicIsize, Ordering}, Arc, Mutex, }, thread, time::Duration, }; use crossbeam_channel::{unbounded, Receiver, Sender}; use lazy_static::lazy_static; use log::{error, info}; use strum::Display; use bindings::Windows::Win32::{ Foundation::HWND, Graphics::Gdi::{MonitorFromWindow, MONITOR_DEFAULTTOPRIMARY}, UI::{ Accessibility::{SetWinEventHook, HWINEVENTHOOK}, WindowsAndMessaging::{EVENT_MAX, EVENT_MIN}, }, }; use crate::{ message_loop, window::{exe_name_from_path, Window}, Message, YATTA_CHANNEL, }; lazy_static! { static ref WINDOWS_EVENT_CHANNEL: Arc<Mutex<(Sender<WindowsEvent>, Receiver<WindowsEvent>)>> = Arc::new(Mutex::new(unbounded())); } #[derive(Debug, Clone)] pub struct WindowsEventListener { hook: Arc<AtomicIsize>, } impl Default for WindowsEventListener { fn default() -> Self { Self { hook: Arc::new(AtomicIsize::new(0)), } } } impl WindowsEventListener { pub fn start(&self) { let hook = self.hook.clone(); let yatta_sender = YATTA_CHANNEL.lock().unwrap().0.clone(); thread::spawn(move || unsafe { let hook_ref = SetWinEventHook( EVENT_MIN as u32, EVENT_MAX as u32, None, Some(handler), 0, 0, 0, ); hook.store(hook_ref.0, Ordering::SeqCst); info!("starting windows event listener"); message_loop::start(|_| { if let Ok(event) = WINDOWS_EVENT_CHANNEL.lock().unwrap().1.try_recv() { match yatta_sender.send(Message::WindowsEvent(event)) { Ok(_) => {} Err(error) => { error!("could not send windows event to yatta channel: {}", error) } } } thread::sleep(Duration::from_millis(10)); true }); }); } } extern "system" fn handler( _h_win_event_hook: HWINEVENTHOOK, event: u32, hwnd: HWND, id_object: i32, _id_child: i32, _id_event_thread: u32, _dwms_event_time: u32, ) { if id_object != 0 { return; } let hmonitor = unsafe { MonitorFromWindow(hwnd, MONITOR_DEFAULTTOPRIMARY) }; let window = Window { hwnd, hmonitor, tile: true, resize: None, }; let event_code = unsafe { ::std::mem::transmute(event) }; let event_type = match WindowsEventType::from_event_code(event_code) { Some(event) => event, None => { let object_name_change_on_launch = vec!["firefox.exe", "idea64.exe"]; if let Ok(path) = window.exe_path() { if event_code == WinEventCode::ObjectNameChange { if object_name_change_on_launch.contains(&&*exe_name_from_path(&path)) { WindowsEventType::Show } else { return; } } else { return; } } else { return; } } }; if window.should_manage(Option::from(event_type)) { let event = WindowsEvent { event_type, event_code, window, title: window.title(), }; WINDOWS_EVENT_CHANNEL .lock() .unwrap() .0 .send(event) .expect("Failed to forward WindowsEvent"); } } #[derive(Clone, Copy, Debug, Display, PartialEq)] pub enum WindowsEventType { Destroy, FocusChange, Hide, Show, MoveResizeStart, MoveResizeEnd, } impl WindowsEventType { pub fn from_event_code(eve
} #[derive(Clone, Debug)] pub struct WindowsEvent { pub event_type: WindowsEventType, pub event_code: WinEventCode, pub window: Window, pub title: Option<String>, } #[derive(Clone, Copy, FromPrimitive, ToPrimitive, PartialEq, Display, Debug)] #[repr(u32)] #[allow(dead_code)] pub enum WinEventCode { ObjectAcceleratorChange = 0x8012, ObjectCloaked = 0x8017, ObjectContentScrolled = 0x8015, ObjectCreate = 0x8000, ObjectDefActionChange = 0x8011, ObjectDescriptionChange = 0x800D, ObjectDestroy = 0x8001, ObjectDragStart = 0x8021, ObjectDragCancel = 0x8022, ObjectDragComplete = 0x8023, ObjectDragEnter = 0x8024, ObjectDragLeave = 0x8025, ObjectDragDropped = 0x8026, ObjectEnd = 0x80FF, ObjectFocus = 0x8005, ObjectHelpChange = 0x8010, ObjectHide = 0x8003, ObjectHostedObjectsInvalidated = 0x8020, ObjectImeHide = 0x8028, ObjectImeShow = 0x8027, ObjectImeChange = 0x8029, ObjectInvoked = 0x8013, ObjectLiveRegionChanged = 0x8019, ObjectLocationChange = 0x800B, ObjectNameChange = 0x800C, ObjectParentChange = 0x800F, ObjectReorder = 0x8004, ObjectSelection = 0x8006, ObjectSelectionAdd = 0x8007, ObjectSelectionRemove = 0x8008, ObjectSelectionWithin = 0x8009, ObjectShow = 0x8002, ObjectStateChange = 0x800A, ObjectTextEditConversionTargetChanged = 0x8030, ObjectTextSelectionChanged = 0x8014, ObjectUncloaked = 0x8018, ObjectValueChange = 0x800E, SystemAlert = 0x0002, SystemArrangementPreview = 0x8016, SystemCaptureEnd = 0x0009, SystemCaptureStart = 0x0008, SystemContextHelpEnd = 0x000D, SystemContextHelpStart = 0x000C, SystemDesktopSwitch = 0x0020, SystemDialogEnd = 0x0011, SystemDialogStart = 0x0010, SystemDragDropEnd = 0x000F, SystemDragDropStart = 0x000E, SystemEnd = 0x00FF, SystemForeground = 0x0003, SystemMenuPopupEnd = 0x0007, SystemMenuPopupStart = 0x0006, SystemMenuEnd = 0x0005, SystemMenuStart = 0x0004, SystemMinimizeEnd = 0x0017, SystemMinimizeStart = 0x0016, SystemMoveSizeEnd = 0x000B, SystemMoveSizeStart = 0x000A, SystemScrollingEnd = 0x0013, SystemScrollingStart = 0x0012, SystemSound = 0x0001, SystemSwitchEnd = 0x0015, SystemSwitchStart = 0x0014, }
nt_code: WinEventCode) -> Option<Self> { match event_code { WinEventCode::ObjectDestroy => Some(Self::Destroy), WinEventCode::ObjectCloaked | WinEventCode::ObjectHide | WinEventCode::SystemMinimizeStart => Some(Self::Hide), WinEventCode::ObjectShow | WinEventCode::ObjectUncloaked | WinEventCode::SystemMinimizeEnd => Some(Self::Show), WinEventCode::ObjectFocus | WinEventCode::SystemForeground => Some(Self::FocusChange), WinEventCode::SystemMoveSizeStart => Some(Self::MoveResizeStart), WinEventCode::SystemMoveSizeEnd => Some(Self::MoveResizeEnd), _ => None, } }
function_block-function_prefixed
[]
Rust
rust tezos+everscale/tezos_everscale/src/bin/bridge_without_comments.rs
everscale-experts/hackathon_new
dafaaea8b7f70cfdb9162a1b6752ac31d6a4a5df
mod tezos_send_transaction; use lib::functions::*; use ureq::Agent; use std::sync::Arc; use std::fs; use serde_json::Value; use tezos_send_transaction::transfer as tezos_transfer; fn tezos_get_transactions() -> Value { let agent = Agent::new(); let path = format!("https://api.hangzhounet.tzkt.io/v1/accounts/{}/operations", serde_json::from_str::<Value>(fs::read_to_string(PATH) .unwrap() .as_str()) .unwrap()[1]["address"] .as_str() .unwrap() ); let res = agent.get(&path) .call() .unwrap() .into_string() .unwrap(); let res_json = serde_json::from_str::<Value>(res.as_str()).unwrap(); res_json } const PATH: &str = "./dependencies/json/tezos_accounts.json"; async fn everscale_transaction34(amount: &str, ton: &Arc<ClientContext>, config: Config) { let ever_accs = get_json_field("./dependencies/json/everscale_accounts.json", None, None); let address = ever_accs[2]["address"].clone(); let abi = std::fs::read_to_string("./dependencies/json/SetcodeMultisigWallet.abi.json") .map_err(|e| format!("failed to read ABI file: {}", e.to_string())).unwrap(); let trans_id = submit_transaction( ton.clone(), config.clone(), address.as_str().unwrap(), abi.as_str(), Some("./dependencies/json/wallet3.scmsig1.json".to_string()), "".to_string(), amount, ever_accs[3]["address"].as_str().unwrap() ).await; println!("Transaction created with id: {}", trans_id); for i in 2..4 { println!( "{}", confirm_transaction( ton.clone(), config.clone(), address.as_str().unwrap(), abi.as_str(), Some(format!("./dependencies/json/wallet3.scmsig{}.json", i)), trans_id.to_string(), ).await, ); } } #[tokio::main] async fn main() { let config = Config::from_json( serde_json::from_str( std::fs::read_to_string( "./dependencies/json/run_config.json" ).unwrap().as_str() ).expect("failed to parse json") ); let ton = create_client_verbose(&config).unwrap(); let mut last_len = tezos_get_transactions().as_array().unwrap().len(); let context = Arc::new( ton_client::ClientContext::new(ton_client::ClientConfig { network: ton_client::net::NetworkConfig { server_address: Some("net.ton.dev".to_string()), ..Default::default() }, ..Default::default() }) .unwrap(), ); ton_client::net::subscribe_collection( context.clone(), ton_client::net::ParamsOfSubscribeCollection { collection: "transactions".to_owned(), filter: None, result: "id in_message {value} account_addr".to_owned(), }, |result| async { match result { Ok(result) => { if let Some(address) = result.result["account_addr"].as_str() { if let Some(v) = result.result["in_message"]["value"].as_str() { if address == get_json_field("./dependencies/json/everscale_accounts.json", None, None)[1]["address"] .as_str().unwrap().to_owned() { let v_u64 = hex_to_dec(v); println!("{}", v_u64); let sender = get_json_field("./dependencies/json/tezos_accounts.json", None, Some(2)); let receiver = get_json_field("./dependencies/json/tezos_accounts.json", None, Some(3)); tezos_transfer( sender["address"].as_str().unwrap(), receiver["address"].as_str().unwrap(), sender["public"].as_str().unwrap(), sender["secret"].as_str().unwrap(), format!("{}", v_u64 as f64 / 1000000000.0).as_str(), ); } } } } Err(err) => { println!("(Everscale listener) Error: {}", err); } } }, ).await.unwrap(); loop { let res = tezos_get_transactions(); let len = res.as_array().unwrap().len(); if len > last_len { println!("{:#}", res[0]["amount"]); everscale_transaction34((res[0]["amount"].as_i64().unwrap() * 1000).to_string().as_str(), &ton, config.clone()).await; last_len = len; } } }
mod tezos_send_transaction; use lib::functions::*; use ureq::Agent; use std::sync::Arc; use std::fs; use serde_json::Value; use tezos_send_transaction::transfer as tezos_transfer; fn tezos_get_transactions() -> Value { let agent = Agent::new(); let path = format!("https://api.hangzhounet.tzkt.io/v1/accounts/{}/operations", serde_json::from_str::<Value>(fs::read_to_string(PATH) .unwrap() .as_str()) .unwrap()[1]["address"] .as_str() .unwrap() ); let res = agent.get(&path) .call() .unwrap() .into_string() .unwrap(); let res_json = serde_json::from_str::<Value>(res.as_str()).unwrap(); res_json } const PATH: &str = "./dependencies/json/tezos_accounts.json"; async fn everscale_transaction34(amount: &str, ton: &Arc<ClientContext>, config: Config) { let ever_accs = get_json_field("./dependencies/json/everscale_accounts.json", None, None); let address = ever_accs[2]["address"].clone(); let abi = std::fs::read_to_string("./dependencies/json/SetcodeMultisigWallet.abi.json") .map_err(|e| format!("failed to read ABI file: {}", e.to_string())).unwrap(); let trans_id = submit_transaction( ton.clone(), config.clone(), address.as_str().unwrap(), abi.as_str(), Some("./dependencies/json/wallet3.scmsig1.json".to_string()), "".to_string(), amount, ever_accs[3]["address"].as_str().unwrap() ).await; println!("Transaction created with id: {}", trans_id); for i in 2..4 { println!( "{}", confirm_transaction( ton.clone(), config.clone(), address.as_str().unwrap(), abi.as_str(), Some(format!("./dependencies/json/wallet3.scmsig{}.json", i)), trans_id.to_string(), ).await, ); } } #[tokio::main] async fn main() { let config = Config::from_json( serde_json::from_str( std::fs::read_to_string( "./dependencies/json/run_config.json" ).unwrap
} } } Err(err) => { println!("(Everscale listener) Error: {}", err); } } }, ).await.unwrap(); loop { let res = tezos_get_transactions(); let len = res.as_array().unwrap().len(); if len > last_len { println!("{:#}", res[0]["amount"]); everscale_transaction34((res[0]["amount"].as_i64().unwrap() * 1000).to_string().as_str(), &ton, config.clone()).await; last_len = len; } } }
().as_str() ).expect("failed to parse json") ); let ton = create_client_verbose(&config).unwrap(); let mut last_len = tezos_get_transactions().as_array().unwrap().len(); let context = Arc::new( ton_client::ClientContext::new(ton_client::ClientConfig { network: ton_client::net::NetworkConfig { server_address: Some("net.ton.dev".to_string()), ..Default::default() }, ..Default::default() }) .unwrap(), ); ton_client::net::subscribe_collection( context.clone(), ton_client::net::ParamsOfSubscribeCollection { collection: "transactions".to_owned(), filter: None, result: "id in_message {value} account_addr".to_owned(), }, |result| async { match result { Ok(result) => { if let Some(address) = result.result["account_addr"].as_str() { if let Some(v) = result.result["in_message"]["value"].as_str() { if address == get_json_field("./dependencies/json/everscale_accounts.json", None, None)[1]["address"] .as_str().unwrap().to_owned() { let v_u64 = hex_to_dec(v); println!("{}", v_u64); let sender = get_json_field("./dependencies/json/tezos_accounts.json", None, Some(2)); let receiver = get_json_field("./dependencies/json/tezos_accounts.json", None, Some(3)); tezos_transfer( sender["address"].as_str().unwrap(), receiver["address"].as_str().unwrap(), sender["public"].as_str().unwrap(), sender["secret"].as_str().unwrap(), format!("{}", v_u64 as f64 / 1000000000.0).as_str(), ); }
random
[ { "content": "fn get_config_field(name: &str) -> serde_json::Value {\n\n serde_json::from_str::<serde_json::Value>(std::fs::read_to_string(\"config.json\").unwrap().as_str())\n\n .unwrap()[name].clone()\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let context = Arc::new(\n\n ton_clien...
Rust
src/msg.rs
JACKAL-DAO/RNS-Name-Service
132fc3eea6e6dcd3a378382992da783d192d76a9
use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use cosmwasm_std::Addr; use crate::state::{ Name, Operator } ; use cw_utils::Expiration; use cosmwasm_std::Binary; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct InstantiateMsg { pub blocks_per_year: u64, pub meta_url: String, pub denom: String, pub cost_for_6: Option<u64>, pub cost_for_5: Option<u64>, pub cost_for_4: Option<u64>, pub cost_for_3: Option<u64>, pub cost_for_2: Option<u64>, pub cost_for_1: Option<u64> } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum ExecuteMsg { SetBlocksPerYear { blocks_per_year: u64 }, SetOwner { owner: Addr }, RegisterName { name: String, years: u64, avatar_url: Option<String>, secret_address: Option<String>, crypto_org_address: Option<String>, starname_address: Option<String>, persistence_address: Option<String>, kava_address: Option<String>, terra_address: Option<String>, website: Option<String>, email: Option<String>, twitter: Option<String>, telegram: Option<String>, discord: Option<String>, instagram: Option<String>, reddit: Option<String>, }, AddTime { name : String, years: u64}, UpdateParams { name: String, avatar_url: Option<String>, secret_address: Option<String>, crypto_org_address: Option<String>, starname_address: Option<String>, persistence_address: Option<String>, kava_address: Option<String>, terra_address: Option<String>, website: Option<String>, email: Option<String>, twitter: Option<String>, telegram: Option<String>, discord: Option<String>, instagram: Option<String>, reddit: Option<String>, }, /** * ALL THE CW721 STANDARD FUNCTIONS */ TransferNft { recipient: String, token_id: String }, SendNft { contract: String, token_id: String, message: Binary, }, Approve { spender: String, token_id: String, expires: Option<Expiration>, }, Revoke { spender: String, token_id: String }, ApproveAll { operator: String, expires: Option<Expiration>, }, RevokeAll { operator: String }, WithdrawBalance{ }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum QueryMsg { GetOwner {}, GetBlocksPerYear {}, ResolveName { name : String }, ResolveAttributes { name : String }, OwnerOf { token_id: String, }, ApprovedForAll { owner: String, start_after: Option<String>, limit: Option<u32>, }, NumTokens {}, ContractInfo {}, NftInfo { token_id: String, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct BlocksResponse { pub blocks_per_year: u64, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct OwnerResponse { pub owner: Addr, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct NameResponse { pub name: Name, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct ApprovedForAllResponse { pub operators: Vec<Operator>, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct NumTokensResponse { pub tokens: u32, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct ContractInfoResponse { pub name: String, pub symbol: String, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct NftInfoResponse { pub name: String, pub description: String, pub image: String, }
use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use cosmwasm_std::Addr; use crate::state::{ Name, Operator } ; use cw_utils::Expiration; use cosmwasm_std::Binary; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct InstantiateMsg { pub blocks_per_year: u64, pub meta_url: String, pub denom: String, pub cost_for_6: Option<u64>, pub cost_for_5: Option<u64>, pub cost_for_4: Option<u64>, pub cost_for_3: Option<u64>, pub cost_for_2: Option<u64>, pub cost_for_1: Option<u64> } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum ExecuteMsg { SetBlocksPerYear { blocks_per_year: u64 }, SetOwner { owner: Addr }, RegisterName { name: String, years: u64, avatar_url: Option<String>, secret_address: Option<String>, crypto_org_address: Option<String>, starname_address: Option<String>, persistence_address: Option<String>, kava_address: Option<String>, terra_address: Option<String>, website: Option<String>, email: Option<String>, twitter: Option<String>, telegram: Option<String>, discord: Option<String>, instagram: Option<String>, reddit: Option<String>, }, AddTime { name : String, years: u64}, UpdateParams { name: String, avatar_url: Option<String>, secret_address: Option<String>, crypto_org_address: Option<String>, starname_address: Option<String>, persistence_address: Option<String>, kava_address: Option<String>, terra_address: Option<String>, website: Option<String>, email: Option<String>, twitter: Option<String>, telegram: Option<String>, discord: Option<String>, instagram: Option<String>, reddit: Option<String>, }, /** * ALL THE CW721 STANDARD FUNCTIONS */ TransferNft { recipient: String, token_id: String }, SendNft { contract: String, token_id: String, message: Binary, }, Approve { spender: String, token_id: String, expires: Option<Expiration>, }, Revoke { spender: String, token_id: String }, ApproveAll { operator: String, expires: Option<Expiration>, }, RevokeAll { operator: String }, WithdrawBalance{ }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")]
}, NumTokens {}, ContractInfo {}, NftInfo { token_id: String, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct BlocksResponse { pub blocks_per_year: u64, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct OwnerResponse { pub owner: Addr, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct NameResponse { pub name: Name, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct ApprovedForAllResponse { pub operators: Vec<Operator>, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct NumTokensResponse { pub tokens: u32, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct ContractInfoResponse { pub name: String, pub symbol: String, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct NftInfoResponse { pub name: String, pub description: String, pub image: String, }
pub enum QueryMsg { GetOwner {}, GetBlocksPerYear {}, ResolveName { name : String }, ResolveAttributes { name : String }, OwnerOf { token_id: String, }, ApprovedForAll { owner: String, start_after: Option<String>, limit: Option<u32>,
random
[ { "content": "pub fn try_set_owner(deps: DepsMut, info: MessageInfo, owner: Addr) -> Result<Response, ContractError> {\n\n STATE.update(deps.storage, |mut state| -> Result<_, ContractError> {\n\n if info.sender != state.owner {\n\n return Err(ContractError::Unauthorized {});\n\n }\n\...
Rust
benchmark/src/lib.rs
mrobakowski/faster-rs
ab33be278ae752ba95e6563949672774174e5919
extern crate hwloc; extern crate libc; extern crate regex; use faster_rs::FasterKv; use hwloc::{CpuSet, ObjectType, Topology, CPUBIND_THREAD}; use regex::Regex; use std::fs::File; use std::io::{BufRead, BufReader, Write}; use std::os::unix::prelude::FileExt; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use std::sync::mpsc::Receiver; use std::sync::{Arc, Barrier, Mutex}; use std::time::{Duration, Instant}; const K_CHECKPOINT_SECONDS: u64 = 30; const K_COMPLETE_PENDING_INTERVAL: usize = 1600; const K_REFRESH_INTERVAL: usize = 64; const K_RUN_TIME: u64 = 360; const K_CHUNK_SIZE: usize = 3200; const K_FILE_CHUNK_SIZE: usize = 131072; const K_INIT_COUNT: usize = 250000000; const K_TXN_COUNT: usize = 1000000000; const K_NANOS_PER_SECOND: usize = 1000000000; const K_THREAD_STACK_SIZE: usize = 4 * 1024 * 1024; pub enum Operation { Read, Upsert, Rmw, } fn cpuset_for_core(topology: &Topology, idx: usize) -> CpuSet { let cores = (*topology).objects_with_type(&ObjectType::Core).unwrap(); match cores.get(idx) { Some(val) => val.cpuset().unwrap(), None => panic!("No Core found with id {}", idx), } } pub fn process_ycsb(input_file: &str, output_file: &str) { let input = File::open(input_file).expect("Unable to open input file for reading"); let mut output = File::create(output_file).expect("Unable to create output file"); let re = Regex::new(r".*usertable user(\d+).*").unwrap(); let reader = BufReader::new(input); for line in reader.lines().map(|l| l.unwrap()) { for cap in re.captures_iter(&line) { let num: u64 = cap[1].parse().expect("Unable to parse uid"); output.write(&num.to_be_bytes()).unwrap(); } } } pub fn generate_sequential_keys(out_file: &str, workload: &str) { let mut output = File::create(out_file).expect("Unable to create output file"); let num_keys = match workload { "load" => K_INIT_COUNT, "run" => K_TXN_COUNT, _ => panic!("Must specify load or run for generating sequential keys"), }; for i in 0..num_keys { output.write(&((i % K_INIT_COUNT) as u64).to_be_bytes()).unwrap(); } } pub fn read_upsert5050(key: usize) -> Operation { match key % 2 { 0 => Operation::Read, 1 => Operation::Upsert, _ => panic!(), } } pub fn rmw_100(_key: usize) -> Operation { Operation::Rmw } pub fn upsert_100(_key: usize) -> Operation { Operation::Upsert } pub fn load_files(load_file: &str, run_file: &str) -> (Vec<u64>, Vec<u64>) { let load_file = File::open(load_file).expect("Unable to open load file"); let run_file = File::open(run_file).expect("Unable to open run file"); let mut buffer = [0; K_FILE_CHUNK_SIZE]; let mut count = 0; let mut offset = 0; let mut init_keys = Vec::with_capacity(K_INIT_COUNT); println!("Loading keys into memory"); loop { let bytes_read = load_file.read_at(&mut buffer, offset).unwrap(); for i in 0..(bytes_read / 8) { let mut num = [0; 8]; num.copy_from_slice(&buffer[i..i + 8]); init_keys.insert(count, u64::from_be_bytes(num)); count += 1; } if bytes_read == K_FILE_CHUNK_SIZE { offset += K_FILE_CHUNK_SIZE as u64; } else { break; } } if K_INIT_COUNT != count { panic!("Init file load fail!"); } println!("Loaded {} keys", count); let mut count = 0; let mut offset = 0; let mut run_keys = Vec::with_capacity(K_TXN_COUNT); println!("Loading txns into memory"); loop { let bytes_read = run_file.read_at(&mut buffer, offset).unwrap(); for i in 0..(bytes_read / 8) { let mut num = [0; 8]; num.copy_from_slice(&buffer[i..i + 8]); run_keys.insert(count, u64::from_be_bytes(num)); count += 1; } if bytes_read == K_FILE_CHUNK_SIZE { offset += K_FILE_CHUNK_SIZE as u64; } else { break; } } if K_TXN_COUNT != count { panic!("Txn file load fail!"); } println!("Loaded {} txns", count); (init_keys, run_keys) } pub fn populate_store(store: &Arc<FasterKv>, keys: &Arc<Vec<u64>>, num_threads: u8) { let topo = Arc::new(Mutex::new(Topology::new())); let idx = Arc::new(AtomicUsize::new(0)); let mut threads = vec![]; for thread_idx in 0..num_threads { let store = Arc::clone(store); let idx = Arc::clone(&idx); let keys = Arc::clone(&keys); let child_topo = topo.clone(); threads.push(std::thread::spawn(move || { { let tid = unsafe { libc::pthread_self() }; let mut locked_topo = child_topo.lock().unwrap(); let bind_to = cpuset_for_core(&*locked_topo, thread_idx as usize); locked_topo .set_cpubind_for_thread(tid, bind_to, CPUBIND_THREAD) .unwrap(); } let _session = store.start_session(); let mut chunk_idx = idx.fetch_add(K_CHUNK_SIZE, Ordering::SeqCst); while chunk_idx < K_INIT_COUNT { for i in chunk_idx..(chunk_idx + K_CHUNK_SIZE) { if i % K_REFRESH_INTERVAL == 0 { store.refresh(); if i % K_COMPLETE_PENDING_INTERVAL == 0 { store.complete_pending(false); } } store.upsert(&*keys.get(i as usize).unwrap(), &42, i as u64); } chunk_idx = idx.fetch_add(K_CHUNK_SIZE, Ordering::SeqCst); } store.complete_pending(true); store.stop_session(); })); } for t in threads { t.join().expect("Something went wrong in a thread"); } } pub fn run_benchmark<F: Fn(usize) -> Operation + Send + Copy + 'static>( store: &Arc<FasterKv>, keys: &Arc<Vec<u64>>, num_threads: u8, op_allocator: F, ) { let topo = Arc::new(Mutex::new(Topology::new())); let idx = Arc::new(AtomicUsize::new(0)); let done = Arc::new(AtomicBool::new(false)); let barrier = Arc::new(Barrier::new((num_threads + 1) as usize)); let mut threads = vec![]; for thread_id in 0..num_threads { let store = Arc::clone(&store); let keys = Arc::clone(&keys); let idx = Arc::clone(&idx); let done = Arc::clone(&done); let barrier = Arc::clone(&barrier); let topo = Arc::clone(&topo); threads.push( std::thread::Builder::new() .stack_size(K_THREAD_STACK_SIZE) .spawn(move || { { let tid = unsafe { libc::pthread_self() }; let mut locked_topo = topo.lock().unwrap(); let bind_to = cpuset_for_core(&*locked_topo, thread_id as usize); locked_topo .set_cpubind_for_thread(tid, bind_to, CPUBIND_THREAD) .unwrap(); } let mut reads = 0; let mut upserts = 0; let mut rmws = 0; let _session = store.start_session(); barrier.wait(); let start = Instant::now(); while !done.load(Ordering::SeqCst) { let mut chunk_idx = idx.fetch_add(K_CHUNK_SIZE, Ordering::SeqCst); while chunk_idx >= K_TXN_COUNT { if chunk_idx == K_TXN_COUNT { idx.store(0, Ordering::SeqCst); } chunk_idx = idx.fetch_add(K_CHUNK_SIZE, Ordering::SeqCst); } for i in chunk_idx..(chunk_idx + K_CHUNK_SIZE) { if i % K_REFRESH_INTERVAL == 0 { store.refresh(); if i % K_COMPLETE_PENDING_INTERVAL == 0 { store.complete_pending(false); } } match op_allocator(i) { Operation::Read => { let (_, _): (u8, Receiver<i32>) = store.read(&*keys.get(i).unwrap(), 1); reads += 1; } Operation::Upsert => { store.upsert(&*keys.get(i).unwrap(), &42, 1); upserts += 1; } Operation::Rmw => { store.rmw(&*keys.get(i).unwrap(), &5, 1); rmws += 1; } } } } store.complete_pending(true); store.stop_session(); let duration = Instant::now().duration_since(start); println!( "Thread {} completed {} reads, {} upserts and {} rmws in {}ms", thread_id, reads, upserts, rmws, duration.as_millis() ); (reads, upserts, rmws, duration.as_nanos()) }) .unwrap(), ) } barrier.wait(); let start = Instant::now(); let mut last_checkpoint = Instant::now(); let mut num_checkpoints = 0; while Instant::now().duration_since(start).as_secs() < K_RUN_TIME { std::thread::sleep(Duration::from_secs(1)); if Instant::now().duration_since(last_checkpoint).as_secs() > K_CHECKPOINT_SECONDS { println!("Checkpointing..."); store.checkpoint(); num_checkpoints += 1; last_checkpoint = Instant::now(); } } done.store(true, Ordering::SeqCst); let mut total_counts = (0, 0, 0, 0); for t in threads { let (reads, upserts, rmws, duration) = t.join().expect("Something went wrong in a thread"); total_counts.0 += reads; total_counts.1 += upserts; total_counts.2 += rmws; total_counts.3 += duration; } println!( "Finished benchmark: {} checkpoints, {} reads, {} writes, {} rmws. {} ops/second/thread", num_checkpoints, total_counts.0, total_counts.1, total_counts.2, (total_counts.0 + total_counts.1 + total_counts.2) / (total_counts.3 as usize / K_NANOS_PER_SECOND) ) }
extern crate hwloc; extern crate libc; extern crate regex; use faster_rs::FasterKv; use hwloc::{CpuSet, ObjectType, Topology, CPUBIND_THREAD}; use regex::Regex; use std::fs::File; use std::io::{BufRead, BufReader, Write}; use std::os::unix::prelude::FileExt; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use std::sync::mpsc::Receiver; use std::sync::{Arc, Barrier, Mutex}; use std::time::{Duration, Instant}; const K_CHECKPOINT_SECONDS: u64 = 30; const K_COMPLETE_PENDING_INTERVAL: usize = 1600; cons
println!("Loaded {} txns", count); (init_keys, run_keys) } pub fn populate_store(store: &Arc<FasterKv>, keys: &Arc<Vec<u64>>, num_threads: u8) { let topo = Arc::new(Mutex::new(Topology::new())); let idx = Arc::new(AtomicUsize::new(0)); let mut threads = vec![]; for thread_idx in 0..num_threads { let store = Arc::clone(store); let idx = Arc::clone(&idx); let keys = Arc::clone(&keys); let child_topo = topo.clone(); threads.push(std::thread::spawn(move || { { let tid = unsafe { libc::pthread_self() }; let mut locked_topo = child_topo.lock().unwrap(); let bind_to = cpuset_for_core(&*locked_topo, thread_idx as usize); locked_topo .set_cpubind_for_thread(tid, bind_to, CPUBIND_THREAD) .unwrap(); } let _session = store.start_session(); let mut chunk_idx = idx.fetch_add(K_CHUNK_SIZE, Ordering::SeqCst); while chunk_idx < K_INIT_COUNT { for i in chunk_idx..(chunk_idx + K_CHUNK_SIZE) { if i % K_REFRESH_INTERVAL == 0 { store.refresh(); if i % K_COMPLETE_PENDING_INTERVAL == 0 { store.complete_pending(false); } } store.upsert(&*keys.get(i as usize).unwrap(), &42, i as u64); } chunk_idx = idx.fetch_add(K_CHUNK_SIZE, Ordering::SeqCst); } store.complete_pending(true); store.stop_session(); })); } for t in threads { t.join().expect("Something went wrong in a thread"); } } pub fn run_benchmark<F: Fn(usize) -> Operation + Send + Copy + 'static>( store: &Arc<FasterKv>, keys: &Arc<Vec<u64>>, num_threads: u8, op_allocator: F, ) { let topo = Arc::new(Mutex::new(Topology::new())); let idx = Arc::new(AtomicUsize::new(0)); let done = Arc::new(AtomicBool::new(false)); let barrier = Arc::new(Barrier::new((num_threads + 1) as usize)); let mut threads = vec![]; for thread_id in 0..num_threads { let store = Arc::clone(&store); let keys = Arc::clone(&keys); let idx = Arc::clone(&idx); let done = Arc::clone(&done); let barrier = Arc::clone(&barrier); let topo = Arc::clone(&topo); threads.push( std::thread::Builder::new() .stack_size(K_THREAD_STACK_SIZE) .spawn(move || { { let tid = unsafe { libc::pthread_self() }; let mut locked_topo = topo.lock().unwrap(); let bind_to = cpuset_for_core(&*locked_topo, thread_id as usize); locked_topo .set_cpubind_for_thread(tid, bind_to, CPUBIND_THREAD) .unwrap(); } let mut reads = 0; let mut upserts = 0; let mut rmws = 0; let _session = store.start_session(); barrier.wait(); let start = Instant::now(); while !done.load(Ordering::SeqCst) { let mut chunk_idx = idx.fetch_add(K_CHUNK_SIZE, Ordering::SeqCst); while chunk_idx >= K_TXN_COUNT { if chunk_idx == K_TXN_COUNT { idx.store(0, Ordering::SeqCst); } chunk_idx = idx.fetch_add(K_CHUNK_SIZE, Ordering::SeqCst); } for i in chunk_idx..(chunk_idx + K_CHUNK_SIZE) { if i % K_REFRESH_INTERVAL == 0 { store.refresh(); if i % K_COMPLETE_PENDING_INTERVAL == 0 { store.complete_pending(false); } } match op_allocator(i) { Operation::Read => { let (_, _): (u8, Receiver<i32>) = store.read(&*keys.get(i).unwrap(), 1); reads += 1; } Operation::Upsert => { store.upsert(&*keys.get(i).unwrap(), &42, 1); upserts += 1; } Operation::Rmw => { store.rmw(&*keys.get(i).unwrap(), &5, 1); rmws += 1; } } } } store.complete_pending(true); store.stop_session(); let duration = Instant::now().duration_since(start); println!( "Thread {} completed {} reads, {} upserts and {} rmws in {}ms", thread_id, reads, upserts, rmws, duration.as_millis() ); (reads, upserts, rmws, duration.as_nanos()) }) .unwrap(), ) } barrier.wait(); let start = Instant::now(); let mut last_checkpoint = Instant::now(); let mut num_checkpoints = 0; while Instant::now().duration_since(start).as_secs() < K_RUN_TIME { std::thread::sleep(Duration::from_secs(1)); if Instant::now().duration_since(last_checkpoint).as_secs() > K_CHECKPOINT_SECONDS { println!("Checkpointing..."); store.checkpoint(); num_checkpoints += 1; last_checkpoint = Instant::now(); } } done.store(true, Ordering::SeqCst); let mut total_counts = (0, 0, 0, 0); for t in threads { let (reads, upserts, rmws, duration) = t.join().expect("Something went wrong in a thread"); total_counts.0 += reads; total_counts.1 += upserts; total_counts.2 += rmws; total_counts.3 += duration; } println!( "Finished benchmark: {} checkpoints, {} reads, {} writes, {} rmws. {} ops/second/thread", num_checkpoints, total_counts.0, total_counts.1, total_counts.2, (total_counts.0 + total_counts.1 + total_counts.2) / (total_counts.3 as usize / K_NANOS_PER_SECOND) ) }
t K_REFRESH_INTERVAL: usize = 64; const K_RUN_TIME: u64 = 360; const K_CHUNK_SIZE: usize = 3200; const K_FILE_CHUNK_SIZE: usize = 131072; const K_INIT_COUNT: usize = 250000000; const K_TXN_COUNT: usize = 1000000000; const K_NANOS_PER_SECOND: usize = 1000000000; const K_THREAD_STACK_SIZE: usize = 4 * 1024 * 1024; pub enum Operation { Read, Upsert, Rmw, } fn cpuset_for_core(topology: &Topology, idx: usize) -> CpuSet { let cores = (*topology).objects_with_type(&ObjectType::Core).unwrap(); match cores.get(idx) { Some(val) => val.cpuset().unwrap(), None => panic!("No Core found with id {}", idx), } } pub fn process_ycsb(input_file: &str, output_file: &str) { let input = File::open(input_file).expect("Unable to open input file for reading"); let mut output = File::create(output_file).expect("Unable to create output file"); let re = Regex::new(r".*usertable user(\d+).*").unwrap(); let reader = BufReader::new(input); for line in reader.lines().map(|l| l.unwrap()) { for cap in re.captures_iter(&line) { let num: u64 = cap[1].parse().expect("Unable to parse uid"); output.write(&num.to_be_bytes()).unwrap(); } } } pub fn generate_sequential_keys(out_file: &str, workload: &str) { let mut output = File::create(out_file).expect("Unable to create output file"); let num_keys = match workload { "load" => K_INIT_COUNT, "run" => K_TXN_COUNT, _ => panic!("Must specify load or run for generating sequential keys"), }; for i in 0..num_keys { output.write(&((i % K_INIT_COUNT) as u64).to_be_bytes()).unwrap(); } } pub fn read_upsert5050(key: usize) -> Operation { match key % 2 { 0 => Operation::Read, 1 => Operation::Upsert, _ => panic!(), } } pub fn rmw_100(_key: usize) -> Operation { Operation::Rmw } pub fn upsert_100(_key: usize) -> Operation { Operation::Upsert } pub fn load_files(load_file: &str, run_file: &str) -> (Vec<u64>, Vec<u64>) { let load_file = File::open(load_file).expect("Unable to open load file"); let run_file = File::open(run_file).expect("Unable to open run file"); let mut buffer = [0; K_FILE_CHUNK_SIZE]; let mut count = 0; let mut offset = 0; let mut init_keys = Vec::with_capacity(K_INIT_COUNT); println!("Loading keys into memory"); loop { let bytes_read = load_file.read_at(&mut buffer, offset).unwrap(); for i in 0..(bytes_read / 8) { let mut num = [0; 8]; num.copy_from_slice(&buffer[i..i + 8]); init_keys.insert(count, u64::from_be_bytes(num)); count += 1; } if bytes_read == K_FILE_CHUNK_SIZE { offset += K_FILE_CHUNK_SIZE as u64; } else { break; } } if K_INIT_COUNT != count { panic!("Init file load fail!"); } println!("Loaded {} keys", count); let mut count = 0; let mut offset = 0; let mut run_keys = Vec::with_capacity(K_TXN_COUNT); println!("Loading txns into memory"); loop { let bytes_read = run_file.read_at(&mut buffer, offset).unwrap(); for i in 0..(bytes_read / 8) { let mut num = [0; 8]; num.copy_from_slice(&buffer[i..i + 8]); run_keys.insert(count, u64::from_be_bytes(num)); count += 1; } if bytes_read == K_FILE_CHUNK_SIZE { offset += K_FILE_CHUNK_SIZE as u64; } else { break; } } if K_TXN_COUNT != count { panic!("Txn file load fail!"); }
random
[ { "content": "fn populate(num_threads: usize) -> () {\n\n if let Ok(store) = FasterKvBuilder::new(TABLE_SIZE, LOG_SIZE).with_disk(STORAGE_DIR).build() {\n\n let store = Arc::new(store);\n\n let mut threads = vec![];\n\n let num_active_threads = Arc::new(AtomicUsize::new(0));\n\n f...
Rust
src/opts.rs
robatipoor/shell-hist
b931b50feb5608af38fb0991dbbef4f29e2a1cb0
use crate::eject; use dirs::home_dir; use regex::Regex; use std::env; use std::fmt; use std::path::PathBuf; use std::slice::Iter; use structopt::StructOpt; #[derive(StructOpt)] pub struct Options { #[structopt(flatten)] pub display: DisplayOpts, #[structopt(flatten)] pub shell: ShellOpts, #[structopt(short = "f", parse(from_os_str))] pub file: Option<PathBuf>, #[structopt(short = "n", default_value = "10")] pub count: usize, } #[derive(StructOpt)] pub struct DisplayOpts { #[structopt(short = "z", long = "display-fuzzy")] pub fuzzy: bool, #[structopt(short = "e", long = "display-exact")] pub exact: bool, #[structopt(short = "t", long = "display-heat")] pub heat: bool, } #[derive(StructOpt)] pub struct ShellOpts { #[structopt(long = "flavor-zsh")] pub zsh: bool, #[structopt(long = "flavor-bash")] pub bash: bool, } #[derive(Copy, Clone)] pub enum HistoryFlavor { Zsh, Bash, } impl fmt::Display for HistoryFlavor { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { HistoryFlavor::Bash => write!(f, "bash"), HistoryFlavor::Zsh => write!(f, "zsh"), } } } impl ShellOpts { pub fn detect_shell() -> Option<HistoryFlavor> { let shell_path = env::var("SHELL").ok()?; for sh in HistoryFlavor::iter() { if shell_path.contains(sh.to_string().as_str()) { return Some(*sh); } } None } pub fn validate(self) -> HistoryFlavor { match (self.zsh, self.bash) { (false, false) => { if let Some(sh) = Self::detect_shell() { sh } else { eject("Unable to detect shell, please manually select a shell flavor"); } } (true, false) => HistoryFlavor::Zsh, (false, true) => HistoryFlavor::Bash, (true, true) => { eject("Multiple shell modes selected, please select one or none"); } } } } impl HistoryFlavor { pub fn iter() -> Iter<'static, HistoryFlavor> { use HistoryFlavor::*; const HISTORY_FLAVOR: [HistoryFlavor; 2] = [Bash, Zsh]; HISTORY_FLAVOR.iter() } pub fn history_path(&self) -> PathBuf { use HistoryFlavor::*; let name = match self { Zsh => ".zsh_history", Bash => ".bash_history", }; let mut dir = home_dir().unwrap_or_else(|| { eject("Unable to determine home path. Please specify history file path"); }); dir.push(name); dir } pub fn regex_and_capture_idx(&self) -> (Regex, usize) { use HistoryFlavor::*; let (re_res, idx) = match self { Zsh => (Regex::new(r"^.*;(sudo )?(.*)$"), 2), Bash => (Regex::new(r"^(sudo )?(.*)$"), 2), }; ( re_res.unwrap_or_else(|_| eject("Failed to compile regex!")), idx, ) } } pub enum DisplayMode { Fuzzy, Exact, Heat, } impl DisplayOpts { pub fn validate(self) -> DisplayMode { match (self.fuzzy, self.exact, self.heat) { (false, false, false) => DisplayMode::Fuzzy, (true, false, false) => DisplayMode::Fuzzy, (false, true, false) => DisplayMode::Exact, (false, false, true) => DisplayMode::Heat, _ => { eject("Multiple display modes selected, please select one or none"); } } } }
use crate::eject; use dirs::home_dir; use regex::Regex; use std::env; use std::fmt; use std::path::PathBuf; use std::slice::Iter; use structopt::StructOpt; #[derive(StructOpt)] pub struct Options { #[structopt(flatten)] pub display: DisplayOpts, #[structopt(flatten)] pub shell: ShellOpts, #[structopt(short = "f", parse(from_os_str))] pub file: Option<PathBuf>, #[structopt(short = "n", default_value = "10")] pub count: usize, } #[derive(StructOpt)] pub struct DisplayOpts { #[structopt(short = "z", long = "display-fuzzy")] pub fuzzy: bool, #[structopt(short = "e", long = "display-exact")] pub exact: bool, #[structopt(short = "t", long = "display-heat")] pub heat: bool, } #[derive(StructOpt)] pub struct ShellOpts { #[structopt(long = "flavor-zsh")] pub zsh: bool, #[structopt(long = "flavor-bash")] pub bash: bool, } #[derive(Copy, Clone)] pub enum HistoryFlavor { Zsh, Bash, } impl fmt::Display for HistoryFlavor { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { HistoryFlavor::Bash => write!(f, "bash"), HistoryFlavor::Zsh => write!(f, "zsh"), } } } impl ShellOpts { pub fn detect_shell() -> Option<HistoryFlavor> { let shell_path = env::var("SHELL").ok()?; for sh in HistoryFlavor::iter() { if shell_path.contains(sh.to_string().as_str()) { return Some(*sh); } } None } pub fn validate(self) -> HistoryFlavor { match (self.zsh, self.bash) { (false, false) =
lf.exact, self.heat) { (false, false, false) => DisplayMode::Fuzzy, (true, false, false) => DisplayMode::Fuzzy, (false, true, false) => DisplayMode::Exact, (false, false, true) => DisplayMode::Heat, _ => { eject("Multiple display modes selected, please select one or none"); } } } }
> { if let Some(sh) = Self::detect_shell() { sh } else { eject("Unable to detect shell, please manually select a shell flavor"); } } (true, false) => HistoryFlavor::Zsh, (false, true) => HistoryFlavor::Bash, (true, true) => { eject("Multiple shell modes selected, please select one or none"); } } } } impl HistoryFlavor { pub fn iter() -> Iter<'static, HistoryFlavor> { use HistoryFlavor::*; const HISTORY_FLAVOR: [HistoryFlavor; 2] = [Bash, Zsh]; HISTORY_FLAVOR.iter() } pub fn history_path(&self) -> PathBuf { use HistoryFlavor::*; let name = match self { Zsh => ".zsh_history", Bash => ".bash_history", }; let mut dir = home_dir().unwrap_or_else(|| { eject("Unable to determine home path. Please specify history file path"); }); dir.push(name); dir } pub fn regex_and_capture_idx(&self) -> (Regex, usize) { use HistoryFlavor::*; let (re_res, idx) = match self { Zsh => (Regex::new(r"^.*;(sudo )?(.*)$"), 2), Bash => (Regex::new(r"^(sudo )?(.*)$"), 2), }; ( re_res.unwrap_or_else(|_| eject("Failed to compile regex!")), idx, ) } } pub enum DisplayMode { Fuzzy, Exact, Heat, } impl DisplayOpts { pub fn validate(self) -> DisplayMode { match (self.fuzzy, se
random
[ { "content": "pub fn parse<'a>(path: Option<PathBuf>, flavor: HistoryFlavor) -> Node {\n\n let mut tree = Node::new();\n\n\n\n let path = path.unwrap_or_else(|| {\n\n flavor.history_path()\n\n });\n\n let (re, idx) = flavor.regex_and_capture_idx();\n\n\n\n let f = File::open(&path).unwrap...
Rust
src/coincheck/mock.rs
canpok1/trading-bot-rust
e45c4a0d2af25fdbacf05dfdedf20b7f29d51e75
use crate::bot::model::TradeInfo; use crate::bot::model::TradeInfoParam; use crate::coincheck::client::Client; use crate::coincheck::model::Pair; use crate::coincheck::model::{Balance, NewOrder, OpenOrder, Order, OrderBooks, OrderType}; use crate::config::Config; use crate::error::MyError::{EmptyCollection, KeyNotFound}; use crate::error::MyResult; use crate::mysql::model::Market; use async_trait::async_trait; use chrono::FixedOffset; use chrono::TimeZone; use std::collections::HashMap; #[derive(Debug)] pub struct SimulationClient { markets: HashMap<String, Vec<Market>>, } impl SimulationClient { pub fn new() -> MyResult<SimulationClient> { Ok(SimulationClient { markets: HashMap::new(), }) } pub fn add_market(&mut self, market: &Market) -> MyResult<()> { let pair = market.pair.clone(); if !self.markets.contains_key(&pair) { self.markets.insert(pair.to_string(), vec![]); } self.markets .get_mut(&pair.to_string()) .unwrap() .push(market.clone()); Ok(()) } pub fn get_market(&self, pair: &str) -> MyResult<Option<Market>> { if !self.markets.contains_key(pair) { return Err(Box::new(KeyNotFound { key: pair.to_owned(), collection_name: "markets".to_owned(), })); } if let Some(market) = self.markets.get(pair).unwrap().iter().last() { Ok(Some(market.clone())) } else { Ok(None) } } pub fn make_info(&self, pair: &str, config: &Config) -> MyResult<TradeInfo> { if let Some(market) = self.get_market(pair)? { let mut param = TradeInfoParam::default(); param.pair = Pair::new(pair)?; param.support_line_period_long = config.support_line_period_long; param.support_line_period_short = config.support_line_period_short; param.support_line_offset = config.support_line_offset; param.resistance_line_period = config.resistance_line_period; param.resistance_line_offset = config.resistance_line_offset; param .sell_rates .insert(pair.to_string(), market.ex_rate_sell); param.buy_rate = market.ex_rate_buy; param.sell_rate_histories.push(market.ex_rate_sell); param.sell_volumes.push(market.ex_volume_sell); param.buy_volumes.push(market.ex_volume_buy); Ok(param.build()?) } else { Err(Box::new(EmptyCollection("markets".to_string()))) } } } #[async_trait] impl Client for SimulationClient { async fn get_order_books(&self, _pair: &str) -> MyResult<OrderBooks> { Ok(OrderBooks::default()) } async fn get_exchange_orders_rate( &self, t: OrderType, pair: &str, _amount: f64, ) -> MyResult<f64> { if let Some(market) = self.get_market(pair)? { if t == OrderType::Buy || t == OrderType::MarketBuy { Ok(market.ex_rate_buy) } else { Ok(market.ex_rate_sell) } } else { Err(Box::new(EmptyCollection("markets".to_string()))) } } async fn post_exchange_orders(&self, req: &NewOrder) -> MyResult<Order> { let tz = FixedOffset::east(9 * 60 * 60); if let Some(market) = self.get_market(&req.pair)? { Ok(Order { id: 0, rate: None, amount: None, order_type: req.order_type.clone(), pair: Pair::new(&req.pair)?, created_at: tz.from_utc_datetime(&market.recorded_at), }) } else { Err(Box::new(EmptyCollection("markets".to_string()))) } } async fn get_exchange_orders_opens(&self) -> MyResult<Vec<OpenOrder>> { Ok(vec![]) } async fn delete_exchange_orders(&self, _id: u64) -> MyResult<u64> { Ok(0) } async fn get_exchange_orders_cancel_status(&self, _id: u64) -> MyResult<bool> { Ok(true) } async fn get_accounts_balance(&self) -> MyResult<HashMap<String, Balance>> { Ok(HashMap::new()) } }
use crate::bot::model::TradeInfo; use crate::bot::model::TradeInfoParam; use crate::coincheck::client::Client; use crate::coincheck::model::Pair; use crate::coincheck::model::{Balance, NewOrder, OpenOrder, Order, OrderBooks, OrderType}; use crate::config::Config; use crate::error::MyError::{EmptyCollection, KeyNotFound}; use crate::error::MyResult; use crate::mysql::model::Market; use async_trait::async_trait; use chrono::FixedOffset; use chrono::TimeZone; use std::collections::HashMap; #[derive(Debug)] pub struct SimulationClient { markets: HashMap<String, Vec<Market>>, } impl SimulationClient { pub fn new() -> MyResult<SimulationClient> { Ok(SimulationClient { markets: HashMap::new(), }) } pub fn add_market(&mut self, market: &Market) -> MyResult<()> { let pair = market.pair.clone(); if !self.markets.contains_key(&pair) { self.markets.insert(pair.to_string(), vec![]); } self.markets .get_mut(&pair.to_string()) .unwrap() .push(market.clone()); Ok(()) } pub fn get_market(&self, pair: &str) -> MyResult<Option<Market>> { if !self.markets.contains_key(pair) { return Err(Box::new(KeyNotFound {
wrap().iter().last() { Ok(Some(market.clone())) } else { Ok(None) } } pub fn make_info(&self, pair: &str, config: &Config) -> MyResult<TradeInfo> { if let Some(market) = self.get_market(pair)? { let mut param = TradeInfoParam::default(); param.pair = Pair::new(pair)?; param.support_line_period_long = config.support_line_period_long; param.support_line_period_short = config.support_line_period_short; param.support_line_offset = config.support_line_offset; param.resistance_line_period = config.resistance_line_period; param.resistance_line_offset = config.resistance_line_offset; param .sell_rates .insert(pair.to_string(), market.ex_rate_sell); param.buy_rate = market.ex_rate_buy; param.sell_rate_histories.push(market.ex_rate_sell); param.sell_volumes.push(market.ex_volume_sell); param.buy_volumes.push(market.ex_volume_buy); Ok(param.build()?) } else { Err(Box::new(EmptyCollection("markets".to_string()))) } } } #[async_trait] impl Client for SimulationClient { async fn get_order_books(&self, _pair: &str) -> MyResult<OrderBooks> { Ok(OrderBooks::default()) } async fn get_exchange_orders_rate( &self, t: OrderType, pair: &str, _amount: f64, ) -> MyResult<f64> { if let Some(market) = self.get_market(pair)? { if t == OrderType::Buy || t == OrderType::MarketBuy { Ok(market.ex_rate_buy) } else { Ok(market.ex_rate_sell) } } else { Err(Box::new(EmptyCollection("markets".to_string()))) } } async fn post_exchange_orders(&self, req: &NewOrder) -> MyResult<Order> { let tz = FixedOffset::east(9 * 60 * 60); if let Some(market) = self.get_market(&req.pair)? { Ok(Order { id: 0, rate: None, amount: None, order_type: req.order_type.clone(), pair: Pair::new(&req.pair)?, created_at: tz.from_utc_datetime(&market.recorded_at), }) } else { Err(Box::new(EmptyCollection("markets".to_string()))) } } async fn get_exchange_orders_opens(&self) -> MyResult<Vec<OpenOrder>> { Ok(vec![]) } async fn delete_exchange_orders(&self, _id: u64) -> MyResult<u64> { Ok(0) } async fn get_exchange_orders_cancel_status(&self, _id: u64) -> MyResult<bool> { Ok(true) } async fn get_accounts_balance(&self) -> MyResult<HashMap<String, Balance>> { Ok(HashMap::new()) } }
key: pair.to_owned(), collection_name: "markets".to_owned(), })); } if let Some(market) = self.markets.get(pair).un
function_block-random_span
[ { "content": "pub fn has_near_rate_order(\n\n buy_rate: f64,\n\n profit_ratio: f64,\n\n open_orders: &Vec<OpenOrder>,\n\n entry_skip_rate_ratio: f64,\n\n) -> (bool, String) {\n\n if open_orders.is_empty() {\n\n return (\n\n false,\n\n \"has not near rate order, open_o...
Rust
src/request.rs
rustysec/hasty-rs
0e7014c4288ac97b284c4b074ecdf54ed46685dd
extern crate mime; use std::collections::HashMap; use url::Url; use constants::HttpMethods; #[derive(Clone)] pub struct Request { host: String, path: String, method: HttpMethods, headers: HashMap<String,String>, body: Option<Vec<u8>>, body_type: mime::Mime, url: Option<Url>, } impl Request { pub fn new() -> Request { Request { host: "".to_string(), path: "/".to_string(), method: HttpMethods::Get, headers: HashMap::new(), body: None, body_type: mime::TEXT_PLAIN, url: None } } pub fn from_url(url: Url) -> Request { Request { host: url.host_str().unwrap().to_string(), path: url.path().to_owned(), method: HttpMethods::Get, headers: HashMap::new(), body: None, body_type: mime::TEXT_PLAIN, url: Some(url), } } pub fn set_url(&mut self, url: Url) { self.host = url.host_str().unwrap().to_string(); self.path = url.path().to_owned(); self.url = Some(url); } pub fn url(&self) -> Option<Url> { self.url.clone() } pub fn set_method(&mut self, method: HttpMethods) { self.method = method; } pub fn set_content_type(&mut self, content_type: mime::Mime) { self.body_type = content_type; } pub fn set_body(&mut self, body: Option<Vec<u8>>) { self.body = body; } pub fn add_raw_header(&mut self, name: String, value: String) { self.headers.insert(name, value); } pub fn with_raw_header(mut self, name: String, value: String) -> Self { self.headers.insert(name, value); self } pub fn with_url(mut self, url: Url) -> Self { self.host = url.host().unwrap().to_string(); self.path = url.path().to_owned(); self.url = Some(url); self } pub fn with_method(mut self, method: HttpMethods) -> Self { self.method = method; self } pub fn with_content_type(mut self, content_type: mime::Mime) -> Self { self.body_type = content_type; self } pub fn with_body(mut self, body: Option<Vec<u8>>) -> Self { self.body = body; self } pub fn to_payload(self) -> Vec<u8> { let mut payload = Vec::new(); payload.extend( format!( "{} {} HTTP/1.1\r\n", self.method.to_string(), self.path ).as_bytes() ); payload.extend( format!( "Host: {}\r\n", self.host ).as_bytes() ); for header in self.headers { payload.extend( format!( "{}: {}\r\n", header.0, header.1 ).as_bytes() ); } if let Some(content) = self.body { payload.extend( format!( "content-length: {}\r\ncontent-type: {};charset=UTF-8\r\n\r\n", content.len(), self.body_type ).as_bytes() ); payload.extend(content); } payload.extend( "\r\n".to_owned().into_bytes() ); payload } }
extern crate mime; use std::collections::HashMap; use url::Url; use constants::HttpMethods; #[derive(Clone)] pub struct Request { host: String, path: String, method: HttpMethods, headers: HashMap<String,String>, body: Option<Vec<u8>>, body_type: mime::Mime, url: Option<Url>, } impl Request { pub fn new() -> Request { Request { host: "".to_string(), path: "/".to_string(), method: HttpMethods::Get, headers: HashMap::new(), body: None, body_type: mime::TEXT_PLAIN, url: None } } pub fn from_url(url: Url) -> Request { Request { host: url.host_str().unwrap().to_string(), path: url.path().to_owned(), method: HttpMethods::Get, headers: HashMap::new(), body: None, body_type: mime::TEXT_PLAIN, url: Some(url), } } pub fn set_url(&mut self, url: Url) { self.host = url.host_str().unwrap().to_string(); self.path = url.path().to_owned(); self.url = Some(url); } pub fn url(&self) -> Option<Url> { self.url.clone() } pub fn set_method(&mut self, method: HttpMethods) { self.method = method; } pub fn set_content_type(&mut self
pub fn add_raw_header(&mut self, name: String, value: String) { self.headers.insert(name, value); } pub fn with_raw_header(mut self, name: String, value: String) -> Self { self.headers.insert(name, value); self } pub fn with_url(mut self, url: Url) -> Self { self.host = url.host().unwrap().to_string(); self.path = url.path().to_owned(); self.url = Some(url); self } pub fn with_method(mut self, method: HttpMethods) -> Self { self.method = method; self } pub fn with_content_type(mut self, content_type: mime::Mime) -> Self { self.body_type = content_type; self } pub fn with_body(mut self, body: Option<Vec<u8>>) -> Self { self.body = body; self } pub fn to_payload(self) -> Vec<u8> { let mut payload = Vec::new(); payload.extend( format!( "{} {} HTTP/1.1\r\n", self.method.to_string(), self.path ).as_bytes() ); payload.extend( format!( "Host: {}\r\n", self.host ).as_bytes() ); for header in self.headers { payload.extend( format!( "{}: {}\r\n", header.0, header.1 ).as_bytes() ); } if let Some(content) = self.body { payload.extend( format!( "content-length: {}\r\ncontent-type: {};charset=UTF-8\r\n\r\n", content.len(), self.body_type ).as_bytes() ); payload.extend(content); } payload.extend( "\r\n".to_owned().into_bytes() ); payload } }
, content_type: mime::Mime) { self.body_type = content_type; } pub fn set_body(&mut self, body: Option<Vec<u8>>) { self.body = body; }
random
[ { "content": "#[test]\n\npub fn methods_post() {\n\n let s = HttpMethods::Post.to_string();\n\n assert_eq!(s, \"POST\".to_owned());\n\n}\n\n\n", "file_path": "src/constants.rs", "rank": 0, "score": 89164.04834883794 }, { "content": "#[test]\n\npub fn methods_get() {\n\n let s = Http...
Rust
tests/it/env.rs
oberblastmeister/xshell
f98f45a6a34a2dc3f526039685ba0972e9794c21
use std::collections::BTreeMap; use xshell::{cmd, pushenv}; use crate::setup; #[test] fn test_env() { setup(); let v1 = "xshell_test_123"; let v2 = "xshell_test_456"; assert_env(cmd!("echo_env {v1}").env(v1, "123"), &[(v1, Some("123"))]); assert_env( cmd!("echo_env {v1} {v2}").envs([(v1, "123"), (v2, "456")].iter().copied()), &[(v1, Some("123")), (v2, Some("456"))], ); assert_env( cmd!("echo_env {v1} {v2}").envs([(v1, "123"), (v2, "456")].iter().copied()).env_remove(v2), &[(v1, Some("123")), (v2, None)], ); assert_env( cmd!("echo_env {v1} {v2}") .envs([(v1, "123"), (v2, "456")].iter().copied()) .env_remove("nothing"), &[(v1, Some("123")), (v2, Some("456"))], ); let _g1 = pushenv(v1, "foobar"); let _g2 = pushenv(v2, "quark"); assert_env(cmd!("echo_env {v1} {v2}"), &[(v1, Some("foobar")), (v2, Some("quark"))]); assert_env( cmd!("echo_env {v1} {v2}").env(v1, "wombo"), &[(v1, Some("wombo")), (v2, Some("quark"))], ); assert_env(cmd!("echo_env {v1} {v2}").env_remove(v1), &[(v1, None), (v2, Some("quark"))]); assert_env( cmd!("echo_env {v1} {v2}").env_remove(v1).env(v1, "baz"), &[(v1, Some("baz")), (v2, Some("quark"))], ); assert_env( cmd!("echo_env {v1} {v2}").env(v1, "baz").env_remove(v1), &[(v1, None), (v2, Some("quark"))], ); } #[test] #[cfg(not(windows))] fn test_env_clear() { setup(); let v1 = "xshell_test_123"; let v2 = "xshell_test_456"; let echo_env = format!("./mock_bin/echo_env{}", std::env::consts::EXE_SUFFIX); assert_env( cmd!("{echo_env} {v1} {v2}").envs([(v1, "123"), (v2, "456")].iter().copied()).env_clear(), &[(v1, None), (v2, None)], ); assert_env( cmd!("{echo_env} {v1} {v2}") .envs([(v1, "123"), (v2, "456")].iter().copied()) .env_clear() .env(v1, "789"), &[(v1, Some("789")), (v2, None)], ); let _g1 = pushenv(v1, "foobar"); let _g2 = pushenv(v2, "quark"); assert_env(cmd!("{echo_env} {v1} {v2}").env_clear(), &[(v1, None), (v2, None)]); assert_env( cmd!("{echo_env} {v1} {v2}").env_clear().env(v1, "baz"), &[(v1, Some("baz")), (v2, None)], ); assert_env(cmd!("{echo_env} {v1} {v2}").env(v1, "baz").env_clear(), &[(v1, None), (v2, None)]); } #[track_caller] fn assert_env(echo_env_cmd: xshell::Cmd, want_env: &[(&str, Option<&str>)]) { let output = echo_env_cmd.output().unwrap(); let env = String::from_utf8_lossy(&output.stdout) .lines() .filter(|line| !line.is_empty()) .map(|line| { let (key, val) = split_once(line, '=').unwrap_or_else(|| { panic!("failed to parse line from `echo_env` output: {:?}", line) }); (key.to_owned(), val.to_owned()) }) .collect::<BTreeMap<_, _>>(); check_env(&env, want_env); } #[track_caller] fn check_env(env: &BTreeMap<String, String>, wanted_env: &[(&str, Option<&str>)]) { let mut failed = false; let mut seen = env.clone(); for &(k, val) in wanted_env { match (seen.remove(k), val) { (Some(env_v), Some(want_v)) if env_v == want_v => {} (None, None) => {} (have, want) => { eprintln!("mismatch on env var {:?}: have `{:?}`, want `{:?}` ", k, have, want); failed = true; } } } for (k, v) in seen { eprintln!("Unexpected env key {:?} (value: {:?})", k, v); failed = true; } assert!( !failed, "env didn't match (see stderr for cleaner output):\nsaw: {:?}\n\nwanted: {:?}", env, wanted_env, ); } fn split_once(line: &str, arg: char) -> Option<(&str, &str)> { let idx = line.find(arg)?; Some((&line[..idx], &line[idx + arg.len_utf8()..])) }
use std::collections::BTreeMap; use xshell::{cmd, pushenv}; use crate::setup; #[test] fn test_env() { setup(); let v1 = "xshell_test_123"; let v2 = "xshell_test_456"; assert_env(cmd!("echo_env {v1}").env(v1, "123"), &[(v1, Some("123"))]); assert_env( cmd!("echo_env {v1} {v2}").envs([(v1, "123"), (v2, "456")].iter().copied()), &[(v1, Some("123")), (v2, Some("456"))], ); assert_env( cmd!("echo_env {v1} {v2}").envs([(v1, "123"), (v2, "456")].iter().copied()).env_remove(v2), &[(v1, Some("123")), (v2, None)], ); assert_env( cmd!("echo_env {v1} {v2}") .envs([(v1, "123"), (v2, "456")].iter().copied()) .env_remove("nothing"), &[(v1, Some("123")), (v2, Some("456"))], ); let _g1 = pushenv(v1, "foobar"); let _g2 = pushenv(v2, "quark");
, "quark"); assert_env(cmd!("{echo_env} {v1} {v2}").env_clear(), &[(v1, None), (v2, None)]); assert_env( cmd!("{echo_env} {v1} {v2}").env_clear().env(v1, "baz"), &[(v1, Some("baz")), (v2, None)], ); assert_env(cmd!("{echo_env} {v1} {v2}").env(v1, "baz").env_clear(), &[(v1, None), (v2, None)]); } #[track_caller] fn assert_env(echo_env_cmd: xshell::Cmd, want_env: &[(&str, Option<&str>)]) { let output = echo_env_cmd.output().unwrap(); let env = String::from_utf8_lossy(&output.stdout) .lines() .filter(|line| !line.is_empty()) .map(|line| { let (key, val) = split_once(line, '=').unwrap_or_else(|| { panic!("failed to parse line from `echo_env` output: {:?}", line) }); (key.to_owned(), val.to_owned()) }) .collect::<BTreeMap<_, _>>(); check_env(&env, want_env); } #[track_caller] fn check_env(env: &BTreeMap<String, String>, wanted_env: &[(&str, Option<&str>)]) { let mut failed = false; let mut seen = env.clone(); for &(k, val) in wanted_env { match (seen.remove(k), val) { (Some(env_v), Some(want_v)) if env_v == want_v => {} (None, None) => {} (have, want) => { eprintln!("mismatch on env var {:?}: have `{:?}`, want `{:?}` ", k, have, want); failed = true; } } } for (k, v) in seen { eprintln!("Unexpected env key {:?} (value: {:?})", k, v); failed = true; } assert!( !failed, "env didn't match (see stderr for cleaner output):\nsaw: {:?}\n\nwanted: {:?}", env, wanted_env, ); } fn split_once(line: &str, arg: char) -> Option<(&str, &str)> { let idx = line.find(arg)?; Some((&line[..idx], &line[idx + arg.len_utf8()..])) }
assert_env(cmd!("echo_env {v1} {v2}"), &[(v1, Some("foobar")), (v2, Some("quark"))]); assert_env( cmd!("echo_env {v1} {v2}").env(v1, "wombo"), &[(v1, Some("wombo")), (v2, Some("quark"))], ); assert_env(cmd!("echo_env {v1} {v2}").env_remove(v1), &[(v1, None), (v2, Some("quark"))]); assert_env( cmd!("echo_env {v1} {v2}").env_remove(v1).env(v1, "baz"), &[(v1, Some("baz")), (v2, Some("quark"))], ); assert_env( cmd!("echo_env {v1} {v2}").env(v1, "baz").env_remove(v1), &[(v1, None), (v2, Some("quark"))], ); } #[test] #[cfg(not(windows))] fn test_env_clear() { setup(); let v1 = "xshell_test_123"; let v2 = "xshell_test_456"; let echo_env = format!("./mock_bin/echo_env{}", std::env::consts::EXE_SUFFIX); assert_env( cmd!("{echo_env} {v1} {v2}").envs([(v1, "123"), (v2, "456")].iter().copied()).env_clear(), &[(v1, None), (v2, None)], ); assert_env( cmd!("{echo_env} {v1} {v2}") .envs([(v1, "123"), (v2, "456")].iter().copied()) .env_clear() .env(v1, "789"), &[(v1, Some("789")), (v2, None)], ); let _g1 = pushenv(v1, "foobar"); let _g2 = pushenv(v2
random
[ { "content": "fn setup() {\n\n static ONCE: std::sync::Once = std::sync::Once::new();\n\n ONCE.call_once(|| {\n\n if let Err(err) = install_mock_binaries() {\n\n panic!(\"failed to install binaries from mock_bin: {}\", err)\n\n }\n\n });\n\n\n\n fn install_mock_binaries() ->...
Rust
src/cluster/bootstrap.rs
FridgeSeal/blip
0a202eb0061d8e2ac34a4bd501cfa08051cb4087
use super::{ cut::{self, MultiNodeCut, Subscription}, proto::{ membership_client::MembershipClient, Endpoint, EndpointError, Join, JoinReq, JoinResp, NodeId, NodeMetadata, PreJoinReq, PreJoinResp, }, Cluster, State, }; use futures::{ future::TryFutureExt, stream::{FuturesUnordered, StreamExt}, }; use log::{info, warn}; use std::{borrow::Cow, cmp, sync::Arc, time::Duration}; use thiserror::Error; use tokio::time::{error::Elapsed, sleep, timeout}; use tonic::transport; #[derive(Debug, Error)] enum JoinError { #[error("timed out: {}", .0)] TimedOut(#[from] Elapsed), #[error("endpoint resolution failed: {}", .0)] Resolution(#[from] EndpointError), #[error("phase 1 failed: {}", .0)] Phase1(GrpcError), #[error("phase 2 failed: {}", .0)] Phase2(GrpcError), #[error("phase 2 failed: no observers")] NoObservers, } #[derive(Debug, Error)] enum GrpcError { #[error("connection failed: {}", .0)] Connect(#[from] transport::Error), #[error("call failed: {}", .0)] Call(#[from] tonic::Status), } impl Cluster { pub(crate) async fn handle_parts(self: Arc<Self>, mut cuts: Subscription) -> cut::Result { self.initialize().await; loop { let cut = cuts.recv().await?; if !cut.is_degraded() { continue; } if cut.members().is_empty() { self.initialize().await; continue; } self.join_via_backoff(|| Cow::Owned(cut.random_member().into())) .await; } } async fn initialize(&self) { if let Some(seed) = self.cfg.seed.as_ref() { self.join_via_backoff(|| Cow::Borrowed(seed)).await; } else { let mut state = self.state.write().await; state.clear_consensus(); state.clear_membership(); self.bootstrap(&mut state); } } fn bootstrap(&self, state: &mut State) { assert!(state.nodes.is_empty()); assert!(state.uuids.is_empty()); assert!(state.metadata.is_empty()); assert!(state.last_cut.is_none()); let node = Endpoint::from(self.addr).tls(self.cfg.server_tls); let uuid = state.uuid.clone(); let meta = self.cfg.meta.clone(); let members: Arc<[_]> = vec![self .resolve_member_meta(self.cfg.meta.clone(), &node) .unwrap()] .into(); state.join_node(node, Join { uuid, meta }); let cut = MultiNodeCut { skipped: 0, local_addr: self.addr, conf_id: state.rehash_config(), degraded: false, members: members.clone(), joined: members, kicked: vec![].into(), }; state.last_cut = Some(cut.clone()); self.propagate_cut(cut); info!("bootstrapped: conf_id={}", state.conf_id); } async fn join_via_backoff<'a, F: FnMut() -> Cow<'a, Endpoint>>(&self, mut seed: F) { const RETRY_MAX: Duration = Duration::from_secs(4); const JOIN_MAX: Duration = Duration::from_secs(15); let mut retry_backoff = Duration::from_millis(200); let mut join_backoff = Duration::from_secs(5); while let Err(e) = self.join_via(&seed(), join_backoff).await { warn!("join failed: {}", e); sleep(retry_backoff).await; retry_backoff = cmp::min(retry_backoff * 2, RETRY_MAX); join_backoff = cmp::min(join_backoff + (join_backoff / 2), JOIN_MAX); } } async fn join_via(&self, seed: &Endpoint, max_wait: Duration) -> Result<(), JoinError> { let mut state = self.state.write().await; state.uuid = NodeId::generate(); info!("requesting join: timeout={:?}", max_wait); let JoinResp { nodes, uuids, .. } = timeout(max_wait, self.request_join(&state, seed)).await??; state.clear_consensus(); state.clear_membership(); let mut joined = Vec::with_capacity(nodes.len()); for NodeMetadata { node, meta } in nodes { joined.push(self.resolve_member_meta(meta.clone(), &node).unwrap()); assert!(state.nodes.insert(node.clone())); assert!(state.metadata.insert(node, meta).is_none()); } for uuid in uuids { assert!(state.uuids.insert(uuid)); } joined.sort_by_key(|m| m.addr()); let mut members: Vec<_> = (state.nodes.iter()) .map(|node| self.resolve_member(&state, node).unwrap()) .collect(); members.sort_by_key(|m| m.addr()); let cut = MultiNodeCut { skipped: 0, local_addr: self.addr, degraded: !state.nodes.contains(&self.local_node()), conf_id: state.rehash_config(), members: members.into(), joined: joined.into(), kicked: vec![].into(), }; state.last_cut = Some(cut.clone()); self.propagate_cut(cut); info!("joined: conf_id={}", state.conf_id); Ok(()) } async fn request_join(&self, state: &State, seed: &Endpoint) -> Result<JoinResp, JoinError> { let p1j_req = PreJoinReq { sender: self.local_node(), uuid: state.uuid.clone(), }; let r1 = self.join_phase1(p1j_req, seed).await?; let conf_id = r1.conf_id; let p2j_req = |ring| JoinReq { sender: self.local_node(), ring: ring as u64, uuid: state.uuid.clone(), conf_id, meta: self.cfg.meta.clone(), }; let mut joins = (r1.contact.into_iter()) .enumerate() .map(move |(ring, observer)| (p2j_req(ring), observer)) .map(|(req, observer)| self.join_phase2(req, observer)) .collect::<FuturesUnordered<_>>(); let mut e = None; while let Some(resp) = joins.next().await { match resp { Ok(resp) => return Ok(resp), Err(err) => e = Some(err), } } Err(e.unwrap_or(JoinError::NoObservers)) } async fn join_phase1(&self, req: PreJoinReq, via: &Endpoint) -> Result<PreJoinResp, JoinError> { let seed = self.resolve_endpoint(via)?; let mut c = MembershipClient::connect(seed) .map_err(|e| JoinError::Phase1(e.into())) .await?; (c.pre_join(req).map_ok(|r| r.into_inner())) .map_err(|e| JoinError::Phase1(e.into())) .await } async fn join_phase2(&self, req: JoinReq, observer: Endpoint) -> Result<JoinResp, JoinError> { let observer = self.resolve_endpoint(&observer)?; let mut c = MembershipClient::connect(observer) .map_err(|e| JoinError::Phase2(e.into())) .await?; (c.join(req).map_ok(|r| r.into_inner())) .map_err(|e| JoinError::Phase2(e.into())) .await } }
use super::{ cut::{self, MultiNodeCut, Subscription}, proto::{ membership_client::MembershipClient, Endpoint, EndpointError, Join, JoinReq, JoinResp, NodeId, NodeMetadata, PreJoinReq, PreJoinResp, }, Cluster, State, }; use futures::{ future::TryFutureExt, stream::{FuturesUnordered, StreamExt}, }; use log::{info, warn}; use std::{borrow::Cow, cmp, sync::Arc, time::Duration}; use thiserror::Error; use tokio::time::{error::Elapsed, sleep, timeout}; use tonic::transport; #[derive(Debug, Error)] enum JoinError { #[error("timed out: {}", .0)] TimedOut(#[from] Elapsed), #[error("endpoint resolution failed: {}", .0)] Resolution(#[from] EndpointError), #[error("phase 1 failed: {}", .0)] Phase1(GrpcError), #[error("phase 2 failed: {}", .0)] Phase2(GrpcError), #[error("phase 2 failed: no observers")] NoObservers, } #[derive(Debug, Error)] enum GrpcError { #[error("connection failed: {}", .0)] Connect(#[from] transport::Error), #[error("call failed: {}", .0)] Call(#[from] tonic::Status), } impl Cluster { pub(crate) async fn handle_parts(self: Arc<Self>, mut cuts: Subscription) -> cut::Result { self.initialize().await; loop { let cut = cuts.recv().await?; if !cut.is_degraded() { continue; } if cut.members().is_empty() { self.initialize().await; continue; } self.join_via_backoff(|| Cow::Owned(cut.random_member().into())) .await; } } async fn initialize(&self) { if let Some(seed) = self.cfg.seed.as_ref() { self.join_via_backoff(|| Cow::Borrowed(seed)).await; } else { let mut state = self.state.write().await; state.clear_consensus(); state.clear_membership(); self.bootstrap(&mut state); } } fn bootstrap(&self, state: &mut State) { assert!(state.nodes.is_empty()); assert!(state.uuids.is_empty()); assert!(state.metadata.is_empty()); assert!(state.last_cut.is_none()); let node = Endpoint::from(self.addr).tls(self.cfg.server_tls); let uuid = state.uuid.clone(); let meta = self.cfg.meta.clone(); let members: Arc<[_]> = vec![self .resolve_member_meta(self.cfg.meta.clone(), &node) .unwrap()] .into(); state.join_node(node, Join { uuid, meta }); let cut = MultiNodeCut { skipped: 0, local_addr: self.addr, conf_id: state.rehash_config(), degraded: false, members: members.clone(), joined: members, kicked: vec![].into(), }; state.last_cut = Some(cut.clone()); self.propagate_cut(cut); info!("bootstrapped: conf_id={}", state.conf_id); } async fn join_via_backoff<'a, F: FnMut() -> Cow<'a, Endpoint>>(&self, mut seed: F) { const RETRY_MAX: Duration = Duration::from_secs(4); const JOIN_MAX: Duration = Duration::from_secs(15); let mut retry_backoff = Duration::from_millis(200); let mut join_backoff = Duration::from_secs(5); while let Err(e) = self.join_via(&seed(), join_backoff).await { warn!("join failed: {}", e); sleep(retry_backoff).await; retry_backoff = cmp::min(retry_backoff * 2, RETRY_MAX); join_backoff = cmp::min(join_backoff + (join_backoff / 2), JOIN_MAX); } } async fn join_via(&self, seed: &Endpoint, max_wait: Duration) -> Result<(), JoinError> { let mut state = self.state.write().await; state.uuid = NodeId::generate(); info!("requesting join: timeout={:?}", max_wait); let JoinResp { nodes, uuids, .. } = timeout(max_wait, self.request_join(&state, seed)).await??; state.clear_consensus(); state.clear_membership(); let mut joined = Vec::with_capacity(nodes.len()); for NodeMetadata { node, meta } in nodes { joined.push(self.resolve_member_meta(meta.clone(), &node).unwrap()); assert!(state.nodes.insert(node.clone())); assert!(state.metadata.insert(node, meta).is_none()); } for uuid in uuids { assert!(state.uuids.insert(uuid)); } joined.sort_by_key(|m| m.addr()); let mut members: Vec<_> = (state.nodes.iter()) .map(|node| self.resolve_member(&state, node).unwrap()) .collect(); members.sort_by_key(|m| m.addr()); let cut = MultiNodeCut { skipped: 0, local_addr: self.addr, degraded: !state.nodes.contains(&self.local_node()), conf_id: state.rehash_config(), members: members.into(), joined: joined.into(), kicked: vec![].into(), }; state.last_cut = Some(cut.clone()); self.propagate_cut(cut); info!("joined: conf_id={}", state.conf_id); Ok(()) } async fn request_join(&self, state: &State, seed: &Endpoint) -> Result<JoinResp, JoinError> { let p1j_req = PreJoinReq { sender: self.local_node(), uuid: state.uuid.clone(), }; let r1 = self.join_phase1(p1j_req, seed).await?; let conf_id = r1.conf_id; let p2j_req = |ring| JoinReq { sender: self.local_node(), ring: ring as u64, uuid: state.uuid.clone(), conf_id, meta: self.cfg.meta.clone(), }; let mut joins = (r1.contact.into_iter()) .enumerate() .map(move |(ring, observer)| (p2j_req(ring), observer)) .map(|(req, observer)| self.join_phase2(req, observer)) .collect::<FuturesUnordered<_>>(); let mut e = None; while let Some(resp) = joins.next().await { match resp { Ok(resp) => return Ok(resp), Err(err) => e = Some(err), } } Err(e.unwrap_or(JoinError::NoObservers)) }
async fn join_phase2(&self, req: JoinReq, observer: Endpoint) -> Result<JoinResp, JoinError> { let observer = self.resolve_endpoint(&observer)?; let mut c = MembershipClient::connect(observer) .map_err(|e| JoinError::Phase2(e.into())) .await?; (c.join(req).map_ok(|r| r.into_inner())) .map_err(|e| JoinError::Phase2(e.into())) .await } }
async fn join_phase1(&self, req: PreJoinReq, via: &Endpoint) -> Result<PreJoinResp, JoinError> { let seed = self.resolve_endpoint(via)?; let mut c = MembershipClient::connect(seed) .map_err(|e| JoinError::Phase1(e.into())) .await?; (c.pre_join(req).map_ok(|r| r.into_inner())) .map_err(|e| JoinError::Phase1(e.into())) .await }
function_block-full_function
[ { "content": "#[derive(Copy, Clone, Debug, Error)]\n\nenum MemberResolutionError {\n\n #[error(\"invalid socketaddr: {}\", .0)]\n\n InvalidSocketAddr(#[from] SocketAddrError),\n\n\n\n #[error(\"missing metadata\")]\n\n MissingMetadata,\n\n}\n\n\n", "file_path": "src/cluster/mod.rs", "rank": ...
Rust
decadog_core/src/lib.rs
tommilligan/decadog
b8137b682ca7d9dbcbffb8f7efbb925b143b7417
#![deny(clippy::all)] use std::collections::hash_map::DefaultHasher; use std::fmt; use std::hash::Hasher; use chrono::{DateTime, FixedOffset}; mod core; pub mod error; pub mod github; pub mod secret; pub mod zenhub; pub use crate::core::{AssignedTo, Sprint}; pub use error::Error; use github::{ paginate::PaginatedSearch, Direction, Issue, IssueUpdate, Milestone, MilestoneUpdate, OrganisationMember, Repository, SearchIssues, SearchQueryBuilder, State, }; use zenhub::{Board, Pipeline, PipelinePosition, StartDate, Workspace}; pub struct Client<'a> { owner: &'a str, repo: &'a str, github: &'a github::Client, zenhub: &'a zenhub::Client, id: u64, } impl<'a> fmt::Debug for Client<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Decadog client {}", self.id) } } impl<'a> Client<'a> { pub fn new( owner: &'a str, repo: &'a str, github: &'a github::Client, zenhub: &'a zenhub::Client, ) -> Result<Client<'a>, Error> { let mut hasher = DefaultHasher::new(); hasher.write(owner.as_bytes()); hasher.write(repo.as_bytes()); hasher.write(&github.id().to_be_bytes()); hasher.write(&zenhub.id().to_be_bytes()); let id = hasher.finish(); Ok(Client { id, owner, repo, github, zenhub, }) } pub fn owner(&self) -> &str { self.owner } pub fn repo(&self) -> &str { self.repo } pub fn get_start_date( &self, repository: &Repository, milestone: &Milestone, ) -> Result<StartDate, Error> { self.zenhub.get_start_date(repository.id, milestone.number) } pub fn get_first_workspace(&self, repository: &Repository) -> Result<Workspace, Error> { self.zenhub.get_first_workspace(repository.id) } pub fn get_board( &self, repository: &Repository, workspace: &Workspace, ) -> Result<Board, Error> { self.zenhub.get_board(repository.id, &workspace.id) } pub fn get_zenhub_issue( &self, repository: &Repository, issue: &Issue, ) -> Result<zenhub::Issue, Error> { self.zenhub.get_issue(repository.id, issue.number) } pub fn set_estimate( &self, repository: &Repository, issue: &Issue, estimate: u32, ) -> Result<(), Error> { self.zenhub .set_estimate(repository.id, issue.number, estimate) } pub fn get_sprint( &self, repository: &Repository, milestone: Milestone, ) -> Result<Sprint, Error> { let start_date = self.get_start_date(repository, &milestone)?; Ok(Sprint { milestone, start_date, }) } pub fn create_sprint( &self, repository: &Repository, sprint_number: &str, start_date: DateTime<FixedOffset>, due_on: DateTime<FixedOffset>, ) -> Result<Sprint, Error> { let mut milestone_spec = MilestoneUpdate::default(); milestone_spec.title = Some(format!("Sprint {}", sprint_number)); milestone_spec.due_on = Some(due_on); let milestone = self .github .create_milestone(self.owner, self.repo, &milestone_spec)?; let start_date = start_date.into(); let start_date = self.zenhub .set_start_date(repository.id, milestone.number, &start_date)?; Ok(Sprint { milestone, start_date, }) } pub fn move_issue_to_pipeline( &self, repository: &Repository, workspace: &Workspace, issue: &Issue, pipeline: &Pipeline, ) -> Result<(), Error> { let mut position = PipelinePosition::default(); position.pipeline_id = pipeline.id.clone(); self.zenhub .move_issue(repository.id, &workspace.id, issue.number, &position) } pub fn get_repository(&self) -> Result<Repository, Error> { self.github.get_repository(self.owner, self.repo) } pub fn get_issue(&self, issue_number: u32) -> Result<Issue, Error> { self.github.get_issue(self.owner, self.repo, issue_number) } pub fn get_milestones(&self) -> Result<Vec<Milestone>, Error> { self.github.get_milestones(self.owner, self.repo) } pub fn assign_issue_to_milestone( &self, issue: &Issue, milestone: Option<&Milestone>, ) -> Result<Issue, Error> { let mut update = IssueUpdate::default(); update.milestone = Some(milestone.map(|milestone| milestone.number)); self.github .patch_issue(&self.owner, &self.repo, issue.number, &update) } pub fn assign_member_to_issue( &self, member: &OrganisationMember, issue: &Issue, ) -> Result<Issue, Error> { let mut update = IssueUpdate::default(); update.assignees = Some(vec![member.login.clone()]); self.github .patch_issue(&self.owner, &self.repo, issue.number, &update) } pub fn search_issues( &self, query_builder: &mut SearchQueryBuilder, ) -> Result<PaginatedSearch<Issue>, Error> { let query = SearchIssues { q: query_builder .owner_repo(self.owner, self.repo) .issue() .build(), sort: Some("updated"), order: Some(Direction::Ascending), per_page: Some(100), }; self.github.search_issues(&query) } pub fn get_members(&self) -> Result<Vec<OrganisationMember>, Error> { self.github.get_members(self.owner) } pub fn update_milestone_title( &self, milestone: &Milestone, new_title: String, ) -> Result<Milestone, Error> { let mut update = MilestoneUpdate::default(); update.title = Some(new_title); self.github .patch_milestone(&self.owner, &self.repo, milestone.number, &update) } pub fn close_milestone(&self, milestone: &Milestone) -> Result<Milestone, Error> { let mut update = MilestoneUpdate::default(); update.state = Some(State::Closed); self.github .patch_milestone(&self.owner, &self.repo, milestone.number, &update) } } #[cfg(test)] mod tests { use chrono::{FixedOffset, NaiveDate, TimeZone}; use lazy_static::lazy_static; use mockito::mock; use pretty_assertions::assert_eq; use super::github::{tests::MOCK_GITHUB_CLIENT, State}; use super::zenhub::tests::MOCK_ZENHUB_CLIENT; use super::*; const OWNER: &str = "tommilligan"; const REPO: &str = "decadog"; lazy_static! { pub static ref MOCK_CLIENT: Client<'static> = Client::new(OWNER, REPO, &MOCK_GITHUB_CLIENT, &MOCK_ZENHUB_CLIENT) .expect("Couldn't create mock client"); } #[test] fn test_get_issues_closed_after() { let body = r#"{ "incomplete_results": false, "items": [] }"#; let mock = mock("GET", "/search/issues?q=state%3Aclosed+closed%3A%3E%3D2011-04-22+repo%3Atommilligan%2Fdecadog+type%3Aissue&sort=updated&order=asc&per_page=100") .match_header("authorization", "token mock_token") .with_status(200) .with_body(body) .create(); let issues = MOCK_CLIENT .search_issues( &mut SearchQueryBuilder::new().closed_on_or_after( &FixedOffset::east(0) .from_utc_datetime(&NaiveDate::from_ymd(2011, 4, 22).and_hms(13, 33, 48)), ), ) .unwrap() .collect::<Result<Vec<Issue>, _>>() .unwrap(); mock.assert(); assert_eq!(issues, vec![]); } #[test] fn test_get_milestone_open_issues() { let body = r#"{ "incomplete_results": false, "items": [] }"#; let mock = mock("GET", "/search/issues?q=state%3Aopen+milestone%3A%22Sprint+2%22+repo%3Atommilligan%2Fdecadog+type%3Aissue&sort=updated&order=asc&per_page=100") .match_header("authorization", "token mock_token") .with_status(200) .with_body(body) .create(); let issues = MOCK_CLIENT .search_issues( SearchQueryBuilder::new() .state(&State::Open) .milestone("Sprint 2"), ) .unwrap() .collect::<Result<Vec<Issue>, _>>() .unwrap(); mock.assert(); assert_eq!(issues, vec![]); } }
#![deny(clippy::all)] use std::collections::hash_map::DefaultHasher; use std::fmt; use std::hash::Hasher; use chrono::{DateTime, FixedOffset}; mod core; pub mod error; pub mod github; pub mod secret; pub mod zenhub; pub use crate::core::{AssignedTo, Sprint}; pub use error::Error; use github::{ paginate::PaginatedSearch, Direction, Issue, IssueUpdate, Milestone, MilestoneUpdate, OrganisationMember, Repository, SearchIssues, SearchQueryBuilder, State, }; use zenhub::{Board, Pipeline, PipelinePosition, StartDate, Workspace}; pub struct Client<'a> { owner: &'a str, repo: &'a str, github: &'a github::Client, zenhub: &'a zenhub::Client, id: u64, } impl<'a> fmt::Debug for Client<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Decadog client {}", self.id) } } impl<'a> Client<'a> { pub fn new( owner: &'a str, repo: &'a str, github: &'a github::Client, zenhub: &'a zenhub::Client, ) -> Result<Client<'a>, Error> { let mut hasher = DefaultHasher::new(); hasher.write(owner.as_bytes()); hasher.write(repo.as_bytes()); hasher.write(&github.id().to_be_bytes()); hasher.write(&zenhub.id().to_be_bytes()); let id = hasher.finish(); Ok(Client { id, owner, repo, github, zenhub, }) } pub fn owner(&self) -> &str { self.owner } pub fn repo(&self) -> &str { self.repo } pub fn get_start_date( &self, repository: &Repository, milestone: &Milestone, ) -> Result<StartDate, Error> { self.zenhub.get_start_date(repository.id, milestone.number) } pub fn get_first_workspace(&self, repository: &Repository) -> Result<Workspace, Error> { self.zenhub.get_first_workspace(repository.id) } pub fn get_board( &self, repository: &Repository, workspace: &Workspace, ) -> Result<Board, Error> { self.zenhub.get_board(repository.id, &workspace.id) } pub fn get_zenhub_issue( &self, repository: &Repository, issue: &Issue, ) -> Result<zenhub::Issue, Error> { self.zenhub.get_issue(repository.id, issue.number) } pub fn set_estimate( &self, repository: &Repository, issue: &Issue, estimate: u32, ) -> Result<(), Error> { self.zenhub .set_estimate(repository.id, issue.number, estimate) } pub fn get_sprint( &self, repository: &Repository, milestone: Milestone, ) -> Result<Sprint, Error> { let start_date = self.get_start_date(repository, &milestone)?; Ok(Sprint { milestone, start_date, }) } pub fn create_sprint( &self, repository: &Repository, sprint_number: &str, start_date: DateTime<FixedOffset>, due_on: DateTime<FixedOffset>, ) -> Result<Sprint, Error> { let mut milestone_spec = MilestoneUpdate::default(); milestone_spec.title = Some(format!("Sprint {}", sprint_number)); milestone_spec.due_on = Some(due_on); let milestone = self .github .create_milestone(self.owner, self.repo, &milestone_spec)?; let start_date = start_date.into(); let start_date = self.zenhub .set_start_date(repository.id, milestone.number, &start_date)?; Ok(Sprint { milestone, start_date, }) } pub fn move_issue_to_pipeline( &self, repository: &Repository, workspace: &Workspace, issue: &Issue, pipeline: &Pipeline, ) -> Result<(), Error> { let mut position = PipelinePosition::default(); position.pipeline_id = pipeline.id.clone(); self.zenhub .move_issue(repository.id, &workspace.id, issue.number, &position) } pub fn get_repository(&self) -> Result<Repository, Error> { self.github.get_repository(self.owner, self.repo) } pub fn get_issue(&self, issue_number: u32) -> Result<Issue, Error> { self.github.get_issue(self.owner, self.repo, issue_number) } pub fn get_milestones(&self) -> Result<Vec<Milestone>, Error> { self.github.get_milestones(self.owner, self.repo) } pub fn assign_issue_to_milestone( &self, issue: &Issue, milestone: Option<&Milestone>, ) -> Result<Issue, Error> { let mut update = IssueUpdate::default(); update.milestone = Some(milestone.map(|milestone| milestone.number)); self.github .patch_issue(&self.owner, &self.repo, issue.number, &update) } pub fn assign_member_to_issue( &self, member: &OrganisationMember, issue: &Issue, ) -> Result<Issue, Error> { let mut update = IssueUpdate::default(); update.assignees = Some(vec![member.login.clone()]); self.github .patch_issue(&self.owner, &self.repo, issue.number, &update) } pub fn search_issues( &self, query_builder: &mut SearchQueryBuilder, ) -> Result<PaginatedSearch<Issue>, Error> { let query = SearchIssues { q: query_builder .owner_repo(self.owner, self.repo) .issue() .build(), sort: Some("updated"), order: Some(Direction::Ascending), per_page: Some(100), }; self.github.search_issues(&query) } pub fn get_members(&self) -> Result<Vec<OrganisationMember>, Error> { self.github.get_members(self.owner) } pub fn update_mil
fault(); update.title = Some(new_title); self.github .patch_milestone(&self.owner, &self.repo, milestone.number, &update) } pub fn close_milestone(&self, milestone: &Milestone) -> Result<Milestone, Error> { let mut update = MilestoneUpdate::default(); update.state = Some(State::Closed); self.github .patch_milestone(&self.owner, &self.repo, milestone.number, &update) } } #[cfg(test)] mod tests { use chrono::{FixedOffset, NaiveDate, TimeZone}; use lazy_static::lazy_static; use mockito::mock; use pretty_assertions::assert_eq; use super::github::{tests::MOCK_GITHUB_CLIENT, State}; use super::zenhub::tests::MOCK_ZENHUB_CLIENT; use super::*; const OWNER: &str = "tommilligan"; const REPO: &str = "decadog"; lazy_static! { pub static ref MOCK_CLIENT: Client<'static> = Client::new(OWNER, REPO, &MOCK_GITHUB_CLIENT, &MOCK_ZENHUB_CLIENT) .expect("Couldn't create mock client"); } #[test] fn test_get_issues_closed_after() { let body = r#"{ "incomplete_results": false, "items": [] }"#; let mock = mock("GET", "/search/issues?q=state%3Aclosed+closed%3A%3E%3D2011-04-22+repo%3Atommilligan%2Fdecadog+type%3Aissue&sort=updated&order=asc&per_page=100") .match_header("authorization", "token mock_token") .with_status(200) .with_body(body) .create(); let issues = MOCK_CLIENT .search_issues( &mut SearchQueryBuilder::new().closed_on_or_after( &FixedOffset::east(0) .from_utc_datetime(&NaiveDate::from_ymd(2011, 4, 22).and_hms(13, 33, 48)), ), ) .unwrap() .collect::<Result<Vec<Issue>, _>>() .unwrap(); mock.assert(); assert_eq!(issues, vec![]); } #[test] fn test_get_milestone_open_issues() { let body = r#"{ "incomplete_results": false, "items": [] }"#; let mock = mock("GET", "/search/issues?q=state%3Aopen+milestone%3A%22Sprint+2%22+repo%3Atommilligan%2Fdecadog+type%3Aissue&sort=updated&order=asc&per_page=100") .match_header("authorization", "token mock_token") .with_status(200) .with_body(body) .create(); let issues = MOCK_CLIENT .search_issues( SearchQueryBuilder::new() .state(&State::Open) .milestone("Sprint 2"), ) .unwrap() .collect::<Result<Vec<Issue>, _>>() .unwrap(); mock.assert(); assert_eq!(issues, vec![]); } }
estone_title( &self, milestone: &Milestone, new_title: String, ) -> Result<Milestone, Error> { let mut update = MilestoneUpdate::de
function_block-random_span
[ { "content": "fn create_sprint(settings: &Settings) -> Result<(), Error> {\n\n let github = github::Client::new(&settings.github_url, &settings.github_token.value())?;\n\n let zenhub = zenhub::Client::new(\n\n settings\n\n .zenhub_url\n\n .as_ref()\n\n .ok_or(Error:...
Rust
kernel-rs/src/kernel.rs
coolofficials/rv6
2b93a86a7c9f332607670b545a219f40e5a216fe
use core::cell::UnsafeCell; use core::fmt::{self, Write}; use core::hint::spin_loop; use core::mem::MaybeUninit; use core::ops::Deref; use core::pin::Pin; use core::sync::atomic::{AtomicBool, Ordering}; use array_macro::array; use pin_project::pin_project; use crate::{ bio::Bcache, console::{consoleinit, Console, Printer}, file::{Devsw, FileTable}, fs::{FileSystem, Itable}, kalloc::Kmem, lock::{Sleepablelock, Spinlock}, page::Page, param::{NCPU, NDEV}, plic::{plicinit, plicinithart}, println, proc::{cpuid, scheduler, Cpu, Procs, ProcsBuilder}, trap::{trapinit, trapinithart}, uart::Uart, vm::KernelMemory, }; static mut KERNEL: KernelBuilder = KernelBuilder::zero(); #[inline] pub fn kernel_builder() -> &'static KernelBuilder { unsafe { &KERNEL } } #[inline] pub unsafe fn kernel() -> &'static Kernel { unsafe { &*(kernel_builder() as *const _ as *const _) } } #[inline] unsafe fn kernel_builder_unchecked_pin() -> Pin<&'static mut KernelBuilder> { unsafe { Pin::new_unchecked(&mut KERNEL) } } #[pin_project] pub struct KernelBuilder { panicked: AtomicBool, pub console: Sleepablelock<Console>, pub uart: Uart, pub printer: Spinlock<Printer>, kmem: Spinlock<Kmem>, memory: MaybeUninit<KernelMemory>, pub ticks: Sleepablelock<u32>, #[pin] pub procs: ProcsBuilder, cpus: [UnsafeCell<Cpu>; NCPU], #[pin] bcache: Bcache, pub devsw: [Devsw; NDEV], pub ftable: FileTable, pub itable: Itable, pub file_system: FileSystem, } #[repr(transparent)] pub struct Kernel { inner: KernelBuilder, } impl Kernel { pub fn procs(&self) -> &Procs { unsafe { self.inner.procs.as_procs_unchecked() } } } impl Deref for Kernel { type Target = KernelBuilder; fn deref(&self) -> &Self::Target { &self.inner } } impl KernelBuilder { const fn zero() -> Self { Self { panicked: AtomicBool::new(false), console: Sleepablelock::new("CONS", Console::new()), uart: Uart::new(), printer: Spinlock::new("PRINTLN", Printer::new()), kmem: Spinlock::new("KMEM", Kmem::new()), memory: MaybeUninit::uninit(), ticks: Sleepablelock::new("time", 0), procs: ProcsBuilder::zero(), cpus: array![_ => UnsafeCell::new(Cpu::new()); NCPU], bcache: unsafe { Bcache::zero() }, devsw: [Devsw { read: None, write: None, }; NDEV], ftable: FileTable::zero(), itable: Itable::zero(), file_system: FileSystem::zero(), } } fn panic(&self) { self.panicked.store(true, Ordering::Release); } pub fn is_panicked(&self) -> bool { self.panicked.load(Ordering::Acquire) } pub fn free(&self, mut page: Page) { page.write_bytes(1); self.kmem.lock().free(page); } pub fn alloc(&self) -> Option<Page> { let mut page = self.kmem.lock().alloc()?; page.write_bytes(5); Some(page) } pub fn printer_write_fmt(&self, args: fmt::Arguments<'_>) -> fmt::Result { if self.is_panicked() { unsafe { (*self.printer.get_mut_raw()).write_fmt(args) } } else { let mut lock = self.printer.lock(); lock.write_fmt(args) } } pub fn current_cpu(&self) -> *mut Cpu { let id: usize = cpuid(); self.cpus[id].get() } pub unsafe fn get_bcache(&self) -> &Bcache { &self.bcache } } #[macro_export] macro_rules! print { ($($arg:tt)*) => { $crate::kernel::kernel_builder().printer_write_fmt(format_args!($($arg)*)).unwrap(); }; } #[macro_export] macro_rules! println { () => ($crate::print!("\n")); ($($arg:tt)*) => ($crate::print!("{}\n", format_args!($($arg)*))); } #[cfg(not(test))] #[panic_handler] fn panic_handler(info: &core::panic::PanicInfo<'_>) -> ! { kernel_builder().panic(); println!("{}", info); crate::utils::spin_loop() } pub unsafe fn kernel_main() -> ! { static STARTED: AtomicBool = AtomicBool::new(false); if cpuid() == 0 { Uart::init(); unsafe { consoleinit(kernel_builder_unchecked_pin().project().devsw) }; println!(); println!("rv6 kernel is booting"); println!(); unsafe { kernel_builder_unchecked_pin() .project() .kmem .get_mut() .init() }; let memory = KernelMemory::new().expect("PageTable::new failed"); unsafe { kernel_builder_unchecked_pin() .project() .memory .write(memory) .init_hart() }; let procs = unsafe { kernel_builder_unchecked_pin().project().procs.init() }; trapinit(); unsafe { trapinithart() }; unsafe { plicinit() }; unsafe { plicinithart() }; unsafe { kernel_builder_unchecked_pin() .project() .bcache .get_pin_mut() .init() }; unsafe { kernel_builder_unchecked_pin() .project() .file_system .log .disk .get_mut() .init() }; procs.user_proc_init(); STARTED.store(true, Ordering::Release); } else { while !STARTED.load(Ordering::Acquire) { spin_loop(); } println!("hart {} starting", cpuid()); unsafe { kernel_builder().memory.assume_init_ref().init_hart() }; unsafe { trapinithart() }; unsafe { plicinithart() }; } unsafe { scheduler() } }
use core::cell::UnsafeCell; use core::fmt::{self, Write}; use core::hint::spin_loop; use core::mem::MaybeUninit; use core::ops::Deref; use core::pin::Pin; use core::sync::atomic::{AtomicBool, Ordering}; use array_macro::array; use pin_project::pin_project; use crate::{ bio::Bcache, console::{consoleinit, Console, Printer}, file::{Devsw, FileTable}, fs::{FileSystem, Itable}, kalloc::Kmem, lock::{Sleepablelock, Spinlock}, page::Page, param::{NCPU, NDEV}, plic::{plicinit, plicinithart}, println, proc::{cpuid, scheduler, Cpu, Procs, ProcsBuilder}, trap::{trapinit, trapinithart}, uart::Uart, vm::KernelMemory, }; static mut KERNEL: KernelBuilder = KernelBuilder::zero(); #[inline] pub fn kernel_builder() -> &'static KernelBuilder { unsafe { &KERNEL } } #[inline] pub unsafe fn kernel() -> &'static Kernel { unsafe { &*(kernel_builder() as *const _ as *const _) } } #[inline] unsafe fn kernel_builder_unchecked_pin() -> Pin<&'static mut KernelBuilder> { unsafe { Pin::new_unchecked(&mut KERNEL) } } #[pin_project] pub struct KernelBuilder { panicked: AtomicBool, pub console: Sleepablelock<Console>, pub uart: Uart, pub printer: Spinlock<Printer>, kmem: Spinlock<Kmem>, memory: MaybeUninit<KernelMemory>, pub ticks: Sleepablelock<u32>, #[pin] pub procs: ProcsBuilder, cpus: [UnsafeCell<Cpu>; NCPU], #[pin] bcache: Bcache, pub devsw: [Devsw; NDEV], pub ftable: FileTable, pub itable: Itable, pub file_system: FileSystem, } #[repr(transparent)] pub struct Kernel { inner: KernelBuilder, } impl Kernel { pub fn procs(&self) -> &Procs { unsafe { self.inner.procs.as_procs_unchecked() } } } impl Deref for Kernel { type Target = KernelBuilder; fn deref(&self) -> &Self::Target { &self.inner } } impl KernelBuilder { const fn zero() -> Self { Self { panicked: AtomicBool::new(false), console: Sleepablelock::new("CONS", Console::new()), uart: Uart::new(), printer: Spinlock::new("PRINTLN", Printer::new()), kmem: Spinlock::new("KMEM", Kmem::new()), memory: MaybeUninit::uninit(), ticks: Sleepablelock::new("time", 0), procs: ProcsBuilder::zero(), cpus: array![_ => UnsafeCell::new(Cpu::new()); NCPU], bcache: unsafe { Bcache::zero() }, devsw: [Devsw { read: None, write: None, }; NDEV], ftable: FileTable::zero(), itable: Itable::zero(), file_system: FileSystem::zero(), } } fn panic(&self) { self.panicked.store(true, Ordering::Release); } pub fn is_panicked(&self) -> bool { self.panicked.load(Ordering::Acquire) } pub fn free(&self, mut page: Page) { page.write_bytes(1); self.kmem.lock().free(page); } pub fn alloc(&self) -> Option<Page> { let mut page = self.kmem.lock().alloc()?; page.write_bytes(5); Some(page) } pub fn printer_write_fmt(&self, args: fmt::Arguments<'_>) -> fmt::Result {
} pub fn current_cpu(&self) -> *mut Cpu { let id: usize = cpuid(); self.cpus[id].get() } pub unsafe fn get_bcache(&self) -> &Bcache { &self.bcache } } #[macro_export] macro_rules! print { ($($arg:tt)*) => { $crate::kernel::kernel_builder().printer_write_fmt(format_args!($($arg)*)).unwrap(); }; } #[macro_export] macro_rules! println { () => ($crate::print!("\n")); ($($arg:tt)*) => ($crate::print!("{}\n", format_args!($($arg)*))); } #[cfg(not(test))] #[panic_handler] fn panic_handler(info: &core::panic::PanicInfo<'_>) -> ! { kernel_builder().panic(); println!("{}", info); crate::utils::spin_loop() } pub unsafe fn kernel_main() -> ! { static STARTED: AtomicBool = AtomicBool::new(false); if cpuid() == 0 { Uart::init(); unsafe { consoleinit(kernel_builder_unchecked_pin().project().devsw) }; println!(); println!("rv6 kernel is booting"); println!(); unsafe { kernel_builder_unchecked_pin() .project() .kmem .get_mut() .init() }; let memory = KernelMemory::new().expect("PageTable::new failed"); unsafe { kernel_builder_unchecked_pin() .project() .memory .write(memory) .init_hart() }; let procs = unsafe { kernel_builder_unchecked_pin().project().procs.init() }; trapinit(); unsafe { trapinithart() }; unsafe { plicinit() }; unsafe { plicinithart() }; unsafe { kernel_builder_unchecked_pin() .project() .bcache .get_pin_mut() .init() }; unsafe { kernel_builder_unchecked_pin() .project() .file_system .log .disk .get_mut() .init() }; procs.user_proc_init(); STARTED.store(true, Ordering::Release); } else { while !STARTED.load(Ordering::Acquire) { spin_loop(); } println!("hart {} starting", cpuid()); unsafe { kernel_builder().memory.assume_init_ref().init_hart() }; unsafe { trapinithart() }; unsafe { plicinithart() }; } unsafe { scheduler() } }
if self.is_panicked() { unsafe { (*self.printer.get_mut_raw()).write_fmt(args) } } else { let mut lock = self.printer.lock(); lock.write_fmt(args) }
if_condition
[ { "content": "/// Trys to return a pinned mutable reference of the array's element at index `index`.\n\n/// Returns `Some(pin_mut)` if index is not out of bounds.\n\n/// Otherwise, returns `None`.\n\npub fn get_pin_mut<T, const N: usize>(arr: Pin<&mut [T; N]>, index: usize) -> Option<Pin<&mut T>> {\n\n if in...
Rust
logos/src/source.rs
mikolajpp/logos
7ebfe8aaae03df38db2be01b611882d1b576ba4b
use std::ops::Range; use std::fmt::Debug; pub trait Slice<'source>: Sized + PartialEq + Eq + Debug { fn as_bytes(&self) -> &'source [u8]; } impl<'source> Slice<'source> for &'source str { fn as_bytes(&self) -> &'source [u8] { (*self).as_bytes() } } impl<'source> Slice<'source> for &'source [u8] { fn as_bytes(&self) -> &'source [u8] { *self } } pub trait ByteArray<'source>: Sized { const SIZE: usize; unsafe fn from_ptr(ptr: *const u8) -> &'source Self { &*(ptr as *const Self) } } impl<'source> ByteArray<'source> for u8 { const SIZE: usize = 1; } impl<'source> ByteArray<'source> for [u8; 2] { const SIZE: usize = 2; } impl<'source> ByteArray<'source> for [u8; 3] { const SIZE: usize = 3; } impl<'source> ByteArray<'source> for [u8; 4] { const SIZE: usize = 4; } impl<'source> ByteArray<'source> for [u8; 5] { const SIZE: usize = 5; } impl<'source> ByteArray<'source> for [u8; 6] { const SIZE: usize = 6; } impl<'source> ByteArray<'source> for [u8; 7] { const SIZE: usize = 7; } impl<'source> ByteArray<'source> for [u8; 8] { const SIZE: usize = 8; } pub trait Source<'source> { type Slice: self::Slice<'source>; fn len(&self) -> usize; unsafe fn read(&self, offset: usize) -> u8; fn read_bytes<Array>(&self, offset: usize) -> Option<&'source Array> where Array: ByteArray<'source>; fn slice(&self, range: Range<usize>) -> Option<Self::Slice>; unsafe fn slice_unchecked(&self, range: Range<usize>) -> Self::Slice; } impl<'source> Source<'source> for &'source str { type Slice = &'source str; #[inline] fn len(&self) -> usize { (*self).len() } #[inline] unsafe fn read(&self, offset: usize) -> u8 { debug_assert!(offset <= self.len(), "Reading out founds!"); match self.as_bytes().get(offset) { Some(byte) => *byte, None => 0, } } #[inline] fn read_bytes<Array>(&self, offset: usize) -> Option<&'source Array> where Array: ByteArray<'source> { if offset + (Array::SIZE - 1) < (*self).len() { Some(unsafe { Array::from_ptr((*self).as_ptr().add(offset)) }) } else { None } } #[inline] fn slice(&self, range: Range<usize>) -> Option<&'source str> { self.get(range) } #[inline] unsafe fn slice_unchecked(&self, range: Range<usize>) -> &'source str { debug_assert!( range.start <= self.len() && range.end <= self.len(), "Reading out of bounds {:?} for {}!", range, self.len() ); self.get_unchecked(range) } } impl<'source> Source<'source> for &'source [u8] { type Slice = &'source [u8]; #[inline] fn len(&self) -> usize { (*self).len() } #[inline] unsafe fn read(&self, offset: usize) -> u8 { debug_assert!(offset <= self.len(), "Reading out founds!"); match self.as_bytes().get(offset) { Some(byte) => *byte, None => 0, } } #[inline] fn read_bytes<Array>(&self, offset: usize) -> Option<&'source Array> where Array: ByteArray<'source> { if offset + (Array::SIZE - 1) < (*self).len() { Some(unsafe { Array::from_ptr((*self).as_ptr().add(offset)) }) } else { None } } #[inline] fn slice(&self, range: Range<usize>) -> Option<&'source [u8]> { self.get(range) } #[inline] unsafe fn slice_unchecked(&self, range: Range<usize>) -> &'source [u8] { debug_assert!( range.start <= self.len() && range.end <= self.len(), "Reading out of bounds {:?} for {}!", range, self.len() ); self.get_unchecked(range) } } #[cfg(feature = "nul_term_source")] impl<'source> Source<'source> for toolshed::NulTermStr<'source> { type Slice = &'source str; #[inline] fn len(&self) -> usize { (**self).len() } #[inline] unsafe fn read(&self, offset: usize) -> u8 { debug_assert!(offset <= self.len(), "Reading out founds!"); self.byte_unchecked(offset) } #[inline] fn read_bytes<Array>(&self, offset: usize) -> Option<&'source Array> where Array: ByteArray<'source> { if offset + (Array::SIZE - 1) < (**self).len() { Some(unsafe { Array::from_ptr((**self).as_ptr().add(offset)) }) } else { None } } #[inline] fn slice(&self, range: Range<usize>) -> Option<&'source str> { if range.start <= self.len() && range.end <= self.len() { Some(unsafe { self.get_unchecked(range) }) } else { None } } #[inline] unsafe fn slice_unchecked(&self, range: Range<usize>) -> &'source str { debug_assert!( range.start <= self.len() && range.end <= self.len(), "Reading out of bounds {:?} for {}!", range, self.len() ); self.get_unchecked(range) } }
use std::ops::Range; use std::fmt::Debug; pub trait Slice<'source>: Sized + PartialEq + Eq + Debug { fn as_bytes(&self) -> &'source [u8]; } impl<'source> Slice<'source> for &'source str { fn as_bytes(&self) -> &'source [u8] { (*self).as_bytes() } } impl<'source> Slice<'source> for &'source [u8] { fn as_bytes(&self) -> &'source [u8] { *self } } pub trait ByteArray<'source>: Sized { const SIZE: usize; unsafe fn from_ptr(ptr: *const u8) -> &'source Self { &*(ptr as *const Self) } } impl<'source> ByteArray<'source> for u8 { const SI
1) < (*self).len() { Some(unsafe { Array::from_ptr((*self).as_ptr().add(offset)) }) } else { None } } #[inline] fn slice(&self, range: Range<usize>) -> Option<&'source [u8]> { self.get(range) } #[inline] unsafe fn slice_unchecked(&self, range: Range<usize>) -> &'source [u8] { debug_assert!( range.start <= self.len() && range.end <= self.len(), "Reading out of bounds {:?} for {}!", range, self.len() ); self.get_unchecked(range) } } #[cfg(feature = "nul_term_source")] impl<'source> Source<'source> for toolshed::NulTermStr<'source> { type Slice = &'source str; #[inline] fn len(&self) -> usize { (**self).len() } #[inline] unsafe fn read(&self, offset: usize) -> u8 { debug_assert!(offset <= self.len(), "Reading out founds!"); self.byte_unchecked(offset) } #[inline] fn read_bytes<Array>(&self, offset: usize) -> Option<&'source Array> where Array: ByteArray<'source> { if offset + (Array::SIZE - 1) < (**self).len() { Some(unsafe { Array::from_ptr((**self).as_ptr().add(offset)) }) } else { None } } #[inline] fn slice(&self, range: Range<usize>) -> Option<&'source str> { if range.start <= self.len() && range.end <= self.len() { Some(unsafe { self.get_unchecked(range) }) } else { None } } #[inline] unsafe fn slice_unchecked(&self, range: Range<usize>) -> &'source str { debug_assert!( range.start <= self.len() && range.end <= self.len(), "Reading out of bounds {:?} for {}!", range, self.len() ); self.get_unchecked(range) } }
ZE: usize = 1; } impl<'source> ByteArray<'source> for [u8; 2] { const SIZE: usize = 2; } impl<'source> ByteArray<'source> for [u8; 3] { const SIZE: usize = 3; } impl<'source> ByteArray<'source> for [u8; 4] { const SIZE: usize = 4; } impl<'source> ByteArray<'source> for [u8; 5] { const SIZE: usize = 5; } impl<'source> ByteArray<'source> for [u8; 6] { const SIZE: usize = 6; } impl<'source> ByteArray<'source> for [u8; 7] { const SIZE: usize = 7; } impl<'source> ByteArray<'source> for [u8; 8] { const SIZE: usize = 8; } pub trait Source<'source> { type Slice: self::Slice<'source>; fn len(&self) -> usize; unsafe fn read(&self, offset: usize) -> u8; fn read_bytes<Array>(&self, offset: usize) -> Option<&'source Array> where Array: ByteArray<'source>; fn slice(&self, range: Range<usize>) -> Option<Self::Slice>; unsafe fn slice_unchecked(&self, range: Range<usize>) -> Self::Slice; } impl<'source> Source<'source> for &'source str { type Slice = &'source str; #[inline] fn len(&self) -> usize { (*self).len() } #[inline] unsafe fn read(&self, offset: usize) -> u8 { debug_assert!(offset <= self.len(), "Reading out founds!"); match self.as_bytes().get(offset) { Some(byte) => *byte, None => 0, } } #[inline] fn read_bytes<Array>(&self, offset: usize) -> Option<&'source Array> where Array: ByteArray<'source> { if offset + (Array::SIZE - 1) < (*self).len() { Some(unsafe { Array::from_ptr((*self).as_ptr().add(offset)) }) } else { None } } #[inline] fn slice(&self, range: Range<usize>) -> Option<&'source str> { self.get(range) } #[inline] unsafe fn slice_unchecked(&self, range: Range<usize>) -> &'source str { debug_assert!( range.start <= self.len() && range.end <= self.len(), "Reading out of bounds {:?} for {}!", range, self.len() ); self.get_unchecked(range) } } impl<'source> Source<'source> for &'source [u8] { type Slice = &'source [u8]; #[inline] fn len(&self) -> usize { (*self).len() } #[inline] unsafe fn read(&self, offset: usize) -> u8 { debug_assert!(offset <= self.len(), "Reading out founds!"); match self.as_bytes().get(offset) { Some(byte) => *byte, None => 0, } } #[inline] fn read_bytes<Array>(&self, offset: usize) -> Option<&'source Array> where Array: ByteArray<'source> { if offset + (Array::SIZE -
random
[ { "content": "/// Trait implemented for an enum representing all tokens. You should never have\n\n/// to implement it manually, use the `#[derive(Logos)]` attribute on your enum.\n\npub trait Logos: Sized {\n\n /// Associated type `Extras` for the particular lexer. Those can handle things that\n\n /// are...
Rust
day-08/part-2/enizor.rs
lypnol/adventofcode-2021
8ba277d698e8c59ca9cd554acc135473f5964b87
use std::env::args; use std::time::Instant; fn main() { let now = Instant::now(); let output = run(&args().nth(1).expect("Please provide an input")); let elapsed = now.elapsed(); println!("_duration:{}", elapsed.as_secs_f64() * 1000.); println!("{}", output); } #[allow(unused)] fn dbg_deductions(deductions: &[u8]) { println!("Deductions:"); for d in deductions.iter() { println!(" {:8b}", d); } } fn solve_clue(clue: u8, bd: u8, cf: u8, n: u8) -> usize { let count_cf = (clue & cf).count_ones(); let count_bd = (clue & bd).count_ones(); match (n, count_bd, count_cf) { (2, _, _) => 1, (3, _, _) => 7, (4, _, _) => 4, (5, 1, 1) => 2, (5, 1, 2) => 3, (5, 2, 1) => 5, (6, 2, 2) => 9, (6, 1, 2) => 0, (6, 2, 1) => 6, (7, _, _) => 8, _ => panic!("{} {} {}", n, count_bd, count_cf), } } fn solve_line(input: &str) -> usize { let mut clues = input.split(' '); let mut cf = 0; let mut bd = 0; for _ in 0..10 { let c = clues.next().unwrap(); let mut clue_byte = 0; let n = c.len(); if n == 2 || n == 4 { for c in c.as_bytes() { clue_byte |= 1 << (c - b'a'); } if n == 2 { cf = clue_byte; bd &= !clue_byte; } else if n == 4 { bd = clue_byte & !cf; } } } let mut res = 0; for c in clues.skip(1) { res *= 10; let mut clue_byte = 0; let n = c.len() as u8; for c in c.as_bytes() { clue_byte |= 1 << (c - b'a'); } res += solve_clue(clue_byte, bd, cf, n) } res } fn run(input: &str) -> usize { let mut res = 0; for line in input.lines() { res += solve_line(line); } res } #[cfg(test)] mod tests { use super::*; #[test] fn test_solving() { assert_eq!(solve_clue(0b01011111, 0b00110000, 0b00000011, 6), 0); } #[test] fn line_test() { let input = "be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe"; assert_eq!(solve_line(input), 8394); let input = "acedgfb cdfbe gcdfa fbcad dab cefabd cdfgeb eafb cagedb ab | cdfeb fcadb cdfeb cdbaf"; assert_eq!(solve_line(input), 5353); } #[test] fn run_test() { let input = "be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe edbfga begcd cbg gc gcadebf fbgde acbgfd abcde gfcbed gfec | fcgedb cgb dgebacf gc fgaebd cg bdaec gdafb agbcfd gdcbef bgcad gfac gcb cdgabef | cg cg fdcagb cbg fbegcd cbd adcefb dageb afcb bc aefdc ecdab fgdeca fcdbega | efabcd cedba gadfec cb aecbfdg fbg gf bafeg dbefa fcge gcbea fcaegb dgceab fcbdga | gecf egdcabf bgf bfgea fgeab ca afcebg bdacfeg cfaedg gcfdb baec bfadeg bafgc acf | gebdcfa ecba ca fadegcb dbcfg fgd bdegcaf fgec aegbdf ecdfab fbedc dacgb gdcebf gf | cefg dcbef fcge gbcadfe bdfegc cbegaf gecbf dfcage bdacg ed bedf ced adcbefg gebcd | ed bcgafe cdgba cbgef egadfb cdbfeg cegd fecab cgb gbdefca cg fgcdab egfdb bfceg | gbdfcae bgc cg cgb gcafb gcf dcaebfg ecagb gf abcdeg gaef cafbge fdbac fegbdc | fgae cfgab fg bagce"; assert_eq!(run(input), 61229) } }
use std::env::args; use std::time::Instant; fn main() { let now = Instant::now(); let output = run(&args().nth(1).expect("Please provide an input")); let elapsed = now.elapsed(); println!("_duration:{}", elapsed.as_secs_f64() * 1000.); println!("{}", output); } #[allow(unused)] fn dbg_deductions(deductions: &[u8]) { println!("Deductions:"); for d in deductions.iter() { println!(" {:8b}", d); } } fn solve_clue(clue: u8, bd: u8, cf: u8, n: u8) -> usize { let count_cf = (clue & cf).count_ones(); let count_bd = (clue & bd).count_ones(); match (n, count_bd, count_cf) { (2, _, _) => 1, (3, _, _) => 7, (4, _, _) => 4, (5, 1, 1) => 2, (5, 1, 2) => 3, (5, 2, 1) => 5, (6, 2, 2) => 9, (6, 1, 2) => 0, (6, 2, 1) => 6, (7, _, _) => 8, _ => panic!("{} {} {}", n, count_bd, count_cf), } } fn solve_line(input: &str) -> usize { let mut clues = input.split(' '); let mut cf = 0; let mut bd = 0; for _ in 0..10 { let c = clues.next().unwrap(); let mut clue_byte = 0; let n = c.len(); if n == 2 || n == 4 { for c in c.as_bytes() { clue_byte |= 1 << (c - b'a'); } if n == 2 { cf = clue_byte; bd &= !clue_byte; } else if n == 4 { bd = clue_byte & !cf; } } } let mut res = 0; for c in clues.skip(1) { res *= 10; let mut clue_byte = 0; let n = c.len() as u8; for c in c.as_bytes() { clue_byte |= 1 << (c - b'a'); } res += solve_clue(clue_byte, bd, cf, n) } res } fn run(input: &str) -> usize { let mut res = 0; for line in input.lines() { res += solve_line(line); } res } #[cfg(test)] mod tests { use super::*; #[test] fn test_solving() { assert_eq!(solve_clue(0b01011111, 0b00110000, 0b00000011, 6), 0); } #[test]
#[test] fn run_test() { let input = "be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe edbfga begcd cbg gc gcadebf fbgde acbgfd abcde gfcbed gfec | fcgedb cgb dgebacf gc fgaebd cg bdaec gdafb agbcfd gdcbef bgcad gfac gcb cdgabef | cg cg fdcagb cbg fbegcd cbd adcefb dageb afcb bc aefdc ecdab fgdeca fcdbega | efabcd cedba gadfec cb aecbfdg fbg gf bafeg dbefa fcge gcbea fcaegb dgceab fcbdga | gecf egdcabf bgf bfgea fgeab ca afcebg bdacfeg cfaedg gcfdb baec bfadeg bafgc acf | gebdcfa ecba ca fadegcb dbcfg fgd bdegcaf fgec aegbdf ecdfab fbedc dacgb gdcebf gf | cefg dcbef fcge gbcadfe bdfegc cbegaf gecbf dfcage bdacg ed bedf ced adcbefg gebcd | ed bcgafe cdgba cbgef egadfb cdbfeg cegd fecab cgb gbdefca cg fgcdab egfdb bfceg | gbdfcae bgc cg cgb gcafb gcf dcaebfg ecagb gf abcdeg gaef cafbge fdbac fegbdc | fgae cfgab fg bagce"; assert_eq!(run(input), 61229) } }
fn line_test() { let input = "be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe"; assert_eq!(solve_line(input), 8394); let input = "acedgfb cdfbe gcdfa fbcad dab cefabd cdfgeb eafb cagedb ab | cdfeb fcadb cdfeb cdbaf"; assert_eq!(solve_line(input), 5353); }
function_block-full_function
[ { "content": "fn line_score(line: &str) -> usize {\n\n let mut stack = Vec::<u8>::new();\n\n\n\n for c in line.as_bytes() {\n\n match c {\n\n b'(' | b'{' | b'[' | b'<' => stack.push(*c),\n\n b')' => {\n\n if stack.pop().unwrap() != b'(' {\n\n ...
Rust
executor/src/executor.rs
alyssaverkade/neuromancer
7d1d03e32a3c46ab90f0c1fb0f2e8e7f341a89e7
use std::collections::BTreeMap; use std::collections::HashSet; use conhash::{ConsistentHash, Node}; use crossbeam_utils::sync::ShardedLock; use smol_str::SmolStr; use uuid::Uuid; use neuromancer::{read_lock, write_lock, DefaultHasher}; pub(crate) struct Executor { pub(crate) identifier_mappings: ShardedLock<BTreeMap<Librarian, Vec<Uuid>>>, pub(crate) librarians: ShardedLock<KnownLibrarians>, } pub(crate) trait ToLibrarian { fn to_librarian(&self) -> Librarian; } #[derive(Clone, Default, Debug, Eq, Hash, PartialEq)] pub(crate) struct Librarian { address: SmolStr, } pub(crate) struct KnownLibrarians { set: HashSet<Librarian, DefaultHasher>, ring: ConsistentHash<Librarian>, } impl Executor { pub(crate) fn new() -> Self { Self { identifier_mappings: ShardedLock::new(BTreeMap::default()), librarians: ShardedLock::new(KnownLibrarians::new()), } } pub(crate) fn rebalance(&self, deleted: Vec<Librarian>) { let unbalanced = self.unbalanced_identifiers(deleted); if unbalanced.is_empty() { return; } let mut identifier_mappings = write_lock!(self.identifier_mappings); let librarians = read_lock!(self.librarians); for uuid in unbalanced { if let Some(librarian) = librarians.mapping_for(uuid) { let entry = identifier_mappings .entry(librarian.clone()) .or_insert_with(|| Vec::new()); entry.push(uuid); } } } fn unbalanced_identifiers(&self, deleted: Vec<Librarian>) -> Vec<Uuid> { let mut result = Vec::new(); let mut deleted = deleted; let identifier_mappings = read_lock!(self.identifier_mappings); for librarian in deleted.drain(..) { let uuids = identifier_mappings.get(&librarian); if uuids.is_none() { continue; } let uuids = uuids.unwrap(); result.extend_from_slice(uuids); } result } } impl KnownLibrarians { pub(crate) fn new() -> Self { let ring = ConsistentHash::new(); Self { set: HashSet::default(), ring, } } fn mapping_for(&self, id: Uuid) -> Option<&Librarian> { self.ring.get(id.as_bytes()) } #[must_use = "You must remap the values that have been assigned to the deleted servers"] pub(crate) fn modify_membership(&mut self, librarians: &[Librarian]) -> Vec<Librarian> { let mut removed = Vec::new(); let mut added = Vec::new(); let new_membership_list: HashSet<Librarian, _> = librarians.into_iter().cloned().collect(); if new_membership_list.is_superset(&self.set) { for member in new_membership_list.difference(&self.set) { added.push(member.clone()); } } else { for member in new_membership_list.symmetric_difference(&self.set) { if self.set.contains(member) { removed.push(member.clone()); } else { added.push(member.clone()); } } } for deleted in &removed { self.ring.remove(deleted); self.set.remove(deleted); } for new in added { self.ring.add(&new, 1); self.set.insert(new); } removed } } impl Librarian { pub fn new(s: impl AsRef<str>) -> Self { let address = SmolStr::new(s); Self { address } } } impl Ord for Librarian { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.address.cmp(&other.address) } } impl PartialOrd for Librarian { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } } impl Node for Librarian { fn name(&self) -> String { self.address.to_string() } } impl<T> ToLibrarian for T where T: AsRef<str>, { fn to_librarian(&self) -> Librarian { Librarian::new(self) } } #[cfg(test)] mod tests { use super::*; #[test] fn superset_modify_membership() { let input: Vec<Librarian> = ["foo", "bar"] .iter() .map(<_ as ToLibrarian>::to_librarian) .collect(); let mut known = KnownLibrarians::new(); let removed = known.modify_membership(&input); assert_eq!(removed, Vec::new()); assert_eq!(known.set, input.into_iter().collect()); assert_eq!(known.ring.len(), 2); } #[test] fn added_and_deleted_modify_membership() { let baseline: Vec<Librarian> = ["foo", "bar"] .iter() .map(<_ as ToLibrarian>::to_librarian) .collect(); let request: Vec<Librarian> = ["foo", "baz"] .iter() .map(<_ as ToLibrarian>::to_librarian) .collect(); let mut known = KnownLibrarians::new(); let first_removed = known.modify_membership(&baseline); let removed = known.modify_membership(&request); let removed_predicate = vec![Librarian::new("bar")]; assert_eq!(first_removed, Vec::new()); assert_eq!(removed_predicate, removed); assert_eq!(known.set, request.into_iter().collect()); assert_eq!(known.ring.len(), 2); } }
use std::collections::BTreeMap; use std::collections::HashSet; use conhash::{ConsistentHash, Node}; use crossbeam_utils::sync::ShardedLock; use smol_str::SmolStr; use uuid::Uuid; use neuromancer::{read_lock, write_lock, DefaultHasher}; pub(crate) struct Executor { pub(crate) identifier_mappings: ShardedLock<BTreeMap<Librarian, Vec<Uuid>>>, pub(crate) librarians: ShardedLock<KnownLibrarians>, } pub(crate) trait ToLibrarian { fn to_librarian(&self) -> Librarian; } #[derive(Clone, Default, Debug, Eq, Hash, PartialEq)] pub(crate) struct Librarian { address: SmolStr, } pub(crate) struct KnownLibrarians { set: HashSet<Librarian, DefaultHasher>, ring: ConsistentHash<Librarian>, } impl Executor { pub(crate) fn new() -> Self { Self { identifier_mappings: ShardedLock::new(BTreeMap::default()), librarians: ShardedLock::new(KnownLibrarians::new()), } } pub(crate) fn rebalance(&self, deleted: Vec<Librarian>) { let unbalanced = self.unbalanced_identifiers(deleted); if unbalanced.is_empty() { return; } let mut identifier_mappings = write_lock!(self.identifier_mappings); let librarians = read_lock!(self.librarians); for uuid in unbalanced { if let Some(librarian) = librarians.mapping_for(uuid) { let entry = identifier_mappings .entry(librarian.clone()) .or_insert_with(|| Vec::new()); entry.push(uuid); } } } fn unbalanced_identifiers(&self, deleted: Vec<Librarian>) -> Vec<Uuid> { let mut result = Vec::new(); let mut deleted = deleted; let identifier_mappings = read_lock!(self.identifier_mappings); for librarian in deleted.drain(..) { let uuids = identifier_mappings.get(&librarian); if uuids.is_none() { continue; } let uuids = uuids.unwrap(); result.extend_from_slice(uuids); } result } } impl KnownLibrarians { pub(crate) fn new() -> Self { let ring = ConsistentHash::new(); Self { set: HashSet::default(), ring, } } fn mapping_for(&self, id: Uuid) -> Option<&Librarian> { self.ring.get(id.as_bytes()) } #[must_use = "You must remap the values that have been assigned to the deleted servers"] pub(crate) fn modify_membership(&mut self, librarians: &[Librarian]) -> Vec<Librarian> { let mut removed = Vec::new(); let mut added = Vec::new(); let new_membership_list: HashSet<Librarian, _> = librarians.into_iter().cloned().collect(); if new_membership_list.is_superset(&self.set) { for member in new_membership_list.difference(&self.set) { added.push(member.clone()); } } else { for member in new_membership_list.symmetric_difference(&self.set) { if self.set.contains(member) { removed.push(member.clone()); } else { added.push(member.clone()); } } } for deleted in &removed { self.ring.remove(deleted); self.set.remove(deleted); } for new in added { self.ring.add(&new, 1); self.set.insert(new); } removed } } impl Librarian { pub fn new(s: impl AsRef<str>) -> Self { let address = SmolStr::new(s); Self { address } } } impl Ord for Librarian { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.address.cmp(&other.address) } } impl PartialOrd for Librarian { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } } impl Node for Librarian { fn name(&self) -> String { self.address.to_string() } } impl<T> ToLibrarian for T where T: AsRef<str>, { fn to_librarian(&self) -> Librarian { Librarian::new(self) } } #[cfg(test)] mod tests { use super::*; #[test] fn superset_modify_membership() { let input: Vec<Librarian> = ["foo", "bar"] .iter() .map(<_ as ToLibrarian>::to_librarian) .collect(); let mut known = KnownLibrarians::new(); let removed = known.modify_membership(&input); assert_eq!(removed, Vec::new()); assert_eq!(known.set, input.into_iter().collect()); assert_eq!(known.ring.len(), 2); } #[test] fn added_and_deleted_modify_membership() { let baseline: Vec<Librarian> = ["foo", "bar"] .iter() .map(<_ as ToLibrarian>::to_librarian) .collect(); let request: Vec<Librarian> = ["foo", "baz"] .iter() .map(<_ as ToLibrarian>::to_librarian) .collect(); let mut known = KnownLibrarians::ne
}
w(); let first_removed = known.modify_membership(&baseline); let removed = known.modify_membership(&request); let removed_predicate = vec![Librarian::new("bar")]; assert_eq!(first_removed, Vec::new()); assert_eq!(removed_predicate, removed); assert_eq!(known.set, request.into_iter().collect()); assert_eq!(known.ring.len(), 2); }
function_block-function_prefixed
[]
Rust
src/node.rs
h4sh3d/monero-swap-lib
882159ca97dae811351f2bbb45e659af326af36d
use crate::{Protocol, Phase, Btc, Xmr}; use crate::types::{common, xmr, btc, RelativeLocktime}; use rand::rngs::OsRng; use bitcoin_hashes::hex::FromHex; use wasm_bindgen::prelude::*; use wasm_bindgen::JsValue; #[wasm_bindgen] pub struct Seller { parameters: Option<xmr::SetupParams>, setup: Option<xmr::Setup>, setup_is_ready: bool, } #[wasm_bindgen] impl Seller { pub fn new() -> Seller { Seller { parameters: None, setup: None, setup_is_ready: false, } } pub fn generate_params(&mut self, t0: u16, t1: u16) -> Result<(), JsValue> { let mut rng = OsRng::new().expect("OsRng"); let params = common::Params::new( RelativeLocktime::Blocks(t0), RelativeLocktime::Blocks(t1), ); self.parameters = Some(Xmr::setup(params.clone(), &mut rng)?); Ok(()) } pub fn export_setup(&mut self) -> Result<xmr::ExportedSetupParams, JsValue> { match &self.parameters { Some(params) => Ok(xmr::ExportedSetupParams::from(params)), None => Err("Parameters is missing".into()), } } pub fn verify_setup(&mut self, buyer_params: &btc::ExportedSetupParams) -> Result<(), JsValue> { match &self.parameters { Some(params) => { let xmr_setup = Xmr::verify_setup(params, buyer_params)?; self.setup = Some(xmr_setup); self.setup_is_ready = true; Ok(()) }, None => Err("Parameters is missing".into()), } } pub fn is_setup_ready(&self) -> bool { self.setup_is_ready } } #[wasm_bindgen] pub struct Buyer { parameters: Option<btc::SetupParams>, setup: Option<btc::Setup>, setup_is_ready: bool, } #[wasm_bindgen] impl Buyer { pub fn new() -> Buyer { Buyer { parameters: None, setup: None, setup_is_ready: false, } } pub fn generate_params(&mut self, t0: u16, t1: u16) -> Result<(), JsValue> { let mut rng = OsRng::new().expect("OsRng"); let params = common::Params::new( RelativeLocktime::Blocks(t0), RelativeLocktime::Blocks(t1), ); self.parameters = Some(Btc::setup(params.clone(), &mut rng)?); Ok(()) } pub fn export_setup(&mut self) -> Result<btc::ExportedSetupParams, JsValue> { match &self.parameters { Some(params) => Ok(btc::ExportedSetupParams::from(params)), None => Err("Parameters is missing".into()), } } pub fn verify_setup(&mut self, seller_params: &xmr::ExportedSetupParams) -> Result<(), JsValue> { match &self.parameters { Some(params) => { let btc_setup = Btc::verify_setup(params, seller_params)?; self.setup = Some(btc_setup); self.setup_is_ready = true; Ok(()) }, None => Err("Parameters is missing".into()), } } pub fn is_setup_ready(&self) -> bool { self.setup_is_ready } pub fn create_transactions(&mut self, txid: &str, vout: u32, amount: u32) -> Result<String, JsValue> { match &self.setup { Some(setup) => { let txid = bitcoin_hashes::sha256d::Hash::from_hex(txid) .map_err(|e| format!("{:?}", e))?; let init_txs = Btc::execute(setup, &btc::CreateTransactions { utxo: btc::Utxo { txid, vout, amount: amount as u64, }, })?; Ok(init_txs.btx_1) }, None => Err("Setup is missing".into()), } } }
use crate::{Protocol, Phase, Btc, Xmr}; use crate::types::{common, xmr, btc, RelativeLocktime}; use rand::rngs::OsRng; use bitcoin_hashes::hex::FromHex; use wasm_bindgen::prelude::*; use wasm_bindgen::JsValue; #[wasm_bindgen] pub struct Seller { parameters: Option<xmr::SetupParams>, setup: Option<xmr::Setup>, setup_is_ready: bool, } #[wasm_bindgen] impl Seller { pub fn new() -> Seller { Seller { parameters: None, setup: None, setup_is_ready: false, } } pub fn generate_params(&mut self, t0: u16, t1: u16) -> Result<(), JsValue> { let mut rng = OsRng::new().expect("OsRng"); let params = common::Params::new( RelativeLocktime::Blocks(t0), RelativeLocktime::Blocks(t1), ); self.parameters = Some(Xmr::setup(params.clone(), &mut rng)?); Ok(()) } pub fn export_setup(&mut self) -> Result<xmr::ExportedSetupParams, JsValue> { match &self.parameters { Some(params) => Ok(xmr::ExportedSetupParams::from(params)), None => Err("Parameters is missing".into()), } } pub fn verify_setup(&mut self, buyer_params: &btc::ExportedSetupParams) -> Result<(), JsValue> { match &self.parameters { Some(params) => { let xmr_setup = Xmr::verify_setup(params, buyer_params)?; self.setup = Some(xmr_setup); self.setup_is_ready = true; Ok(()) }, None => Err("Parameters is missing".into()), } } pub fn is_setup_ready(&self) -> bool { self.setup_is_ready } } #[wasm_bindgen] pub struct Buyer { parameters: Option<btc::SetupParams>, setup: Option<btc::Setup>, setup_is_ready: bool, } #[wasm_bindgen] impl Buyer { pub fn new() -> Buyer { Buyer { parameters: None, setup: None, setup_is_ready: false, } }
pub fn export_setup(&mut self) -> Result<btc::ExportedSetupParams, JsValue> { match &self.parameters { Some(params) => Ok(btc::ExportedSetupParams::from(params)), None => Err("Parameters is missing".into()), } } pub fn verify_setup(&mut self, seller_params: &xmr::ExportedSetupParams) -> Result<(), JsValue> { match &self.parameters { Some(params) => { let btc_setup = Btc::verify_setup(params, seller_params)?; self.setup = Some(btc_setup); self.setup_is_ready = true; Ok(()) }, None => Err("Parameters is missing".into()), } } pub fn is_setup_ready(&self) -> bool { self.setup_is_ready } pub fn create_transactions(&mut self, txid: &str, vout: u32, amount: u32) -> Result<String, JsValue> { match &self.setup { Some(setup) => { let txid = bitcoin_hashes::sha256d::Hash::from_hex(txid) .map_err(|e| format!("{:?}", e))?; let init_txs = Btc::execute(setup, &btc::CreateTransactions { utxo: btc::Utxo { txid, vout, amount: amount as u64, }, })?; Ok(init_txs.btx_1) }, None => Err("Setup is missing".into()), } } }
pub fn generate_params(&mut self, t0: u16, t1: u16) -> Result<(), JsValue> { let mut rng = OsRng::new().expect("OsRng"); let params = common::Params::new( RelativeLocktime::Blocks(t0), RelativeLocktime::Blocks(t1), ); self.parameters = Some(Btc::setup(params.clone(), &mut rng)?); Ok(()) }
function_block-full_function
[ { "content": "fn setup() -> (xmr::Setup, btc::Setup, Client, String) {\n\n let mut rng = OsRng::new().expect(\"OsRng\");\n\n let params = common::Params::new(\n\n RelativeLocktime::Blocks(T_0),\n\n RelativeLocktime::Blocks(T_1),\n\n );\n\n let xmr_params = Xmr::setup(params.clone(), &m...
Rust
src/parser/parser.rs
origis-lang/origis-lang
dc9e3dc93901e2791587a222ca5127a958a5e166
use std::cell::UnsafeCell; use crate::parser::lexer::Lexer; use crate::parser::rollbackable::{rollbackable, Rollbackable}; use crate::parser::token::{Token, TokenStream}; pub struct Parser<'s> { pub token_stream: UnsafeCell<Rollbackable<TokenStream<'s>>>, pub current: UnsafeCell<Token<'s>>, source: &'s str, } impl<'s> Parser<'s> { pub fn from_source(src: &'s str) -> Self { Parser { token_stream: UnsafeCell::new(rollbackable( TokenStream { lexer: Lexer::from_source(src), }, )), current: UnsafeCell::new(Token::eof((0..0).into())), source: src, } } #[inline] #[allow(clippy::mut_from_ref)] fn fuck(&self) -> &mut Rollbackable<TokenStream<'s>> { unsafe { &mut *self.token_stream.get() } } pub fn next_token(&self) -> Token<'s> { unsafe { *self.fuck().next().unwrap_unchecked() } } pub fn peek_token(&self) -> Token<'s> { unsafe { *self.fuck().peek().unwrap_unchecked() } } pub fn last_token(&self) -> Token<'s> { unsafe { *self.fuck().last().unwrap_unchecked() } } pub fn current_point(&self) -> usize { self.fuck().current() } pub fn back_to(&self, point: usize) { self.fuck().to(point).unwrap() } pub fn back(&self) { self.fuck().back().unwrap() } } pub macro expect_token($parser:expr; { $($(@ $a:ident)? $token_inner:pat_param => $e:expr),* $(,)? }) { match $parser.next_token() { $($($a @)? crate::parser::token::Token { inner: $token_inner, .. } => $e),*, token => Err(crate::parser::parse::error::Error::expected_but_found(&[ $(stringify!($token_inner)),* ], token)) }.map_err(|x| { $parser.back(); x }) } pub macro expect_one_token($parser: expr, $token_inner: pat_param) { match $parser.peek_token() { token @ crate::parser::token::Token { inner: $token_inner, .. } => Ok(token), token => Err( crate::parser::parse::error::Error::expected_but_found( &[stringify!($token_inner)], token, ), ), } .map(|x| { $parser.next_token(); x }) } #[cfg(test)] mod tests { use crate::parser::parse::decl::Decl; use std::assert_matches::assert_matches; use crate::parser::parse::expr::{Expr, LiteralExpr}; use crate::parser::parse::ident::{ MaybeTypedIdent, TypedIdent, UntypedIdent, }; use crate::parser::parse::stmt::Stmt; use crate::parser::parser::Parser; use crate::parser::token::TokenInner::Ident; use crate::parser::token::{Token, TokenInner}; #[test] fn fn_decl() { let parser = Parser::from_source( "pub fn sum(a: int, b: int,) int { 123 }", ); assert!(parser.parse_module().is_ok()); } #[test] fn struct_decl() { let parser = Parser::from_source("struct Test { x: int b: float }"); assert!(parser.parse_module().is_ok()); } #[test] fn use_decl() { let parser = Parser::from_source("use a::b::c;"); assert_matches!(parser.parse_decl(), Ok(Decl::Import(_))); } #[test] fn assign_stmt() { let parser = Parser::from_source("x: int = 1;"); assert_matches!( parser.parse_stmt(), Ok(Stmt::Assign( MaybeTypedIdent::Typed(TypedIdent { tok: Token { inner: Ident("x"), .. }, .. }), Expr::Literal(LiteralExpr::Integer(1,),) )) ); } #[test] fn expr_ident() { let parser = Parser::from_source("id"); assert_matches!( parser.parse_expr(), Ok(Expr::Ident(UntypedIdent(Token { inner: TokenInner::Ident("id"), .. }))) ) } #[test] fn expr_literal() { let parser = Parser::from_source(r#""abc""#); assert_matches!( parser.parse_expr(), Ok(Expr::Literal(LiteralExpr::String("abc"))) ) } #[test] fn expr_call_fn() { let parser = Parser::from_source(r#"print("abc",)"#); assert_matches!( parser.parse_expr(), Ok(Expr::CallFunc { name: UntypedIdent(Token { inner: TokenInner::Ident("print"), .. }), args: _, }) ) } #[test] fn op_prec() { let parser = Parser::from_source("1 + 2 * (3 - 4) / 5"); assert!(parser.parse_expr().is_ok()); } #[test] fn example_fib() { let parser = Parser::from_source(include_str!( "../../examples/fib.gs" )); assert!(parser.parse_module().is_ok()); } }
use std::cell::UnsafeCell; use crate::parser::lexer::Lexer; use crate::parser::rollbackable::{rollbackable, Rollbackable}; use crate::parser::token::{Token, TokenStream}; pub struct Parser<'s> { pub token_stream: UnsafeCell<Rollbackable<TokenStream<'s>>>, pub current: UnsafeCell<Token<'s>>, source: &'s str, } impl<'s> Parser<'s> { pub fn from_source(src: &'s str) -> Self { Parser { token_stream: UnsafeCell::new(rollbackable( TokenStream { lexer: Lexer::from_source(src), }, )), current: UnsafeCell::new(Token::eof((0..0).into())), source: src, } } #[inline] #[allow(clippy::mut_from_ref)] fn fuck(&self) -> &mut Rollbackable<TokenStream<'s>> { unsafe { &mut *self.token_stream.get() } } pub fn next_token(&self) -> Token<'s> { unsafe { *self.fuck().next().unwrap_unchecked() } } pub fn peek_token(&self) -> Token<'s> { unsafe { *self.fuck().peek().unwrap_unchecked() } } pub fn last_token(&self) -> Token<'s> { unsafe { *self.fuck().last().unwrap_unchecked() } } pub fn current_point(&self) -> usize { self.fuck().current() } pub fn back_to(&self, point: usize) { self.fuck().to(point).unwrap() } pub fn back(&self) { self.fuck().back().unwrap() } } pub macro expect_token($parser:expr; { $($(@ $a:ident)? $token_inner:pat_param => $e:expr),* $(,)? }) { match $parser.next_token() { $($($a @)? crate::parser::token::Token { inner: $token_inner, .. } => $e),*, token => Err(crate::parser::parse::error::Error::expected_but_found(&[ $(stringify!($token_inner)),* ], token)) }.map_err(|x| { $parser.back(); x }) } pub macro expect_one_token($parser: expr, $token_inner: pat_param) {
.map(|x| { $parser.next_token(); x }) } #[cfg(test)] mod tests { use crate::parser::parse::decl::Decl; use std::assert_matches::assert_matches; use crate::parser::parse::expr::{Expr, LiteralExpr}; use crate::parser::parse::ident::{ MaybeTypedIdent, TypedIdent, UntypedIdent, }; use crate::parser::parse::stmt::Stmt; use crate::parser::parser::Parser; use crate::parser::token::TokenInner::Ident; use crate::parser::token::{Token, TokenInner}; #[test] fn fn_decl() { let parser = Parser::from_source( "pub fn sum(a: int, b: int,) int { 123 }", ); assert!(parser.parse_module().is_ok()); } #[test] fn struct_decl() { let parser = Parser::from_source("struct Test { x: int b: float }"); assert!(parser.parse_module().is_ok()); } #[test] fn use_decl() { let parser = Parser::from_source("use a::b::c;"); assert_matches!(parser.parse_decl(), Ok(Decl::Import(_))); } #[test] fn assign_stmt() { let parser = Parser::from_source("x: int = 1;"); assert_matches!( parser.parse_stmt(), Ok(Stmt::Assign( MaybeTypedIdent::Typed(TypedIdent { tok: Token { inner: Ident("x"), .. }, .. }), Expr::Literal(LiteralExpr::Integer(1,),) )) ); } #[test] fn expr_ident() { let parser = Parser::from_source("id"); assert_matches!( parser.parse_expr(), Ok(Expr::Ident(UntypedIdent(Token { inner: TokenInner::Ident("id"), .. }))) ) } #[test] fn expr_literal() { let parser = Parser::from_source(r#""abc""#); assert_matches!( parser.parse_expr(), Ok(Expr::Literal(LiteralExpr::String("abc"))) ) } #[test] fn expr_call_fn() { let parser = Parser::from_source(r#"print("abc",)"#); assert_matches!( parser.parse_expr(), Ok(Expr::CallFunc { name: UntypedIdent(Token { inner: TokenInner::Ident("print"), .. }), args: _, }) ) } #[test] fn op_prec() { let parser = Parser::from_source("1 + 2 * (3 - 4) / 5"); assert!(parser.parse_expr().is_ok()); } #[test] fn example_fib() { let parser = Parser::from_source(include_str!( "../../examples/fib.gs" )); assert!(parser.parse_module().is_ok()); } }
match $parser.peek_token() { token @ crate::parser::token::Token { inner: $token_inner, .. } => Ok(token), token => Err( crate::parser::parse::error::Error::expected_but_found( &[stringify!($token_inner)], token, ), ), }
if_condition
[]
Rust
src/software/control/src/display/screen.rs
ejuliot/makair
a024a4cec0b7e3ff4faf177410e1c2f1d696149c
use conrod_core::color::{self, Color}; use telemetry::structures::MachineStateSnapshot; use crate::config::environment::DISPLAY_WIDGET_SPACING_FROM_BOTTOM; use super::fonts::Fonts; use super::widget::{ BackgroundWidgetConfig, BrandingWidgetConfig, ControlWidget, ControlWidgetType, ErrorWidgetConfig, GraphWidgetConfig, NoDataWidgetConfig, StopWidgetConfig, TelemetryWidgetConfig, }; widget_ids!(pub struct Ids { background, branding, alarms, pressure_graph, cycles_parent, cycles_title, cycles_value, cycles_unit, peak_parent, peak_title, peak_value, peak_unit, plateau_parent, plateau_title, plateau_value, plateau_unit, peep_parent, peep_title, peep_value, peep_unit, ratio_parent, ratio_title, ratio_value, ratio_unit, tidal_parent, tidal_title, tidal_value, tidal_unit, no_data, stopped, error }); pub struct Screen<'a> { ids: &'a Ids, machine_snapshot: Option<&'a MachineStateSnapshot>, widgets: ControlWidget<'a>, } pub struct ScreenDataBranding { pub image_id: conrod_core::image::Id, pub width: f64, pub height: f64, } pub struct ScreenDataGraph { pub image_id: conrod_core::image::Id, pub width: f64, pub height: f64, } impl<'a> Screen<'a> { pub fn new( ui: conrod_core::UiCell<'a>, ids: &'a Ids, fonts: &'a Fonts, machine_snapshot: Option<&'a MachineStateSnapshot>, ) -> Screen<'a> { Screen { ids, machine_snapshot, widgets: ControlWidget::new(ui, fonts), } } pub fn render_with_data( &mut self, branding_data: ScreenDataBranding, graph_data: ScreenDataGraph, ) { self.render_background(); self.render_branding( "?.?.?".to_string(), "?.?.?".to_string(), branding_data.image_id, branding_data.width, branding_data.height, ); self.render_graph(graph_data.image_id, graph_data.width, graph_data.height); self.render_telemetry(); } pub fn render_background(&mut self) { let config = BackgroundWidgetConfig::new(color::BLACK, self.ids.background); self.widgets.render(ControlWidgetType::Background(config)); } pub fn render_branding( &mut self, version_firmware: String, version_control: String, image_id: conrod_core::image::Id, width: f64, height: f64, ) { let config = BrandingWidgetConfig::new( version_firmware, version_control, width, height, image_id, self.ids.branding, ); self.widgets.render(ControlWidgetType::Branding(config)); } pub fn render_graph(&mut self, image_id: conrod_core::image::Id, width: f64, height: f64) { let config = GraphWidgetConfig::new(width, height, image_id, self.ids.pressure_graph); self.widgets.render(ControlWidgetType::Graph(config)); } pub fn render_stop(&mut self) { let config = StopWidgetConfig::new(self.ids.stopped); self.widgets.render(ControlWidgetType::Stop(config)); } pub fn render_no_data(&mut self) { let config = NoDataWidgetConfig::new(self.ids.no_data); self.widgets.render(ControlWidgetType::NoData(config)); } pub fn render_error(&mut self, error: String) { let config = ErrorWidgetConfig::new(error, self.ids.error); self.render_background(); self.widgets.render(ControlWidgetType::Error(config)); } pub fn render_telemetry(&mut self) { let mut last_widget_position = 0.0; let machine_snapshot = self.machine_snapshot.unwrap(); let peak_config = TelemetryWidgetConfig { title: "P(peak)", value: format!( "{} ← ({})", (machine_snapshot.previous_peak_pressure as f64 / 10.0).round(), machine_snapshot.peak_command ), unit: "cmH20", ids: ( self.ids.peak_parent, self.ids.peak_title, self.ids.peak_value, self.ids.peak_unit, ), x_position: last_widget_position, y_position: DISPLAY_WIDGET_SPACING_FROM_BOTTOM, background_color: Color::Rgba(39.0 / 255.0, 66.0 / 255.0, 100.0 / 255.0, 1.0), }; last_widget_position = self .widgets .render(ControlWidgetType::Telemetry(peak_config)); let plateau_config = TelemetryWidgetConfig { title: "P(plateau)", value: format!( "{} ← ({})", (machine_snapshot.previous_plateau_pressure as f64 / 10.0).round(), machine_snapshot.plateau_command ), unit: "cmH20", ids: ( self.ids.plateau_parent, self.ids.plateau_title, self.ids.plateau_value, self.ids.plateau_unit, ), x_position: last_widget_position, y_position: 0.0, background_color: Color::Rgba(66.0 / 255.0, 44.0 / 255.0, 85.0 / 255.0, 1.0), }; last_widget_position = self .widgets .render(ControlWidgetType::Telemetry(plateau_config)); let peep_config = TelemetryWidgetConfig { title: "P(expiratory)", value: format!( "{} ← ({})", (machine_snapshot.previous_peep_pressure as f64 / 10.0).round(), machine_snapshot.peep_command ), unit: "cmH20", ids: ( self.ids.peep_parent, self.ids.peep_title, self.ids.peep_value, self.ids.peep_unit, ), x_position: last_widget_position, y_position: 0.0, background_color: Color::Rgba(76.0 / 255.0, 73.0 / 255.0, 25.0 / 255.0, 1.0), }; last_widget_position = self .widgets .render(ControlWidgetType::Telemetry(peep_config)); let cycles_config = TelemetryWidgetConfig { title: "Cycles/minute", value: format!("{}", machine_snapshot.cpm_command), unit: "/minute", ids: ( self.ids.cycles_parent, self.ids.cycles_title, self.ids.cycles_value, self.ids.cycles_unit, ), x_position: last_widget_position, y_position: 0.0, background_color: Color::Rgba(47.0 / 255.0, 74.0 / 255.0, 16.0 / 255.0, 1.0), }; last_widget_position = self .widgets .render(ControlWidgetType::Telemetry(cycles_config)); let ratio_config = TelemetryWidgetConfig { title: "Insp-exp ratio", value: "0:0".to_string(), unit: "insp:exp.", ids: ( self.ids.ratio_parent, self.ids.ratio_title, self.ids.ratio_value, self.ids.ratio_unit, ), x_position: last_widget_position, y_position: 0.0, background_color: Color::Rgba(52.0 / 255.0, 52.0 / 255.0, 52.0 / 255.0, 1.0), }; last_widget_position = self .widgets .render(ControlWidgetType::Telemetry(ratio_config)); let tidal_config = TelemetryWidgetConfig { title: "Tidal volume", value: "0".to_string(), unit: "mL (milliliters)", ids: ( self.ids.tidal_parent, self.ids.tidal_title, self.ids.tidal_value, self.ids.tidal_unit, ), x_position: last_widget_position, y_position: 0.0, background_color: Color::Rgba(52.0 / 255.0, 52.0 / 255.0, 52.0 / 255.0, 1.0), }; self.widgets .render(ControlWidgetType::Telemetry(tidal_config)); } }
use conrod_core::color::{self, Color}; use telemetry::structures::MachineStateSnapshot; use crate::config::environment::DISPLAY_WIDGET_SPACING_FROM_BOTTOM; use super::fonts::Fonts; use super::widget::{ BackgroundWidgetConfig, BrandingWidgetConfig, ControlWidget, ControlWidgetType, ErrorWidgetConfig, GraphWidgetConfig, NoDataWidgetConfig, StopWidgetConfig, TelemetryWidgetConfig, }; widget_ids!(pub struct Ids { background, branding, alarms, pressure_graph, cycles_parent, cycles_title, cycles_value, cycles_unit, peak_parent, peak_title, peak_value, peak_unit, plateau_parent, plateau_title, plateau_value, plateau_unit, peep_parent, peep_title, peep_value, peep_unit, ratio_parent, ratio_title, ratio_value, ratio_unit, tidal_parent, tidal_title, tidal_value, tidal_unit, no_data, stopped, error }); pub struct Screen<'a> { ids: &'a Ids, machine_snapshot: Option<&'a MachineStateSnapshot>, widgets: ControlWidget<'a>, } pub struct ScreenDataBranding { pub image_id: conrod_core::image::Id, pub width: f64, pub height: f64, } pub struct ScreenDataGraph { pub image_id: conrod_core::image::Id, pub width: f64, pub height: f64, } impl<'a> Screen<'a> { pub fn new( ui: conrod_core::UiCell<'a>, ids: &'a Ids, fonts: &'a Fonts, machine_snapshot: Option<&'a MachineStateSnapshot>, ) -> Screen<'a> { Screen { ids, machine_snapshot, widgets: ControlWidget::new(ui, fonts), } } pub fn render_with_data( &mut self, branding_data: ScreenDataBranding, graph_data: ScreenDataGraph, ) { self.render_background(); self.render_branding( "?.?.?".to_string(), "?.?.?".to_string(), branding_data.image_id, branding_data.width, branding_data.height, ); self.render_graph(graph_data.image_id, graph_data.width, graph_data.height); self.render_telemetry(); } pub fn render_background(&mut self) { let config = BackgroundWidgetConfig::new(color::BLACK, self.ids.background); self.widgets.render(ControlWidgetType::Background(config)); } pub fn render_branding( &mut self, version_firmware: String, version_control: String, image_id: conrod_core::image::Id, width: f64, height: f64, ) { let config = BrandingWidgetConfig::new( version_firmware, version_control, width, height, image_id, self.ids.branding, ); self.widgets.render(ControlWidgetType::Branding(config)); } pub fn render_graph(&mut self, image_id: conrod_core::image::Id, width: f64, height: f64) { let config = GraphWidgetConfig::new(width, height, image_id, self.ids.pressure_graph); self.widgets.render(ControlWidgetType::Graph(config)); } pub fn render_stop(&mut self) { let config = StopWidgetConfig::new(self.ids.stopped); self.widgets.render(ControlWidgetType::Stop(config)); } pub fn render_no_data(&mut self) { let config = NoDataWidgetConfig::new(self.ids.no_data); self.widgets.render(ControlWidgetType::NoData(config)); } pub fn render_error(&mut self, error: String) { let config = ErrorWidgetConfig::new(error, self.ids.error); self.render_background(); self.widgets.render(ControlWidgetType::Error(config)); } pub fn render_telemetry(&mut self) { let mut last_widget_position = 0.0; let machine_snapshot = self.machine_snapshot.unwrap(); let peak_config = TelemetryWidgetConfig { title: "P(peak)", value: format!( "{} ← ({})", (machine_snapshot.previous_peak_pressure as f64 / 10.0).round(), machine_snapshot.peak_command ), unit: "cmH20", ids: ( self.ids.peak_parent, self.ids.peak_title, self.ids.peak_value, self.ids.peak_unit, ), x_position: last_widget_position, y_position: DISPLAY_WIDGET_SPACING_FROM_BOTTOM, background_color: Color::Rgba(39.0 / 255.0, 66.0 / 255.0, 100.0 / 255.0, 1.0), }; last_widget_position = self .widgets .render(ControlWidgetType::Telemetry(peak_config)); let plateau_config = TelemetryWidgetConfig { title: "P(plateau)", value: format!( "{} ← ({})", (machine_snapshot.previous_plateau_pressure as f64 / 10.0).round(), machine_snapshot.plateau_command ), unit: "cmH20", ids: ( self.ids.plateau_parent, self.ids.plateau_title, self.ids.plateau_value, self.ids.plateau_unit, ), x_position: last_widget_position, y_position: 0.0, background_color: Color::Rgba(66.0 / 255.0, 44.0 / 255.0, 85.0 / 255.0, 1.0), }; last_widget_position = self .widgets .render(ControlWidgetType::Telemetry(plateau_config)); let peep_config = TelemetryWidgetConfig { title: "P(expiratory)", value: format!( "{} ← ({})", (machine_snapshot.previous_peep_pressure as f64 / 10.0).round(), machine_snapshot.peep_command ), unit: "cmH20", ids: ( self.ids.peep_parent, self.ids.peep_title, self.ids.peep_value, self.ids.peep_unit, ), x_position: last_widget_position, y_position: 0.0, background_color: Color::Rgba(76.0 / 255.0, 73.0 / 255.0, 25.0 / 255.0, 1.0), }; last_widget_position = self .widgets .render(ControlWidgetType::Telemetry(peep_config)); let cycles_config = TelemetryWidgetConfig { title: "Cycles/minute", value: format!("{}", machine_snapshot.cpm_command), unit: "/minute", ids: ( self.ids.cycles_parent, self.ids.cycles_title, self.ids.cycles_value, self.ids.cycles_unit, ), x_position: last_widget_position, y_position: 0.0, background_color: Color::Rgba(47.0 / 255.0, 74.0 / 255.0, 16.0 / 255.0, 1.0), }; last_widget_position = self .widgets .render(ControlWidgetType::Telemetry(cycles_config));
last_widget_position = self .widgets .render(ControlWidgetType::Telemetry(ratio_config)); let tidal_config = TelemetryWidgetConfig { title: "Tidal volume", value: "0".to_string(), unit: "mL (milliliters)", ids: ( self.ids.tidal_parent, self.ids.tidal_title, self.ids.tidal_value, self.ids.tidal_unit, ), x_position: last_widget_position, y_position: 0.0, background_color: Color::Rgba(52.0 / 255.0, 52.0 / 255.0, 52.0 / 255.0, 1.0), }; self.widgets .render(ControlWidgetType::Telemetry(tidal_config)); } }
let ratio_config = TelemetryWidgetConfig { title: "Insp-exp ratio", value: "0:0".to_string(), unit: "insp:exp.", ids: ( self.ids.ratio_parent, self.ids.ratio_title, self.ids.ratio_value, self.ids.ratio_unit, ), x_position: last_widget_position, y_position: 0.0, background_color: Color::Rgba(52.0 / 255.0, 52.0 / 255.0, 52.0 / 255.0, 1.0), };
assignment_statement
[ { "content": "pub fn gather_telemetry(\n\n port_id: &str,\n\n tx: Sender<TelemetryChannelType>,\n\n mut file_buf: Option<BufWriter<File>>,\n\n) {\n\n loop {\n\n info!(\"Opening {}\", &port_id);\n\n match serial::open(&port_id) {\n\n Err(e) => {\n\n error!(\"{:...
Rust
src/limiting.rs
Icelk/kvarn
9b3e0f0909b73712ad2057aa2875bcb3da01d3c8
use crate::prelude::*; use threading::atomic; #[inline] #[must_use] pub fn get_too_many_requests() -> Response<Bytes> { let body = Bytes::from_static("<html>\ <head>\ <title>429 Too Many Requests</title>\ </head>\ <body>\ <center>\ <h1>429 Too Many Requests</h1>\ <hr>\ <p>You have requested resources from this server too many times. <i>Please Enhance Your Calm.</i></p>\ <p>Try to access this page again in a minute. If this error persists, please contact the website administrator.</p>\ </center>\ </body>\ </html>".as_bytes()); Response::builder() .status(StatusCode::TOO_MANY_REQUESTS) .header( "content-type", HeaderValue::from_static("text/html; charset=utf-8"), ) .header("content-length", body.len().to_string()) .header("content-encoding", "identity") .body(body) .unwrap() } #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)] pub enum Action { Passed, Send, Drop, } #[derive(Debug, Clone)] #[must_use] pub struct Manager { connection_map_and_time: Arc<Mutex<(HashMap<IpAddr, usize>, std::time::Instant)>>, max_requests: usize, check_every: usize, reset_seconds: u64, iteration: Arc<atomic::AtomicUsize>, } impl LimitManager { pub fn new(max_requests: usize, check_every: usize, reset_seconds: u64) -> Self { Self { connection_map_and_time: Arc::new(Mutex::new(( HashMap::new(), std::time::Instant::now(), ))), max_requests, check_every, reset_seconds, iteration: Arc::new(atomic::AtomicUsize::new(0)), } } pub fn disable(&mut self) -> &mut Self { self.set_check_every(usize::MAX) } pub fn set_check_every(&mut self, check_every: usize) -> &mut Self { self.check_every = check_every; self } pub fn set_max_requests(&mut self, max_requests: usize) -> &mut Self { self.max_requests = max_requests; self } pub fn set_reset_seconds(&mut self, reset_seconds: u64) -> &mut Self { self.reset_seconds = reset_seconds; self } pub async fn register(&self, addr: IpAddr) -> Action { if self.check_every == usize::MAX || self.iteration.fetch_add(1, atomic::Ordering::Relaxed) + 1 < self.check_every { Action::Passed } else { self.iteration.store(0, atomic::Ordering::Release); let mut lock = self.connection_map_and_time.lock().await; let (map, time) = &mut *lock; if time.elapsed().as_secs() >= self.reset_seconds { *time = std::time::Instant::now(); map.clear(); Action::Passed } else { let requests = *map.entry(addr).and_modify(|count| *count += 1).or_insert(1); if requests <= self.max_requests { Action::Passed } else if requests <= self.max_requests * 10 { Action::Send } else { Action::Drop } } } } } impl Default for LimitManager { #[inline] fn default() -> Self { Self::new(10, 10, 10) } }
use crate::prelude::*; use threading::atomic; #[inline] #[must_use] pub fn get_too_many_requests() -> Response<Bytes> { let body = Bytes::from_static("<html>\ <head>\ <title>429 Too Many Requests</title>\ </head>\ <body>\ <center>\ <h1>429 Too Many Requests</h1>\ <hr>\ <p>You have requested resources from this ser
ing()) .header("content-encoding", "identity") .body(body) .unwrap() } #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)] pub enum Action { Passed, Send, Drop, } #[derive(Debug, Clone)] #[must_use] pub struct Manager { connection_map_and_time: Arc<Mutex<(HashMap<IpAddr, usize>, std::time::Instant)>>, max_requests: usize, check_every: usize, reset_seconds: u64, iteration: Arc<atomic::AtomicUsize>, } impl LimitManager { pub fn new(max_requests: usize, check_every: usize, reset_seconds: u64) -> Self { Self { connection_map_and_time: Arc::new(Mutex::new(( HashMap::new(), std::time::Instant::now(), ))), max_requests, check_every, reset_seconds, iteration: Arc::new(atomic::AtomicUsize::new(0)), } } pub fn disable(&mut self) -> &mut Self { self.set_check_every(usize::MAX) } pub fn set_check_every(&mut self, check_every: usize) -> &mut Self { self.check_every = check_every; self } pub fn set_max_requests(&mut self, max_requests: usize) -> &mut Self { self.max_requests = max_requests; self } pub fn set_reset_seconds(&mut self, reset_seconds: u64) -> &mut Self { self.reset_seconds = reset_seconds; self } pub async fn register(&self, addr: IpAddr) -> Action { if self.check_every == usize::MAX || self.iteration.fetch_add(1, atomic::Ordering::Relaxed) + 1 < self.check_every { Action::Passed } else { self.iteration.store(0, atomic::Ordering::Release); let mut lock = self.connection_map_and_time.lock().await; let (map, time) = &mut *lock; if time.elapsed().as_secs() >= self.reset_seconds { *time = std::time::Instant::now(); map.clear(); Action::Passed } else { let requests = *map.entry(addr).and_modify(|count| *count += 1).or_insert(1); if requests <= self.max_requests { Action::Passed } else if requests <= self.max_requests * 10 { Action::Send } else { Action::Drop } } } } } impl Default for LimitManager { #[inline] fn default() -> Self { Self::new(10, 10, 10) } }
ver too many times. <i>Please Enhance Your Calm.</i></p>\ <p>Try to access this page again in a minute. If this error persists, please contact the website administrator.</p>\ </center>\ </body>\ </html>".as_bytes()); Response::builder() .status(StatusCode::TOO_MANY_REQUESTS) .header( "content-type", HeaderValue::from_static("text/html; charset=utf-8"), ) .header("content-length", body.len().to_str
function_block-random_span
[ { "content": "#[inline]\n\npub fn get_body_length_request<T>(request: &Request<T>) -> usize {\n\n use std::str::FromStr;\n\n if method_has_request_body(request.method()) {\n\n request\n\n .headers()\n\n .get(\"content-length\")\n\n .map(HeaderValue::to_str)\n\n ...
Rust
src/geom/bound.rs
nwtnni/photon
60c15cca7509a8b43a8ab9aa4cb444c29775ad78
use std::iter; use std::ops::BitOr; use std::ops::BitOrAssign; use crate::math; use crate::math::{Axis, Ray, Vec3}; use crate::geom; #[readonly::make] #[derive(Copy, Clone, Debug)] pub struct Box3 { pub min: Vec3, pub max: Vec3, } impl Default for Box3 { fn default() -> Self { let min = std::f32::NEG_INFINITY; let max = std::f32::INFINITY; Box3 { min: Vec3::new(max, max, max), max: Vec3::new(min, min, min), } } } impl Box3 { pub fn new(a: Vec3, b: Vec3) -> Self { let min = a.min(&b); let max = a.max(&b); let eps = Vec3::broadcast(math::EPSILON); let zero = (max - min).lt(&eps); let min = zero.blend(&min, &(min - eps)); let max = zero.blend(&max, &(max + eps)); Box3 { min, max } } pub fn max_extent(&self) -> Axis { let extent = (self.max - self.min).abs(); let x = extent.x(); let y = extent.y(); let z = extent.z(); if x > y { if x > z { Axis::X } else { Axis::Z } } else { if y > z { Axis::Y } else { Axis::Z } } } pub fn intersect(&self, rhs: &Self) -> Self { let min = self.min.max(&rhs.min); let max = self.max.min(&rhs.max); Box3 { min, max } } pub fn union_b(&self, rhs: &Self) -> Self { let min = self.min.min(&rhs.min); let max = self.max.max(&rhs.max); Box3 { min, max } } pub fn union_v(&self, rhs: &Vec3) -> Self { let min = self.min.min(rhs); let max = self.max.max(rhs); Box3 { min, max } } pub fn scale(&self, c: f32) -> Self { let min = self.min * c; let max = self.max * c; Box3 { min, max } } pub fn translate(&self, v: &Vec3) -> Self { let min = self.min + v; let max = self.max + v; Box3 { min, max } } pub fn offset(&self, v: &Vec3) -> Vec3 { let m = self.max.gt(&self.min); let p = v - self.min; m.blend(&p, &(p / (self.max - self.min))) } pub fn surface_area(&self) -> f32 { 2.0 * (self.max - self.min).len_sq() } } impl std::ops::Index<usize> for Box3 { type Output = Vec3; fn index(&self, index: usize) -> &Self::Output { match index { | 0 => &self.min, | 1 => &self.max, | _ => unreachable!(), } } } impl<'scene> geom::Surface<'scene> for Box3 { fn bound(&self) -> Box3 { *self } fn hit(&self, ray: &mut Ray, _: &mut geom::Hit<'scene>) -> bool { self.hit_any(&*ray) } fn hit_any(&self, ray: &Ray) -> bool { if cfg!(feature = "stats") { crate::stats::INTERSECTION_TESTS.inc(); crate::stats::BOUNDING_BOX_INTERSECTION_TESTS.inc(); } let t_0 = (self.min - ray.p) * ray.inv; let t_1 = (self.max - ray.p) * ray.inv; let t_min = t_0.min(&t_1).max_horizontal(); let t_max = t_0.max(&t_1).min_horizontal(); t_min < t_max && t_min < ray.max && t_max > ray.min } } impl iter::FromIterator<Box3> for Box3 { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = Box3> { iter.into_iter().fold(Box3::default(), |a, b| a.union_b(&b)) } } impl<'b> iter::FromIterator<&'b Box3> for Box3 { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = &'b Box3> { iter.into_iter().fold(Box3::default(), |a, b| a.union_b(b)) } } impl iter::FromIterator<Vec3> for Box3 { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = Vec3> { iter.into_iter().fold(Box3::default(), |a, b| a.union_v(&b)) } } impl<'v> iter::FromIterator<&'v Vec3> for Box3 { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = &'v Vec3> { iter.into_iter().fold(Box3::default(), |a, b| a.union_v(b)) } } impl BitOr<Box3> for Box3 { type Output = Box3; fn bitor(self, rhs: Box3) -> Self::Output { self.union_b(&rhs) } } impl BitOrAssign<Box3> for Box3 { fn bitor_assign(&mut self, rhs: Box3) { *self = self.union_b(&rhs); } } impl BitOrAssign<Box3> for &'_ mut Box3 { fn bitor_assign(&mut self, rhs: Box3) { **self = self.union_b(&rhs); } } impl BitOrAssign<&'_ Box3> for Box3 { fn bitor_assign(&mut self, rhs: &'_ Box3) { *self = self.union_b(rhs); } } impl BitOrAssign<&'_ Box3> for &'_ mut Box3 { fn bitor_assign(&mut self, rhs: &'_ Box3) { **self = self.union_b(rhs); } } impl BitOrAssign<Vec3> for Box3 { fn bitor_assign(&mut self, rhs: Vec3) { *self = self.union_v(&rhs); } } impl BitOrAssign<Vec3> for &'_ mut Box3 { fn bitor_assign(&mut self, rhs: Vec3) { **self = self.union_v(&rhs); } } impl BitOrAssign<&'_ Vec3> for Box3 { fn bitor_assign(&mut self, rhs: &'_ Vec3) { *self = self.union_v(rhs); } } impl BitOrAssign<&'_ Vec3> for &'_ mut Box3 { fn bitor_assign(&mut self, rhs: &'_ Vec3) { **self = self.union_v(rhs); } }
use std::iter; use std::ops::BitOr; use std::ops::BitOrAssign; use crate::math; use crate::math::{Axis, Ray, Vec3}; use crate::geom; #[readonly::make] #[derive(Copy, Clone, Debug)] pub struct Box3 { pub min: Vec3, pub max: Vec3, } impl Default for Box3 { fn default() -> Self { let min = std::f32::NEG_INFINITY; let max = std::f32::INFINITY; Box3 { min: Vec3::new(max, max, max), max: Vec3::new(min, min, min), } } } impl Box3 { pub fn new(a: Vec3, b: Vec3) -> Self { let min = a.min(&b); let max = a.max(&b); let eps = Vec3::broadcast(math::EPSILON); let zero = (max - min).lt(&eps); let min = zero.blend(&min, &(min - eps)); let max = zero.blend(&max, &(max + eps)); Box3 { min, max } } pub fn max_extent(&self) -> Axis { let extent = (self.max - self.min).abs(); let x = extent.x(); let y = extent.y(); let z = extent.z(); if x > y { if x > z { Axis::X } else { Axis::Z }
rhs: &'_ Box3) { *self = self.union_b(rhs); } } impl BitOrAssign<&'_ Box3> for &'_ mut Box3 { fn bitor_assign(&mut self, rhs: &'_ Box3) { **self = self.union_b(rhs); } } impl BitOrAssign<Vec3> for Box3 { fn bitor_assign(&mut self, rhs: Vec3) { *self = self.union_v(&rhs); } } impl BitOrAssign<Vec3> for &'_ mut Box3 { fn bitor_assign(&mut self, rhs: Vec3) { **self = self.union_v(&rhs); } } impl BitOrAssign<&'_ Vec3> for Box3 { fn bitor_assign(&mut self, rhs: &'_ Vec3) { *self = self.union_v(rhs); } } impl BitOrAssign<&'_ Vec3> for &'_ mut Box3 { fn bitor_assign(&mut self, rhs: &'_ Vec3) { **self = self.union_v(rhs); } }
} else { if y > z { Axis::Y } else { Axis::Z } } } pub fn intersect(&self, rhs: &Self) -> Self { let min = self.min.max(&rhs.min); let max = self.max.min(&rhs.max); Box3 { min, max } } pub fn union_b(&self, rhs: &Self) -> Self { let min = self.min.min(&rhs.min); let max = self.max.max(&rhs.max); Box3 { min, max } } pub fn union_v(&self, rhs: &Vec3) -> Self { let min = self.min.min(rhs); let max = self.max.max(rhs); Box3 { min, max } } pub fn scale(&self, c: f32) -> Self { let min = self.min * c; let max = self.max * c; Box3 { min, max } } pub fn translate(&self, v: &Vec3) -> Self { let min = self.min + v; let max = self.max + v; Box3 { min, max } } pub fn offset(&self, v: &Vec3) -> Vec3 { let m = self.max.gt(&self.min); let p = v - self.min; m.blend(&p, &(p / (self.max - self.min))) } pub fn surface_area(&self) -> f32 { 2.0 * (self.max - self.min).len_sq() } } impl std::ops::Index<usize> for Box3 { type Output = Vec3; fn index(&self, index: usize) -> &Self::Output { match index { | 0 => &self.min, | 1 => &self.max, | _ => unreachable!(), } } } impl<'scene> geom::Surface<'scene> for Box3 { fn bound(&self) -> Box3 { *self } fn hit(&self, ray: &mut Ray, _: &mut geom::Hit<'scene>) -> bool { self.hit_any(&*ray) } fn hit_any(&self, ray: &Ray) -> bool { if cfg!(feature = "stats") { crate::stats::INTERSECTION_TESTS.inc(); crate::stats::BOUNDING_BOX_INTERSECTION_TESTS.inc(); } let t_0 = (self.min - ray.p) * ray.inv; let t_1 = (self.max - ray.p) * ray.inv; let t_min = t_0.min(&t_1).max_horizontal(); let t_max = t_0.max(&t_1).min_horizontal(); t_min < t_max && t_min < ray.max && t_max > ray.min } } impl iter::FromIterator<Box3> for Box3 { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = Box3> { iter.into_iter().fold(Box3::default(), |a, b| a.union_b(&b)) } } impl<'b> iter::FromIterator<&'b Box3> for Box3 { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = &'b Box3> { iter.into_iter().fold(Box3::default(), |a, b| a.union_b(b)) } } impl iter::FromIterator<Vec3> for Box3 { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = Vec3> { iter.into_iter().fold(Box3::default(), |a, b| a.union_v(&b)) } } impl<'v> iter::FromIterator<&'v Vec3> for Box3 { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = &'v Vec3> { iter.into_iter().fold(Box3::default(), |a, b| a.union_v(b)) } } impl BitOr<Box3> for Box3 { type Output = Box3; fn bitor(self, rhs: Box3) -> Self::Output { self.union_b(&rhs) } } impl BitOrAssign<Box3> for Box3 { fn bitor_assign(&mut self, rhs: Box3) { *self = self.union_b(&rhs); } } impl BitOrAssign<Box3> for &'_ mut Box3 { fn bitor_assign(&mut self, rhs: Box3) { **self = self.union_b(&rhs); } } impl BitOrAssign<&'_ Box3> for Box3 { fn bitor_assign(&mut self,
random
[ { "content": "pub fn basis(w: &Vec3) -> (Vec3, Vec3) {\n\n let x = w.x().abs();\n\n let y = w.y().abs();\n\n let z = w.z().abs();\n\n let v = if x <= y && x <= z {\n\n Vec3::new(1.0, 0.0, 0.0)\n\n } else if y <= x && y <= z {\n\n Vec3::new(0.0, 1.0, 0.0)\n\n } else {\n\n V...
Rust
src/apis/fsa_results_api.rs
cholcombe973/isilon
928234ce6928a47e061c8afac4e4e589d3b22ea9
/* * Isilon SDK * * Isilon SDK - Language bindings for the OneFS API * * OpenAPI spec version: 5 * Contact: sdk@isilon.com * Generated by: https://github.com/swagger-api/swagger-codegen.git */ use std::borrow::Borrow; use std::rc::Rc; use futures; use futures::Future; use hyper; use super::{configuration, query, Error}; pub struct FsaResultsApiClient<C: hyper::client::connect::Connect> { configuration: Rc<configuration::Configuration<C>>, } impl<C: hyper::client::connect::Connect + 'static> FsaResultsApiClient<C> { pub fn new(configuration: Rc<configuration::Configuration<C>>) -> FsaResultsApiClient<C> { FsaResultsApiClient { configuration: configuration, } } } pub trait FsaResultsApi { fn get_histogram_stat_by( &self, id: &str, stat: &str, ) -> Box<dyn Future<Item = crate::models::HistogramStatBy, Error = Error>>; fn get_histogram_stat_by_breakout( &self, histogram_stat_by_breakout: &str, id: &str, stat: &str, directory_filter: &str, attribute_filter: &str, node_pool_filter: &str, disk_pool_filter: &str, tier_filter: &str, comp_report: i32, log_size_filter: i32, phys_size_filter: i32, limit: i32, path_ext_filter: &str, ctime_filter: i32, atime_filter: i32, ) -> Box<dyn Future<Item = crate::models::HistogramStatBy, Error = Error>>; fn get_result_directories( &self, id: &str, sort: &str, path: &str, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultDirectories, Error = Error>>; fn get_result_directory( &self, result_directory_id: i32, id: &str, sort: &str, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultDirectories, Error = Error>>; fn get_result_histogram( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultHistogram, Error = Error>>; fn get_result_histogram_stat( &self, result_histogram_stat: &str, id: &str, directory_filter: &str, attribute_filter: &str, node_pool_filter: &str, disk_pool_filter: &str, tier_filter: &str, comp_report: i32, log_size_filter: i32, phys_size_filter: i32, path_ext_filter: &str, ctime_filter: i32, atime_filter: i32, ) -> Box<dyn Future<Item = crate::models::ResultHistogram, Error = Error>>; fn get_result_top_dir( &self, result_top_dir_id: &str, id: &str, sort: &str, start: i32, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopDirs, Error = Error>>; fn get_result_top_dirs( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopDirs, Error = Error>>; fn get_result_top_file( &self, result_top_file_id: &str, id: &str, sort: &str, start: i32, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopFiles, Error = Error>>; fn get_result_top_files( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopFiles, Error = Error>>; } impl<C: hyper::client::connect::Connect + 'static> FsaResultsApi for FsaResultsApiClient<C> { fn get_histogram_stat_by( &self, id: &str, stat: &str, ) -> Box<dyn Future<Item = crate::models::HistogramStatBy, Error = Error>> { let uri_str = format!( "{}/platform/3/fsa/results/{Id}/histogram/{Stat}/by", self.configuration.base_path, Id = id, Stat = stat ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_histogram_stat_by_breakout( &self, histogram_stat_by_breakout: &str, id: &str, stat: &str, directory_filter: &str, attribute_filter: &str, node_pool_filter: &str, disk_pool_filter: &str, tier_filter: &str, comp_report: i32, log_size_filter: i32, phys_size_filter: i32, limit: i32, path_ext_filter: &str, ctime_filter: i32, atime_filter: i32, ) -> Box<dyn Future<Item = crate::models::HistogramStatBy, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("directory_filter", &directory_filter.to_string()) .append_pair("attribute_filter", &attribute_filter.to_string()) .append_pair("node_pool_filter", &node_pool_filter.to_string()) .append_pair("disk_pool_filter", &disk_pool_filter.to_string()) .append_pair("tier_filter", &tier_filter.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("log_size_filter", &log_size_filter.to_string()) .append_pair("phys_size_filter", &phys_size_filter.to_string()) .append_pair("limit", &limit.to_string()) .append_pair("path_ext_filter", &path_ext_filter.to_string()) .append_pair("ctime_filter", &ctime_filter.to_string()) .append_pair("atime_filter", &atime_filter.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/histogram/{Stat}/by/{HistogramStatByBreakout}?{}", self.configuration.base_path, q, HistogramStatByBreakout = histogram_stat_by_breakout, Id = id, Stat = stat ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_directories( &self, id: &str, sort: &str, path: &str, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultDirectories, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("sort", &sort.to_string()) .append_pair("path", &path.to_string()) .append_pair("limit", &limit.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("dir", &dir.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/directories?{}", self.configuration.base_path, q, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_directory( &self, result_directory_id: i32, id: &str, sort: &str, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultDirectories, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("sort", &sort.to_string()) .append_pair("limit", &limit.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("dir", &dir.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/directories/{ResultDirectoryId}?{}", self.configuration.base_path, q, ResultDirectoryId = result_directory_id, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_histogram( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultHistogram, Error = Error>> { let uri_str = format!( "{}/platform/3/fsa/results/{Id}/histogram", self.configuration.base_path, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_histogram_stat( &self, result_histogram_stat: &str, id: &str, directory_filter: &str, attribute_filter: &str, node_pool_filter: &str, disk_pool_filter: &str, tier_filter: &str, comp_report: i32, log_size_filter: i32, phys_size_filter: i32, path_ext_filter: &str, ctime_filter: i32, atime_filter: i32, ) -> Box<dyn Future<Item = crate::models::ResultHistogram, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("directory_filter", &directory_filter.to_string()) .append_pair("attribute_filter", &attribute_filter.to_string()) .append_pair("node_pool_filter", &node_pool_filter.to_string()) .append_pair("disk_pool_filter", &disk_pool_filter.to_string()) .append_pair("tier_filter", &tier_filter.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("log_size_filter", &log_size_filter.to_string()) .append_pair("phys_size_filter", &phys_size_filter.to_string()) .append_pair("path_ext_filter", &path_ext_filter.to_string()) .append_pair("ctime_filter", &ctime_filter.to_string()) .append_pair("atime_filter", &atime_filter.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/histogram/{ResultHistogramStat}?{}", self.configuration.base_path, q, ResultHistogramStat = result_histogram_stat, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_top_dir( &self, result_top_dir_id: &str, id: &str, sort: &str, start: i32, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopDirs, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("sort", &sort.to_string()) .append_pair("start", &start.to_string()) .append_pair("limit", &limit.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("dir", &dir.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/top-dirs/{ResultTopDirId}?{}", self.configuration.base_path, q, ResultTopDirId = result_top_dir_id, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_top_dirs( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopDirs, Error = Error>> { let uri_str = format!( "{}/platform/3/fsa/results/{Id}/top-dirs", self.configuration.base_path, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_top_file( &self, result_top_file_id: &str, id: &str, sort: &str, start: i32, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopFiles, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("sort", &sort.to_string()) .append_pair("start", &start.to_string()) .append_pair("limit", &limit.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("dir", &dir.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/top-files/{ResultTopFileId}?{}", self.configuration.base_path, q, ResultTopFileId = result_top_file_id, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_top_files( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopFiles, Error = Error>> { let uri_str = format!( "{}/platform/3/fsa/results/{Id}/top-files", self.configuration.base_path, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } }
/* * Isilon SDK * * Isilon SDK - Language bindings for the OneFS API * * OpenAPI spec version: 5 * Contact: sdk@isilon.com * Generated by: https://github.com/swagger-api/swagger-codegen.git */ use std::borrow::Borrow; use std::rc::Rc; use futures; use futures::Future; use hyper; use super::{configuration, query, Error}; pub struct FsaResultsApiClient<C: hyper::client::connect::Connect> { configuration: Rc<configuration::Configuration<C>>, } impl<C: hyper::client::connect::Connect + 'static> FsaResultsApiClient<C> { pub fn new(configuration: Rc<configuration::Configuration<C>>) -> FsaResultsApiClient<C> { FsaResultsApiClient { configuration: configuration, } } } pub trait FsaResultsApi { fn get_histogram_stat_by( &self, id: &str, stat: &str, ) -> Box<dyn Future<Item = crate::models::HistogramStatBy, Error = Error>>; fn get_histogram_stat_by_breakout( &self, histogram_stat_by_breakout: &str, id: &str, stat: &str, directory_filter: &str, attribute_filter: &str, node_pool_filter: &str, disk_pool_filter: &str, tier_filter: &str, comp_report: i32, log_size_filter: i32, phys_size_filter: i32, limit: i32, path_ext_filter: &str, ctime_filter: i32, atime_filter: i32, ) -> Box<dyn Future<Item = crate::models::HistogramStatBy, Error = Error>>; fn get_result_directories( &self, id: &str, sort: &str, path: &str, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultDirectories, Error = Error>>; fn get_result_directory( &self, result_directory_id: i32, id: &str, sort: &str, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultDirectories, Error = Error>>; fn get_result_histogram( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultHistogram, Error = Error>>; fn get_result_histogram_stat( &self, result_histogram_stat: &str, id: &str, directory_filter: &str, attribute_filter: &str, node_pool_filter: &str, disk_pool_filter: &str, tier_filter: &str, comp_report: i32, log_size_filter: i32, phys_size_filter: i32, path_ext_filter: &str, ctime_filter: i32, atime_filter: i32, ) -> Box<dyn Future<Item = crate::models::ResultHistogram, Error = Error>>; fn get_result_top_dir( &self, result_top_dir_id: &str, id: &str, sort: &str, start: i32, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopDirs, Error = Error>>; fn get_result_top_dirs( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopDirs, Error = Error>>; fn get_result_top_file( &self, result_top_file_id: &str, id: &str, sort: &str, start: i32, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopFiles, Error = Error>>; fn get_result_top_files( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopFiles, Error = Error>>; } impl<C: hyper::client::connect::Connect + 'static> FsaResultsApi for FsaResultsApiClient<C> { fn get_histogram_stat_by( &self, id: &str, stat: &str, ) -> Box<dyn Future<Item = crate::models::HistogramStatBy, Error = Error>> { let uri_str = format!( "{}/platform/3/fsa/results/{Id}/histogram/{Stat}/by", self.configuration.base_path, Id = id, Stat = stat ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_histogram_stat_by_breakout( &self, histogram_stat_by_breakout: &str, id: &str, stat: &str, directory_filter: &str, attribute_filter: &str, node_pool_filter: &str, disk_pool_filter: &str, tier_filter: &str, comp_report: i32, log_size_filter: i32, phys_size_filter: i32, limit: i32, path_ext_filter: &str, ctime_filter: i32, atime_filter: i32, ) -> Box<dyn Future<Item = crate::models::HistogramStatBy, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("directory_filter", &directory_filter.to_string()) .append_pair("attribute_filter", &attribute_filter.to_string()) .append_pair("node_pool_filter", &node_pool_filter.to_string()) .append_pair("disk_pool_filter", &disk_pool_filter.to_string()) .append_pair("tier_filter", &tier_filter.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("log_size_filter", &log_size_filter.to_string()) .append_pair("phys_size_filter", &phys_size_filter.to_string()) .append_pair("limit", &limit.to_string()) .append_pair("path_ext_filter", &path_ext_filter.to_string()) .append_pair("ctime_filter", &ctime_filter.to_string()) .append_pair("atime_filter", &atime_filter.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/histogram/{Stat}/by/{HistogramStatByBreakout}?{}", self.configuration.base_path, q, HistogramStatByBreakout = histogram_stat_by_breakout, Id = id, Stat = stat ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_directories( &self, id: &str, sort: &str, path: &str, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultDirectories, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("sort", &sort.to_string()) .append_pair("path", &path.to_string()) .append_pair("limit", &limit.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("dir", &dir.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/directories?{}", self.configuration.base_path, q, Id = id );
} fn get_result_directory( &self, result_directory_id: i32, id: &str, sort: &str, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultDirectories, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("sort", &sort.to_string()) .append_pair("limit", &limit.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("dir", &dir.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/directories/{ResultDirectoryId}?{}", self.configuration.base_path, q, ResultDirectoryId = result_directory_id, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_histogram( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultHistogram, Error = Error>> { let uri_str = format!( "{}/platform/3/fsa/results/{Id}/histogram", self.configuration.base_path, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_histogram_stat( &self, result_histogram_stat: &str, id: &str, directory_filter: &str, attribute_filter: &str, node_pool_filter: &str, disk_pool_filter: &str, tier_filter: &str, comp_report: i32, log_size_filter: i32, phys_size_filter: i32, path_ext_filter: &str, ctime_filter: i32, atime_filter: i32, ) -> Box<dyn Future<Item = crate::models::ResultHistogram, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("directory_filter", &directory_filter.to_string()) .append_pair("attribute_filter", &attribute_filter.to_string()) .append_pair("node_pool_filter", &node_pool_filter.to_string()) .append_pair("disk_pool_filter", &disk_pool_filter.to_string()) .append_pair("tier_filter", &tier_filter.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("log_size_filter", &log_size_filter.to_string()) .append_pair("phys_size_filter", &phys_size_filter.to_string()) .append_pair("path_ext_filter", &path_ext_filter.to_string()) .append_pair("ctime_filter", &ctime_filter.to_string()) .append_pair("atime_filter", &atime_filter.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/histogram/{ResultHistogramStat}?{}", self.configuration.base_path, q, ResultHistogramStat = result_histogram_stat, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_top_dir( &self, result_top_dir_id: &str, id: &str, sort: &str, start: i32, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopDirs, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("sort", &sort.to_string()) .append_pair("start", &start.to_string()) .append_pair("limit", &limit.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("dir", &dir.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/top-dirs/{ResultTopDirId}?{}", self.configuration.base_path, q, ResultTopDirId = result_top_dir_id, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_top_dirs( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopDirs, Error = Error>> { let uri_str = format!( "{}/platform/3/fsa/results/{Id}/top-dirs", self.configuration.base_path, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_top_file( &self, result_top_file_id: &str, id: &str, sort: &str, start: i32, limit: i32, comp_report: i32, dir: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopFiles, Error = Error>> { let q = ::url::form_urlencoded::Serializer::new(String::new()) .append_pair("sort", &sort.to_string()) .append_pair("start", &start.to_string()) .append_pair("limit", &limit.to_string()) .append_pair("comp_report", &comp_report.to_string()) .append_pair("dir", &dir.to_string()) .finish(); let uri_str = format!( "{}/platform/3/fsa/results/{Id}/top-files/{ResultTopFileId}?{}", self.configuration.base_path, q, ResultTopFileId = result_top_file_id, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } fn get_result_top_files( &self, id: &str, ) -> Box<dyn Future<Item = crate::models::ResultTopFiles, Error = Error>> { let uri_str = format!( "{}/platform/3/fsa/results/{Id}/top-files", self.configuration.base_path, Id = id ); query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, ) } }
query( self.configuration.borrow(), &uri_str, &"", hyper::Method::GET, )
call_expression
[ { "content": "fn query<T, R, C: hyper::client::connect::Connect + 'static>(\n\n config: &configuration::Configuration<C>,\n\n url: &str,\n\n body: &T,\n\n method: hyper::Method,\n\n) -> Box<dyn Future<Item = R, Error = Error>>\n\nwhere\n\n T: Serialize,\n\n R: DeserializeOwned + 'static,\n\n{\...
Rust
src/day14.rs
runfalk/advent-of-code-2021
0be69d30f5037d59a7364beecbaaffe59ce66666
use anyhow::{anyhow, Result}; use std::collections::HashMap; use std::path::Path; struct PolymerExpander { rules: HashMap<(char, char), char>, cache: HashMap<(char, char, usize), HashMap<char, usize>>, } impl PolymerExpander { fn new(rules: &HashMap<(char, char), char>) -> Self { Self { rules: rules.clone(), cache: rules .keys() .map(|&(a, b)| { let mut counts = HashMap::new(); counts.insert(b, 1); ((a, b, 0), counts) }) .collect(), } } fn expand_pair(&mut self, a: char, b: char, depth: usize) -> HashMap<char, usize> { if let Some(cached) = self.cache.get(&(a, b, depth)) { return cached.clone(); } let insertion = self.rules.get(&(a, b)).cloned().unwrap(); let left = self.expand_pair(a, insertion, depth - 1); let right = self.expand_pair(insertion, b, depth - 1); let mut counts = left; right .into_iter() .for_each(|(k, v)| *counts.entry(k).or_default() += v); self.cache.insert((a, b, depth), counts.clone()); counts } fn expand_template(&mut self, template: &str, depth: usize) -> HashMap<char, usize> { let mut counts = HashMap::new(); counts.insert(template.chars().next().unwrap(), 1); for (p, c) in template.chars().zip(template.chars().skip(1)) { self.expand_pair(p, c, depth) .into_iter() .for_each(|(k, v)| *counts.entry(k).or_default() += v); } counts } } fn part_a(template: &str, rules: &HashMap<(char, char), char>) -> usize { let mut polymer_expander = PolymerExpander::new(rules); let counts = polymer_expander.expand_template(template, 10); let most_common = counts.values().copied().max().unwrap(); let least_common = counts.values().copied().min().unwrap(); most_common - least_common } fn part_b(template: &str, rules: &HashMap<(char, char), char>) -> usize { let mut polymer_expander = PolymerExpander::new(rules); let counts = polymer_expander.expand_template(template, 40); let most_common = counts.values().copied().max().unwrap(); let least_common = counts.values().copied().min().unwrap(); most_common - least_common } fn parse_insertion_rule(rule: &str) -> Option<((char, char), char)> { let (pair, insertion) = rule.split_once(" -> ")?; if pair.len() != 2 || insertion.len() != 1 { return None; } Some(( (pair.chars().next()?, pair.chars().nth(1)?), insertion.chars().next()?, )) } pub fn main(path: &Path) -> Result<(usize, Option<usize>)> { let input = std::fs::read_to_string(path)?; let (template, rules_str) = input .split_once("\n\n") .ok_or_else(|| anyhow!("Unable to find insertion rules"))?; let rules = rules_str .lines() .map(|l| parse_insertion_rule(l).ok_or_else(|| anyhow!("{:?} is not a valid rule", l))) .collect::<Result<HashMap<(char, char), char>>>()?; Ok((part_a(template, &rules), Some(part_b(template, &rules)))) } #[cfg(test)] mod tests { use super::*; #[test] fn test_example() -> Result<()> { let template = "NNCB"; let mut rules = HashMap::new(); rules.insert(('C', 'H'), 'B'); rules.insert(('H', 'H'), 'N'); rules.insert(('C', 'B'), 'H'); rules.insert(('N', 'H'), 'C'); rules.insert(('H', 'B'), 'C'); rules.insert(('H', 'C'), 'B'); rules.insert(('H', 'N'), 'C'); rules.insert(('N', 'N'), 'C'); rules.insert(('B', 'H'), 'H'); rules.insert(('N', 'C'), 'B'); rules.insert(('N', 'B'), 'B'); rules.insert(('B', 'N'), 'B'); rules.insert(('B', 'B'), 'N'); rules.insert(('B', 'C'), 'B'); rules.insert(('C', 'C'), 'N'); rules.insert(('C', 'N'), 'C'); assert_eq!(part_a(template, &rules), 1588); assert_eq!(part_b(template, &rules), 2188189693529); Ok(()) } }
use anyhow::{anyhow, Result}; use std::collections::HashMap; use std::path::Path; struct PolymerExpander { rules: HashMap<(char, char), char>, cache: HashMap<(char, char, usize), HashMap<char, usize>>, } impl PolymerExpander { fn new(rules: &HashMap<(char, char), char>) -> Self { Self { rules: rules.clone(), cache: rules .keys() .map(|&(a, b)| { let mut counts = HashMap::new(); counts.insert(b, 1); ((a, b, 0), counts) }) .collect(), } } fn expand_pair(&mut self, a: char, b: char, depth: usize) -> HashMap<char, usize> { if let Some(cached) = self.cache.get(&(a, b, depth)) { return cached.clone(); } let insertion = self.rules.get(&(a, b)).cloned().unwrap(); let left = self.expand_pair(a, insertion, depth - 1); let right = self.expand_pair(insertion, b, depth - 1); let mut counts = left; right .into_iter() .for_each(|(k, v)| *counts.entry(k).or_default() += v); self.cache.insert((a, b, depth), counts.clone()); counts } fn expand_template(&mut self, template: &str, depth: usize) -> HashMap<char, usize> { let mut counts = HashMap::new(); counts.insert(template.chars().next().unwrap(), 1); for (p, c) in template.chars().zip(template.chars().skip(1)) { self.expand_pair(p, c, depth) .into_iter() .for_each(|(k, v)| *counts.entry(k).or_default() += v); } counts } } fn part_a(template: &str, rules: &HashMap<(char, char), char>) -> usize { let mut polymer_expander = PolymerExpander::new(rules); let counts = polymer_expander.expand_template(template, 10); let most_common = counts.values().copied().max().unwrap(); let least_common = counts.values().copied().min().unwrap(); most_common - least_common } fn part_b(template: &str, rules: &H
fn parse_insertion_rule(rule: &str) -> Option<((char, char), char)> { let (pair, insertion) = rule.split_once(" -> ")?; if pair.len() != 2 || insertion.len() != 1 { return None; } Some(( (pair.chars().next()?, pair.chars().nth(1)?), insertion.chars().next()?, )) } pub fn main(path: &Path) -> Result<(usize, Option<usize>)> { let input = std::fs::read_to_string(path)?; let (template, rules_str) = input .split_once("\n\n") .ok_or_else(|| anyhow!("Unable to find insertion rules"))?; let rules = rules_str .lines() .map(|l| parse_insertion_rule(l).ok_or_else(|| anyhow!("{:?} is not a valid rule", l))) .collect::<Result<HashMap<(char, char), char>>>()?; Ok((part_a(template, &rules), Some(part_b(template, &rules)))) } #[cfg(test)] mod tests { use super::*; #[test] fn test_example() -> Result<()> { let template = "NNCB"; let mut rules = HashMap::new(); rules.insert(('C', 'H'), 'B'); rules.insert(('H', 'H'), 'N'); rules.insert(('C', 'B'), 'H'); rules.insert(('N', 'H'), 'C'); rules.insert(('H', 'B'), 'C'); rules.insert(('H', 'C'), 'B'); rules.insert(('H', 'N'), 'C'); rules.insert(('N', 'N'), 'C'); rules.insert(('B', 'H'), 'H'); rules.insert(('N', 'C'), 'B'); rules.insert(('N', 'B'), 'B'); rules.insert(('B', 'N'), 'B'); rules.insert(('B', 'B'), 'N'); rules.insert(('B', 'C'), 'B'); rules.insert(('C', 'C'), 'N'); rules.insert(('C', 'N'), 'C'); assert_eq!(part_a(template, &rules), 1588); assert_eq!(part_b(template, &rules), 2188189693529); Ok(()) } }
ashMap<(char, char), char>) -> usize { let mut polymer_expander = PolymerExpander::new(rules); let counts = polymer_expander.expand_template(template, 40); let most_common = counts.values().copied().max().unwrap(); let least_common = counts.values().copied().min().unwrap(); most_common - least_common }
function_block-function_prefixed
[ { "content": "fn count_ones<R: AsRef<str>>(report: impl Iterator<Item = R>) -> Result<Vec<usize>> {\n\n let mut iter = report.peekable();\n\n\n\n let num_digits = match iter.peek() {\n\n Some(line) => line.as_ref().len(),\n\n None => return Ok(Vec::new()),\n\n };\n\n\n\n let mut ones =...
Rust
src/inline_object/mod.rs
ngirard/directwrite-rs
c453b8a3793c0bfc4e7db0203674b4878b7cb56d
use crate::effects::ClientEffect; use crate::enums::BreakCondition; use crate::error::DWResult; use crate::factory::Factory; use crate::inline_object::custom::CustomInlineObject; use crate::metrics::overhang::OverhangMetrics; use crate::metrics::InlineObjectMetrics; use crate::text_format::TextFormat; use crate::text_renderer::DrawContext; use crate::text_renderer::TextRenderer; use checked_enum::UncheckedEnum; use com_wrapper::ComWrapper; use dcommon::helpers::unwrap_opt_com; use math2d::Point2f; use winapi::shared::winerror::SUCCEEDED; use winapi::um::dwrite::IDWriteInlineObject; use wio::com::ComPtr; pub mod custom; #[repr(transparent)] #[derive(Clone, ComWrapper)] #[com(send, sync)] pub struct InlineObject { ptr: ComPtr<IDWriteInlineObject>, } impl InlineObject { pub fn create_custom(object: impl CustomInlineObject) -> InlineObject { let ptr = custom::com_obj::ComInlineObject::new(object); unsafe { InlineObject::from_ptr(ptr) } } pub fn create_trimming_ellipsis( factory: &Factory, format: &TextFormat, ) -> DWResult<InlineObject> { unsafe { let mut ptr = std::ptr::null_mut(); let hr = (*factory.get_raw()).CreateEllipsisTrimmingSign(format.get_raw(), &mut ptr); if SUCCEEDED(hr) { Ok(InlineObject::from_raw(ptr)) } else { Err(hr.into()) } } } pub fn metrics(&self) -> InlineObjectMetrics { unsafe { let mut metrics = std::mem::zeroed(); self.ptr.GetMetrics(&mut metrics); metrics.into() } } pub fn overhang_metrics(&self) -> OverhangMetrics { unsafe { let mut metrics = std::mem::zeroed(); self.ptr.GetOverhangMetrics(&mut metrics); metrics.into() } } pub fn break_conditions(&self) -> BreakConditions { unsafe { let (mut before, mut after) = std::mem::zeroed(); self.ptr.GetBreakConditions(&mut before, &mut after); BreakConditions { preceding: before.into(), following: after.into(), } } } pub fn draw(&self, context: &DrawingContext) -> DWResult<()> { unsafe { let hr = self.ptr.Draw( context.client_context.ptr(), context.renderer.get_raw(), context.origin.x, context.origin.y, context.is_sideways as i32, context.is_right_to_left as i32, unwrap_opt_com(context.client_effect), ); if SUCCEEDED(hr) { Ok(()) } else { Err(hr.into()) } } } } pub struct BreakConditions { pub preceding: UncheckedEnum<BreakCondition>, pub following: UncheckedEnum<BreakCondition>, } pub struct DrawingContext<'a> { pub client_context: DrawContext, pub renderer: &'a mut TextRenderer, pub origin: Point2f, pub is_sideways: bool, pub is_right_to_left: bool, pub client_effect: Option<&'a ClientEffect>, }
use crate::effects::ClientEffect; use crate::enums::BreakCondition; use crate::error::DWResult; use crate::factory::Factory; use crate::inline_object::custom::CustomInlineObject; use crate::metrics::overhang::OverhangMetrics; use crate::metrics::InlineObjectMetrics; use crate::text_format::TextFormat; use crate::text_renderer::DrawContext; use crate::text_renderer::TextRenderer; use checked_enum::UncheckedEnum; use com_wrapper::ComWrapper; use dcommon::helpers::unwrap_opt_com; use math2d::Point2f; use winapi::shared::winerror::SUCCEEDED; use winapi::um::dwrite::IDWriteInlineObject; use wio::com::ComPtr; pub mod custom; #[repr(transparent)] #[derive(Clone, ComWrapper)] #[com(send, sync)] pub struct InlineObject { ptr: ComPtr<IDWriteInlineObject>, } impl InlineObject { pub fn create_custom(object: impl CustomInlineObject) -> InlineObject { let ptr = custom::com_obj::ComInlineObject::new(object); unsafe { InlineObject::from_ptr(ptr) } } pub fn create_trimming_ellipsis( factory: &Factory, format: &TextFormat, ) -> DWResult<InlineObject> { unsafe { let mut ptr = std::ptr::null_mut(); let hr = (*factory.get_raw()).CreateEllipsisTrimmingSign(format.get_raw(), &mut ptr); if SUCCEEDED(hr) { Ok(InlineObject::from_raw(ptr)) } else { Err(hr.into()) } } } pub fn metrics(&self) -> InlineObjectMetrics { unsafe { let mut metrics = std::mem::zeroed(); self.ptr.GetMetrics(&mut metrics); metrics.into() } } pub fn overhang_metrics(&self) -> OverhangMetrics { unsafe { let mut metrics = std::mem::zeroed(); self.ptr.GetOverhangMetrics(&mut metrics); metrics.into() } }
pub fn draw(&self, context: &DrawingContext) -> DWResult<()> { unsafe { let hr = self.ptr.Draw( context.client_context.ptr(), context.renderer.get_raw(), context.origin.x, context.origin.y, context.is_sideways as i32, context.is_right_to_left as i32, unwrap_opt_com(context.client_effect), ); if SUCCEEDED(hr) { Ok(()) } else { Err(hr.into()) } } } } pub struct BreakConditions { pub preceding: UncheckedEnum<BreakCondition>, pub following: UncheckedEnum<BreakCondition>, } pub struct DrawingContext<'a> { pub client_context: DrawContext, pub renderer: &'a mut TextRenderer, pub origin: Point2f, pub is_sideways: bool, pub is_right_to_left: bool, pub client_effect: Option<&'a ClientEffect>, }
pub fn break_conditions(&self) -> BreakConditions { unsafe { let (mut before, mut after) = std::mem::zeroed(); self.ptr.GetBreakConditions(&mut before, &mut after); BreakConditions { preceding: before.into(), following: after.into(), } } }
function_block-full_function
[ { "content": "/// Custom implementation of an inline text object in Rust.\n\npub trait CustomInlineObject: Send + Sync + 'static {\n\n /// Report metrics about your inline object to the runtime.\n\n fn metrics(&self) -> InlineObjectMetrics;\n\n\n\n /// Report your object's overhang values to the runtim...
Rust
rng/src/lib.rs
undecidedzogvisvitalispotent8stars360/stm32wl-hal
e0ecfee11c36be1a3ff1545113e0ac361a49572b
#![cfg_attr(not(test), no_std)] use core::{ mem::transmute, num::NonZeroU32, sync::atomic::{compiler_fence, Ordering::SeqCst}, }; pub use rand_core; cfg_if::cfg_if! { if #[cfg(feature = "stm32wl5x_cm0p")] { pub use stm32wl::stm32wl5x_cm0p as pac; } else if #[cfg(feature = "stm32wl5x_cm4")] { pub use stm32wl::stm32wl5x_cm4 as pac; } else if #[cfg(feature = "stm32wle5")] { pub use stm32wl::stm32wle5 as pac; } else { core::compile_error!("You must select your hardware with a feature flag"); } } #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] pub enum Error { UncorrectableNoise, Clock, Timeout, } impl From<Error> for rand_core::Error { fn from(e: Error) -> Self { match e { Error::UncorrectableNoise => NonZeroU32::new(1).unwrap().into(), Error::Clock => NonZeroU32::new(2).unwrap().into(), Error::Timeout => NonZeroU32::new(3).unwrap().into(), } } } pub struct Rng { rng: pac::RNG, err_cnt: u32, } impl Rng { pub fn new(rng: pac::RNG, rcc: &mut pac::RCC) -> Rng { rcc.ahb3rstr.modify(|_, w| w.rngrst().set_bit()); rcc.ahb3rstr.modify(|_, w| w.rngrst().clear_bit()); #[rustfmt::skip] rng.cr.write(|w| unsafe { w .condrst().set_bit() .nistc().set_bit() .rng_config1().bits(0x0F) .clkdiv().bits(0x0) .rng_config2().bits(0x0) .rng_config3().bits(0xD) .ced().clear_bit() .ie().clear_bit() .rngen().set_bit() }); #[rustfmt::skip] rng.cr.write(|w| unsafe { w .condrst().clear_bit() .nistc().set_bit() .rng_config1().bits(0x0F) .clkdiv().bits(0x0) .rng_config2().bits(0x0) .rng_config3().bits(0xD) .ced().clear_bit() .ie().clear_bit() .rngen().set_bit() }); while rng.cr.read().condrst().bit_is_set() { compiler_fence(SeqCst); } Rng { rng, err_cnt: 0 } } pub fn free(self) -> pac::RNG { self.rng } pub unsafe fn steal() -> Rng { let dp: pac::Peripherals = pac::Peripherals::steal(); Rng { rng: dp.RNG, err_cnt: 0, } } pub fn noise_error_stat(&self) -> u32 { self.err_cnt } pub fn rest_noise_error_stat(&mut self) { self.err_cnt = 0 } fn wait_for_new_entropy(&mut self) -> Result<(), Error> { let mut timeout: u32 = 0; loop { let sr = self.rng.sr.read(); if sr.seis().bit_is_set() { self.recover_from_noise_error()?; } else if sr.ceis().bit_is_set() { return Err(Error::Clock); } else if sr.drdy().bit_is_set() { return Ok(()); } timeout = timeout.saturating_add(1); if timeout > 100_000 { return Err(Error::Timeout); } } } pub fn try_fill_u32(&mut self, dest: &mut [u32]) -> Result<(), Error> { for chunk in dest.chunks_mut(4) { self.wait_for_new_entropy()?; for dw in chunk { *dw = self.rng.dr.read().bits(); } } Ok(()) } fn recover_from_noise_error(&mut self) -> Result<(), Error> { self.rng.cr.modify(|_, w| w.condrst().set_bit()); self.rng.cr.modify(|_, w| w.condrst().clear_bit()); while self.rng.cr.read().condrst().bit_is_set() { compiler_fence(SeqCst); } loop { let sr = self.rng.sr.read(); if sr.seis().bit_is_set() { return Err(Error::UncorrectableNoise); } if sr.secs().bit_is_clear() { self.err_cnt = self.err_cnt.saturating_add(1); return Ok(()); } } } } impl rand_core::RngCore for Rng { fn next_u32(&mut self) -> u32 { let mut bytes: [u8; 4] = [0; 4]; self.try_fill_bytes(&mut bytes).unwrap(); u32::from_le_bytes(bytes) } fn next_u64(&mut self) -> u64 { let mut bytes: [u8; 8] = [0; 8]; self.try_fill_bytes(&mut bytes).unwrap(); u64::from_le_bytes(bytes) } fn fill_bytes(&mut self, dest: &mut [u8]) { self.try_fill_bytes(dest).unwrap() } fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> { for chunk in dest.chunks_mut(16) { self.wait_for_new_entropy()?; let mut block: [u32; 4] = [0; 4]; self.try_fill_u32(&mut block)?; let data: [u8; 16] = unsafe { transmute::<[u32; 4], [u8; 16]>(block) }; chunk .iter_mut() .zip(data.iter()) .for_each(|(buffer_byte, &data_byte)| *buffer_byte = data_byte); } Ok(()) } } impl rand_core::CryptoRng for Rng {}
#![cfg_attr(not(test), no_std)] use core::{ mem::transmute, num::NonZeroU32, sync::atomic::{compiler_fence, Ordering::SeqCst}, }; pub use rand_core; cfg_if::cfg_if! { if #[cfg(feature = "stm32wl5x_cm0p")] { pub use stm32wl::stm32wl5x_cm0p as pac; } else if #[cfg(feature = "stm32wl5x_cm4")] { pub use stm32wl::stm32wl5x_cm4 as pac; } else if #[cfg(feature = "stm32wle5")] { pub use stm32wl::stm32wle5 as pac; } else { core::compile_error!("You must select your hardware with a feature flag"); } } #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] pub enum Error { UncorrectableNoise, Clock, Timeout, } impl From<Error> for rand_core::Error { fn from(e: Error) -> Self { match e { Error::UncorrectableNoise => NonZeroU32::new(1).unwrap().into(), Error::Clock => NonZeroU32::new(2).unwrap().into(), Error::Timeout => NonZeroU32::new(3).unwrap().into(), } } } pub struct Rng { rng: pac::RNG, err_cnt: u32, } impl Rng { pub fn new(rng: pac::RNG, rcc: &mut pac::RCC) -> Rng { rcc.ahb3rstr.modify(|_, w| w.rngrst().set_bit()); rcc.ahb3rstr.modify(|_, w| w.rngrst().clear_bit()); #[rustfmt::skip] rng.cr.write(|w| unsafe { w .condrst().set_bit() .nistc().set_bit() .rng_config1().bits(0x0F) .clkdiv().bits(0x0) .rng_config2().bits(0x0) .rng_config3().bits(0xD) .ced().clear_bit() .ie().clear_bit() .rngen().set_bit() }); #[rustfmt::skip] rng.cr.write(|w| unsafe { w .condrst().clear_bit() .nistc().set_bit() .rng_config1().bits(0x0F) .clkdiv().bits(0x0) .rng_config2().bits(0x0) .rng_config3().bits(0xD) .ced().clear_bit() .ie().clear_bit() .rngen().set_bit() }); while rng.cr.read().condrst().bit_is_set() { compiler_fence(SeqCst); } Rng { rng, err_cnt: 0 } } pub fn free(self) -> pac::RNG { self.rng } pub unsafe fn steal() -> Rng { let dp: pac::Peripherals = pac::Peripherals::steal(); Rng { rng: dp.RNG, err_cnt: 0, } } pub fn noise_error_stat(&self) -> u32 { self.err_cnt } pub fn rest_noise_error_stat(&mut self) { self.err_cnt = 0 } fn wait_for_new_entropy(&mut self) -> Result<(), Error> { let mut timeout: u32 = 0; loop { let sr = self.rng.sr.read(); if sr.seis().bit_is_set() { self.recover_from_noise_error()?; } else if sr.ceis().bit_is_set() { return Err(Error::Clock); } else if sr.drdy().bit_is_set() { return Ok(()); } timeout = timeout.saturating_add(1); if timeout > 100_000 { return Err(Error::Timeout); } } } pub fn try_fill_u32(&mut self, dest: &mut [u32]) -> Result<(), Error> { for chunk in dest.chunks_mut(4) {
fn recover_from_noise_error(&mut self) -> Result<(), Error> { self.rng.cr.modify(|_, w| w.condrst().set_bit()); self.rng.cr.modify(|_, w| w.condrst().clear_bit()); while self.rng.cr.read().condrst().bit_is_set() { compiler_fence(SeqCst); } loop { let sr = self.rng.sr.read(); if sr.seis().bit_is_set() { return Err(Error::UncorrectableNoise); } if sr.secs().bit_is_clear() { self.err_cnt = self.err_cnt.saturating_add(1); return Ok(()); } } } } impl rand_core::RngCore for Rng { fn next_u32(&mut self) -> u32 { let mut bytes: [u8; 4] = [0; 4]; self.try_fill_bytes(&mut bytes).unwrap(); u32::from_le_bytes(bytes) } fn next_u64(&mut self) -> u64 { let mut bytes: [u8; 8] = [0; 8]; self.try_fill_bytes(&mut bytes).unwrap(); u64::from_le_bytes(bytes) } fn fill_bytes(&mut self, dest: &mut [u8]) { self.try_fill_bytes(dest).unwrap() } fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> { for chunk in dest.chunks_mut(16) { self.wait_for_new_entropy()?; let mut block: [u32; 4] = [0; 4]; self.try_fill_u32(&mut block)?; let data: [u8; 16] = unsafe { transmute::<[u32; 4], [u8; 16]>(block) }; chunk .iter_mut() .zip(data.iter()) .for_each(|(buffer_byte, &data_byte)| *buffer_byte = data_byte); } Ok(()) } } impl rand_core::CryptoRng for Rng {}
self.wait_for_new_entropy()?; for dw in chunk { *dw = self.rng.dr.read().bits(); } } Ok(()) }
function_block-function_prefix_line
[ { "content": "fn hclk3_prescaler_div(rcc: &pac::RCC) -> u16 {\n\n match rcc.extcfgr.read().shdhpre().bits() {\n\n 0b0001 => 3,\n\n 0b0010 => 5,\n\n 0b0101 => 6,\n\n 0b0110 => 10,\n\n 0b0111 => 32,\n\n 0b1000 => 2,\n\n 0b1001 => 4,\n\n 0b1010 => 8,\n\n ...
Rust
src/process/thread.rs
maroider/winproc-rs
a702364bcd96b2508a9ff8b4e607ea7f49a01d0c
use crate::{Error, Handle, Process, WinResult}; use std::{ mem, ops::Deref, os::windows::io::{AsRawHandle, FromRawHandle, IntoRawHandle, RawHandle}, }; use winapi::{ shared::{ basetsd::{DWORD_PTR, ULONG64}, minwindef::DWORD, }, um::{ processthreadsapi::{ GetCurrentThread, GetThreadId, GetThreadIdealProcessorEx, GetThreadPriority, OpenThread, ResumeThread, SetThreadIdealProcessor, SetThreadPriority, SuspendThread, TerminateThread, }, realtimeapiset::QueryThreadCycleTime, tlhelp32::{Thread32Next, THREADENTRY32}, winbase::{ SetThreadAffinityMask, THREAD_MODE_BACKGROUND_BEGIN, THREAD_MODE_BACKGROUND_END, THREAD_PRIORITY_ABOVE_NORMAL, THREAD_PRIORITY_BELOW_NORMAL, THREAD_PRIORITY_HIGHEST, THREAD_PRIORITY_IDLE, THREAD_PRIORITY_LOWEST, THREAD_PRIORITY_NORMAL, THREAD_PRIORITY_TIME_CRITICAL, }, winnt::{self, PROCESSOR_NUMBER, THREAD_ALL_ACCESS}, }, }; #[derive(Debug)] pub struct Thread { handle: Handle, } impl Thread { pub fn from_id(id: u32) -> WinResult<Thread> { unsafe { let handle = OpenThread(THREAD_ALL_ACCESS, 0, id); if handle.is_null() { Err(Error::last_os_error()) } else { Ok(Thread { handle: Handle::new(handle), }) } } } pub fn current() -> Thread { unsafe { Thread { handle: Handle::from_raw_handle(GetCurrentThread() as RawHandle), } } } pub fn handle(&self) -> &Handle { &self.handle } pub fn id(&self) -> u32 { unsafe { GetThreadId(self.handle.as_raw_handle() as winnt::HANDLE) } } pub fn cycle_time(&self) -> WinResult<u64> { unsafe { let mut cycles: ULONG64 = 0; let ret = QueryThreadCycleTime(self.handle.as_raw_handle() as winnt::HANDLE, &mut cycles); if ret == 0 { Err(Error::last_os_error()) } else { Ok(cycles as u64) } } } pub fn priority(&self) -> WinResult<PriorityLevel> { unsafe { let ret = GetThreadPriority(self.handle.as_raw_handle() as winnt::HANDLE); if ret == 0 { Err(Error::last_os_error()) } else { Ok(PriorityLevel::from_code(ret as _)) } } } pub fn set_priority(&mut self, priority: PriorityLevel) -> WinResult { unsafe { let ret = SetThreadPriority( self.handle.as_raw_handle() as winnt::HANDLE, priority.as_code() as _, ); if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } } } pub fn start_background_mode(&mut self) -> WinResult { unsafe { let ret = SetThreadPriority( self.handle.as_raw_handle() as winnt::HANDLE, THREAD_MODE_BACKGROUND_BEGIN as _, ); if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } } } pub fn end_background_mode(&mut self) -> WinResult { unsafe { let ret = SetThreadPriority( self.handle.as_raw_handle() as winnt::HANDLE, THREAD_MODE_BACKGROUND_END as _, ); if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } } } pub fn suspend(&mut self) -> WinResult<u32> { unsafe { let ret = SuspendThread(self.handle.as_raw_handle() as winnt::HANDLE); if ret == u32::max_value() { Err(Error::last_os_error()) } else { Ok(ret) } } } pub fn resume(&mut self) -> WinResult<u32> { unsafe { let ret = ResumeThread(self.handle.as_raw_handle() as winnt::HANDLE); if ret == u32::max_value() { Err(Error::last_os_error()) } else { Ok(ret) } } } pub fn terminate(&mut self, exit_code: u32) -> WinResult { unsafe { let ret = TerminateThread(self.handle.as_raw_handle() as winnt::HANDLE, exit_code); if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } } } pub fn ideal_processor(&self) -> WinResult<u32> { unsafe { let mut ideal: PROCESSOR_NUMBER = mem::zeroed(); let ret = GetThreadIdealProcessorEx(self.handle.as_raw_handle() as winnt::HANDLE, &mut ideal); if ret == 0 { Err(Error::last_os_error()) } else { Ok(ideal.Number as u32) } } } pub fn set_ideal_processor(&mut self, processor: u32) -> WinResult<u32> { unsafe { let ret = SetThreadIdealProcessor( self.handle.as_raw_handle() as winnt::HANDLE, processor as DWORD, ); if ret == DWORD::max_value() { Err(Error::last_os_error()) } else { Ok(ret) } } } pub fn affinity_mask(&self) -> WinResult<usize> { unsafe { let affinity = SetThreadAffinityMask( self.handle.as_raw_handle() as winnt::HANDLE, DWORD_PTR::max_value(), ); if affinity == 0 { Err(Error::last_os_error()) } else { let ret = SetThreadAffinityMask(self.handle.as_raw_handle() as winnt::HANDLE, affinity); if ret == 0 { Err(Error::last_os_error()) } else { Ok(affinity) } } } } pub fn set_affinity_mask(&mut self, mask: usize) -> WinResult<usize> { unsafe { let ret = SetThreadAffinityMask( self.handle.as_raw_handle() as winnt::HANDLE, mask as DWORD_PTR, ); if ret == 0 { Err(Error::last_os_error()) } else { Ok(ret) } } } pub fn set_affinity(&mut self, processor: u8) -> WinResult<usize> { let processor = processor as usize; if processor >= mem::size_of::<usize>() * 8 { self.affinity_mask() } else { self.set_affinity_mask(1 << processor) } } } impl AsRawHandle for Thread { fn as_raw_handle(&self) -> RawHandle { self.handle.as_raw_handle() } } impl Deref for Thread { type Target = winnt::HANDLE; fn deref(&self) -> &winnt::HANDLE { &*self.handle } } impl FromRawHandle for Thread { unsafe fn from_raw_handle(handle: RawHandle) -> Thread { Thread { handle: Handle::new(handle as winnt::HANDLE), } } } impl IntoRawHandle for Thread { fn into_raw_handle(self) -> RawHandle { self.handle.into_raw_handle() } } #[derive(Debug)] pub struct ThreadIter<'a> { pub(crate) process: &'a Process, pub(crate) snapshot: Handle, } impl<'a> Iterator for ThreadIter<'a> { type Item = WinResult<Thread>; fn next(&mut self) -> Option<WinResult<Thread>> { unsafe { loop { let mut entry: THREADENTRY32 = mem::zeroed(); entry.dwSize = mem::size_of::<THREADENTRY32>() as DWORD; let ret = Thread32Next(self.snapshot.as_raw_handle() as winnt::HANDLE, &mut entry); if ret == 0 { return None; } else { if entry.th32OwnerProcessID == self.process.id() { return Some(Thread::from_id(entry.th32ThreadID)); } } } } } } #[derive(Debug)] pub struct ThreadIdIter<'a> { pub(crate) process: &'a Process, pub(crate) snapshot: Handle, } impl<'a> Iterator for ThreadIdIter<'a> { type Item = u32; fn next(&mut self) -> Option<u32> { unsafe { loop { let mut entry: THREADENTRY32 = mem::zeroed(); entry.dwSize = mem::size_of::<THREADENTRY32>() as DWORD; let ret = Thread32Next(self.snapshot.as_raw_handle() as winnt::HANDLE, &mut entry); if ret == 0 { return None; } else { if entry.th32OwnerProcessID == self.process.id() { return Some(entry.th32ThreadID); } } } } } } #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum PriorityLevel { Idle, Lowest, BelowNormal, Normal, AboveNormal, Highest, TimeCritical, } impl PriorityLevel { fn from_code(code: DWORD) -> PriorityLevel { match code { THREAD_PRIORITY_IDLE => PriorityLevel::Idle, THREAD_PRIORITY_LOWEST => PriorityLevel::Lowest, THREAD_PRIORITY_BELOW_NORMAL => PriorityLevel::BelowNormal, THREAD_PRIORITY_NORMAL => PriorityLevel::Normal, THREAD_PRIORITY_ABOVE_NORMAL => PriorityLevel::AboveNormal, THREAD_PRIORITY_HIGHEST => PriorityLevel::Highest, THREAD_PRIORITY_TIME_CRITICAL => PriorityLevel::TimeCritical, _ => panic!("Unexpected priority code: {}", code), } } fn as_code(&self) -> DWORD { match self { PriorityLevel::Idle => THREAD_PRIORITY_IDLE, PriorityLevel::Lowest => THREAD_PRIORITY_LOWEST, PriorityLevel::BelowNormal => THREAD_PRIORITY_BELOW_NORMAL, PriorityLevel::Normal => THREAD_PRIORITY_NORMAL, PriorityLevel::AboveNormal => THREAD_PRIORITY_ABOVE_NORMAL, PriorityLevel::Highest => THREAD_PRIORITY_HIGHEST, PriorityLevel::TimeCritical => THREAD_PRIORITY_TIME_CRITICAL, } } } impl Default for PriorityLevel { fn default() -> PriorityLevel { PriorityLevel::Normal } }
use crate::{Error, Handle, Process, WinResult}; use std::{ mem, ops::Deref, os::windows::io::{AsRawHandle, FromRawHandle, IntoRawHandle, RawHandle}, }; use winapi::{ shared::{ basetsd::{DWORD_PTR, ULONG64}, minwindef::DWORD, }, um::{ processthreadsapi::{ GetCurrentThread, GetThreadId, GetThreadIdealProcessorEx, GetThreadPriority, OpenThread, ResumeThread, SetThreadIdealProcessor, SetThreadPriority, SuspendThread, TerminateThread, }, realtimeapiset::QueryThreadCycleTime, tlhelp32::{Thread32Next, THREADENTRY32}, winbase::{ SetThreadAffinityMask, THREAD_MODE_BACKGROUND_BEGIN, THREAD_MODE_BACKGROUND_END, THREAD_PRIORITY_ABOVE_NORMAL, THREAD_PRIORITY_BELOW_NORMAL, THREAD_PRIORITY_HIGHEST, THREAD_PRIORITY_IDLE, THREAD_PRIORITY_LOWEST, THREAD_PRIORITY_NORMAL, THREAD_PRIORITY_TIME_CRITICAL, }, winnt::{self, PROCESSOR_NUMBER, THREAD_ALL_ACCESS}, }, }; #[derive(Debug)] pub struct Thread { handle: Handle, } impl Thread { pub fn from_id(id: u32) -> WinResult<Thread> { unsafe { let handle = OpenThread(THREAD_ALL_ACCESS, 0, id); if handle.is_null() { Err(Error::last_os_error()) } else { Ok(Thread { handle: Handle::new(handle), }) } } } pub fn current() -> Thread { unsafe { Thread { handle: Handle::from_raw_handle(GetCurrentThread() as RawHandle), } } } pub fn handle(&self) -> &Handle { &self.handle } pub fn id(&self) -> u32 { unsafe { GetThreadId(self.handle.as_raw_handle() as winnt::HANDLE) } } pub fn cycle_time(&self) -> WinResult<u64> { unsafe { let mut cycles: ULONG64 = 0; let ret = QueryThreadCycleTime(self.handle.as_raw_handle() as winnt::HANDLE, &mut cycles); if ret == 0 { Err(Error::last_os_error()) } else { Ok(cycles as u64) } } } pub fn priority(&self) -> WinResult<PriorityLevel> { unsafe { let ret = GetThreadPriority(self.handle.as_raw_handle() as winnt::HANDLE); if ret == 0 { Err(Error::last_os_error()) } else { Ok(PriorityLevel::from_code(ret as _)) } } } pub fn set_priority(&mut self, priority: PriorityLevel) -> WinResult { unsafe { let ret = SetThreadPriority( self.handle.as_raw_handle() as winnt::HANDLE, priority.as_code() as _, ); if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } } } pub fn start_background_mode(&mut self) -> WinResult { unsafe { let ret = SetThreadPriority( self.handle.as_raw_handle() as winnt::HANDLE, THREAD_MODE_BACKGROUND_BEGIN as _, ); if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } } } pub fn end_background_mode(&mut self) -> WinResult { unsafe { let ret = SetThreadPriority( self.handle.as_raw_handle() as winnt::HANDLE, THREAD_MODE_BACKGROUND_END as _, ); if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } } } pub fn suspend(&mut self) -> WinResult<u32> { unsafe { let ret = SuspendThread(self.handle.as_raw_handle() as winnt::HANDLE); if ret == u32::max_value() { Err(Error::last_os_error()) } else { Ok(ret) } } } pub fn resume(&mut self) -> WinResult<u32> { unsafe { let ret = ResumeThread(self.handle.as_raw_handle() as winnt::HANDLE); if ret == u32::max_value() { Err(Error::last_os_error()) } else { Ok(ret) } } } pub fn terminate(&mut self, exit_code: u32) -> WinResult { unsafe { let ret = TerminateThread(self.handle.as_raw_handle() as winnt::HANDLE, exit_code); if ret == 0 { Err(Error::last_os_error()) } else { Ok(()) } } } pub fn ideal_processor(&self) -> WinResult<u32> { unsafe { let mut ideal: PROCESSOR_NUMBER = mem::zeroed(); let ret = GetThreadIdealProcessorEx(self.handle.as_raw_handle() as winnt::HANDLE, &mut ideal); if ret == 0 { Err(Error::last_os_error()) } else { Ok(ideal.Number as u32) } } } pub fn set_ideal_processor(&mut self, processor: u32) -> WinResult<u32> { unsafe { let ret = SetThreadIdealProcessor( self.handle.as_raw_handle() as winnt::HANDLE, processor as DWORD, ); if ret == DWORD::max_value() { Err(Error::last_os_error()) } else { Ok(ret) } } } pub fn affinity_mask(&self) -> WinResult<usize> { unsafe { let affinity = SetThreadAffinityMask( self.handle.as_raw_handle() as winnt::HANDLE, DWORD_PTR::max_value(), ); if affinity == 0 { Err(Error::last_os_error()) } else { let ret = SetThreadAffinityMask(self.handle.as_raw_handle() as winnt::HANDLE, affinity); if ret == 0 { Err(Error::last_os_error()) } else { Ok(affinity) } } } } pub fn set_affinity_mask(&mut self, mask: usize) -> WinResult<usize> { unsafe { let ret = SetThreadAffinityMask( self.handle.as_raw_handle() as winnt::HANDLE, mask as DWORD_PTR, ); if ret == 0 { Err(Error::last_os_error()) } else { Ok(ret) } } } pub fn set_affinity(&mut self, processor: u8) -> WinResult<usize> { let processor = processor as usize; if processor >= mem::size_of::<usize>() * 8 { self.affinity_mask() } else { self.set_affinity_mask(1 << processor) } } } impl AsRawHandle for Thread { fn as_raw_handle(&self) -> RawHandle { self.handle.as_raw_handle() } } impl Deref for Thread { type Target = winnt::HANDLE; fn deref(&self) -> &winnt::HANDLE { &*self.handle } } impl FromRawHandle for Thread { unsafe fn from_raw_handle(handle: RawHandle) -> Thread { Thread { handle: Handle::new(handle as winnt::HANDLE), } } } impl IntoRawHandle for Thread { fn into_raw_handle(self) -> RawHandle { self.handle.into_raw_handle() } } #[derive(Debug)] pub struct ThreadIter<'a> { pub(crate) process: &'a Process, pub(crate) snapshot: Handle, } impl<'a> Iterator for ThreadIter<'a> { type Item = WinResult<Thread>; fn next(&mut self) -> Option<WinResult<Thread>> { unsafe { loop { let mut entry: THREADENTRY32 = mem::zeroed(); entry.dwSize = mem::size_of::<THREADENTRY32>() as DWORD; let ret = Thread32Next(self.snapshot.as_raw_handle() as winnt::HANDLE, &mut entry); if ret == 0 { return None; } else { if entry.th32OwnerProcessID == self.process.id() { return Some(Thread::from_id(entry.th32ThreadID)); } } } } } } #[derive(Debug)] pub struct ThreadIdIter<'a> { pub(crate) process: &'a Process, pub(crate) snapshot: Handle, } impl<'a> Iterator for ThreadIdIter<'a> { type Item = u32; fn next(&mut self) -> Option<u32> { unsafe { loop { let mut entry: THREADENTRY32 = mem::zeroed(); entry.dwSize = mem::size_of::<THREADENTRY32>() as DWORD; let ret = Thread32Next(self.snapshot.as_raw_handle() as winnt::HANDLE, &mut entry); if ret == 0 { return None; } else { if entry.th32OwnerProcessID == self.process.id() { return Some(entry.th32ThreadID); } } } } } } #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum PriorityLevel { Idle, Lowest, BelowNormal, Normal, AboveNormal, Highest, TimeCritical, } impl PriorityLevel { fn from_code(code: DWORD) -> PriorityLevel { match code { THREAD_PRIORITY_IDLE => PriorityLevel::Idle, THREAD_PRIORITY_LOWEST => PriorityLevel::Lowest, THREAD_PRIORITY_BELOW_NORMAL => PriorityLevel::BelowNormal, THREAD_PRIORITY_NORMAL => PriorityLevel::Normal, THREAD_PRIORITY_ABOVE_NORMAL => PriorityLevel::AboveNormal, THREAD_PRIORITY_HIGHEST => PriorityLevel::Highest, THREAD_PRIORITY_TIME_CRITICAL => PriorityLevel::TimeCritical, _ => panic!("Unexpected priority code: {}", code), } } fn as_code(&self) -> DWORD { match self { PriorityLevel::Idle => THREAD_PRIORITY_IDLE, P
} impl Default for PriorityLevel { fn default() -> PriorityLevel { PriorityLevel::Normal } }
riorityLevel::Lowest => THREAD_PRIORITY_LOWEST, PriorityLevel::BelowNormal => THREAD_PRIORITY_BELOW_NORMAL, PriorityLevel::Normal => THREAD_PRIORITY_NORMAL, PriorityLevel::AboveNormal => THREAD_PRIORITY_ABOVE_NORMAL, PriorityLevel::Highest => THREAD_PRIORITY_HIGHEST, PriorityLevel::TimeCritical => THREAD_PRIORITY_TIME_CRITICAL, } }
function_block-function_prefixed
[ { "content": "#[derive(Debug)]\n\nstruct ProcessIter {\n\n snapshot: Handle,\n\n access: Access,\n\n}\n\n\n\nimpl Iterator for ProcessIter {\n\n type Item = WinResult<Process>;\n\n\n\n fn next(&mut self) -> Option<WinResult<Process>> {\n\n unsafe {\n\n let mut entry: PROCESSENTRY32...
Rust
crates/storage/src/oauth2/access_token.rs
matrix-org/matrix-authentication-service
0c2950a160e4a831c4d18047412d007df06909ec
use anyhow::Context; use chrono::{DateTime, Duration, Utc}; use mas_data_model::{ AccessToken, Authentication, BrowserSession, Client, Session, User, UserEmail, }; use sqlx::PgExecutor; use thiserror::Error; use crate::{DatabaseInconsistencyError, IdAndCreationTime, PostgresqlBackend}; pub async fn add_access_token( executor: impl PgExecutor<'_>, session: &Session<PostgresqlBackend>, token: &str, expires_after: Duration, ) -> anyhow::Result<AccessToken<PostgresqlBackend>> { let expires_after_seconds = i32::try_from(expires_after.num_seconds()).unwrap_or(i32::MAX); let res = sqlx::query_as!( IdAndCreationTime, r#" INSERT INTO oauth2_access_tokens (oauth2_session_id, token, expires_after) VALUES ($1, $2, $3) RETURNING id, created_at "#, session.data, token, expires_after_seconds, ) .fetch_one(executor) .await .context("could not insert oauth2 access token")?; Ok(AccessToken { data: res.id, expires_after, token: token.to_string(), jti: format!("{}", res.id), created_at: res.created_at, }) } #[derive(Debug)] pub struct OAuth2AccessTokenLookup { access_token_id: i64, access_token: String, access_token_expires_after: i32, access_token_created_at: DateTime<Utc>, session_id: i64, client_id: String, scope: String, user_session_id: i64, user_session_created_at: DateTime<Utc>, user_id: i64, user_username: String, user_session_last_authentication_id: Option<i64>, user_session_last_authentication_created_at: Option<DateTime<Utc>>, user_email_id: Option<i64>, user_email: Option<String>, user_email_created_at: Option<DateTime<Utc>>, user_email_confirmed_at: Option<DateTime<Utc>>, } #[derive(Debug, Error)] #[error("failed to lookup access token")] pub enum AccessTokenLookupError { Database(#[from] sqlx::Error), Inconsistency(#[from] DatabaseInconsistencyError), } impl AccessTokenLookupError { #[must_use] pub fn not_found(&self) -> bool { matches!(self, Self::Database(sqlx::Error::RowNotFound)) } } pub async fn lookup_active_access_token( executor: impl PgExecutor<'_>, token: &str, ) -> Result<(AccessToken<PostgresqlBackend>, Session<PostgresqlBackend>), AccessTokenLookupError> { let res = sqlx::query_as!( OAuth2AccessTokenLookup, r#" SELECT at.id AS "access_token_id", at.token AS "access_token", at.expires_after AS "access_token_expires_after", at.created_at AS "access_token_created_at", os.id AS "session_id!", os.client_id AS "client_id!", os.scope AS "scope!", us.id AS "user_session_id!", us.created_at AS "user_session_created_at!", u.id AS "user_id!", u.username AS "user_username!", usa.id AS "user_session_last_authentication_id?", usa.created_at AS "user_session_last_authentication_created_at?", ue.id AS "user_email_id?", ue.email AS "user_email?", ue.created_at AS "user_email_created_at?", ue.confirmed_at AS "user_email_confirmed_at?" FROM oauth2_access_tokens at INNER JOIN oauth2_sessions os ON os.id = at.oauth2_session_id INNER JOIN user_sessions us ON us.id = os.user_session_id INNER JOIN users u ON u.id = us.user_id LEFT JOIN user_session_authentications usa ON usa.session_id = us.id LEFT JOIN user_emails ue ON ue.id = u.primary_email_id WHERE at.token = $1 AND at.created_at + (at.expires_after * INTERVAL '1 second') >= now() AND us.active AND os.ended_at IS NULL ORDER BY usa.created_at DESC LIMIT 1 "#, token, ) .fetch_one(executor) .await?; let access_token = AccessToken { data: res.access_token_id, jti: format!("{}", res.access_token_id), token: res.access_token, created_at: res.access_token_created_at, expires_after: Duration::seconds(res.access_token_expires_after.into()), }; let client = Client { data: (), client_id: res.client_id, }; let primary_email = match ( res.user_email_id, res.user_email, res.user_email_created_at, res.user_email_confirmed_at, ) { (Some(id), Some(email), Some(created_at), confirmed_at) => Some(UserEmail { data: id, email, created_at, confirmed_at, }), (None, None, None, None) => None, _ => return Err(DatabaseInconsistencyError.into()), }; let user = User { data: res.user_id, username: res.user_username, sub: format!("fake-sub-{}", res.user_id), primary_email, }; let last_authentication = match ( res.user_session_last_authentication_id, res.user_session_last_authentication_created_at, ) { (None, None) => None, (Some(id), Some(created_at)) => Some(Authentication { data: id, created_at, }), _ => return Err(DatabaseInconsistencyError.into()), }; let browser_session = BrowserSession { data: res.user_session_id, created_at: res.user_session_created_at, user, last_authentication, }; let scope = res.scope.parse().map_err(|_e| DatabaseInconsistencyError)?; let session = Session { data: res.session_id, client, browser_session, scope, }; Ok((access_token, session)) } pub async fn revoke_access_token( executor: impl PgExecutor<'_>, access_token: &AccessToken<PostgresqlBackend>, ) -> anyhow::Result<()> { let res = sqlx::query!( r#" DELETE FROM oauth2_access_tokens WHERE id = $1 "#, access_token.data, ) .execute(executor) .await .context("could not revoke access tokens")?; if res.rows_affected() == 1 { Ok(()) } else { Err(anyhow::anyhow!("no row were affected when revoking token")) } } pub async fn cleanup_expired(executor: impl PgExecutor<'_>) -> anyhow::Result<u64> { let res = sqlx::query!( r#" DELETE FROM oauth2_access_tokens WHERE created_at + (expires_after * INTERVAL '1 second') + INTERVAL '15 minutes' < now() "#, ) .execute(executor) .await .context("could not cleanup expired access tokens")?; Ok(res.rows_affected()) }
use anyhow::Context; use chrono::{DateTime, Duration, Utc}; use mas_data_model::{ AccessToken, Authentication, BrowserSession, Client, Session, User, UserEmail, }; use sqlx::PgExecutor; use thiserror::Error; use crate::{DatabaseInconsistencyError, IdAndCreationTime, PostgresqlBackend}; pub async fn add_access_token( executor: impl PgExecutor<'_>, session: &Session<PostgresqlBackend>, token: &str, expires_after: Duration, ) -> anyhow::Result<AccessToken<PostgresqlBackend>> { let expires_after_seconds = i32::try_from(expires_after.num_seconds()).unwrap_or(i32::MAX); let res = sqlx::query_as!( IdAndCreationTime, r#" INSERT INTO oauth2_access_tokens (oauth2_session_id, token, expires_after) VALUES ($1, $2, $3) RETURNING id, created_at "#, session.data, token, expires_after_seconds, ) .fetch_one(executor) .await .context("could not insert oauth2 access token")?; Ok(AccessToken { data: res.id, expires_after, token: token.to_string(), jti: format!("{}", res.id), created_at: res.created_at, }) } #[derive(Debug)] pub struct OAuth2AccessTokenLookup { access_token_id: i64, access_token: String, access_token_expires_after: i32, access_token_created_at: DateTime<Utc>, session_id: i64, client_id: String, scope: String, user_session_id: i64, user_session_created_at: DateTime<Utc>, user_id: i64, user_username: String, user_session_last_authentication_id: Option<i64>, user_session_last_authentication_created_at: Option<DateTime<Utc>>, user_email_id: Option<i64>, user_email: Option<String>, user_email_created_at: Option<DateTime<Utc>>, user_email_confirmed_at: Option<DateTime<Utc>>, } #[derive(Debug, Error)] #[error("failed to lookup access token")] pub enum AccessTokenLookupError { Database(#[from] sqlx::Error), Inconsistency(#[from] DatabaseInconsistencyError), } impl AccessTokenLookupError { #[must_use] pub fn not_found(&self) -> bool { matches!(self, Self::Database(sqlx::Error::RowNotFound)) } } pub async fn lookup_active_access_token( executor: impl PgExecutor<'_>, token: &str, ) -> Result<(AccessToken<PostgresqlBackend>, Session<PostgresqlBackend>), AccessTokenLookupError> { let res = sqlx::query_as!( OAuth2AccessTokenLookup, r#" SELECT at.id AS "access_token_id", at.token AS "access_token", at.expires_after AS "access_token_expires_after", at.created_at AS "access_token_created_at", os.id AS "session_id!", os.client_id AS "client_id!", os.scope AS "scope!", us.id AS "user_session_id!", us.created_at AS "user_session_created_at!", u.id AS "user_id!", u.username AS "user_username!", usa.id AS "user_session_last_authentication_id?", usa.created_at AS "user_session_last_authentication_created_at?", ue.id AS "user_email_id?", ue.email AS "user_email?", ue.created_at AS "user_email_created_at?", ue.confirmed_at AS "user_email_confirmed_at?" FROM oauth2_access_tokens at INNER JOIN oauth2_sessions os ON os.id = at.oauth2_session_id INNER JOIN user_sessions us ON us.id = os.user_session_id INNER JOIN users u ON u.id = us.user_id LEFT JOIN user_session_authentications usa ON usa.session_id = us.id LEFT JOIN user_emails ue ON ue.id = u.primary_email_id WHERE at.token = $1 AND at.created_at + (at.expires_after * INTERVAL '1 second') >= now() AND us.active AND os.ended_at IS NULL ORDER BY usa.created_at DESC LIMIT 1 "#, token, ) .fetch_one(executor) .await?; let access_token = AccessToken { data: res.access_token_id, jti: format!("{}", res.access_token_id), token: res.access_token, created_at: res.access_token_created_at, expires_after: Duration::seconds(res.access_token_expires_after.into()), }; let client = Client { data: (), client_id: res.client_id, }; let primary_email = match ( res.user_email_id, res.user_email, res.user_email_created_at, res.user_email_confirmed_at, ) { (Some(id), Some(email), Some(created_at), confirmed_at) => Some(UserEmail { data: id, email, created_at, confirmed_at, }), (None, None, None, None) => None, _ => return Err(DatabaseInconsistencyError.into()), }; let user = User { data: res.user_id, username: res.user_username, sub: format!("fake-sub-{}", res.user_id), primary_email, }; let last_authentication = match ( res.user_session_last_authentication_id, res.user_session_last_authentication_created_at, ) { (None, None) => None, (Some(id), Some(created_at)) => Some(Authentication { data: id, created_at, }), _ => return Err(DatabaseInconsistencyError.into()), }; let browser_session = BrowserSession { data: res.user_session_id, created_at: res.user_session_created_at, user, last_authentication, }; let scope = res.scope.parse().map_err(|_e| DatabaseInconsistencyError)?; let session = Session { data: res.session_id, client, browser_session, scope, }; Ok((access_token, session)) } pub async fn revoke_access_token( executor: impl PgExecutor<'_>, access_token: &AccessToken<PostgresqlBackend>, ) -> anyhow::Result<()> { let res = sqlx::query!( r#" DELETE FROM oauth2_access_tokens WHERE id = $1 "#, access_token.data, ) .execute(executor) .await .context("could not revoke access tokens")?; if res.rows_affected() == 1 { Ok(()) } else { Err(anyhow::anyhow!("no row were affected when revoking token")) } }
pub async fn cleanup_expired(executor: impl PgExecutor<'_>) -> anyhow::Result<u64> { let res = sqlx::query!( r#" DELETE FROM oauth2_access_tokens WHERE created_at + (expires_after * INTERVAL '1 second') + INTERVAL '15 minutes' < now() "#, ) .execute(executor) .await .context("could not cleanup expired access tokens")?; Ok(res.rows_affected()) }
function_block-full_function
[ { "content": "fn hash<H: Digest>(mut hasher: H, token: &str) -> anyhow::Result<String> {\n\n hasher.update(token);\n\n let hash = hasher.finalize();\n\n // Left-most 128bit\n\n let bits = hash\n\n .get(..16)\n\n .context(\"failed to get first 128 bits of hash\")?;\n\n Ok(BASE64URL_N...
Rust
roller_web/src/lib.rs
jacobh/roller
4640ac9e2feec6aa211ba0b47b77f287a439f67b
use async_std::sync::{Mutex, Receiver, Sender}; use broadcaster::BroadcastChannel; use futures::prelude::*; use rustc_hash::FxHashMap; use std::sync::Arc; use warp::{ ws::{self, WebSocket, Ws}, Filter, }; use roller_protocol::{ control::{ButtonCoordinate, ButtonGridLocation, ButtonState, InputEvent}, fixture::{FixtureGroupId, FixtureId, FixtureParams}, lighting_engine::FixtureGroupState, ClientMessage, ServerMessage, }; async fn browser_session( websocket: WebSocket, fixture_params: FxHashMap<FixtureId, FixtureParams>, initial_button_states: FxHashMap<(ButtonGridLocation, ButtonCoordinate), (String, ButtonState)>, initial_fixture_group_states: ( FixtureGroupState, FxHashMap<FixtureGroupId, FixtureGroupState>, ), server_message_recv: impl Stream<Item = ServerMessage> + Unpin, event_sender: Sender<InputEvent>, ) { let (mut tx, rx) = websocket.split(); let initial_messages = &[ ServerMessage::ButtonStatesUpdated( initial_button_states .iter() .map(|((loc, coord), (_, state))| (*loc, *coord, *state)) .collect(), ), ServerMessage::ButtonLabelsUpdated( initial_button_states .iter() .map(|((loc, coord), (label, _))| (*loc, *coord, label.clone())) .collect(), ), ServerMessage::FixtureParamsUpdated(fixture_params.into_iter().collect()), ServerMessage::FixtureGroupStatesUpdated({ let (base_state, group_states) = initial_fixture_group_states; vec![(None, base_state)] .into_iter() .chain( group_states .into_iter() .map(|(id, state)| (Some(id), state)), ) .collect() }), ]; for message in initial_messages { let msg = bincode::serialize::<ServerMessage>(&message).unwrap(); match tx.send(ws::Message::binary(msg)).await { Ok(()) => {} Err(_) => { dbg!("Client has hung up"); return; } } } enum Event { ServerMessage(ServerMessage), ClientMessage(Result<ws::Message, warp::Error>), } let mut events = stream::select( rx.map(Event::ClientMessage), server_message_recv.map(Event::ServerMessage), ); while let Some(event) = events.next().await { match event { Event::ServerMessage(msg) => { let msg = bincode::serialize::<ServerMessage>(&msg).unwrap(); match tx.send(ws::Message::binary(msg)).await { Ok(()) => {} Err(_) => { dbg!("Client has hung up"); return; } } } Event::ClientMessage(Err(e)) => { println!("error reading from client: {:?}", e); return; } Event::ClientMessage(Ok(msg)) => { if !msg.is_binary() { continue; } let msg = bincode::deserialize::<ClientMessage>(msg.as_bytes()) .expect("bincode::deserialize"); println!("{:?}", msg); match msg { ClientMessage::Input(input_event) => { event_sender.send(input_event).await; } }; } } } } pub fn serve_frontend( initial_button_states: FxHashMap<(ButtonGridLocation, ButtonCoordinate), (String, ButtonState)>, fixture_params: FxHashMap<FixtureId, FixtureParams>, mut server_message_recv: Receiver<ServerMessage>, event_sender: Sender<InputEvent>, ) { let initial_button_states = Arc::new(Mutex::new(initial_button_states)); let initial_fixture_group_states = Arc::new(Mutex::new(( FixtureGroupState::default(), FxHashMap::default(), ))); let server_message_channel: BroadcastChannel<ServerMessage> = BroadcastChannel::new(); let initial_button_states2 = initial_button_states.clone(); let initial_fixture_group_states2 = initial_fixture_group_states.clone(); let (mut server_message_sender, _) = server_message_channel.clone().split(); async_std::task::spawn(async move { while let Some(server_message) = server_message_recv.next().await { match &server_message { ServerMessage::ButtonStatesUpdated(coord_states) => { let mut states = initial_button_states2.lock().await; for (loc, coord, state) in coord_states.clone() { states .entry((loc, coord)) .and_modify(|(_label, prev_state)| *prev_state = state) .or_insert_with(|| (String::new(), state)); } } ServerMessage::FixtureGroupStatesUpdated(updates) => { let mut states = initial_fixture_group_states2.lock().await; for (id, state) in updates.clone() { if let Some(id) = id { states.1.insert(id, state); } else { states.0 = state; } } } _ => {} } match server_message_sender.send(server_message).await { Ok(()) => {}, Err(_) => { dbg!("unable to broadcast server message"); return; } } } }); let index = warp::get().and(warp::fs::file("web_ui/index.html")); let assets = warp::get().and(warp::fs::dir("web_ui")); let websocket = warp::get() .and(warp::path("ws")) .and(warp::ws()) .map(move |ws: Ws| { let fixture_params = fixture_params.clone(); let event_sender = event_sender.clone(); let initial_button_states = async_std::task::block_on(initial_button_states.lock()).clone(); let initial_fixture_group_states = async_std::task::block_on(initial_fixture_group_states.lock()).clone(); let (_, server_message_recv) = server_message_channel.clone().split(); ws.on_upgrade(move |websocket| { browser_session( websocket, fixture_params, initial_button_states, initial_fixture_group_states, server_message_recv, event_sender, ) }) }); let app = warp::path::end().and(index).or(websocket).or(assets); let mut rt = tokio::runtime::Runtime::new().unwrap(); std::thread::spawn(move || { rt.block_on(async { warp::serve(app).bind(([0, 0, 0, 0], 8888)).await; }); }); }
use async_std::sync::{Mutex, Receiver, Sender}; use broadcaster::BroadcastChannel; use futures::prelude::*; use rustc_hash::FxHashMap; use std::sync::Arc; use warp::{ ws::{self, WebSocket, Ws}, Filter, }; use roller_protocol::{ control::{ButtonCoordinate, ButtonGridLocation, ButtonState, InputEvent}, fixture::{FixtureGroupId, FixtureId, FixtureParams}, lighting_engine::FixtureGroupState, ClientMessage, ServerMessage, }; async fn browser_session( websocket: WebSocket, fixture_params: FxHashMap<FixtureId, FixtureParams>, initial_button_states: FxHashMap<(ButtonGridLocation, ButtonCoordinate), (String, ButtonState)>, initial_fixture_group_states: ( FixtureGroupState, FxHashMap<FixtureGroupId, FixtureGroupState>, ), server_message_recv: impl Stream<Item = ServerMessage> + Unpin, event_sender: Sender<InputEvent>, ) { let (mut tx, rx) = websocket.split(); let initial_messages = &[ ServerMessage::ButtonStatesUpdated( initial_button_states .iter() .map(|((loc, coord), (_, state))| (*loc, *coord, *state)) .collect(), ), ServerMessage::ButtonLabelsUpdated( initial_button_states .iter() .map(|((loc, coord), (label, _))| (*loc, *coord, label.clone())) .collect(), ), ServerMessage::FixtureParamsUpdated(fixture_params.into_iter().collect()), ServerMessage::FixtureGroupStatesUpdated({ let (base_state, group_states) = initial_fixture_group_states; vec![(None, base_state)] .into_iter() .chain( group_states .into_iter() .map(|(id, state)| (Some(id), state)), ) .collect() }), ]; for message in initial_messages { let msg = bincode::serialize::<ServerMessage>(&message).unwrap(); match tx.send(ws::Message::binary(msg)).await { Ok(()) => {} Err(_) => { dbg!("Client has hung up"); return; } } } enum Event { ServerMessage(ServerMessage), ClientMessage(Result<ws::Message, warp::Error>), } let mut events = stream::select( rx.map(Event::ClientMessage), server_message_recv.map(Event::ServerMessage), ); while let Some(event) = events.next().await { match event { Event::ServerMessage(msg) => { let msg = bincode::serialize::<ServerMessage>(&msg).unwrap(); match tx.send(ws::Message::binary(msg)).await { Ok(()) => {} Err(_) => { dbg!("Client has hung up"); return; } } } Event::ClientMessage(Err(e)) => { println!("error reading from client: {:?}", e); return; } Event::ClientMessage(Ok(msg)) => { if !msg.is_binary() { continue; } let msg = bincode::deserialize::<ClientMessage>(msg.as_bytes()) .expect("bincode::deserialize"); println!("{:?}", msg); match msg { ClientMessage::Input(input_event) => { event_sender.send(input_event).await; } }; } } } } pub fn serve_frontend( initial_button_states: FxHashMap<(ButtonGridLocation, ButtonCoordinate), (String, ButtonState)>, fixture_params: FxHashMap<FixtureId, FixtureParams>, mut server_message_recv: Receiver<ServerMessage>, event_sender: Sender<InputEvent>, ) { let initial_button_states = Arc::new(Mutex::new(initial_button_states)); let initial_fixture_group_states = Arc::new(Mutex::new(( FixtureGroupState::default(), FxHashMap::default(), ))); let server_message_channel: BroadcastChannel<ServerMessage> = BroadcastChannel::new(); let initial_button_states2 = initial_button_states.clone(); let initial_fixture_group_states2 = initial_fixture_group_states.clone(); let (mut server_message_sender, _) = server_message_channel.clone().split(); async_std::task::spawn(async move { while let Some(server_message) = server_message_recv.next().await { match &server_message { ServerMessage::ButtonStatesUpdated(coord_states) => { let mut states = initial_button_states2.lock().await; for (loc, coord, state) in coord_states.clone() { states .entry((loc, coord)) .and_modify(|(_label, prev_state)| *prev_state = state) .or_insert_with(|| (String::new(), state)); } } ServerMessage::FixtureGroupStatesUpdated(updates) => { let mut states = initial_fixture_group_states2.lock().await; for (id, state) in updates.clone() { if let Some(id) = id { states.1.insert(id, state); } else { states.0 = state; } } } _ => {} } match server_message_sender.send(server_message).await { Ok(()) => {}, Err(_) => { dbg!("unable to broadcast server message"); return; } } } }); let index = warp::get().and(warp::fs::file("web_ui/index.html")); let assets = warp::get().and(warp::fs::dir("web_ui")); let websocket = warp::get() .and(warp::path("ws")) .and(warp::ws()) .map(move |ws: Ws| { let fixture_params = fixture_params.clone(); let event_sender = event_sender.clone(); let initial_button_states = async_std::task::block_on(initial_button_states.lock()).clone(); let initial_fixture_group_states = async_std::task::block_on(initial_fixture_group_states.lock()).clone(); let (_, server_message_recv) = server_message_channel.clone().split(); ws.on_upgrade(move |websocket| {
}) }); let app = warp::path::end().and(index).or(websocket).or(assets); let mut rt = tokio::runtime::Runtime::new().unwrap(); std::thread::spawn(move || { rt.block_on(async { warp::serve(app).bind(([0, 0, 0, 0], 8888)).await; }); }); }
browser_session( websocket, fixture_params, initial_button_states, initial_fixture_group_states, server_message_recv, event_sender, )
call_expression
[]
Rust
particle-node/src/node.rs
ktzanida/fluence
51c44cbdb9a530aa986eb9f3af1d84508877dcec
/* * Copyright 2020 Fluence Labs Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::io::Error; use std::sync::Arc; use std::{io, iter::once, net::SocketAddr}; use async_std::task; use eyre::WrapErr; use futures::{ channel::{mpsc::unbounded, oneshot}, select, stream::{self, StreamExt}, FutureExt, }; use libp2p::core::either::EitherError; use libp2p::ping::Failure; use libp2p::swarm::{ProtocolsHandlerUpgrErr, SwarmEvent}; use libp2p::{ core::{muxing::StreamMuxerBox, transport::Boxed, Multiaddr}, identity::Keypair, swarm::AddressScore, PeerId, Swarm, TransportError, }; use libp2p_metrics::{Metrics, Recorder}; use open_metrics_client::registry::Registry; use aquamarine::{ AquaRuntime, AquamarineApi, AquamarineApiError, AquamarineBackend, DataStoreError, NetworkEffects, VmConfig, VmPoolConfig, AVM, }; use builtins_deployer::BuiltinsDeployer; use config_utils::to_peer_id; use connection_pool::ConnectionPoolApi; use fluence_libp2p::types::{BackPressuredInlet, Inlet}; use fluence_libp2p::{ build_transport, types::{OneshotOutlet, Outlet}, }; use particle_builtins::{Builtins, NodeInfo}; use particle_protocol::Particle; use peer_metrics::{ ConnectionPoolMetrics, ConnectivityMetrics, DispatcherMetrics, ParticleExecutorMetrics, VmPoolMetrics, }; use script_storage::{ScriptStorageApi, ScriptStorageBackend, ScriptStorageConfig}; use server_config::{NetworkConfig, ResolvedConfig, ServicesConfig}; use crate::dispatcher::Dispatcher; use crate::effectors::Effectors; use crate::metrics::start_metrics_endpoint; use crate::Connectivity; use super::behaviour::NetworkBehaviour; pub struct Node<RT: AquaRuntime> { particle_stream: BackPressuredInlet<Particle>, effects_stream: Inlet<Result<NetworkEffects, AquamarineApiError>>, pub swarm: Swarm<NetworkBehaviour>, pub connectivity: Connectivity, pub dispatcher: Dispatcher, aquavm_pool: AquamarineBackend<RT, Arc<Builtins<Connectivity>>>, script_storage: ScriptStorageBackend, builtins_deployer: BuiltinsDeployer, registry: Option<Registry>, metrics_listen_addr: SocketAddr, pub local_peer_id: PeerId, pub builtins_management_peer_id: PeerId, } impl<RT: AquaRuntime> Node<RT> { pub fn new( config: ResolvedConfig, vm_config: RT::Config, node_version: &'static str, ) -> eyre::Result<Box<Self>> { let key_pair: Keypair = config.node_config.root_key_pair.clone().into(); let local_peer_id = to_peer_id(&key_pair); let transport = config.transport_config.transport; let transport = build_transport( transport, key_pair.clone(), config.transport_config.socket_timeout, ); let builtins_peer_id = to_peer_id(&config.builtins_key_pair.clone().into()); let services_config = ServicesConfig::new( local_peer_id, config.dir_config.services_base_dir.clone(), config_utils::particles_vault_dir(&config.dir_config.avm_base_dir), config.services_envs.clone(), config.management_peer_id, builtins_peer_id, config.node_config.module_max_heap_size, config.node_config.module_default_heap_size, ) .expect("create services config"); let mut metrics_registry = if config.metrics_config.metrics_enabled { Some(Registry::default()) } else { None }; let libp2p_metrics = metrics_registry.as_mut().map(Metrics::new); let connectivity_metrics = metrics_registry.as_mut().map(ConnectivityMetrics::new); let connection_pool_metrics = metrics_registry.as_mut().map(ConnectionPoolMetrics::new); let plumber_metrics = metrics_registry.as_mut().map(ParticleExecutorMetrics::new); let vm_pool_metrics = metrics_registry.as_mut().map(VmPoolMetrics::new); let network_config = NetworkConfig::new( libp2p_metrics, connectivity_metrics, connection_pool_metrics, key_pair, &config, node_version, ); let (swarm, connectivity, particle_stream) = Self::swarm( local_peer_id, network_config, transport, config.external_addresses(), ); let (particle_failures_out, particle_failures_in) = unbounded(); let (script_storage_api, script_storage_backend) = { let script_storage_config = ScriptStorageConfig { timer_resolution: config.script_storage_timer_resolution, max_failures: config.script_storage_max_failures, particle_ttl: config.script_storage_particle_ttl, peer_id: local_peer_id, }; let pool: &ConnectionPoolApi = connectivity.as_ref(); ScriptStorageBackend::new(pool.clone(), particle_failures_in, script_storage_config) }; let builtins = Self::builtins( connectivity.clone(), config.external_addresses(), services_config, script_storage_api, ); let (effects_out, effects_in) = unbounded(); let pool_config = VmPoolConfig::new(config.aquavm_pool_size, config.particle_execution_timeout); let (aquavm_pool, aquamarine_api) = AquamarineBackend::new( pool_config, vm_config, Arc::new(builtins), effects_out, plumber_metrics, vm_pool_metrics, ); let effectors = Effectors::new(connectivity.clone()); let dispatcher = { let failures = particle_failures_out; let parallelism = config.particle_processor_parallelism; Dispatcher::new( local_peer_id, aquamarine_api.clone(), effectors, failures, parallelism, metrics_registry.as_mut(), ) }; let builtins_deployer = BuiltinsDeployer::new( builtins_peer_id, local_peer_id, aquamarine_api, config.dir_config.builtins_base_dir.clone(), config.node_config.autodeploy_particle_ttl, config.node_config.force_builtins_redeploy, config.node_config.autodeploy_retry_attempts, ); Ok(Self::with( particle_stream, effects_in, swarm, connectivity, dispatcher, aquavm_pool, script_storage_backend, builtins_deployer, metrics_registry, config.metrics_listen_addr(), local_peer_id, builtins_peer_id, )) } pub fn swarm( local_peer_id: PeerId, network_config: NetworkConfig, transport: Boxed<(PeerId, StreamMuxerBox)>, external_addresses: Vec<Multiaddr>, ) -> ( Swarm<NetworkBehaviour>, Connectivity, BackPressuredInlet<Particle>, ) { let (behaviour, connectivity, particle_stream) = NetworkBehaviour::new(network_config); let mut swarm = Swarm::new(transport, behaviour, local_peer_id); external_addresses.iter().cloned().for_each(|addr| { Swarm::add_external_address(&mut swarm, addr, AddressScore::Finite(1)); }); (swarm, connectivity, particle_stream) } pub fn builtins( connectivity: Connectivity, external_addresses: Vec<Multiaddr>, services_config: ServicesConfig, script_storage_api: ScriptStorageApi, ) -> Builtins<Connectivity> { let node_info = NodeInfo { external_addresses, node_version: env!("CARGO_PKG_VERSION"), air_version: air_interpreter_wasm::VERSION, }; Builtins::new(connectivity, script_storage_api, node_info, services_config) } } impl<RT: AquaRuntime> Node<RT> { #[allow(clippy::too_many_arguments)] pub fn with( particle_stream: BackPressuredInlet<Particle>, effects_stream: Inlet<Result<NetworkEffects, AquamarineApiError>>, swarm: Swarm<NetworkBehaviour>, connectivity: Connectivity, dispatcher: Dispatcher, aquavm_pool: AquamarineBackend<RT, Arc<Builtins<Connectivity>>>, script_storage: ScriptStorageBackend, builtins_deployer: BuiltinsDeployer, registry: Option<Registry>, metrics_listen_addr: SocketAddr, local_peer_id: PeerId, builtins_management_peer_id: PeerId, ) -> Box<Self> { let node_service = Self { particle_stream, effects_stream, swarm, connectivity, dispatcher, aquavm_pool, script_storage, builtins_deployer, registry, metrics_listen_addr, local_peer_id, builtins_management_peer_id, }; Box::new(node_service) } pub fn start(self: Box<Self>) -> eyre::Result<OneshotOutlet<()>> { let (exit_outlet, exit_inlet) = oneshot::channel(); let mut exit_inlet = exit_inlet.into_stream().fuse(); let particle_stream = self.particle_stream; let effects_stream = self.effects_stream; let swarm = self.swarm; let connectivity = self.connectivity; let dispatcher = self.dispatcher; let aquavm_pool = self.aquavm_pool; let script_storage = self.script_storage; let mut registry = self.registry; let metrics_listen_addr = self.metrics_listen_addr; let local_peer_id = self.local_peer_id; let builtins_management_peer_id = self.builtins_management_peer_id; task::spawn(async move { let (metrics_fut, libp2p_metrics) = if let Some(mut registry) = registry { let libp2p_metrics = Metrics::new(&mut registry); let fut = start_metrics_endpoint(registry, metrics_listen_addr); (fut, Some(libp2p_metrics)) } else { (futures::future::ready(Ok(())).boxed(), None) }; let mut metrics_fut = metrics_fut.fuse(); let script_storage = script_storage.start(); let pool = aquavm_pool.start(); let mut connectivity = connectivity.start(); let mut dispatcher = dispatcher.start(particle_stream, effects_stream); let stopped = stream::iter(once(Err(()))); let mut swarm = swarm .map(|e| { libp2p_metrics.as_ref().map(|m| m.record(&e)); Ok(()) }) .chain(stopped) .fuse(); loop { select!( e = swarm.select_next_some() => { if e.is_err() { log::error!("Swarm has terminated"); break; } }, e = metrics_fut => { if let Err(err) = e { log::warn!("Metrics returned error: {}", err) } }, _ = connectivity => {}, _ = dispatcher => {}, event = exit_inlet.next() => { if let Some(Ok(_)) = event { break } } ) } log::info!("Stopping node"); script_storage.cancel().await; dispatcher.cancel().await; connectivity.cancel().await; pool.cancel().await; }); let mut builtins_deployer = self.builtins_deployer; builtins_deployer .deploy_builtin_services() .wrap_err("builtins deploy failed")?; Ok(exit_outlet) } #[inline] pub fn listen( &mut self, addrs: impl Into<Vec<Multiaddr>>, ) -> Result<(), TransportError<io::Error>> { let addrs = addrs.into(); log::info!("Fluence listening on {:?}", addrs); for addr in addrs { Swarm::listen_on(&mut self.swarm, addr)?; } Ok(()) } } #[cfg(test)] mod tests { use eyre::WrapErr; use libp2p::core::Multiaddr; use maplit::hashmap; use serde_json::json; use air_interpreter_fs::{air_interpreter_path, write_default_air_interpreter}; use aquamarine::{VmConfig, AVM}; use config_utils::to_peer_id; use connected_client::ConnectedClient; use fluence_libp2p::RandomPeerId; use server_config::{builtins_base_dir, default_base_dir, deserialize_config}; use crate::Node; #[test] fn run_node() { let base_dir = default_base_dir(); fs_utils::create_dir(&base_dir).unwrap(); fs_utils::create_dir(builtins_base_dir(&base_dir)).unwrap(); write_default_air_interpreter(&air_interpreter_path(&base_dir)).unwrap(); let mut config = deserialize_config(&<_>::default(), &[]).expect("deserialize config"); config.aquavm_pool_size = 1; let vm_config = VmConfig::new( to_peer_id(&config.root_key_pair.clone().into()), config.dir_config.avm_base_dir.clone(), config.dir_config.air_interpreter_path.clone(), None, ); let mut node: Box<Node<AVM<_>>> = Node::new(config, vm_config, "some version").expect("create node"); let listening_address: Multiaddr = "/ip4/127.0.0.1/tcp/7777".parse().unwrap(); node.listen(vec![listening_address.clone()]).unwrap(); node.start().expect("start node"); let mut client = ConnectedClient::connect_to(listening_address).expect("connect client"); let data = hashmap! { "name" => json!("folex"), "client" => json!(client.peer_id.to_string()), "relay" => json!(client.node.to_string()), }; client.send_particle( r#" (seq (call relay ("op" "identity") []) (call client ("return" "") [name]) ) "#, data.clone(), ); client.receive_args().wrap_err("receive args").unwrap(); } }
/* * Copyright 2020 Fluence Labs Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::io::Error; use std::sync::Arc; use std::{io, iter::once, net::SocketAddr}; use async_std::task; use eyre::WrapErr; use futures::{ channel::{mpsc::unbounded, oneshot}, select, stream::{self, StreamExt}, FutureExt, }; use libp2p::core::either::EitherError; use libp2p::ping::Failure; use libp2p::swarm::{ProtocolsHandlerUpgrErr, SwarmEvent}; use libp2p::{ core::{muxing::StreamMuxerBox, transport::Boxed, Multiaddr}, identity::Keypair, swarm::AddressScore, PeerId, Swarm, TransportError, }; use libp2p_metrics::{Metrics, Recorder}; use open_metrics_client::registry::Registry; use aquamarine::{ AquaRuntime, AquamarineApi, AquamarineApiError, AquamarineBackend, DataStoreError, NetworkEffects, VmConfig, VmPoolConfig, AVM, }; use builtins_deployer::BuiltinsDeployer; use config_utils::to_peer_id; use connection_pool::ConnectionPoolApi; use fluence_libp2p::types::{BackPressuredInlet, Inlet}; use fluence_libp2p::{ build_transport, types::{OneshotOutlet, Outlet}, }; use particle_builtins::{Builtins, NodeInfo}; use particle_protocol::Particle; use peer_metrics::{ ConnectionPoolMetrics, ConnectivityMetrics, DispatcherMetrics, ParticleExecutorMetrics, VmPoolMetrics, }; use script_storage::{ScriptStorageApi, ScriptStorageBackend, ScriptStorageConfig}; use server_config::{NetworkConfig, ResolvedConfig, ServicesConfig}; use crate::dispatcher::Dispatcher; use crate::effectors::Effectors; use crate::metrics::start_metrics_endpoint; use crate::Connectivity; use super::behaviour::NetworkBehaviour; pub struct Node<RT: AquaRuntime> { particle_stream: BackPressuredInlet<Particle>, effects_stream: Inlet<Result<NetworkEffects, AquamarineApiError>>, pub swarm: Swarm<NetworkBehaviour>, pub connectivity: Connectivity, pub dispatcher: Dispatcher, aquavm_pool: AquamarineBackend<RT, Arc<Builtins<Connectivity>>>, script_storage: ScriptStorageBackend, builtins_deployer: BuiltinsDeployer, registry: Option<Registry>, metrics_listen_addr: SocketAddr, pub local_peer_id: PeerId, pub builtins_management_peer_id: PeerId, } impl<RT: AquaRuntime> Node<RT> { pub fn new( config: ResolvedConfig, vm_config: RT::Config, node_version: &'static str, ) -> eyre::Result<Box<Self>> { let key_pair: Keypair = config.node_config.root_key_pair.clone().into(); let local_peer_id = to_peer_id(&key_pair); let transport = config.transport_config.transport; let transport = build_transport( transport, key_pair.clone(), config.transport_config.socket_timeout, ); let builtins_peer_id = to_peer_id(&config.builtins_key_pair.clone().into()); let services_config = ServicesConfig::new( local_peer_id, config.dir_config.services_base_dir.clone(), config_utils::particles_vault_dir(&config.dir_config.avm_base_dir), config.services_envs.clone(), config.management_peer_id, builtins_peer_id, config.node_config.module_max_heap_size, config.node_config.module_default_heap_size, ) .expect("create services config"); let mut metrics_registry =
; let libp2p_metrics = metrics_registry.as_mut().map(Metrics::new); let connectivity_metrics = metrics_registry.as_mut().map(ConnectivityMetrics::new); let connection_pool_metrics = metrics_registry.as_mut().map(ConnectionPoolMetrics::new); let plumber_metrics = metrics_registry.as_mut().map(ParticleExecutorMetrics::new); let vm_pool_metrics = metrics_registry.as_mut().map(VmPoolMetrics::new); let network_config = NetworkConfig::new( libp2p_metrics, connectivity_metrics, connection_pool_metrics, key_pair, &config, node_version, ); let (swarm, connectivity, particle_stream) = Self::swarm( local_peer_id, network_config, transport, config.external_addresses(), ); let (particle_failures_out, particle_failures_in) = unbounded(); let (script_storage_api, script_storage_backend) = { let script_storage_config = ScriptStorageConfig { timer_resolution: config.script_storage_timer_resolution, max_failures: config.script_storage_max_failures, particle_ttl: config.script_storage_particle_ttl, peer_id: local_peer_id, }; let pool: &ConnectionPoolApi = connectivity.as_ref(); ScriptStorageBackend::new(pool.clone(), particle_failures_in, script_storage_config) }; let builtins = Self::builtins( connectivity.clone(), config.external_addresses(), services_config, script_storage_api, ); let (effects_out, effects_in) = unbounded(); let pool_config = VmPoolConfig::new(config.aquavm_pool_size, config.particle_execution_timeout); let (aquavm_pool, aquamarine_api) = AquamarineBackend::new( pool_config, vm_config, Arc::new(builtins), effects_out, plumber_metrics, vm_pool_metrics, ); let effectors = Effectors::new(connectivity.clone()); let dispatcher = { let failures = particle_failures_out; let parallelism = config.particle_processor_parallelism; Dispatcher::new( local_peer_id, aquamarine_api.clone(), effectors, failures, parallelism, metrics_registry.as_mut(), ) }; let builtins_deployer = BuiltinsDeployer::new( builtins_peer_id, local_peer_id, aquamarine_api, config.dir_config.builtins_base_dir.clone(), config.node_config.autodeploy_particle_ttl, config.node_config.force_builtins_redeploy, config.node_config.autodeploy_retry_attempts, ); Ok(Self::with( particle_stream, effects_in, swarm, connectivity, dispatcher, aquavm_pool, script_storage_backend, builtins_deployer, metrics_registry, config.metrics_listen_addr(), local_peer_id, builtins_peer_id, )) } pub fn swarm( local_peer_id: PeerId, network_config: NetworkConfig, transport: Boxed<(PeerId, StreamMuxerBox)>, external_addresses: Vec<Multiaddr>, ) -> ( Swarm<NetworkBehaviour>, Connectivity, BackPressuredInlet<Particle>, ) { let (behaviour, connectivity, particle_stream) = NetworkBehaviour::new(network_config); let mut swarm = Swarm::new(transport, behaviour, local_peer_id); external_addresses.iter().cloned().for_each(|addr| { Swarm::add_external_address(&mut swarm, addr, AddressScore::Finite(1)); }); (swarm, connectivity, particle_stream) } pub fn builtins( connectivity: Connectivity, external_addresses: Vec<Multiaddr>, services_config: ServicesConfig, script_storage_api: ScriptStorageApi, ) -> Builtins<Connectivity> { let node_info = NodeInfo { external_addresses, node_version: env!("CARGO_PKG_VERSION"), air_version: air_interpreter_wasm::VERSION, }; Builtins::new(connectivity, script_storage_api, node_info, services_config) } } impl<RT: AquaRuntime> Node<RT> { #[allow(clippy::too_many_arguments)] pub fn with( particle_stream: BackPressuredInlet<Particle>, effects_stream: Inlet<Result<NetworkEffects, AquamarineApiError>>, swarm: Swarm<NetworkBehaviour>, connectivity: Connectivity, dispatcher: Dispatcher, aquavm_pool: AquamarineBackend<RT, Arc<Builtins<Connectivity>>>, script_storage: ScriptStorageBackend, builtins_deployer: BuiltinsDeployer, registry: Option<Registry>, metrics_listen_addr: SocketAddr, local_peer_id: PeerId, builtins_management_peer_id: PeerId, ) -> Box<Self> { let node_service = Self { particle_stream, effects_stream, swarm, connectivity, dispatcher, aquavm_pool, script_storage, builtins_deployer, registry, metrics_listen_addr, local_peer_id, builtins_management_peer_id, }; Box::new(node_service) } pub fn start(self: Box<Self>) -> eyre::Result<OneshotOutlet<()>> { let (exit_outlet, exit_inlet) = oneshot::channel(); let mut exit_inlet = exit_inlet.into_stream().fuse(); let particle_stream = self.particle_stream; let effects_stream = self.effects_stream; let swarm = self.swarm; let connectivity = self.connectivity; let dispatcher = self.dispatcher; let aquavm_pool = self.aquavm_pool; let script_storage = self.script_storage; let mut registry = self.registry; let metrics_listen_addr = self.metrics_listen_addr; let local_peer_id = self.local_peer_id; let builtins_management_peer_id = self.builtins_management_peer_id; task::spawn(async move { let (metrics_fut, libp2p_metrics) = if let Some(mut registry) = registry { let libp2p_metrics = Metrics::new(&mut registry); let fut = start_metrics_endpoint(registry, metrics_listen_addr); (fut, Some(libp2p_metrics)) } else { (futures::future::ready(Ok(())).boxed(), None) }; let mut metrics_fut = metrics_fut.fuse(); let script_storage = script_storage.start(); let pool = aquavm_pool.start(); let mut connectivity = connectivity.start(); let mut dispatcher = dispatcher.start(particle_stream, effects_stream); let stopped = stream::iter(once(Err(()))); let mut swarm = swarm .map(|e| { libp2p_metrics.as_ref().map(|m| m.record(&e)); Ok(()) }) .chain(stopped) .fuse(); loop { select!( e = swarm.select_next_some() => { if e.is_err() { log::error!("Swarm has terminated"); break; } }, e = metrics_fut => { if let Err(err) = e { log::warn!("Metrics returned error: {}", err) } }, _ = connectivity => {}, _ = dispatcher => {}, event = exit_inlet.next() => { if let Some(Ok(_)) = event { break } } ) } log::info!("Stopping node"); script_storage.cancel().await; dispatcher.cancel().await; connectivity.cancel().await; pool.cancel().await; }); let mut builtins_deployer = self.builtins_deployer; builtins_deployer .deploy_builtin_services() .wrap_err("builtins deploy failed")?; Ok(exit_outlet) } #[inline] pub fn listen( &mut self, addrs: impl Into<Vec<Multiaddr>>, ) -> Result<(), TransportError<io::Error>> { let addrs = addrs.into(); log::info!("Fluence listening on {:?}", addrs); for addr in addrs { Swarm::listen_on(&mut self.swarm, addr)?; } Ok(()) } } #[cfg(test)] mod tests { use eyre::WrapErr; use libp2p::core::Multiaddr; use maplit::hashmap; use serde_json::json; use air_interpreter_fs::{air_interpreter_path, write_default_air_interpreter}; use aquamarine::{VmConfig, AVM}; use config_utils::to_peer_id; use connected_client::ConnectedClient; use fluence_libp2p::RandomPeerId; use server_config::{builtins_base_dir, default_base_dir, deserialize_config}; use crate::Node; #[test] fn run_node() { let base_dir = default_base_dir(); fs_utils::create_dir(&base_dir).unwrap(); fs_utils::create_dir(builtins_base_dir(&base_dir)).unwrap(); write_default_air_interpreter(&air_interpreter_path(&base_dir)).unwrap(); let mut config = deserialize_config(&<_>::default(), &[]).expect("deserialize config"); config.aquavm_pool_size = 1; let vm_config = VmConfig::new( to_peer_id(&config.root_key_pair.clone().into()), config.dir_config.avm_base_dir.clone(), config.dir_config.air_interpreter_path.clone(), None, ); let mut node: Box<Node<AVM<_>>> = Node::new(config, vm_config, "some version").expect("create node"); let listening_address: Multiaddr = "/ip4/127.0.0.1/tcp/7777".parse().unwrap(); node.listen(vec![listening_address.clone()]).unwrap(); node.start().expect("start node"); let mut client = ConnectedClient::connect_to(listening_address).expect("connect client"); let data = hashmap! { "name" => json!("folex"), "client" => json!(client.peer_id.to_string()), "relay" => json!(client.node.to_string()), }; client.send_particle( r#" (seq (call relay ("op" "identity") []) (call client ("return" "") [name]) ) "#, data.clone(), ); client.receive_args().wrap_err("receive args").unwrap(); } }
if config.metrics_config.metrics_enabled { Some(Registry::default()) } else { None }
if_condition
[ { "content": "pub fn create_swarm(config: SwarmConfig) -> (PeerId, Box<Node<AVM>>, KeyPair, SwarmConfig) {\n\n create_swarm_with_runtime(config, aqua_vm_config)\n\n}\n", "file_path": "crates/created-swarm/src/swarm.rs", "rank": 0, "score": 359754.6415028821 }, { "content": "/// Retrieves ...
Rust
arch/x86/src/instr/arith.rs
ConsoleLogLuke/Firn
d3cebbd7403a790e19f412940a016c24ceec4cdf
use crate::GeneralByteReg::{Ah, Al}; use crate::GeneralWordReg::{Ax, Dx}; use crate::{arith, GeneralByteReg, GeneralWordReg, RegMem, System}; use firn_arch_x86_macros::{arith_instr, instr}; arith_instr!(ADD); arith_instr!(ADC); arith_instr!(SUB); arith_instr!(SBB); #[instr("CMP AL, imm8")] pub fn cmp_al_imm8(sys: &mut System, imm: u8) { let old = sys.cpu.reg_8(Al); arith::sub_8(sys, old, imm); } #[instr("CMP AX, imm16")] pub fn cmp_ax_imm16(sys: &mut System, imm: u16) { let old = sys.cpu.reg_16(Ax.into()); arith::sub_16(sys, old, imm); } #[instr("CMP r/m8, imm8")] pub fn cmp_rm8_imm8(sys: &mut System, rm: RegMem, imm: u8) { let old = rm.get_8(sys); arith::sub_8(sys, old, imm); } #[instr("CMP r/m16, imm16")] pub fn cmp_rm16_imm16(sys: &mut System, rm: RegMem, imm: u16) { let old = rm.get_16(sys); arith::sub_16(sys, old, imm); } #[instr("CMP r/m16, imm8")] pub fn cmp_rm16_imm8(sys: &mut System, rm: RegMem, imm: u8) { let old = rm.get_16(sys); arith::sub_16(sys, old, imm as u16); } #[instr("CMP r/m8, r8")] pub fn cmp_rm8_r8(sys: &mut System, rm: RegMem, reg: GeneralByteReg) { let old = rm.get_8(sys); let reg = sys.cpu.reg_8(reg); arith::sub_8(sys, old, reg); } #[instr("CMP r/m16, r16")] pub fn cmp_rm16_r16(sys: &mut System, rm: RegMem, reg: GeneralWordReg) { let old = rm.get_16(sys); let reg = sys.cpu.reg_16(reg.into()); arith::sub_16(sys, old, reg); } #[instr("CMP r8, r/m8")] pub fn cmp_r8_rm8(sys: &mut System, reg: GeneralByteReg, rm: RegMem) { let old = sys.cpu.reg_8(reg); let rm = rm.get_8(sys); arith::sub_8(sys, old, rm); } #[instr("CMP r16, r/m16")] pub fn cmp_r16_rm16(sys: &mut System, reg: GeneralWordReg, rm: RegMem) { let old = sys.cpu.reg_16(reg.into()); let rm = rm.get_16(sys); arith::sub_16(sys, old, rm); } arith_instr!(OR); arith_instr!(AND); arith_instr!(XOR); #[instr("NOT r/m8")] pub fn not_rm8(sys: &mut System, rm: RegMem) { let old = rm.get_8(sys); rm.set_8(sys, !old); } #[instr("NOT r/m16")] pub fn not_rm16(sys: &mut System, rm: RegMem) { let old = rm.get_16(sys); rm.set_16(sys, !old); } #[instr("NEG r/m8")] pub fn neg_rm8(sys: &mut System, rm: RegMem) { let old = rm.get_8(sys); let overflow = old != 0; let (value, signed_overflow) = 0i8.overflowing_sub(old as i8); let value = value as u8; arith::set_all_flags_8(sys, value, overflow, signed_overflow); rm.set_8(sys, value); } #[instr("NEG r/m16")] pub fn neg_rm16(sys: &mut System, rm: RegMem) { let old = rm.get_16(sys); let overflow = old != 0; let (value, signed_overflow) = 0i16.overflowing_sub(old as i16); let value = value as u16; arith::set_all_flags_16(sys, value, overflow, signed_overflow); rm.set_16(sys, value); } #[instr("INC r/m8")] pub fn inc_rm8(sys: &mut System, rm: RegMem) { let old = rm.get_8(sys); let value = arith::add_8(sys, old, 1); rm.set_8(sys, value); } #[instr("INC r/m16")] pub fn inc_rm16(sys: &mut System, rm: RegMem) { let old = rm.get_16(sys); let value = arith::add_16(sys, old, 1); rm.set_16(sys, value); } #[instr("INC r16")] pub fn inc_r16(sys: &mut System, reg: GeneralWordReg) { let old = sys.cpu.reg_16(reg.into()); let value = arith::add_16(sys, old, 1); sys.cpu.set_reg_16(reg.into(), value); } #[instr("DEC r/m8")] pub fn dec_rm8(sys: &mut System, rm: RegMem) { let old = rm.get_8(sys); let value = arith::sub_8(sys, old, 1); rm.set_8(sys, value); } #[instr("DEC r/m16")] pub fn dec_rm16(sys: &mut System, rm: RegMem) { let old = rm.get_16(sys); let value = arith::sub_16(sys, old, 1); rm.set_16(sys, value); } #[instr("DEC r16")] pub fn dec_r16(sys: &mut System, reg: GeneralWordReg) { let old = sys.cpu.reg_16(reg.into()); let value = arith::sub_16(sys, old, 1); sys.cpu.set_reg_16(reg.into(), value); } #[instr("TEST AL, imm8")] pub fn test_al_imm8(sys: &mut System, imm: u8) { let old = sys.cpu.reg_8(Al); arith::and_8(sys, old, imm); } #[instr("TEST AX, imm16")] pub fn test_ax_imm16(sys: &mut System, imm: u16) { let old = sys.cpu.reg_16(Ax.into()); arith::and_16(sys, old, imm); } #[instr("TEST r/m8, imm8")] pub fn test_rm8_imm8(sys: &mut System, rm: RegMem, imm: u8) { let old = rm.get_8(sys); arith::and_8(sys, old, imm); } #[instr("TEST r/m16, imm16")] pub fn test_rm16_imm16(sys: &mut System, rm: RegMem, imm: u16) { let old = rm.get_16(sys); arith::and_16(sys, old, imm); } #[instr("TEST r/m8, r8")] pub fn test_rm8_r8(sys: &mut System, rm: RegMem, reg: GeneralByteReg) { let old = rm.get_8(sys); let reg = sys.cpu.reg_8(reg); arith::and_8(sys, old, reg); } #[instr("TEST r/m16, r16")] pub fn test_rm16_r16(sys: &mut System, rm: RegMem, reg: GeneralWordReg) { let old = rm.get_16(sys); let reg = sys.cpu.reg_16(reg.into()); arith::and_16(sys, old, reg); } #[instr("MUL r/m8")] pub fn mul_rm8(sys: &mut System, rm: RegMem) { let multiplicand = rm.get_8(sys); let multiplier = sys.cpu.reg_8(Al); let value = multiplicand as u16 * multiplier as u16; sys.cpu.set_reg_16(Ax.into(), value); let extended = value > u8::MAX as u16; sys.cpu.flags.carry = extended; sys.cpu.flags.overflow = extended; } #[instr("MUL r/m16")] pub fn mul_rm16(sys: &mut System, rm: RegMem) { let multiplicand = rm.get_16(sys); let multiplier = sys.cpu.reg_16(Ax.into()); let value = multiplicand as u32 * multiplier as u32; let low = (value & 0xff) as u16; let high = (value >> 8) as u16; sys.cpu.set_reg_16(Ax.into(), low); sys.cpu.set_reg_16(Dx.into(), high); let extended = high != 0; sys.cpu.flags.carry = extended; sys.cpu.flags.overflow = extended; } macro_rules! check_div_8 { ($sys:ident, $value:ident, $dividend:ident, $divisor:ident) => { if let Ok($value) = u8::try_from($value) { let remainder = $dividend % $divisor; $sys.cpu.set_reg_8(Ah, remainder as u8); $sys.cpu.set_reg_8(Al, $value); } else { crate::ExtSystem::interrupt($sys, 0); } }; } macro_rules! check_div_16 { ($sys:ident, $value:ident, $dividend:ident, $divisor:ident) => { if let Ok($value) = u16::try_from($value) { let remainder = $dividend % $divisor; $sys.cpu.set_reg_16(Dx.into(), remainder as u16); $sys.cpu.set_reg_16(Ax.into(), $value); } else { crate::ExtSystem::interrupt($sys, 0); } }; } #[instr("DIV r/m8")] pub fn div_rm8(sys: &mut System, rm: RegMem) { let dividend = sys.cpu.reg_16(Ax.into()); let divisor = rm.get_8(sys) as u16; let value = dividend / divisor; check_div_8!(sys, value, dividend, divisor); } #[instr("DIV r/m16")] pub fn div_rm16(sys: &mut System, rm: RegMem) { let dx = sys.cpu.reg_16(Dx.into()); let ax = sys.cpu.reg_16(Ax.into()); let dividend = ((dx as u32) << 16) | ax as u32; let divisor = rm.get_16(sys) as u32; let value = dividend / divisor; check_div_16!(sys, value, dividend, divisor); } #[instr("IDIV r/m8")] pub fn idiv_rm8(sys: &mut System, rm: RegMem) { let dividend = sys.cpu.reg_16(Ax.into()) as i16; let divisor = rm.get_8(sys) as i16; let value = dividend / divisor; check_div_8!(sys, value, dividend, divisor); } #[instr("IDIV r/m16")] pub fn idiv_rm16(sys: &mut System, rm: RegMem) { let dx = sys.cpu.reg_16(Dx.into()); let ax = sys.cpu.reg_16(Ax.into()); let dividend = ((dx as i32) << 16) | ax as i32; let divisor = rm.get_16(sys) as i32; let value = dividend / divisor; check_div_16!(sys, value, dividend, divisor); }
use crate::GeneralByteReg::{Ah, Al}; use crate::GeneralWordReg::{Ax, Dx}; use crate::{arith, GeneralByteReg, GeneralWordReg, RegMem, System}; use firn_arch_x86_macros::{arith_instr, instr}; arith_instr!(ADD); arith_instr!(ADC); arith_instr!(SUB); arith_instr!(SBB); #[instr("CMP AL, imm8")] pub fn cmp_al_imm8(sys: &mut System, imm: u8) { let old = sys.cpu.reg_8(Al); arith::sub_8(sys, old, imm); } #[instr("CMP AX, imm16")] pub fn cmp_ax_imm16(sys: &mut System, imm: u16) { let old = sys.cpu.reg_16(Ax.into()); arith::sub_16(sys, old, imm); } #[instr("CMP r/m8, imm8")] pub fn cmp_rm8_imm8(sys: &mut System, rm: RegMem, imm: u8) { let old = rm.get_8(sys); arith::sub_8(sys, old, imm); } #[instr("CMP r/m16, imm16")] pub fn cmp_rm16_imm16(sys: &mut System, rm: RegMem, imm: u16) { let old = rm.get_16(sys); arith::sub_16(sys, old, imm); } #[instr("CMP r/m16, imm8")] pub fn cmp_rm16_imm8(sys: &mut System, rm: RegMem, imm: u8) { let old = rm.get_16(sys); arith::sub_16(sys, old, imm as u16); } #[instr("CMP r/m8, r8")] pub fn cmp_rm8_r8(sys: &mut System, rm: RegMem, reg: GeneralByteReg) { let old = rm.get_8(sys); let reg = sys.cpu.reg_8(reg); arith::sub_8(sys, old, reg); } #[instr("CMP r/m16, r16")] pub fn cmp_rm16_r16(sys: &mut System, rm: RegMem, reg: GeneralWordReg) { let old = rm.get_16(sys); let reg = sys.cpu.reg_16(reg.into()); arith::sub_16(sys, old, reg); } #[instr("CMP r8, r/m8")] pub fn cmp_r8_rm8(sys: &mut System, reg: GeneralByteReg, rm: RegMem) { let old = sys.cpu.reg_8(reg); let rm = rm.get_8(sys); arith::sub_8(sys, old, rm); } #[instr("CMP r16, r/m16")] pub fn cmp_r16_rm16(sys: &mut System, reg: GeneralWordReg, rm: RegMem) { let old = sys.cpu.reg_16(reg.into()); let rm = rm.get_16(sys); arith::sub_16(sys, old, rm); } arith_instr!(OR); arith_instr!(AND); arith_instr!(XOR); #[instr("NOT r/m8")] pub fn not_rm8(sys: &mut System, rm: RegMem) { let old = rm.get_8(sys); rm.set_8(sys, !old); } #[instr("NOT r/m16")] pub fn not_rm16(sys: &mut System, rm: RegMem) { let old = rm.get_16(sys); rm.set_16(sys, !old); } #[instr("NEG r/m8")] pub fn neg_rm8(sys: &mut System, rm: RegMem) { let old = rm.get_8(sys); let overflow = old != 0; let (value, signed_overflow) = 0i8.overflowing_sub(old as i8); let value = value as u8; arith::set_all_flags_8(sys, value, overflow, signed_overflow); rm.set_8(sys, value); } #[instr("NEG r/m16")] pub fn neg_rm16(sys: &mut System, rm: RegMem) { let old = rm.get_16(sys); let overflow = old != 0; let (value, signed_overflow) = 0i16.overflowing_sub(old as i16); let value = value as u16; arith::set_all_flags_16(sys, value, overflow, signed_overflow); rm.set_16(sys, value); } #[instr("INC r/m8")] pub fn inc_rm8(sys: &mut System, rm: RegMem) { let old = rm.get_8(sys); let value = arith::add_8(sys, old, 1); rm.set_8(sys, value); } #[instr("INC r/m16")] pub fn inc_rm16(sys: &mut System, rm: RegMem) { let old = rm.get_16(sys); let value = arith::add_16(sys, old, 1); rm.set_16(sys, value); } #[instr("INC r16")] pub fn inc_r16(sys: &mut System, reg: GeneralWordReg) { let old = sys.cpu.reg_16(reg.into()); let value = arith::add_16(sys, old, 1); sys.cpu.set_reg_16(reg.into(), value); } #[instr("DEC r/m8")] pub fn dec_rm8(sys: &mut System, rm: RegMem) { let old = rm.get_8(sys); let value = arith::sub_8(sys, old, 1); rm.set_8(sys, value); } #[instr("DEC r/m16")] pub fn dec_rm16(sys: &mut System, rm: RegMem) { let old = rm.get_16(sys); let value = arith::sub_16(sys, old, 1); rm.set_16(sys, value); } #[instr("DEC r16")] pub fn dec_r16(sys: &mut System, reg: GeneralWordReg) { let old = sys.cpu.reg_16(reg.into()); let value = arith::sub_16(sys, old, 1); sys.cpu.set_reg_16(reg.into(), value); } #[instr("TEST AL, imm8")] pub fn test_al_imm8(sys: &mut System, imm: u8) { let old = sys.cpu.reg_8(Al); arith::and_8(sys, old, imm); } #[instr("TEST AX, imm16")] pub fn test_ax_imm16(sys: &mut System, imm: u16) { let old = sys.cpu.reg_16(Ax.into()); arith::and_16(sys, old, imm); } #[instr("TEST r/m8, imm8")] pub fn test_rm8_imm8(sys: &mut System, rm: RegMem, imm: u8) { let old = rm.get_8(sys); arith::and_8(sys, old, imm); } #[instr("TEST r/m16, imm16")] pub fn test_rm16_imm16(sys: &mut System, rm: RegMem, imm: u16) { let old = rm.get_16(sys); arith::and_16(sys, old, imm); } #[instr("TEST r/m8, r8")] pub fn test_rm8_r8(sys: &mut System, rm: RegMem, reg: GeneralByteReg) { let old = rm.get_8(sys); let reg = sys.cpu.reg_8(reg); arith::and_8(sys, old, reg); } #[instr("TEST r/m16, r16")] pub fn test_rm16_r16(sys: &mut System, rm: RegMem, reg: GeneralWordReg) { let old = rm.get_16(sys); let reg = sys.cpu.reg_16(reg.into()); arith::and_16(sys, old, reg); } #[instr("MUL r/m8")]
#[instr("MUL r/m16")] pub fn mul_rm16(sys: &mut System, rm: RegMem) { let multiplicand = rm.get_16(sys); let multiplier = sys.cpu.reg_16(Ax.into()); let value = multiplicand as u32 * multiplier as u32; let low = (value & 0xff) as u16; let high = (value >> 8) as u16; sys.cpu.set_reg_16(Ax.into(), low); sys.cpu.set_reg_16(Dx.into(), high); let extended = high != 0; sys.cpu.flags.carry = extended; sys.cpu.flags.overflow = extended; } macro_rules! check_div_8 { ($sys:ident, $value:ident, $dividend:ident, $divisor:ident) => { if let Ok($value) = u8::try_from($value) { let remainder = $dividend % $divisor; $sys.cpu.set_reg_8(Ah, remainder as u8); $sys.cpu.set_reg_8(Al, $value); } else { crate::ExtSystem::interrupt($sys, 0); } }; } macro_rules! check_div_16 { ($sys:ident, $value:ident, $dividend:ident, $divisor:ident) => { if let Ok($value) = u16::try_from($value) { let remainder = $dividend % $divisor; $sys.cpu.set_reg_16(Dx.into(), remainder as u16); $sys.cpu.set_reg_16(Ax.into(), $value); } else { crate::ExtSystem::interrupt($sys, 0); } }; } #[instr("DIV r/m8")] pub fn div_rm8(sys: &mut System, rm: RegMem) { let dividend = sys.cpu.reg_16(Ax.into()); let divisor = rm.get_8(sys) as u16; let value = dividend / divisor; check_div_8!(sys, value, dividend, divisor); } #[instr("DIV r/m16")] pub fn div_rm16(sys: &mut System, rm: RegMem) { let dx = sys.cpu.reg_16(Dx.into()); let ax = sys.cpu.reg_16(Ax.into()); let dividend = ((dx as u32) << 16) | ax as u32; let divisor = rm.get_16(sys) as u32; let value = dividend / divisor; check_div_16!(sys, value, dividend, divisor); } #[instr("IDIV r/m8")] pub fn idiv_rm8(sys: &mut System, rm: RegMem) { let dividend = sys.cpu.reg_16(Ax.into()) as i16; let divisor = rm.get_8(sys) as i16; let value = dividend / divisor; check_div_8!(sys, value, dividend, divisor); } #[instr("IDIV r/m16")] pub fn idiv_rm16(sys: &mut System, rm: RegMem) { let dx = sys.cpu.reg_16(Dx.into()); let ax = sys.cpu.reg_16(Ax.into()); let dividend = ((dx as i32) << 16) | ax as i32; let divisor = rm.get_16(sys) as i32; let value = dividend / divisor; check_div_16!(sys, value, dividend, divisor); }
pub fn mul_rm8(sys: &mut System, rm: RegMem) { let multiplicand = rm.get_8(sys); let multiplier = sys.cpu.reg_8(Al); let value = multiplicand as u16 * multiplier as u16; sys.cpu.set_reg_16(Ax.into(), value); let extended = value > u8::MAX as u16; sys.cpu.flags.carry = extended; sys.cpu.flags.overflow = extended; }
function_block-full_function
[ { "content": "#[instr(\"MOV r/m16, imm16\")]\n\npub fn mov_rm16_imm16(sys: &mut System, rm: RegMem, imm: u16) {\n\n rm.set_16(sys, imm);\n\n}\n\n\n", "file_path": "arch/x86/src/instr/transfer.rs", "rank": 2, "score": 441492.8412339837 }, { "content": "#[instr(\"MOV r/m8, imm8\")]\n\npub f...
Rust
battleships_engine/src/main.rs
r-o-b-o-t-o/quantum-battleships
3c2aaeeb08830634bcac23c411d504f5d168010c
#[macro_use] extern crate serde_derive; use q1tsim::circuit::Circuit; use std::io::{ Error, ErrorKind }; pub mod ship; pub mod player; pub mod coords; use player::Player; macro_rules! arr { ( $( $n:expr )? ) => { { $( let mut array = Vec::with_capacity($n); for i in 0..$n { array.push(i); } array )* } }; } #[derive(Default, Serialize, Deserialize, Debug)] struct GameState { pub players: [ Player; 2 ], pub winner: i32, } #[derive(Deserialize, Debug)] struct Query { pub shots: usize, #[serde(alias = "gameState")] pub game_state: GameState, } fn read_line() -> std::io::Result<String> { let mut s = String::new(); std::io::stdin().read_line(&mut s)?; Ok(s.trim().into()) } fn game_loop(query: &mut Query) -> Result<(), q1tsim::error::Error> { let game_state = &mut query.game_state; for player in 0..2 { game_state.players[player].reset_damage(); for ship_idx in 0..game_state.players[player].ships.len() { let qbits: usize = game_state.players[player].ships[ship_idx].blocks.len(); let cbits: usize = qbits; let mut circuit = Circuit::new(qbits, cbits); for bomb in game_state.players[(player + 1) % 2].bombs.iter() { let ship = &game_state.players[player].ships[ship_idx]; if ship.contains_coords(bomb) { let bit = ship.get_block_index_at_pos(bomb); let frac = 1.0_f64 / (ship.health as f64); circuit.u3(frac * std::f64::consts::PI, 0.0, 0.0, bit as usize)?; } } circuit.measure_all(&arr![qbits])?; circuit.execute(query.shots)?; let result = circuit.histogram()?; for (key, val) in result { for power in find_powers_of_two(key as i32) { let block_idx = which_power_of_two(power) as usize; let dmg = ((val as f32 / query.shots as f32) * 100.0).round() as i32; let ship = &mut game_state.players[player].ships[ship_idx]; ship.add_damage(block_idx, dmg); } } } } for (player_idx, player) in game_state.players.iter().enumerate() { if player.ships.iter().all(|ship| ship.is_sunk()) { game_state.winner = ((player_idx as i32 + 1) % 2) + 1 } } Ok(()) } fn main() -> Result<(), Box<dyn std::error::Error>> { loop { let command = read_line()?; if command == "blankGameState" { let game_state = GameState::default(); println!("{}", serde_json::to_string(&game_state)?); } else if command == "updateGameState" { let query_string = read_line()?; let mut q = serde_json::from_str(&query_string)?; game_loop(&mut q).map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?; println!("{}", serde_json::to_string(&q.game_state)?); } else if command == "quit" { break; } } Ok(()) } fn find_powers_of_two(x: i32) -> Vec<i32> { let mut powers = Vec::new(); let mut i = 1; while i <= x { if i & x > 0 { powers.push(i); } i <<= 1; } powers } fn which_power_of_two(n: i32) -> i32 { (n as f32).log2() as i32 }
#[macro_use] extern crate serde_derive; use q1tsim::circuit::Circuit; use std::io::{ Error, ErrorKind }; pub mod ship; pub mod player; pub mod coords; use player::Player; macro_rules! arr { ( $( $n:expr )? ) => { { $( let mut array = Vec::with_capacity($n); for i in 0..$n { array.push(i); } array )* } }; } #[derive(Default, Serialize, Deserialize, Debug)] struct GameState { pub players: [ Player; 2 ], pub winner: i32, } #[derive(Deserialize, Debug)] struct Query { pub shots: usize, #[serde(alias = "gameState")] pub game_state: GameState, } fn read_line() -> std::io::Result<String> { let mut s = String::new(); std::io::stdin().read_line(&mut s)?; Ok(s.trim().into()) } fn game_loop(query: &mut Query) -> Result<(), q1tsim::error::Error> { let game_state = &mut query.game_state; for player in 0..2 { game_state.players[player].reset_damage(); for ship_idx in 0..game_state.players[player].ships.len() { let qbits: usize = game_state.players[player].ships[ship_idx].blocks.len(); let cbits: usize = qbits; let mut circuit = Circuit::new(qbits, cbits); for bomb in game_state.players[(player + 1) % 2].bombs.iter() { let ship = &game_state.players[player].ships[ship_idx]; if ship.contains_coords(bomb) { let bit = ship.get_block_index_at_pos(bomb); let frac = 1.0_f64 / (ship.health as f64); circuit.u3(frac * std::f64::consts::PI, 0.0, 0.0, bit as usize)?; } } circuit.measure_all(&arr![qbits])?; circuit.execute(query.shots)?; let result = circuit.histogram()?; for (key, val) in result {
fn main() -> Result<(), Box<dyn std::error::Error>> { loop { let command = read_line()?; if command == "blankGameState" { let game_state = GameState::default(); println!("{}", serde_json::to_string(&game_state)?); } else if command == "updateGameState" { let query_string = read_line()?; let mut q = serde_json::from_str(&query_string)?; game_loop(&mut q).map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?; println!("{}", serde_json::to_string(&q.game_state)?); } else if command == "quit" { break; } } Ok(()) } fn find_powers_of_two(x: i32) -> Vec<i32> { let mut powers = Vec::new(); let mut i = 1; while i <= x { if i & x > 0 { powers.push(i); } i <<= 1; } powers } fn which_power_of_two(n: i32) -> i32 { (n as f32).log2() as i32 }
for power in find_powers_of_two(key as i32) { let block_idx = which_power_of_two(power) as usize; let dmg = ((val as f32 / query.shots as f32) * 100.0).round() as i32; let ship = &mut game_state.players[player].ships[ship_idx]; ship.add_damage(block_idx, dmg); } } } } for (player_idx, player) in game_state.players.iter().enumerate() { if player.ships.iter().all(|ship| ship.is_sunk()) { game_state.winner = ((player_idx as i32 + 1) % 2) + 1 } } Ok(()) }
function_block-function_prefix_line
[ { "content": "#[derive(Default, Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub struct Coords {\n\n pub x: i32,\n\n pub y: i32,\n\n}\n\n\n\nimpl Coords {\n\n pub fn new(x: i32, y: i32) -> Self {\n\n Self { x, y }\n\n }\n\n}\n", "file_path": "battleships_engine/src/coords.rs...
Rust
sources/rust/epoll/src/platform/api.rs
xunilrj/sandbox
f92c12f83433cac01a885585e41c02bb5826a01f
use super::EpollEvent; use crate::platform::linux_x86_64::{ accept4, bind, close, epoll_ctl, listen, read, socket, write, Accept4Flags::NonBlock, EpollEventBuilder, EpollOperation, Errno, Protocol, SinFamily, SockType, SocketAddress, SocketAddressInet, SocketAddressInetBuilder, }; use log::trace; use std::{ fmt::Debug, pin::Pin, sync::{ atomic::{AtomicPtr, AtomicUsize, Ordering}, Arc, Mutex, RwLock, }, task::{Poll, RawWaker, Waker}, }; pub struct Socket(pub u16); impl Debug for Socket { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(&format!("Socket({})", self.0)) } } impl Drop for Socket { fn drop(&mut self) { if self.0 > 0 { let _ = close(self.0); } } } impl Socket { pub fn new( domain: SinFamily, sock_type: SockType, protocol: Protocol, port: u16, ) -> Result<Self, Errno> { let socket = socket(domain, sock_type, protocol)?; let socket = Socket(socket); let addr = SocketAddressInetBuilder::default() .port(port) .build() .unwrap(); socket.bind(&addr)?; socket.listen(1000)?; Ok(socket) } pub fn bind<T: SocketAddress>(&self, addr: &T) -> Result<(), Errno> { bind(self.0, addr) } pub fn listen(&self, backlog: u16) -> Result<(), Errno> { listen(self.0, backlog) } pub fn attach<'a>(mut self, epoll: &'a Epoll) -> EpollSocket<'a> { let mut fd = 0; std::mem::swap(&mut fd, &mut self.0); let idx = epoll.reserve(); let mut event = EpollEvent::new(idx); epoll_ctl(epoll.fd, EpollOperation::Add, fd, &mut event).unwrap(); EpollSocket { fd, epoll, idx } } } #[derive(Copy, Clone)] struct EpollData { free: bool, data: usize, } pub struct Epoll { pub fd: u16, ptrs: Mutex<Vec<EpollData>>, nextptr: usize, } impl Drop for Epoll { fn drop(&mut self) { let _ = close(self.fd); } } impl Epoll { pub fn new(fd: u16) -> Self { Self { fd, ptrs: Mutex::new(vec![ EpollData { free: true, data: 0 }; 100 ]), nextptr: 0, } } pub fn reserve(&mut self) -> usize { let mut g = self.ptrs.lock().unwrap(); let mut idx = self.nextptr; while !g[idx].free { idx += 1; if idx >= g.len() { idx = 0; } } g[idx] = EpollData { free: false, data: 0, }; self.nextptr += 1; if self.nextptr >= g.len() { self.nextptr = 0; } idx } pub fn take(&mut self, idx: usize) -> Option<&Waker> { let mut g = self.ptrs.lock().unwrap(); if g[idx].free { None } else { g[idx].free = true; Some(unsafe { &*(g[idx].data as *const Waker) }) } } pub fn store(&mut self, idx: usize, waker: &Waker) { let mut g = self.ptrs.lock().unwrap(); g[idx].data = waker as *const Waker as usize; } } pub struct EpollSocket<'a> { fd: u16, epoll: &'a Epoll, idx: usize, } impl<'a> Debug for EpollSocket<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(&format!( "EpollSocket(fd: {},epollfd: {})", self.fd, self.epoll.fd )) } } impl<'a> Drop for EpollSocket<'a> { fn drop(&mut self) { if self.fd > 0 { trace!(target:"SocketApi", "Socket closing {}", self.fd); epoll_ctl( self.epoll.fd, EpollOperation::Delete, self.fd, &mut EpollEventBuilder::uninitialized(), ) .unwrap(); close(self.fd).unwrap(); } } } impl<'a> EpollSocket<'a> { pub async fn accept_async<'b>( &'b mut self, ) -> Result<(EpollSocket<'a>, SocketAddressInet), Errno> { let (fd, addr) = Accept4Awaitable(&self.fd, &self.epoll, &self.idx).await; let mut event = EpollEvent::new(self.idx); epoll_ctl(self.epoll.fd, EpollOperation::Modify, self.fd, &mut event)?; let idx = self.epoll.reserve(); let mut event = EpollEvent::new(idx); epoll_ctl(self.epoll.fd, EpollOperation::Add, fd, &mut event)?; Ok(( EpollSocket { fd, epoll: self.epoll, idx, }, addr, )) } pub async fn read<'b>(&'b mut self, size: usize) -> Result<(Vec<u8>, usize), Errno> { ReadAwaitable(size, &self.fd, &self.epoll, &self.idx).await } pub async fn write<'b>(&'b mut self, buffer: &'a [u8]) -> Result<usize, Errno> { WriteAwaitable(buffer, &self.fd, &self.epoll, &self.idx).await } } struct Accept4Awaitable<'a>(&'a u16, &'a Epoll, &'a usize); impl<'a> std::future::Future for Accept4Awaitable<'a> { type Output = (u16, SocketAddressInet); fn poll( self: std::pin::Pin<&mut Self>, ctx: &mut std::task::Context<'_>, ) -> std::task::Poll<Self::Output> { let Accept4Awaitable(fd, epoll, idx) = self.get_mut(); trace!(target:"SocketApi", "Accept4Awaitable {}", **fd); epoll.store(**idx, ctx.waker()); let mut addr: SocketAddressInet = SocketAddressInetBuilder::build_default(); match accept4(**fd, &mut addr, NonBlock) { Ok(fd) => Poll::Ready((fd, addr)), Err(Errno::WouldBlock) => Poll::Pending, Err(_) => Poll::Pending, } } } struct ReadAwaitable<'a>(usize, &'a u16, &'a Epoll, &'a usize); impl<'a> std::future::Future for ReadAwaitable<'a> { type Output = Result<(Vec<u8>, usize), Errno>; fn poll( self: std::pin::Pin<&mut Self>, ctx: &mut std::task::Context<'_>, ) -> std::task::Poll<Self::Output> { let ReadAwaitable(size, fd, epoll, idx) = self.get_mut(); trace!(target:"SocketApi", "ReadAwaitable {}", **fd); epoll.store(**idx, ctx.waker()); let mut buf = vec![0; *size]; match read(**fd, &mut buf) { Ok(size) => { if size == 0 { Poll::Ready(Err(Errno::BadFileDescriptor)) } else { Poll::Ready(Ok((buf, size as usize))) } } Err(Errno::WouldBlock) => Poll::Pending, Err(err) => Poll::Ready(Err(err)), } } } struct WriteAwaitable<'a>(&'a [u8], &'a u16, &'a Epoll, &'a usize); impl<'a> std::future::Future for WriteAwaitable<'a> { type Output = Result<usize, Errno>; fn poll( self: std::pin::Pin<&mut Self>, ctx: &mut std::task::Context<'_>, ) -> std::task::Poll<Self::Output> { let WriteAwaitable(buffer, fd, epoll, idx) = self.get_mut(); trace!(target:"SocketApi", "WriteAwaitable {}", **fd); epoll.store(**idx, ctx.waker()); match write(**fd, buffer) { Ok(size) => Poll::Ready(Ok(size as usize)), Err(Errno::WouldBlock) => Poll::Pending, Err(err) => Poll::Ready(Err(err)), } } }
use super::EpollEvent; use crate::platform::linux_x86_64::{ accept4, bind, close, epoll_ctl, listen, read, socket, write, Accept4Flags::NonBlock, EpollEventBuilder, EpollOperation, Errno, Protocol, SinFamily, SockType, SocketAddress, SocketAddressInet, SocketAddressInetBuilder, }; use log::trace; use std::{ fmt::Debug, pin::Pin, sync::{ atomic::{AtomicPtr, AtomicUsize, Ordering}, Arc, Mutex, RwLock, }, task::{Poll, RawWaker, Waker}, }; pub struct Socket(pub u16); impl Debug for Socket { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(&format!("Socket({})", self.0)) } } impl Drop for Socket { fn drop(&mut self) { if self.0 > 0 { let _ = close(self.0); } } } impl Socket { pub fn new( domain: SinFamily, sock_type: SockType, protocol: Protocol, port: u16, ) -> Result<Self, Errno> { let socket = socket(domain, sock_type, protocol)?; let socket = Socket(socket); let addr = SocketAddressInetBuilder::default() .port(port) .build() .unwrap(); socket.bind(&addr)?; socket.listen(1000)?; Ok(socket) } pub fn bind<T: SocketAddress>(&self, addr: &T) -> Result<(), Errno> { bind(self.0, addr) } pub fn listen(&self, backlog: u16) -> Result<(), Errno> { listen(self.0, backlog) } pub fn attach<'a>(mut self, epoll: &'a Epoll) -> EpollSocke
EpollEvent::new(idx); epoll_ctl(epoll.fd, EpollOperation::Add, fd, &mut event).unwrap(); EpollSocket { fd, epoll, idx } } } #[derive(Copy, Clone)] struct EpollData { free: bool, data: usize, } pub struct Epoll { pub fd: u16, ptrs: Mutex<Vec<EpollData>>, nextptr: usize, } impl Drop for Epoll { fn drop(&mut self) { let _ = close(self.fd); } } impl Epoll { pub fn new(fd: u16) -> Self { Self { fd, ptrs: Mutex::new(vec![ EpollData { free: true, data: 0 }; 100 ]), nextptr: 0, } } pub fn reserve(&mut self) -> usize { let mut g = self.ptrs.lock().unwrap(); let mut idx = self.nextptr; while !g[idx].free { idx += 1; if idx >= g.len() { idx = 0; } } g[idx] = EpollData { free: false, data: 0, }; self.nextptr += 1; if self.nextptr >= g.len() { self.nextptr = 0; } idx } pub fn take(&mut self, idx: usize) -> Option<&Waker> { let mut g = self.ptrs.lock().unwrap(); if g[idx].free { None } else { g[idx].free = true; Some(unsafe { &*(g[idx].data as *const Waker) }) } } pub fn store(&mut self, idx: usize, waker: &Waker) { let mut g = self.ptrs.lock().unwrap(); g[idx].data = waker as *const Waker as usize; } } pub struct EpollSocket<'a> { fd: u16, epoll: &'a Epoll, idx: usize, } impl<'a> Debug for EpollSocket<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(&format!( "EpollSocket(fd: {},epollfd: {})", self.fd, self.epoll.fd )) } } impl<'a> Drop for EpollSocket<'a> { fn drop(&mut self) { if self.fd > 0 { trace!(target:"SocketApi", "Socket closing {}", self.fd); epoll_ctl( self.epoll.fd, EpollOperation::Delete, self.fd, &mut EpollEventBuilder::uninitialized(), ) .unwrap(); close(self.fd).unwrap(); } } } impl<'a> EpollSocket<'a> { pub async fn accept_async<'b>( &'b mut self, ) -> Result<(EpollSocket<'a>, SocketAddressInet), Errno> { let (fd, addr) = Accept4Awaitable(&self.fd, &self.epoll, &self.idx).await; let mut event = EpollEvent::new(self.idx); epoll_ctl(self.epoll.fd, EpollOperation::Modify, self.fd, &mut event)?; let idx = self.epoll.reserve(); let mut event = EpollEvent::new(idx); epoll_ctl(self.epoll.fd, EpollOperation::Add, fd, &mut event)?; Ok(( EpollSocket { fd, epoll: self.epoll, idx, }, addr, )) } pub async fn read<'b>(&'b mut self, size: usize) -> Result<(Vec<u8>, usize), Errno> { ReadAwaitable(size, &self.fd, &self.epoll, &self.idx).await } pub async fn write<'b>(&'b mut self, buffer: &'a [u8]) -> Result<usize, Errno> { WriteAwaitable(buffer, &self.fd, &self.epoll, &self.idx).await } } struct Accept4Awaitable<'a>(&'a u16, &'a Epoll, &'a usize); impl<'a> std::future::Future for Accept4Awaitable<'a> { type Output = (u16, SocketAddressInet); fn poll( self: std::pin::Pin<&mut Self>, ctx: &mut std::task::Context<'_>, ) -> std::task::Poll<Self::Output> { let Accept4Awaitable(fd, epoll, idx) = self.get_mut(); trace!(target:"SocketApi", "Accept4Awaitable {}", **fd); epoll.store(**idx, ctx.waker()); let mut addr: SocketAddressInet = SocketAddressInetBuilder::build_default(); match accept4(**fd, &mut addr, NonBlock) { Ok(fd) => Poll::Ready((fd, addr)), Err(Errno::WouldBlock) => Poll::Pending, Err(_) => Poll::Pending, } } } struct ReadAwaitable<'a>(usize, &'a u16, &'a Epoll, &'a usize); impl<'a> std::future::Future for ReadAwaitable<'a> { type Output = Result<(Vec<u8>, usize), Errno>; fn poll( self: std::pin::Pin<&mut Self>, ctx: &mut std::task::Context<'_>, ) -> std::task::Poll<Self::Output> { let ReadAwaitable(size, fd, epoll, idx) = self.get_mut(); trace!(target:"SocketApi", "ReadAwaitable {}", **fd); epoll.store(**idx, ctx.waker()); let mut buf = vec![0; *size]; match read(**fd, &mut buf) { Ok(size) => { if size == 0 { Poll::Ready(Err(Errno::BadFileDescriptor)) } else { Poll::Ready(Ok((buf, size as usize))) } } Err(Errno::WouldBlock) => Poll::Pending, Err(err) => Poll::Ready(Err(err)), } } } struct WriteAwaitable<'a>(&'a [u8], &'a u16, &'a Epoll, &'a usize); impl<'a> std::future::Future for WriteAwaitable<'a> { type Output = Result<usize, Errno>; fn poll( self: std::pin::Pin<&mut Self>, ctx: &mut std::task::Context<'_>, ) -> std::task::Poll<Self::Output> { let WriteAwaitable(buffer, fd, epoll, idx) = self.get_mut(); trace!(target:"SocketApi", "WriteAwaitable {}", **fd); epoll.store(**idx, ctx.waker()); match write(**fd, buffer) { Ok(size) => Poll::Ready(Ok(size as usize)), Err(Errno::WouldBlock) => Poll::Pending, Err(err) => Poll::Ready(Err(err)), } } }
t<'a> { let mut fd = 0; std::mem::swap(&mut fd, &mut self.0); let idx = epoll.reserve(); let mut event =
function_block-random_span
[ { "content": "#[inline(always)]\n\npub fn listen(sockfd: u16, backlog: u16) -> Result<(), Errno> {\n\n syscall2::<usize>(Syscall::Listen, sockfd as usize, backlog as usize).map(|_| {})\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\n#[allow(dead_code)]\n\npub enum SockType {\n\n Stream = 1,\n\n NonBlock =...
Rust
src/mir/trees.rs
nystrom/rivo-mu
494a99e96842ce445386d0a0b95e4639b5d4f6a1
use crate::common::names::Name; use crate::mir::ops::*; #[derive(Clone, Debug)] pub struct Root { pub externs: Vec<Param>, pub data: Vec<Data>, pub procs: Vec<Proc> } #[derive(Clone, Debug)] pub struct Data { pub ty: Type, pub name: Name, pub init: Lit, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Param { pub ty: Type, pub name: Name, } #[derive(Clone, Debug)] pub struct Proc { pub ret_type: Type, pub name: Name, pub params: Vec<Param>, pub body: Box<Exp> } #[derive(Clone, Debug)] pub enum Stm { Nop, CJump { cond: Box<Exp>, if_true: Name, if_false: Name }, Jump { label: Name }, Label { label: Name }, Move { ty: Type, lhs: Name, rhs: Box<Exp> }, Store { ty: Type, ptr: Box<Exp>, value: Box<Exp> }, Return { exp: Box<Exp> }, } #[derive(Clone, Debug)] pub enum Exp { Block { body: Vec<Stm>, exp: Box<Exp> }, Call { fun_type: Type, fun: Box<Exp>, args: Vec<Exp> }, Load { ty: Type, ptr: Box<Exp> }, Binary { op: Bop, e1: Box<Exp>, e2: Box<Exp> }, Unary { op: Uop, exp: Box<Exp> }, Cast { ty: Type, exp: Box<Exp> }, Lit { lit: Lit }, FunctionAddr { name: Name, ty: Type }, GlobalAddr { name: Name, ty: Type }, Temp { name: Name, ty: Type }, GetStructElementAddr { struct_ty: Type, ptr: Box<Exp>, field: usize }, GetArrayElementAddr { base_ty: Type, ptr: Box<Exp>, index: Box<Exp> }, New { ty: Type }, NewHybrid { ty: Type, length: Box<Exp> }, } #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum Type { I1, I8, I16, I32, I64, F32, F64, Void, IRef { ty: Box<Type> }, Ref { ty: Box<Type> }, Ptr { ty: Box<Type> }, Hybrid { fields: Vec<Type>, variant: Box<Type> }, Struct { fields: Vec<Type> }, Union { variants: Vec<Type> }, Fun { ret: Box<Type>, args: Vec<Type> }, } impl Type { pub fn word() -> Type { Type::I64 } } #[derive(Clone, Debug, PartialEq)] pub enum Lit { Void, Null { ty: Type }, I1 { value: bool }, I8 { value: i8 }, I16 { value: i16 }, I32 { value: i32 }, I64 { value: i64 }, F32 { value: f32 }, F64 { value: f64 }, Sizeof { ty: Type }, }
use crate::common::names::Name; use crate::mir::ops::*; #[derive(Clone, Debug)] pub struct Root { pub externs: Vec<Param>, pub data: Vec<Data>, pub procs: Vec<Proc> } #[derive(Clone, Debug)] pub struct Data { pub ty: Type, pub name: Name, pub init: Lit, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Param { pub ty: Type, pub name: Name, } #[derive(Clone, Debug)] pub struct Proc { pub ret_type: Type, pub name: Name, pub params: Vec<Param>, pub body: Box<Exp> } #[derive(Clone, De
Return { exp: Box<Exp> }, } #[derive(Clone, Debug)] pub enum Exp { Block { body: Vec<Stm>, exp: Box<Exp> }, Call { fun_type: Type, fun: Box<Exp>, args: Vec<Exp> }, Load { ty: Type, ptr: Box<Exp> }, Binary { op: Bop, e1: Box<Exp>, e2: Box<Exp> }, Unary { op: Uop, exp: Box<Exp> }, Cast { ty: Type, exp: Box<Exp> }, Lit { lit: Lit }, FunctionAddr { name: Name, ty: Type }, GlobalAddr { name: Name, ty: Type }, Temp { name: Name, ty: Type }, GetStructElementAddr { struct_ty: Type, ptr: Box<Exp>, field: usize }, GetArrayElementAddr { base_ty: Type, ptr: Box<Exp>, index: Box<Exp> }, New { ty: Type }, NewHybrid { ty: Type, length: Box<Exp> }, } #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum Type { I1, I8, I16, I32, I64, F32, F64, Void, IRef { ty: Box<Type> }, Ref { ty: Box<Type> }, Ptr { ty: Box<Type> }, Hybrid { fields: Vec<Type>, variant: Box<Type> }, Struct { fields: Vec<Type> }, Union { variants: Vec<Type> }, Fun { ret: Box<Type>, args: Vec<Type> }, } impl Type { pub fn word() -> Type { Type::I64 } } #[derive(Clone, Debug, PartialEq)] pub enum Lit { Void, Null { ty: Type }, I1 { value: bool }, I8 { value: i8 }, I16 { value: i16 }, I32 { value: i32 }, I64 { value: i64 }, F32 { value: f32 }, F64 { value: f64 }, Sizeof { ty: Type }, }
bug)] pub enum Stm { Nop, CJump { cond: Box<Exp>, if_true: Name, if_false: Name }, Jump { label: Name }, Label { label: Name }, Move { ty: Type, lhs: Name, rhs: Box<Exp> }, Store { ty: Type, ptr: Box<Exp>, value: Box<Exp> },
random
[ { "content": "pub fn translate_lir(name: &str, h: &hir::Root) -> lir::Root {\n\n println!(\"HIR {:#?}\", h);\n\n\n\n let m = mir_gen::Translate::translate(h);\n\n println!(\"MIR {:#?}\", m);\n\n\n\n let l = lir_gen::Translate::translate(&m);\n\n println!(\"LIR {:#?}\", l);\n\n\n\n l\n\n}\n\n\n...
Rust
src/event/tank.rs
zKevz/GrowRust
c76d21f28a38d70d450a5e1223e09b6c11f8cef2
use std::{cell::RefMut, time::Instant}; use super::event::EventContext; use crate::{ consts::{item_type, items, packet::TankUpdatePacket, packet_type}, item::iteminfo_manager::ITEM_MANAGER, utils::{ error::{Error, Result}, math::Vec2f, variant_function::VariantFunction::*, }, world::world::World, }; const RETURN_STATE_NONE: u32 = 0; const RETURN_STATE_REMOVE_ITEM: u32 = 1 << 0; const RETURN_STATE_SEND_MODIFY_ITEM_VISUAL: u32 = 1 << 1; pub fn handle(mut ctx: EventContext) -> Result<()> { if let Ok(world) = ctx.player.get_world(ctx.world_manager) { let mut world = world.borrow_mut(); let mut tankpacket = TankUpdatePacket::from(&ctx.packet_data[4..])?; tankpacket.net_id = ctx.player.net_id; match tankpacket.packet_type { packet_type::STATE => { ctx.player.pos = Vec2f { x: tankpacket.pos_x, y: tankpacket.pos_y, }; ctx.player.char_flags = tankpacket.flags; if world.peers.len() > 1 { world.push_tankpacket(tankpacket); world.send_all(); } } packet_type::TILE_CHANGE_REQUEST => { let item = ITEM_MANAGER.get_item_safe(tankpacket.int_val as u32)?; let result: Result<()>; match item.id { items::FIST => result = on_tile_punch_request(&mut ctx, tankpacket, &mut world), items::WRENCH => { result = on_tile_wrench_request(&mut ctx, tankpacket, &mut world) } _ => match on_tile_build_request(&mut ctx, tankpacket, &mut world) { Ok(state) => { result = Ok(()); if state & RETURN_STATE_REMOVE_ITEM != 0 { ctx.player.remove_item( item.id, 1, state & RETURN_STATE_SEND_MODIFY_ITEM_VISUAL != 0, )?; } } Err(e) => result = Err(e), }, } world.send_all(); return result; } packet_type::ITEM_ACTIVATE_REQUEST => { let item = ITEM_MANAGER.get_item_safe(tankpacket.int_val as u32)?; if ctx.player.has_item(item.id) { match item.item_type { item_type::ANCES | item_type::CLOTHES => { ctx.player.equip(item.id); world.push_varfn_v( OnSetClothing(ctx.player.cloth, true), ctx.player.net_id, -1, ); world.send_all(); } _ => {} } } else { return Err(Error::InvalidPacketError); } } packet_type::TILE_ACTIVATE_REQUEST => { let tile = world.get_tile_safe(tankpacket.tile_x as u32, tankpacket.tile_y as u32)?; let fore = ITEM_MANAGER.get_item_safe(tile.fore as u32)?; match fore.item_type { item_type::MAIN_DOOR => { ctx.world_manager.exit_world(ctx.player, world); ctx.player.send_world_menu(ctx.host); } _ => {} } } packet_type::SET_ICON_STATE => { world.push_tankpacket(tankpacket); world.send_all(); } _ => println!("Unhandled packet type: {}", tankpacket.packet_type), } } Ok(()) } fn on_tile_punch_request( _: &mut EventContext, mut tankpacket: TankUpdatePacket, world: &mut RefMut<World>, ) -> Result<()> { let tile = world.get_tile_safe(tankpacket.tile_x as u32, tankpacket.tile_y as u32)?; let base = tile.get_base()?; if base.id == items::BLANK { return Ok(()); } if (Instant::now() - tile.last_punch).as_secs() > base.heal_time as u64 { tile.hit_count = 0; tile.last_punch = Instant::now(); } tile.hit_count += 1; tankpacket.packet_type = packet_type::TILE_APPLY_DAMAGE; tankpacket.int_val = 6; if tile.hit_count >= base.hits_to_destroy { tankpacket.packet_type = packet_type::TILE_CHANGE_REQUEST; tankpacket.int_val = items::FIST as i32; tile.remove_base(); } world.push_tankpacket(tankpacket); Ok(()) } fn on_tile_wrench_request( _: &mut EventContext, _: TankUpdatePacket, _: &mut RefMut<World>, ) -> Result<()> { Ok(()) } fn on_tile_build_request( ctx: &mut EventContext, tankpacket: TankUpdatePacket, world: &mut RefMut<World>, ) -> Result<u32> { let tile = world.get_tile_safe(tankpacket.tile_x as u32, tankpacket.tile_y as u32)?; let fore = tile.get_fore()?; let base = tile.get_base()?; let item = ITEM_MANAGER.get_item_safe(tankpacket.int_val as u32)?; if !ctx.player.has_item(item.id) { return Err(Error::InvalidPacketError); } if base.id != items::BLANK {} if item.item_type == item_type::BACKGROUND || item.item_type == item_type::BACKGD_SFX_EXTRA_FRAME || item.item_type == item_type::MUSIC_NOTE { tile.set_back(item.id); } else { if fore.id != items::BLANK { return Ok(RETURN_STATE_NONE); } match item.item_type { _ => { if item.extra { ctx.player.send_log("`4Oops!`` This item is not handled yet. Please report to one of the developers."); return Ok(RETURN_STATE_NONE); } } } tile.set_fore(item.id); } world.push_tankpacket(tankpacket); Ok(RETURN_STATE_REMOVE_ITEM) }
use std::{cell::RefMut, time::Instant}; use super::event::EventContext; use crate::{ consts::{item_type, items, packet::TankUpdatePacket, packet_type}, item::iteminfo_manager::ITEM_MANAGER, utils::{ error::{Error, Result}, math::Vec2f, variant_function::VariantFunction::*, }, world::world::World, }; const RETURN_STATE_NONE: u32 = 0; const RETURN_STATE_REMOVE_ITEM: u32 = 1 << 0; const RETURN_STATE_SEND_MODIFY_ITEM_VISUAL: u32 = 1 << 1; pub fn handle(mut ctx: EventContext) -> Result<()> { if let Ok(world) = ctx.player.get_world(ctx.world_manager) { let mut world = world.borrow_mut(); let mut tankpacket = TankUpdatePacket::from(&ctx.packet_data[4..])?; tankpacket.net_id = ctx.player.net_id; match tankpacket.packet_type { packet_type::STATE => { ctx.player.pos = Vec2f { x: tankpacket.pos_x, y: tankpacket.pos_y, };
fn on_tile_punch_request( _: &mut EventContext, mut tankpacket: TankUpdatePacket, world: &mut RefMut<World>, ) -> Result<()> { let tile = world.get_tile_safe(tankpacket.tile_x as u32, tankpacket.tile_y as u32)?; let base = tile.get_base()?; if base.id == items::BLANK { return Ok(()); } if (Instant::now() - tile.last_punch).as_secs() > base.heal_time as u64 { tile.hit_count = 0; tile.last_punch = Instant::now(); } tile.hit_count += 1; tankpacket.packet_type = packet_type::TILE_APPLY_DAMAGE; tankpacket.int_val = 6; if tile.hit_count >= base.hits_to_destroy { tankpacket.packet_type = packet_type::TILE_CHANGE_REQUEST; tankpacket.int_val = items::FIST as i32; tile.remove_base(); } world.push_tankpacket(tankpacket); Ok(()) } fn on_tile_wrench_request( _: &mut EventContext, _: TankUpdatePacket, _: &mut RefMut<World>, ) -> Result<()> { Ok(()) } fn on_tile_build_request( ctx: &mut EventContext, tankpacket: TankUpdatePacket, world: &mut RefMut<World>, ) -> Result<u32> { let tile = world.get_tile_safe(tankpacket.tile_x as u32, tankpacket.tile_y as u32)?; let fore = tile.get_fore()?; let base = tile.get_base()?; let item = ITEM_MANAGER.get_item_safe(tankpacket.int_val as u32)?; if !ctx.player.has_item(item.id) { return Err(Error::InvalidPacketError); } if base.id != items::BLANK {} if item.item_type == item_type::BACKGROUND || item.item_type == item_type::BACKGD_SFX_EXTRA_FRAME || item.item_type == item_type::MUSIC_NOTE { tile.set_back(item.id); } else { if fore.id != items::BLANK { return Ok(RETURN_STATE_NONE); } match item.item_type { _ => { if item.extra { ctx.player.send_log("`4Oops!`` This item is not handled yet. Please report to one of the developers."); return Ok(RETURN_STATE_NONE); } } } tile.set_fore(item.id); } world.push_tankpacket(tankpacket); Ok(RETURN_STATE_REMOVE_ITEM) }
ctx.player.char_flags = tankpacket.flags; if world.peers.len() > 1 { world.push_tankpacket(tankpacket); world.send_all(); } } packet_type::TILE_CHANGE_REQUEST => { let item = ITEM_MANAGER.get_item_safe(tankpacket.int_val as u32)?; let result: Result<()>; match item.id { items::FIST => result = on_tile_punch_request(&mut ctx, tankpacket, &mut world), items::WRENCH => { result = on_tile_wrench_request(&mut ctx, tankpacket, &mut world) } _ => match on_tile_build_request(&mut ctx, tankpacket, &mut world) { Ok(state) => { result = Ok(()); if state & RETURN_STATE_REMOVE_ITEM != 0 { ctx.player.remove_item( item.id, 1, state & RETURN_STATE_SEND_MODIFY_ITEM_VISUAL != 0, )?; } } Err(e) => result = Err(e), }, } world.send_all(); return result; } packet_type::ITEM_ACTIVATE_REQUEST => { let item = ITEM_MANAGER.get_item_safe(tankpacket.int_val as u32)?; if ctx.player.has_item(item.id) { match item.item_type { item_type::ANCES | item_type::CLOTHES => { ctx.player.equip(item.id); world.push_varfn_v( OnSetClothing(ctx.player.cloth, true), ctx.player.net_id, -1, ); world.send_all(); } _ => {} } } else { return Err(Error::InvalidPacketError); } } packet_type::TILE_ACTIVATE_REQUEST => { let tile = world.get_tile_safe(tankpacket.tile_x as u32, tankpacket.tile_y as u32)?; let fore = ITEM_MANAGER.get_item_safe(tile.fore as u32)?; match fore.item_type { item_type::MAIN_DOOR => { ctx.world_manager.exit_world(ctx.player, world); ctx.player.send_world_menu(ctx.host); } _ => {} } } packet_type::SET_ICON_STATE => { world.push_tankpacket(tankpacket); world.send_all(); } _ => println!("Unhandled packet type: {}", tankpacket.packet_type), } } Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn save_world(mut world: RefMut<World>) -> Result<()> {\n\n if !Path::new(\"data/worlds\").exists() {\n\n fs::create_dir(\"data/worlds\")?;\n\n }\n\n\n\n let mut data = Vec::<u8>::with_capacity((world.width * world.height * 20) as usize);\n\n world.serialize(&mut data, None)...
Rust
src/protocol.rs
Sliman4/bevy_slinet
32f7a3a95f029bf16f7fab1fd665a7ad84fe2f4d
use io::Write; use std::fmt::{Debug, Formatter}; use std::io; use std::net::SocketAddr; use std::sync::atomic::Ordering; use async_trait::async_trait; use crate::connection::MAX_PACKET_SIZE; use crate::packet_length_serializer::PacketLengthDeserializationError; use crate::{PacketLengthSerializer, Serializer}; #[async_trait] pub trait Protocol: Send + Sync + 'static { type Listener: Listener<Stream = Self::ServerStream>; type ServerStream: ServerStream; type ClientStream: ClientStream; async fn bind(addr: SocketAddr) -> io::Result<Self::Listener>; async fn connect_to_server(addr: SocketAddr) -> io::Result<Self::ClientStream> { let stream = Self::ClientStream::connect(addr).await?; log::debug!("Connected to a server at {:?}", stream.peer_addr()); Ok(stream) } } #[async_trait] pub trait Listener { type Stream: ServerStream; async fn accept(&self) -> io::Result<Self::Stream>; fn address(&self) -> SocketAddr; fn handle_disconnection(&self, #[allow(unused_variables)] peer_addr: SocketAddr) {} } #[async_trait] pub trait ClientStream: NetworkStream { async fn connect(addr: SocketAddr) -> io::Result<Self> where Self: Sized; } pub trait ServerStream: NetworkStream {} #[async_trait] pub trait NetworkStream: Send + Sync + 'static { type ReadHalf: ReadStream; type WriteHalf: WriteStream; async fn into_split(self) -> io::Result<(Self::ReadHalf, Self::WriteHalf)>; fn peer_addr(&self) -> SocketAddr; fn local_addr(&self) -> SocketAddr; } #[async_trait] pub trait ReadStream: Send + Sync + 'static { async fn read_exact(&mut self, buffer: &mut [u8]) -> io::Result<()>; async fn receive<ReceivingPacket, SendingPacket, S, LS>( &mut self, serializer: &S, length_serializer: &LS, ) -> Result<ReceivingPacket, ReceiveError<ReceivingPacket, SendingPacket, S, LS>> where ReceivingPacket: Send + Sync + Debug + 'static, SendingPacket: Send + Sync + Debug + 'static, S: Serializer<ReceivingPacket, SendingPacket>, LS: PacketLengthSerializer, { let mut buf = Vec::new(); let mut length = Err(PacketLengthDeserializationError::NeedMoreBytes(LS::SIZE)); while let Err(PacketLengthDeserializationError::NeedMoreBytes(amt)) = length { let mut tmp = vec![0; amt]; self.read_exact(&mut tmp).await.map_err(ReceiveError::Io)?; buf.extend(tmp); length = length_serializer.deserialize_packet_length(&buf); } match length { Ok(length) => { if length > MAX_PACKET_SIZE.load(Ordering::Relaxed) { Err(ReceiveError::PacketTooBig) } else { let mut buf = vec![0; length]; self.read_exact(&mut buf).await.map_err(ReceiveError::Io)?; Ok(serializer .deserialize(&buf) .map_err(ReceiveError::Deserialization)?) } } Err(PacketLengthDeserializationError::Err(err)) => { Err(ReceiveError::LengthDeserialization(err)) } Err(PacketLengthDeserializationError::NeedMoreBytes(_)) => unreachable!(), } } } pub enum ReceiveError<ReceivingPacket, SendingPacket, S, LS> where ReceivingPacket: Send + Sync + Debug + 'static, SendingPacket: Send + Sync + Debug + 'static, S: Serializer<ReceivingPacket, SendingPacket>, LS: PacketLengthSerializer, { Io(io::Error), Deserialization(S::Error), LengthDeserialization(LS::Error), PacketTooBig, NoConnection(io::Error), IntentionalDisconnection, } impl<ReceivingPacket, SendingPacket, S, LS> Debug for ReceiveError<ReceivingPacket, SendingPacket, S, LS> where ReceivingPacket: Send + Sync + Debug + 'static, SendingPacket: Send + Sync + Debug + 'static, S: Serializer<ReceivingPacket, SendingPacket>, LS: PacketLengthSerializer, { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { ReceiveError::Io(error) => write!(f, "ReceiveError::Io({error:?})"), ReceiveError::Deserialization(error) => { write!(f, "ReceiveError::Deserialization({error:?})") } ReceiveError::LengthDeserialization(error) => { write!(f, "ReceiveError::LengthDeserialization({error:?})") } ReceiveError::PacketTooBig => write!(f, "ReceiveError::PacketTooBig"), ReceiveError::NoConnection(error) => write!(f, "ReceiveError::NoConnection({error:?})"), ReceiveError::IntentionalDisconnection => write!(f, "IntentionalDisconnection"), } } } #[async_trait] pub trait WriteStream: Send + Sync + 'static { async fn write_all(&mut self, buffer: &[u8]) -> io::Result<()>; async fn send<ReceivingPacket, SendingPacket, S, LS>( &mut self, packet: SendingPacket, serializer: &S, length_serializer: &LS, ) -> io::Result<()> where ReceivingPacket: Send + Sync + Debug + 'static, SendingPacket: Send + Sync + Debug + 'static, S: Serializer<ReceivingPacket, SendingPacket>, LS: PacketLengthSerializer, { let serialized = serializer .serialize(packet) .expect("Error serializing packet"); let mut buf = length_serializer .serialize_packet_length(serialized.len()) .expect("Error serializing packet length"); buf.write_all(&serialized)?; self.write_all(&buf).await?; Ok(()) } }
use io::Write; use std::fmt::{Debug, Formatter}; use std::io; use std::net::SocketAddr; use std::sync::atomic::Ordering; use async_trait::async_trait; use crate::connection::MAX_PACKET_SIZE; use crate::packet_length_serializer::PacketLengthDeserializationError; use crate::{PacketLengthSerializer, Serializer}; #[async_trait] pub trait Protocol: Send + Sync + 'static { type Listener: Listener<Stream = Self::ServerStream>; type ServerStream: ServerStream; type ClientStream: ClientStream; async fn bind(addr: SocketAddr) -> io::Result<Self::Listener>; async fn connect_to_server(addr: SocketAddr) -> io::Result<Self::ClientStream> { let stream = Self::ClientStream::connect(addr).await?; log::debug!("Connected to a server at {:?}", stream.peer_addr()); Ok(stream) } } #[async_trait] pub trait Listener { type Stream: ServerStream; async fn accept(&self) -> io::Result<Self::Stream>; fn address(&self) -> SocketAddr; fn handle_disconnection(&self, #[allow(unused_variables)] peer_addr: SocketAddr) {} } #[async_trait] pub trait ClientStream: NetworkStream { async fn connect(addr: SocketAddr) -> io::Result<Self> where Self: Sized; } pub trait ServerStream: NetworkStream {} #[async_trait] pub trait NetworkStream: Send + Sync + 'static { type ReadHalf: ReadStream; type WriteHalf: WriteStream; async fn into_split(self) -> io::Result<(Self::ReadHalf, Self::WriteHalf)>; fn peer_addr(&self) -> SocketAddr; fn local_addr(&self) -> SocketAddr; } #[async_trait] pub
et, serializer: &S, length_serializer: &LS, ) -> io::Result<()> where ReceivingPacket: Send + Sync + Debug + 'static, SendingPacket: Send + Sync + Debug + 'static, S: Serializer<ReceivingPacket, SendingPacket>, LS: PacketLengthSerializer, { let serialized = serializer .serialize(packet) .expect("Error serializing packet"); let mut buf = length_serializer .serialize_packet_length(serialized.len()) .expect("Error serializing packet length"); buf.write_all(&serialized)?; self.write_all(&buf).await?; Ok(()) } }
trait ReadStream: Send + Sync + 'static { async fn read_exact(&mut self, buffer: &mut [u8]) -> io::Result<()>; async fn receive<ReceivingPacket, SendingPacket, S, LS>( &mut self, serializer: &S, length_serializer: &LS, ) -> Result<ReceivingPacket, ReceiveError<ReceivingPacket, SendingPacket, S, LS>> where ReceivingPacket: Send + Sync + Debug + 'static, SendingPacket: Send + Sync + Debug + 'static, S: Serializer<ReceivingPacket, SendingPacket>, LS: PacketLengthSerializer, { let mut buf = Vec::new(); let mut length = Err(PacketLengthDeserializationError::NeedMoreBytes(LS::SIZE)); while let Err(PacketLengthDeserializationError::NeedMoreBytes(amt)) = length { let mut tmp = vec![0; amt]; self.read_exact(&mut tmp).await.map_err(ReceiveError::Io)?; buf.extend(tmp); length = length_serializer.deserialize_packet_length(&buf); } match length { Ok(length) => { if length > MAX_PACKET_SIZE.load(Ordering::Relaxed) { Err(ReceiveError::PacketTooBig) } else { let mut buf = vec![0; length]; self.read_exact(&mut buf).await.map_err(ReceiveError::Io)?; Ok(serializer .deserialize(&buf) .map_err(ReceiveError::Deserialization)?) } } Err(PacketLengthDeserializationError::Err(err)) => { Err(ReceiveError::LengthDeserialization(err)) } Err(PacketLengthDeserializationError::NeedMoreBytes(_)) => unreachable!(), } } } pub enum ReceiveError<ReceivingPacket, SendingPacket, S, LS> where ReceivingPacket: Send + Sync + Debug + 'static, SendingPacket: Send + Sync + Debug + 'static, S: Serializer<ReceivingPacket, SendingPacket>, LS: PacketLengthSerializer, { Io(io::Error), Deserialization(S::Error), LengthDeserialization(LS::Error), PacketTooBig, NoConnection(io::Error), IntentionalDisconnection, } impl<ReceivingPacket, SendingPacket, S, LS> Debug for ReceiveError<ReceivingPacket, SendingPacket, S, LS> where ReceivingPacket: Send + Sync + Debug + 'static, SendingPacket: Send + Sync + Debug + 'static, S: Serializer<ReceivingPacket, SendingPacket>, LS: PacketLengthSerializer, { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { ReceiveError::Io(error) => write!(f, "ReceiveError::Io({error:?})"), ReceiveError::Deserialization(error) => { write!(f, "ReceiveError::Deserialization({error:?})") } ReceiveError::LengthDeserialization(error) => { write!(f, "ReceiveError::LengthDeserialization({error:?})") } ReceiveError::PacketTooBig => write!(f, "ReceiveError::PacketTooBig"), ReceiveError::NoConnection(error) => write!(f, "ReceiveError::NoConnection({error:?})"), ReceiveError::IntentionalDisconnection => write!(f, "IntentionalDisconnection"), } } } #[async_trait] pub trait WriteStream: Send + Sync + 'static { async fn write_all(&mut self, buffer: &[u8]) -> io::Result<()>; async fn send<ReceivingPacket, SendingPacket, S, LS>( &mut self, packet: SendingPack
random
[ { "content": "/// A server plugin config.\n\npub trait ServerConfig: Send + Sync + 'static {\n\n /// A client-side packet type.\n\n type ClientPacket: Send + Sync + Debug + 'static;\n\n /// A server-side packet type.\n\n type ServerPacket: Send + Sync + Debug + 'static;\n\n /// The connection's p...
Rust
src/ipfs.rs
sne-os3-rp2/routinator
7a53dabf5013c206b6f0d3d1e376fb2682a60332
use std::path::{PathBuf, Path}; use crate::config::Config; use crate::operation::Error; use std::{fs, env, io, process}; use log::{error, info, warn}; use std::sync::{RwLock, Mutex, Arc}; use std::collections::{HashSet, HashMap}; use bytes::Bytes; use rpki::uri; use crate::metrics::IpfsModuleMetrics; use std::time::SystemTime; #[derive(Clone, Debug, PartialEq, Eq)] pub struct IpnsPubkey(pub String); impl IpnsPubkey { pub fn value(&self) -> &String { &self.0 } } #[derive(Clone, Debug, PartialEq, Eq)] pub struct TalPubkey(pub String); impl TalPubkey { pub fn value(&self) -> &String { &self.0 } } #[derive(Clone, Debug, PartialEq, Eq)] pub struct IpfsPath(pub PathBuf); impl IpfsPath { pub fn value(&self) -> &PathBuf { &self.0 } pub fn to_string(&self) -> String { String::from(&self.0.display().to_string()) } } #[derive(Clone, Debug)] pub struct Cid(pub String); impl Cid { pub fn to_string(&self) -> String { String::from(&self.0) } } #[derive(Clone, Debug)] pub struct KeyName(pub String); impl KeyName { pub fn to_string(&self) -> String { String::from(&self.0) } } #[derive(Debug)] pub struct Cache { base_dir: CacheDir, ipfs_path: Option<IpfsPath>, command: Option<Command>, } impl Cache { pub fn init(config: &Config) -> Result<(), Error> { let ipfs_dir = Self::cache_dir(config); if let Err(err) = fs::create_dir_all(&ipfs_dir) { error!( "Failed to create IPFS cache directory {}: {}.", ipfs_dir.display(), err ); return Err(Error); } Ok(()) } pub fn new(config: &Config, update: bool) -> Result<Option<Self>, Error> { if config.disable_ipfs { Ok(None) } else { Self::init(config)?; Ok(Some(Cache { base_dir: CacheDir::new(Self::cache_dir(config)), command: if update { Some(Command::new(config)?) } else { None }, ipfs_path: config.ipfs_path.clone() })) } } pub fn start(&self) -> Result<Run, Error> { Run::new(self) } pub fn ipfs_path(&self) -> Option<IpfsPath> { self.ipfs_path.clone() } pub fn local_repo_dir(&self, uri: &uri::Ipns) -> PathBuf { let mut res = self.base_dir.base.clone(); res.push(uri.get_repo_publish_key()); res } pub fn local_ta_cer_dir(&self, uri: &uri::Ipns) -> PathBuf { let mut res = self.base_dir.base.clone(); res.push(uri.get_ta_publish_key()); res } fn cache_dir(config: &Config) -> PathBuf { config.cache_dir.join("ipfs") } } #[derive(Clone, Debug)] struct CacheDir { base: PathBuf } impl CacheDir { fn new(base: PathBuf) -> Self { CacheDir { base } } } #[derive(Debug)] pub struct Run<'a> { cache: &'a Cache, updated: RwLock<HashSet<uri::RsyncModule>>, running: RwLock<HashMap<uri::RsyncModule, Arc<Mutex<()>>>>, metrics: Mutex<Vec<IpfsModuleMetrics>> } impl<'a> Run<'a> { pub fn new(cache: &'a Cache) -> Result<Self, Error> { Ok(Run { cache, updated: Default::default(), running: Default::default(), metrics: Default::default(), }) } pub fn cache(&self) -> &Cache { self.cache } pub fn sync(&self, ipfs_path: &IpfsPath, uri: &uri::Ipns) { println!("Starting syncing IPFS..."); env::set_var("IPFS_PATH", ipfs_path.to_string()); let source = format!("/ipns/{}", &uri.get_repo_publish_key()); let destination = &self.cache.local_repo_dir(uri); let destination = format!("--output={}", destination.display().to_string()); let start_time = SystemTime::now(); let result = std::process::Command::new("ipfs") .arg("get") .arg(source) .arg(destination) .output().expect("could not sync ipfs"); for metric in self.metrics.lock().unwrap().iter_mut() { metric.duration = SystemTime::now().duration_since(start_time); } println!("Finished syncing IPFS..."); env::set_var("IPFS_PATH", ""); println!("{:?}", result); } pub fn load_ta(&self, uri: &uri::Ipns) { let command = match self.cache.command.as_ref() { Some(command) => command, None => return, }; let metrics = command.fetch_ta_cer_to_local( uri, &self.cache ); self.metrics.lock().unwrap().push(metrics); } pub fn do_load_file_from_cache(&self, rsync_uri: &uri::Rsync, ipns_uri: &uri::Ipns) -> Option<Bytes> { let source = self.cache .base_dir .base .join(&rsync_uri.to_ipns_repo_path(ipns_uri)); match fs::File::open(&source) { Ok(mut file) => { let mut data = Vec::new(); if let Err(err) = io::Read::read_to_end(&mut file, &mut data) { warn!( "Failed to read file '{}': {}", source.display(), err ); None } else { Some(data.into()) } } Err(err) => { if err.kind() == io::ErrorKind::NotFound { info!("{}: not found in local ipfs repository", source.display()); } else { warn!( "Failed to open file '{}': {}", source.display(), err ); } None } } } pub fn load_ta_file_from_cache( &self, uri: &uri::Ipns, ) -> Option<Bytes> { let path = self.cache.local_ta_cer_dir(uri).join("ta.cer"); match fs::File::open(&path) { Ok(mut file) => { let mut data = Vec::new(); if let Err(err) = io::Read::read_to_end(&mut file, &mut data) { warn!( "Failed to read file '{}': {}", path.display(), err ); None } else { Some(data.into()) } } Err(err) => { if err.kind() == io::ErrorKind::NotFound { info!("{}: not found in local ipfs repository", uri); } else { warn!( "Failed to open file '{}': {}", path.display(), err ); } None } } } pub fn into_metrics(self) -> Vec<IpfsModuleMetrics> { self.metrics.into_inner().unwrap() } } #[derive(Debug)] struct Command { command: String, args: Vec<String>, } impl Command { pub fn new(config: &Config) -> Result<Self, Error> { let command = config.ipfs_command.clone(); let args = vec![String::from("")]; Ok(Command { command, args, }) } pub fn fetch_ta_cer_to_local( &self, source: &uri::Ipns, destination: &Cache ) -> IpfsModuleMetrics { let start = SystemTime::now(); let status = { match self.fetch_ta_cer(&source, destination) { Ok(mut command) => match command.output() { Ok(output) => Ok(Self::log_output(&source.as_str().to_string(), output)), Err(err) => Err(err) } Err(err) => Err(err) } }; IpfsModuleMetrics { ipns: source.clone(), status, duration: SystemTime::now().duration_since(start), } } fn fetch_ta_cer( &self, source: &uri::Ipns, cache: &Cache ) -> Result<process::Command, io::Error> { let destination = cache.local_ta_cer_dir(source); fs::create_dir_all(&destination)?; let mut cmd = process::Command::new(&self.command); let destination = format!("--output={}/ta.cer", &destination.display().to_string()); let source = format!("/ipns/{}", source.get_ta_publish_key()); dbg!(&source); dbg!(&destination); cmd.arg("get") .arg(source) .arg(destination); info!("ipfs Running command {:?}", cmd); Ok(cmd) } fn log_output( source: &String, output: process::Output ) -> process::ExitStatus { if !output.status.success() { warn!( "ipfs to retrieve cid {} failed with status {}", source, output.status ); } else { info!( "successfully completed {}.", source, ); } output.status } #[cfg(not(windows))] fn format_destination(path: &Path) -> Result<String, Error> { let mut destination = format!("{}", path.display()); if !destination.ends_with('/') { destination.push('/') } Ok(destination) } }
use std::path::{PathBuf, Path}; use crate::config::Config; use crate::operation::Error; use std::{fs, env, io, process}; use log::{error, info, warn}; use s
pub fn value(&self) -> &String { &self.0 } } #[derive(Clone, Debug, PartialEq, Eq)] pub struct IpfsPath(pub PathBuf); impl IpfsPath { pub fn value(&self) -> &PathBuf { &self.0 } pub fn to_string(&self) -> String { String::from(&self.0.display().to_string()) } } #[derive(Clone, Debug)] pub struct Cid(pub String); impl Cid { pub fn to_string(&self) -> String { String::from(&self.0) } } #[derive(Clone, Debug)] pub struct KeyName(pub String); impl KeyName { pub fn to_string(&self) -> String { String::from(&self.0) } } #[derive(Debug)] pub struct Cache { base_dir: CacheDir, ipfs_path: Option<IpfsPath>, command: Option<Command>, } impl Cache { pub fn init(config: &Config) -> Result<(), Error> { let ipfs_dir = Self::cache_dir(config); if let Err(err) = fs::create_dir_all(&ipfs_dir) { error!( "Failed to create IPFS cache directory {}: {}.", ipfs_dir.display(), err ); return Err(Error); } Ok(()) } pub fn new(config: &Config, update: bool) -> Result<Option<Self>, Error> { if config.disable_ipfs { Ok(None) } else { Self::init(config)?; Ok(Some(Cache { base_dir: CacheDir::new(Self::cache_dir(config)), command: if update { Some(Command::new(config)?) } else { None }, ipfs_path: config.ipfs_path.clone() })) } } pub fn start(&self) -> Result<Run, Error> { Run::new(self) } pub fn ipfs_path(&self) -> Option<IpfsPath> { self.ipfs_path.clone() } pub fn local_repo_dir(&self, uri: &uri::Ipns) -> PathBuf { let mut res = self.base_dir.base.clone(); res.push(uri.get_repo_publish_key()); res } pub fn local_ta_cer_dir(&self, uri: &uri::Ipns) -> PathBuf { let mut res = self.base_dir.base.clone(); res.push(uri.get_ta_publish_key()); res } fn cache_dir(config: &Config) -> PathBuf { config.cache_dir.join("ipfs") } } #[derive(Clone, Debug)] struct CacheDir { base: PathBuf } impl CacheDir { fn new(base: PathBuf) -> Self { CacheDir { base } } } #[derive(Debug)] pub struct Run<'a> { cache: &'a Cache, updated: RwLock<HashSet<uri::RsyncModule>>, running: RwLock<HashMap<uri::RsyncModule, Arc<Mutex<()>>>>, metrics: Mutex<Vec<IpfsModuleMetrics>> } impl<'a> Run<'a> { pub fn new(cache: &'a Cache) -> Result<Self, Error> { Ok(Run { cache, updated: Default::default(), running: Default::default(), metrics: Default::default(), }) } pub fn cache(&self) -> &Cache { self.cache } pub fn sync(&self, ipfs_path: &IpfsPath, uri: &uri::Ipns) { println!("Starting syncing IPFS..."); env::set_var("IPFS_PATH", ipfs_path.to_string()); let source = format!("/ipns/{}", &uri.get_repo_publish_key()); let destination = &self.cache.local_repo_dir(uri); let destination = format!("--output={}", destination.display().to_string()); let start_time = SystemTime::now(); let result = std::process::Command::new("ipfs") .arg("get") .arg(source) .arg(destination) .output().expect("could not sync ipfs"); for metric in self.metrics.lock().unwrap().iter_mut() { metric.duration = SystemTime::now().duration_since(start_time); } println!("Finished syncing IPFS..."); env::set_var("IPFS_PATH", ""); println!("{:?}", result); } pub fn load_ta(&self, uri: &uri::Ipns) { let command = match self.cache.command.as_ref() { Some(command) => command, None => return, }; let metrics = command.fetch_ta_cer_to_local( uri, &self.cache ); self.metrics.lock().unwrap().push(metrics); } pub fn do_load_file_from_cache(&self, rsync_uri: &uri::Rsync, ipns_uri: &uri::Ipns) -> Option<Bytes> { let source = self.cache .base_dir .base .join(&rsync_uri.to_ipns_repo_path(ipns_uri)); match fs::File::open(&source) { Ok(mut file) => { let mut data = Vec::new(); if let Err(err) = io::Read::read_to_end(&mut file, &mut data) { warn!( "Failed to read file '{}': {}", source.display(), err ); None } else { Some(data.into()) } } Err(err) => { if err.kind() == io::ErrorKind::NotFound { info!("{}: not found in local ipfs repository", source.display()); } else { warn!( "Failed to open file '{}': {}", source.display(), err ); } None } } } pub fn load_ta_file_from_cache( &self, uri: &uri::Ipns, ) -> Option<Bytes> { let path = self.cache.local_ta_cer_dir(uri).join("ta.cer"); match fs::File::open(&path) { Ok(mut file) => { let mut data = Vec::new(); if let Err(err) = io::Read::read_to_end(&mut file, &mut data) { warn!( "Failed to read file '{}': {}", path.display(), err ); None } else { Some(data.into()) } } Err(err) => { if err.kind() == io::ErrorKind::NotFound { info!("{}: not found in local ipfs repository", uri); } else { warn!( "Failed to open file '{}': {}", path.display(), err ); } None } } } pub fn into_metrics(self) -> Vec<IpfsModuleMetrics> { self.metrics.into_inner().unwrap() } } #[derive(Debug)] struct Command { command: String, args: Vec<String>, } impl Command { pub fn new(config: &Config) -> Result<Self, Error> { let command = config.ipfs_command.clone(); let args = vec![String::from("")]; Ok(Command { command, args, }) } pub fn fetch_ta_cer_to_local( &self, source: &uri::Ipns, destination: &Cache ) -> IpfsModuleMetrics { let start = SystemTime::now(); let status = { match self.fetch_ta_cer(&source, destination) { Ok(mut command) => match command.output() { Ok(output) => Ok(Self::log_output(&source.as_str().to_string(), output)), Err(err) => Err(err) } Err(err) => Err(err) } }; IpfsModuleMetrics { ipns: source.clone(), status, duration: SystemTime::now().duration_since(start), } } fn fetch_ta_cer( &self, source: &uri::Ipns, cache: &Cache ) -> Result<process::Command, io::Error> { let destination = cache.local_ta_cer_dir(source); fs::create_dir_all(&destination)?; let mut cmd = process::Command::new(&self.command); let destination = format!("--output={}/ta.cer", &destination.display().to_string()); let source = format!("/ipns/{}", source.get_ta_publish_key()); dbg!(&source); dbg!(&destination); cmd.arg("get") .arg(source) .arg(destination); info!("ipfs Running command {:?}", cmd); Ok(cmd) } fn log_output( source: &String, output: process::Output ) -> process::ExitStatus { if !output.status.success() { warn!( "ipfs to retrieve cid {} failed with status {}", source, output.status ); } else { info!( "successfully completed {}.", source, ); } output.status } #[cfg(not(windows))] fn format_destination(path: &Path) -> Result<String, Error> { let mut destination = format!("{}", path.display()); if !destination.ends_with('/') { destination.push('/') } Ok(destination) } }
td::sync::{RwLock, Mutex, Arc}; use std::collections::{HashSet, HashMap}; use bytes::Bytes; use rpki::uri; use crate::metrics::IpfsModuleMetrics; use std::time::SystemTime; #[derive(Clone, Debug, PartialEq, Eq)] pub struct IpnsPubkey(pub String); impl IpnsPubkey { pub fn value(&self) -> &String { &self.0 } } #[derive(Clone, Debug, PartialEq, Eq)] pub struct TalPubkey(pub String); impl TalPubkey {
random
[ { "content": "/// Creates a new path name.\n\npub fn random_path(path: &Path) -> PathBuf {\n\n path.join(format!(\"{}\", random::<u32>()))\n\n}\n\n\n", "file_path": "src/rrdp/utils.rs", "rank": 0, "score": 70571.3919880939 }, { "content": "fn process_line<B: io::BufRead, T: FromStr>(\n\n ...
Rust
2021/src/day_nine.rs
Majekdor/AdventOfCode
561942ae3200faf55e1ce166a30fac482f5bcbf4
use std::collections::HashSet; use std::fs; use std::ops::Index; pub fn run() { println!("----------\n Day Nine\n----------"); let contents = fs::read_to_string("inputs/day_nine.txt").expect("File not found!"); let lines:Vec<String> = contents.split("\n").map(|s| s.parse().unwrap()).collect(); const COLUMNS: usize = 100; const ROWS: usize = 100; let mut matrix:Matrix<ROWS,COLUMNS> = Matrix::new(); for i in 0..ROWS { let line = lines.get(i).unwrap(); for j in 0..COLUMNS { matrix.data[i][j] = (line.as_bytes()[j] as usize) - 48; } } let mut count = 0; for i in 0..ROWS { for j in 0..COLUMNS { if matrix.low_point(i, j) { count += 1 + matrix.data[i][j]; } } } assert_eq!(count, 423); println!("Part One: {}", count); let mut basin_sizes:Vec<usize> = vec![]; let mut seen:HashSet<Point> = HashSet::new(); for i in 0..ROWS { for j in 0..COLUMNS { if matrix.low_point(i, j) { basin_sizes.push(matrix.basin_size(Point{ row: i, column: j }, &mut seen)); } } } basin_sizes.sort(); let count:usize = basin_sizes.iter().rev().take(3).product(); assert_eq!(count, 1198704); println!("Part Two: {}", count); } #[derive(Debug, Clone, Eq, PartialEq, Hash)] struct Point { row: usize, column: usize, } pub struct Matrix<const ROWS: usize, const COLS: usize> { data: [[usize; COLS]; ROWS], } impl<const ROWS: usize, const COLS: usize> Matrix<ROWS, COLS> { pub fn new() -> Self { Self { data: [[0; COLS]; ROWS] } } fn num_above_greater(&mut self, row: usize, column: usize) -> bool { return if row == 0 { true } else { if self.data[row - 1][column] > self.data[row][column] { true } else { false } } } fn num_below_greater(&mut self, row: usize, column: usize) -> bool { return if row == 99 { true } else { if self.data[row + 1][column] > self.data[row][column] { true } else { false } } } fn num_right_greater(&mut self, row: usize, column: usize) -> bool { return if column == 99 { true } else { if self.data[row][column + 1] > self.data[row][column] { true } else { false } } } fn num_left_greater(&mut self, row: usize, column: usize) -> bool { return if column == 0 { true } else { if self.data[row][column - 1] > self.data[row][column] { true } else { false } } } fn low_point(&mut self, row: usize, column: usize) -> bool { return self.num_below_greater(row, column) && self.num_above_greater(row, column) && self.num_left_greater(row, column) && self.num_right_greater(row, column); } fn neighbors(&mut self, row: usize, column: usize) -> Vec<Point> { let mut neighbors:Vec<Point> = vec![]; if row != 0 { neighbors.push(Point { row: row - 1, column }); } if row != 99 { neighbors.push(Point { row: row + 1, column }); } if column != 0 { neighbors.push(Point { row, column: column - 1 }); } if column != 99 { neighbors.push(Point { row, column: column + 1 }); } neighbors } fn basin_size(&mut self, point: Point, seen: &mut HashSet<Point>) -> usize { if seen.contains(&point) { return 0; } if self.data[point.row][point.column] == 9 { return 0; } let row = point.row; let column = point.column; seen.insert(point); let mut total = 1; for neighbor in self.neighbors(row, column) { total += self.basin_size(neighbor, seen); } total } } impl<const ROWS: usize, const COLS: usize> Index<(usize, usize)> for Matrix<ROWS, COLS> { type Output = usize; fn index(&self, index: (usize, usize)) -> &Self::Output { &self.data[index.0][index.1] } }
use std::collections::HashSet; use std::fs; use std::ops::Index; pub fn run() { println!("----------\n Day Nine\n----------"); let contents = fs::read_to_string("inputs/day_nine.txt").expect("File not found!"); let lines:Vec<String> = contents.split("\n").map(|s| s.parse().unwrap()).collect(); const COLUMNS: usize = 100; const ROWS: usize = 100; let mut matrix:Matrix<ROWS,COLUMNS> = Matrix::new(); for i in 0..ROWS { let line = lines.get(i).unwrap(); for j in 0..COLUMNS { matrix.data[i][j] = (line.as_bytes()[j] as usize) - 48; } } let mut count = 0; for i in 0..ROWS { for j in 0..COLUMNS { if matrix.low_point(i, j) { count += 1 + matrix.data[i][j]; } } } assert_eq!(count, 423); println!("Part One: {}", count); let mut basin_sizes:Vec<usize> = vec![]; let mut seen:HashSet<Point> = HashSet::new(); for i in 0..ROWS { for j in 0..COLUMNS { if matrix.low_point(i, j) { basin_sizes.push(matrix.basin_size(Point{ row: i, column: j }, &mut seen)); } } } basin_sizes.sort(); let count:usize = basin_sizes.iter().rev().take(3).product(); assert_eq!(count, 1198704); println!("Part Two: {}", count); } #[derive(Debug, Clone, Eq, PartialEq, Hash)] struct Point { row: usize, column: usize, } pub struct Matrix<const ROWS: usize, const COLS: usize> { data: [[usize; COLS]; ROWS], } impl<const ROWS: usize, const COLS: usize> Matrix<ROWS, COLS> { pub fn new() -> Self { Self { data: [[0; COLS]; ROWS] } } fn num_above_grea
} else { false } } } fn num_below_greater(&mut self, row: usize, column: usize) -> bool { return if row == 99 { true } else { if self.data[row + 1][column] > self.data[row][column] { true } else { false } } } fn num_right_greater(&mut self, row: usize, column: usize) -> bool { return if column == 99 { true } else { if self.data[row][column + 1] > self.data[row][column] { true } else { false } } } fn num_left_greater(&mut self, row: usize, column: usize) -> bool { return if column == 0 { true } else { if self.data[row][column - 1] > self.data[row][column] { true } else { false } } } fn low_point(&mut self, row: usize, column: usize) -> bool { return self.num_below_greater(row, column) && self.num_above_greater(row, column) && self.num_left_greater(row, column) && self.num_right_greater(row, column); } fn neighbors(&mut self, row: usize, column: usize) -> Vec<Point> { let mut neighbors:Vec<Point> = vec![]; if row != 0 { neighbors.push(Point { row: row - 1, column }); } if row != 99 { neighbors.push(Point { row: row + 1, column }); } if column != 0 { neighbors.push(Point { row, column: column - 1 }); } if column != 99 { neighbors.push(Point { row, column: column + 1 }); } neighbors } fn basin_size(&mut self, point: Point, seen: &mut HashSet<Point>) -> usize { if seen.contains(&point) { return 0; } if self.data[point.row][point.column] == 9 { return 0; } let row = point.row; let column = point.column; seen.insert(point); let mut total = 1; for neighbor in self.neighbors(row, column) { total += self.basin_size(neighbor, seen); } total } } impl<const ROWS: usize, const COLS: usize> Index<(usize, usize)> for Matrix<ROWS, COLS> { type Output = usize; fn index(&self, index: (usize, usize)) -> &Self::Output { &self.data[index.0][index.1] } }
ter(&mut self, row: usize, column: usize) -> bool { return if row == 0 { true } else { if self.data[row - 1][column] > self.data[row][column] { true
function_block-random_span
[ { "content": "// Life times are weird...\n\nfn part_one<'a>(path: &HashMap<&'a str, Vec<&'a str>>, location: &'a str, seen: &mut HashSet<&'a str>) -> usize {\n\n if location == \"end\" {\n\n return 1;\n\n }\n\n\n\n if location.chars().all(|c| c.is_lowercase()) && !seen.insert(location) {\n\n ...
Rust
src/http/server/request.rs
ragagno/bwf
87f417df9d818e0f0917fbe3ddf88d10f5eed918
use std::fmt; use std::io; use super::Result; use super::Error; use super::super::{MIN_LENGTH_METHOD, LENGTH_SPACE, MIN_LENGTH_TARGET, LENGTH_PROTOCOL, LENGTH_EOL}; use crate::http::Method; const MAX_LENGTH_HEADER: usize = 4096usize; const MIN_LENGTH_REQUEST_LINE_FULL: usize = MIN_LENGTH_METHOD + LENGTH_SPACE + MIN_LENGTH_TARGET + LENGTH_SPACE + LENGTH_PROTOCOL + LENGTH_EOL + LENGTH_EOL; const MIN_LENGTH_REQUEST_LINE_SLICED_1: usize = MIN_LENGTH_TARGET + LENGTH_SPACE + LENGTH_PROTOCOL + LENGTH_EOL + LENGTH_EOL; const MIN_LENGTH_REQUEST_LINE_SLICED_2: usize = LENGTH_PROTOCOL + LENGTH_EOL + LENGTH_EOL; const MIN_LENGTH_REQUEST_LINE_SLICED_3: usize = LENGTH_EOL; pub struct Request { buffer: [u8; MAX_LENGTH_HEADER], buffer_length: usize, buffer_finger: usize, method: Method, target: String, } impl Request { pub fn parse(reader: &mut dyn io::Read) -> Result<Self> { let mut request = Self { buffer: [0u8; MAX_LENGTH_HEADER], buffer_length: 0usize, buffer_finger: 0usize, method: Method::GET, target: String::from("/"), }; request.buffer_length = reader.read(&mut request.buffer)?; if MIN_LENGTH_REQUEST_LINE_FULL > request.buffer_length { return Err(Error::BadRequest); } { let space_index = request.buffer_finger + match request.buffer[request.buffer_finger..].iter().position(|&byte| byte == b' ') { Some(index) => index, None => return Err(Error::BadRequest), }; request.method = match Method::from_text(&request.buffer[..space_index]) { Some(method) => method, None => return Err(Error::BadRequest), }; request.buffer_finger = space_index + LENGTH_SPACE; } if request.buffer_finger + MIN_LENGTH_REQUEST_LINE_SLICED_1 > request.buffer_length { return Err(Error::BadRequest); } { let space_index = request.buffer_finger + match request.buffer[request.buffer_finger..].iter().position(|&byte| byte == b' ') { Some(index) => index, None => return Err(Error::BadRequest), }; request.target = if request.buffer[request.buffer_finger] == b'/' { if request.buffer_finger + 1usize == space_index { String::from("/") } else if request.buffer[space_index - 1usize] == b'/' { String::from(std::str::from_utf8(&request.buffer[request.buffer_finger..space_index - 1usize])?) } else { String::from(std::str::from_utf8(&request.buffer[request.buffer_finger..space_index])?) } } else { return Err(Error::BadRequest); }; request.buffer_finger = space_index + LENGTH_SPACE; } if request.buffer_finger + MIN_LENGTH_REQUEST_LINE_SLICED_2 > request.buffer_length { return Err(Error::BadRequest); } if !request.buffer[request.buffer_finger..].starts_with(b"HTTP/1.1\r\n") { return Err(Error::BadRequest); } request.buffer_finger += LENGTH_PROTOCOL + LENGTH_EOL; if request.buffer_finger + MIN_LENGTH_REQUEST_LINE_SLICED_3 > request.buffer_length { return Err(Error::BadRequest); } if !request.buffer[request.buffer_finger..].starts_with(b"\r\n") { return Err(Error::BadRequest); } request.buffer_finger += LENGTH_EOL; if request.buffer_finger != request.buffer_length { return Err(Error::BadRequest); } return Ok(request); } pub fn get_method(&self) -> &Method { return &self.method; } } impl fmt::Display for Request { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { return formatter.write_fmt(format_args!("{} {} HTTP/1.1\r\n\r\n", self.method, self.target)); } } #[cfg(test)] mod tests { use super::*; const REQUEST_GET_HOMEPAGE: &str = "GET / HTTP/1.1\r\n\r\n"; const REQUEST_DELETE_HOMEPAGE: &str = "DELETE / HTTP/1.1\r\n\r\n"; const REQUEST_GET_PATH_1: &str = "GET /lorem HTTP/1.1\r\n\r\n"; const REQUEST_GET_PATH_1_TRAILING: &str = "GET /lorem/ HTTP/1.1\r\n\r\n"; const REQUEST_GET_PATH_2: &str = "GET /lorem/ipsum HTTP/1.1\r\n\r\n"; struct StringRead<'a> { iter: std::slice::Iter<'a, u8>, } impl<'a> StringRead<'a> { pub fn new(data: &'a str) -> Self { Self { iter: data.as_bytes().iter(), } } } impl<'a> std::io::Read for StringRead<'a> { fn read(&mut self, buffer: &mut [u8]) -> std::io::Result<usize> { for i in 0..buffer.len() { if let Some(&x) = self.iter.next() { buffer[i] = x; } else { return Ok(i); } } return Ok(buffer.len()); } } mod assertions { #[test] fn parse_reader() { use super::StringRead; use super::Request; let mut reader_request_get_homepage = StringRead::new(super::REQUEST_GET_HOMEPAGE); let mut reader_request_delete_homepage = StringRead::new(super::REQUEST_DELETE_HOMEPAGE); let mut reader_request_get_path_1 = StringRead::new(super::REQUEST_GET_PATH_1); let mut reader_request_get_path_1_trailing = StringRead::new(super::REQUEST_GET_PATH_1_TRAILING); let mut reader_request_get_path_2 = StringRead::new(super::REQUEST_GET_PATH_2); let request_get_homepage = Request::parse(&mut reader_request_get_homepage); assert!(request_get_homepage.is_ok()); if request_get_homepage.is_ok() { let request = request_get_homepage.unwrap(); assert_eq!(super::Method::GET, request.method); assert_eq!(String::from("/"), request.target); } let request_delete_homepage = Request::parse(&mut reader_request_delete_homepage); assert!(request_delete_homepage.is_ok()); if request_delete_homepage.is_ok() { let request = request_delete_homepage.unwrap(); assert_eq!(super::Method::DELETE, request.method); assert_eq!(String::from("/"), request.target); } let request_get_path_1 = Request::parse(&mut reader_request_get_path_1); assert!(request_get_path_1.is_ok()); if request_get_path_1.is_ok() { let request = request_get_path_1.unwrap(); assert_eq!(super::Method::GET, request.method); assert_eq!("/lorem", request.target); } let request_get_path_1_trailing = Request::parse(&mut reader_request_get_path_1_trailing); assert!(request_get_path_1_trailing.is_ok()); if request_get_path_1_trailing.is_ok() { let request = request_get_path_1_trailing.unwrap(); assert_eq!(super::Method::GET, request.method); assert_eq!("/lorem", request.target); } let request_get_path_2 = Request::parse(&mut reader_request_get_path_2); assert!(request_get_path_2.is_ok()); if request_get_path_2.is_ok() { let request = request_get_path_2.unwrap(); assert_eq!(super::Method::GET, request.method); assert_eq!("/lorem/ipsum", request.target); } } } mod benchmarks { use test::Bencher; const REQUEST_LINE: &str = "HEAD /lorem/ipsum/dolor/sit/amet HTTP/1.1\r\n\r\n"; #[bench] fn parse_reader(b: &mut Bencher) { use super::Request; use super::StringRead; let mut reader = StringRead::new(REQUEST_LINE); b.iter(|| test::black_box(Request::parse(&mut reader))); } } }
use std::fmt; use std::io; use super::Result; use super::Error; use super::super::{MIN_LENGTH_METHOD, LENGTH_SPACE, MIN_LENGTH_TARGET, LENGTH_PROTOCOL, LENGTH_EOL}; use crate::http::Method; const MAX_LENGTH_HEADER: usize = 4096usize; const MIN_LENGTH_REQUEST_LINE_FULL: usize = MIN_LENGTH_METHOD + LENGTH_SPACE + MIN_LENGTH_TARGET + LENGTH_SPACE + LENGTH_PROTOCOL + LENGTH_EOL + LENGTH_EOL; const MIN_LENGTH_REQUEST_LINE_SLICED_1: usize = MIN_LENGTH_TARGET + LENGTH_SPACE + LENGTH_PROTOCOL + LENGTH_EOL + LENGTH_EOL; const MIN_LENGTH_REQUEST_LINE_SLICED_2: usize = LENGTH_PROTOCOL + LENGTH_EOL + LENGTH_EOL; const MIN_LENGTH_REQUEST_LINE_SLICED_3: usize = LENGTH_EOL; pub struct Request { buffer: [u8; MAX_LENGTH_HEADER], buffer_length: usize, buffer_finger: usize, method: Method, target: String, } impl Request { pub fn parse(reader: &mut dyn io::Read) -> Result<Self> { let mut request = Self { buffer: [0u8; MAX_LENGTH_HEADER], buffer_length: 0usize, buffer_finger: 0usize, method: Method::GET, target: String::from("/"), }; request.buffer_length = reader.read(&mut request.buffer)?; if MIN_LENGTH_REQUEST_LINE_FULL > request.buffer_length { return Err(Error::BadRequest); } { let space_index = request.buffer_finger + match request.buffer[request.buffer_finger..].iter().position(|&byte| byte == b' ') { Some(index) => index, None => return Err(Error::BadRequest), }; request.method = match Method::from_text(&request.buffer[..space_index]) { Some(method) => method, None => return Err(Error::BadRequest), }; request.buffer_finger = space_index + LENGTH_SPACE; } if request.buffer_finger + MIN_LENGTH_REQUEST_LINE_SLICED_1 > request.buffer_length { return Err(Error::BadRequest); } { let space_index = request.buffer_finger + match request.buffer[request.buffer_finger..].iter().position(|&byte| byte == b' ') { Some(index) => index, None => return Err(Error::BadRequest), }; request.target = if request.buffer[request.buffer_finger] == b'/' { if request.buffer_finger + 1usize == space_index { String::from("/") } else if request.buffer[space_index - 1usize] == b'/' { String::from(std::str::from_utf8(&request.buffer[request.buffer_finger..space_index - 1usize])?) } else { String::from(std::str::from_utf8(&request.buffer[request.buffer_finger..space_index])?) } } else { return Err(Error::BadRequest); }; request.buffer_finger = space_index + LENGTH_SPACE; } if request.buffer_finger + MIN_LENGTH_REQUEST_LINE_SLICED_2 > request.buffer_length { return Err(Error::BadRequest); } if !request.buffer[request.buffer_finger..].starts_with(b"HTTP/1.1\r\n") { return Err(Error::BadRequest); } request.buffer_finger += LENGTH_PROTOCOL + LENGTH_EOL; if request.buffer_finger + MIN_LENGTH_REQUEST_LINE_SLICED_3 > request.buffer_length { return Err(Error::BadRequest); } if !request.buffer[request.buffer_finger..].starts_with(b"\r\n") { return Err(Error::BadRequest); } request.buffer_finger += LENGTH_EOL; if request.buffer_finger != request.buffer_length { return Err(Error::BadRequest); } return Ok(request); } pub fn get_method(&self) -> &Method { return &self.method; } } impl fmt::Display for Request { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { return formatter.write_fmt(format_args!("{} {} HTTP/1.1\r\n\r\n", self.method, self.target)); } } #[cfg(test)] mod tests { use super::*; const REQUEST_GET_HOMEPAGE: &str = "GET / HTTP/1.1\r\n\r\n"; const REQUEST_DELETE_HOMEPAGE: &str = "DELETE / HTTP/1.1\r\n\r\n"; const REQUEST_GET_PATH_1: &str = "GET /lorem HTTP/1.1\r\n\r\n"; const REQUEST_GET_PATH_1_TRAILING: &str = "GET /lorem/ HTTP/1.1\r\n\r\n"; const REQUEST_GET_PATH_2: &str = "GET /lorem/ipsum HTTP/1.1\r\n\r\n"; struct StringRead<'a> { iter: std::slice::Iter<'a, u8>, } impl<'a> StringRead<'a> { pub fn new(data: &'a str) -> Self { Self { iter: data.as_bytes().iter(), } } } impl<'a> std::io::Read for StringRead<'a> { fn read(&mut self, buffer: &mut [u8]) -> std::io::Result<usize> { for i in 0..buffer.len() { if let Some(&x) = self.iter.next() {
} mod assertions { #[test] fn parse_reader() { use super::StringRead; use super::Request; let mut reader_request_get_homepage = StringRead::new(super::REQUEST_GET_HOMEPAGE); let mut reader_request_delete_homepage = StringRead::new(super::REQUEST_DELETE_HOMEPAGE); let mut reader_request_get_path_1 = StringRead::new(super::REQUEST_GET_PATH_1); let mut reader_request_get_path_1_trailing = StringRead::new(super::REQUEST_GET_PATH_1_TRAILING); let mut reader_request_get_path_2 = StringRead::new(super::REQUEST_GET_PATH_2); let request_get_homepage = Request::parse(&mut reader_request_get_homepage); assert!(request_get_homepage.is_ok()); if request_get_homepage.is_ok() { let request = request_get_homepage.unwrap(); assert_eq!(super::Method::GET, request.method); assert_eq!(String::from("/"), request.target); } let request_delete_homepage = Request::parse(&mut reader_request_delete_homepage); assert!(request_delete_homepage.is_ok()); if request_delete_homepage.is_ok() { let request = request_delete_homepage.unwrap(); assert_eq!(super::Method::DELETE, request.method); assert_eq!(String::from("/"), request.target); } let request_get_path_1 = Request::parse(&mut reader_request_get_path_1); assert!(request_get_path_1.is_ok()); if request_get_path_1.is_ok() { let request = request_get_path_1.unwrap(); assert_eq!(super::Method::GET, request.method); assert_eq!("/lorem", request.target); } let request_get_path_1_trailing = Request::parse(&mut reader_request_get_path_1_trailing); assert!(request_get_path_1_trailing.is_ok()); if request_get_path_1_trailing.is_ok() { let request = request_get_path_1_trailing.unwrap(); assert_eq!(super::Method::GET, request.method); assert_eq!("/lorem", request.target); } let request_get_path_2 = Request::parse(&mut reader_request_get_path_2); assert!(request_get_path_2.is_ok()); if request_get_path_2.is_ok() { let request = request_get_path_2.unwrap(); assert_eq!(super::Method::GET, request.method); assert_eq!("/lorem/ipsum", request.target); } } } mod benchmarks { use test::Bencher; const REQUEST_LINE: &str = "HEAD /lorem/ipsum/dolor/sit/amet HTTP/1.1\r\n\r\n"; #[bench] fn parse_reader(b: &mut Bencher) { use super::Request; use super::StringRead; let mut reader = StringRead::new(REQUEST_LINE); b.iter(|| test::black_box(Request::parse(&mut reader))); } } }
buffer[i] = x; } else { return Ok(i); } } return Ok(buffer.len()); }
function_block-function_prefix_line
[]
Rust
src/main.rs
zikani03/git-down
8612a59cb8e4beaf467ec900dd5b4afdbfe36da0
use std::fs; use std::process::Command; use std::path::{Path, PathBuf}; const COLON: &'static str = ":"; const DOT_GIT: &'static str = ".git"; #[derive(Debug, Clone)] struct GitDir { repo_url: String, repo_name: String, dirs: Vec<String>, } impl GitDir { fn url(&self) -> &str { self.repo_url.as_str() } fn name(&self) -> &str { self.repo_name.as_str() } fn dirs(&self) -> Vec<String> { self.dirs.clone() } } fn main() { let arg = std::env::args().nth(1); let arg_dest = std::env::args().nth(2); let git_url_dir = arg.unwrap(); let git_dir = parse_source(&git_url_dir); let dest_dir = arg_dest.unwrap(); let tmp_dir = create_tmp_name(git_dir.name()); if !download_repo(&git_dir, tmp_dir.as_str()) { panic!("Failed to download repository"); } let dest_path = PathBuf::from(dest_dir.clone()); if !dest_path.exists() { match fs::create_dir(dest_dir) { Ok(_) => (), Err(e) => { panic!("Cannot create destination directory {}", e); } } } let mut source_path: PathBuf = PathBuf::from(tmp_dir.clone()); let dirs = git_dir.dirs(); for d in dirs.iter() { source_path.push(d.clone()); move_directory(source_path.as_path(), dest_path.as_path()); source_path.pop(); } match fs::remove_dir_all(tmp_dir.clone()) { Ok(_) => (), Err(_) => { panic!("Failed to remove tmp directory, you can remove it from here: {}", tmp_dir); } }; } fn download_repo(git_dir: &GitDir, tmp_dir: &str) -> bool { let mut git_command = Command::new("git") .arg("clone") .arg("--depth") .arg("1") .arg(git_dir.url()) .arg(tmp_dir) .spawn() .expect("Failed to download directory/files from repository"); let exit_code = git_command.wait() .expect("Failed to download directory/files from repository"); if exit_code.success() { true } else { false } } fn parse_source(source_uri: &str) -> GitDir { let mut colon_pos = 0; match source_uri.rfind(COLON) { Some(n) => { colon_pos = n; }, None => (), }; if colon_pos > 0 { return from_shortcut_url(source_uri); } from_url(source_uri) } fn from_shortcut_url<'a>(shortcut_composite: &str) -> GitDir { let parts: Vec<&str> = shortcut_composite.split(COLON).collect(); let num_parts: usize = parts.len(); if num_parts != 3 { panic!("Invalid shortcut string"); } let service = parts[0]; let repo = parts[1]; let full_url = service_url(service, repo); let url_opt = full_url.clone(); let url = url_opt.unwrap(); let git_dir = GitDir { repo_name: parts[1].to_string(), repo_url: url, dirs: parse_dirs(parts[2]) }; git_dir } fn from_url<'a>(url_composite: &str) -> GitDir { let len_git = DOT_GIT.len(); let mut pos = 0; match url_composite.rfind(DOT_GIT) { Some(n) => { pos = n; }, None => { panic!("Url must contain a .git extension after the repo name"); } } let pos_git = pos + len_git; let (url, _) = url_composite.split_at(pos_git); let url_len = url.len() + 1; let pos_slash = url.rfind("/"); let (_, name_dot_git) = url.split_at(pos_slash.unwrap() + 1); let (name, _) = name_dot_git.split_at(name_dot_git.len() - len_git); let (_, dir_part) = url_composite.split_at(url_len); GitDir { repo_url: String::from(url), repo_name: String::from(name), dirs: parse_dirs(dir_part), } } fn parse_dirs(dir_spec: &str) -> Vec<String> { let dirs: Vec<_> = dir_spec.split("+") .map(|s| s.to_string()) .collect(); dirs } #[cfg(windows)] fn create_tmp_name(dir_name: &str) -> String { match std::env::var("TMP") { Ok(val) => { let mut p: PathBuf = PathBuf::from(val); p.push(dir_name); return String::from(p.as_path().to_str().unwrap()) }, Err(err) => { let tmp_dir = format!("/tmp/git-down/{}", dir_name); tmp_dir } } } #[cfg(not(windows))] fn create_tmp_name(dir_name: &str) -> String { let tmp_dir = format!("/tmp/git-down/{}", dir_name); tmp_dir } #[cfg(windows)] fn move_directory(source: &Path , dest: &Path) { Command::new("move") .arg(source.to_str().unwrap()) .arg(dest.to_str().unwrap()) .output() .expect(&format!("Failed to copy files to directory. Find the files here: {}.", source.display())); } #[cfg(not(windows))] fn move_directory(source: &Path , dest: &Path) { Command::new("mv") .arg(source.to_str().unwrap()) .arg(dest.to_str().unwrap()) .output() .expect(&format!("Failed to copy files to directory. Find the files here: {}.", source.display())); } fn service_url<'a>(service: &'a str, repo: &'a str) -> Option<String> { match service { "gh" => Some(github_url(repo)), "bb" => Some(bitbucket_url(repo)), "gl" => Some(gitlab_url(repo)), "sf" => Some(sourceforge_url(repo)), _ => None, } } fn github_url(repo: &str) -> String { let mut url = String::from("https://github.com/"); url.push_str(repo.clone()); url } fn bitbucket_url(repo: &str) -> String { let mut url = String::from("https://bitbucket.org/"); url.push_str(repo); url } fn gitlab_url(repo: &str) -> String { let mut url = String::from("https://gitlab.com/"); url.push_str(repo); url } fn sourceforge_url(repo: &str) -> String { let mut url = String::from("https://git.code.sf.net/p/"); url.push_str(repo); url }
use std::fs; use std::process::Command; use std::path::{Path, PathBuf}; const COLON: &'static str = ":"; const DOT_GIT: &'static str = ".git"; #[derive(Debug, Clone)] struct GitDir { repo_url: String, repo_name: String, dirs: Vec<String>, } impl GitDir { fn url(&self) -> &str { self.repo_url.as_str() } fn name(&self) -> &str { self.repo_name.as_str() } fn dirs(&self) -> Vec<String> { self.dirs.clone() } } fn main() { let arg = std::env::args().nth(1); let arg_dest = std::env::args().nth(2); let git_url_dir = arg.unwrap(); let git_dir = parse_source(&git_url_dir); let dest_dir = arg_dest.unwrap(); let tmp_dir = create_tmp_name(git_dir.name()); if !download_repo(&git_dir, tmp_dir.as_str()) { panic!("Failed to download repository"); } let dest_path = PathBuf::from(dest_dir.clone()); if !dest_path.exists() { match fs::create_dir(dest_dir) { Ok(_) => (), Err(e) => { panic!("Cannot create destination directory {}", e); } } } let mut source_path: PathBuf = PathBuf::from(tmp_dir.clone()); let dirs = git_dir.dirs(); for d in dirs.iter() { source_path.push(d.clone()); move_directory(source_path.as_path(), dest_path.as_path()); source_path.pop(); } match fs::remove_dir_all(tmp_dir.clone()) { Ok(_) => (), Err(_) => { panic!("Failed to remove tmp directory, you can remove it from here: {}", tmp_dir); } }; } fn download_repo(git_dir: &GitDir, tmp_dir: &str) -> bool { let mut git_command = Command::new("git") .arg("clone") .arg("--depth") .arg("1") .arg(git_dir.url()) .arg(tmp_dir) .spawn() .expect("Failed to download directory/files from repository");
if exit_code.success() { true } else { false } } fn parse_source(source_uri: &str) -> GitDir { let mut colon_pos = 0; match source_uri.rfind(COLON) { Some(n) => { colon_pos = n; }, None => (), }; if colon_pos > 0 { return from_shortcut_url(source_uri); } from_url(source_uri) } fn from_shortcut_url<'a>(shortcut_composite: &str) -> GitDir { let parts: Vec<&str> = shortcut_composite.split(COLON).collect(); let num_parts: usize = parts.len(); if num_parts != 3 { panic!("Invalid shortcut string"); } let service = parts[0]; let repo = parts[1]; let full_url = service_url(service, repo); let url_opt = full_url.clone(); let url = url_opt.unwrap(); let git_dir = GitDir { repo_name: parts[1].to_string(), repo_url: url, dirs: parse_dirs(parts[2]) }; git_dir } fn from_url<'a>(url_composite: &str) -> GitDir { let len_git = DOT_GIT.len(); let mut pos = 0; match url_composite.rfind(DOT_GIT) { Some(n) => { pos = n; }, None => { panic!("Url must contain a .git extension after the repo name"); } } let pos_git = pos + len_git; let (url, _) = url_composite.split_at(pos_git); let url_len = url.len() + 1; let pos_slash = url.rfind("/"); let (_, name_dot_git) = url.split_at(pos_slash.unwrap() + 1); let (name, _) = name_dot_git.split_at(name_dot_git.len() - len_git); let (_, dir_part) = url_composite.split_at(url_len); GitDir { repo_url: String::from(url), repo_name: String::from(name), dirs: parse_dirs(dir_part), } } fn parse_dirs(dir_spec: &str) -> Vec<String> { let dirs: Vec<_> = dir_spec.split("+") .map(|s| s.to_string()) .collect(); dirs } #[cfg(windows)] fn create_tmp_name(dir_name: &str) -> String { match std::env::var("TMP") { Ok(val) => { let mut p: PathBuf = PathBuf::from(val); p.push(dir_name); return String::from(p.as_path().to_str().unwrap()) }, Err(err) => { let tmp_dir = format!("/tmp/git-down/{}", dir_name); tmp_dir } } } #[cfg(not(windows))] fn create_tmp_name(dir_name: &str) -> String { let tmp_dir = format!("/tmp/git-down/{}", dir_name); tmp_dir } #[cfg(windows)] fn move_directory(source: &Path , dest: &Path) { Command::new("move") .arg(source.to_str().unwrap()) .arg(dest.to_str().unwrap()) .output() .expect(&format!("Failed to copy files to directory. Find the files here: {}.", source.display())); } #[cfg(not(windows))] fn move_directory(source: &Path , dest: &Path) { Command::new("mv") .arg(source.to_str().unwrap()) .arg(dest.to_str().unwrap()) .output() .expect(&format!("Failed to copy files to directory. Find the files here: {}.", source.display())); } fn service_url<'a>(service: &'a str, repo: &'a str) -> Option<String> { match service { "gh" => Some(github_url(repo)), "bb" => Some(bitbucket_url(repo)), "gl" => Some(gitlab_url(repo)), "sf" => Some(sourceforge_url(repo)), _ => None, } } fn github_url(repo: &str) -> String { let mut url = String::from("https://github.com/"); url.push_str(repo.clone()); url } fn bitbucket_url(repo: &str) -> String { let mut url = String::from("https://bitbucket.org/"); url.push_str(repo); url } fn gitlab_url(repo: &str) -> String { let mut url = String::from("https://gitlab.com/"); url.push_str(repo); url } fn sourceforge_url(repo: &str) -> String { let mut url = String::from("https://git.code.sf.net/p/"); url.push_str(repo); url }
let exit_code = git_command.wait() .expect("Failed to download directory/files from repository");
assignment_statement
[ { "content": "Git Down\n\n========\n\n\n\n`git-down` lets you download one or multiple directories from a Git repository without the\n\nhassle of cloning or downloading the whole repository, with one simple command.\n\n\n\n## Usage\n\n\n\nIt's really easy to use.\n\n\n\n```sh\n\n$ git-down <REPO_URL.git/DIRECTO...
Rust
src/lib.rs
colstrom/lockpipes
ad774d6950225f1973bfc6f058a2efe93bf66016
pub mod command; pub mod logging; pub use command::Command; pub use nix; use nix::errno::errno; use nix::errno::Errno; use nix::sys::stat; use nix::unistd; use nix::Error as NixError; use std::fs; use std::io; use std::path::PathBuf; use std::process; #[derive(Debug)] pub struct LockPipe { path: PathBuf, } impl LockPipe { pub fn new<P: Into<PathBuf>>(path: P) -> Self { Self { path: path.into() } } pub fn read(&self) -> io::Result<()> { match fs::read(&self.path) { Ok(_) => Ok(()), Err(error) => Err(error), } } pub fn write(&self) -> io::Result<()> { fs::write(&self.path, "") } pub fn exists(&self) -> io::Result<()> { match fs::metadata(&self.path) { Ok(_) => Ok(()), Err(error) => Err(error), } } pub fn create(&self) -> nix::Result<()> { unistd::mkfifo(&self.path, stat::Mode::S_IRWXU) } pub fn delete(&self) -> io::Result<()> { fs::remove_file(&self.path) } } #[derive(Debug)] pub struct Program { pipe: LockPipe, } impl Program { pub fn new(pipe: LockPipe) -> Self { Self { pipe } } pub fn create(&self) -> i32 { log::debug!("creating pipe at {:?}", &self.pipe.path); match self.pipe.create() { Ok(_) => { log::info!("created pipe at {:?}", &self.pipe.path); 0 } Err(error) => match error { NixError::Sys(Errno::EEXIST) => { log::warn!("pipe already exists at {:?}", &self.pipe.path); 0 } _ => { log::error!( "failed to create pipe at {:?}: {:?}", &self.pipe.path, error ); errno() } }, } } pub fn exists(&self) -> i32 { log::debug!("checking if pipe exists at {:?}", &self.pipe.path); match self.pipe.exists() { Ok(_) => { log::info!("pipe exists at {:?}", &self.pipe.path); 0 } Err(error) => match error.kind() { io::ErrorKind::NotFound => { log::info!("pipe does not exist at {:?}", &self.pipe.path); 1 } _ => { log::error!("failed checking if pipe exists at {:?}", &self.pipe.path); errno() } }, } } fn ensure_exists(&self) { log::debug!("ensuring pipe exists at {:?}", &self.pipe.path); match self.pipe.exists() { Ok(_) => log::info!("pipe exists at {:?}", &self.pipe.path), Err(error) => match error.kind() { io::ErrorKind::NotFound => { log::warn!("pipe does not exist at {:?}", &self.pipe.path); self.create(); } _ => { log::error!("failed checking if pipe exists at {:?}", &self.pipe.path); process::exit(errno()); } }, } } pub fn read(&self) -> i32 { self.ensure_exists(); log::debug!("reading from pipe at {:?}", &self.pipe.path); match self.pipe.read() { Ok(_) => { log::info!("read from pipe at {:?}", &self.pipe.path); 0 } Err(error) => { log::error!( "failed reading from pipe at {:?}: {:?}", &self.pipe.path, error ); errno() } } } pub fn write(&self) -> i32 { self.ensure_exists(); log::debug!("writing to pipe at {:?}", &self.pipe.path); match self.pipe.write() { Ok(_) => { log::info!("wrote to pipe at {:?}", &self.pipe.path); 0 } Err(error) => { log::error!( "failed writing to pipe at {:?}: {:?}", &self.pipe.path, error ); errno() } } } pub fn delete(&self) -> i32 { match self.pipe.delete() { Ok(_) => { log::info!("deleted pipe at {:?}", &self.pipe.path); 0 } Err(error) => match error.kind() { io::ErrorKind::NotFound => { log::warn!("pipe does not exist at {:?}", &self.pipe.path); 0 } _ => { log::error!( "failed to delete pipe at {:?}: {:?}", &self.pipe.path, error ); errno() } }, } } }
pub mod command; pub mod logging; pub use command::Command; pub use nix; use nix::errno::errno; use nix::errno::Errno; use nix::sys::stat; use nix::unistd; use nix::Error as NixError; use std::fs; use std::io; use std::path::PathBuf; use std::process; #[derive(Debug)] pub struct LockPipe { path: PathBuf, } impl LockPipe { pub fn new<P: Into<PathBuf>>(path: P) -> Self { Self { path: path.into() } } pub fn read(&self) -> io::Result<()> { match fs::read(&self.path) { Ok(_) => Ok(()), Err(error) => Err(error), } } pub fn write(&self) -> io::Result<()> { fs::write(&self.path, "") } pub fn exists(&self) -> io::Result<()> { match fs::metadata(&self.path) { Ok(_) => Ok(()), Err(error) => Err(error), } } pub fn create(&self) -> nix::Result<()> { unistd::mkfifo(&self.path, stat::Mode::S_IRWXU) } pub fn delete(&self) -> io::Result<()> { fs::remove_file(&self.path) } } #[derive(Debug)] pub struct Program { pipe: LockPipe, } impl Program { pub fn new(pipe: LockPipe) -> Self { Self { pipe } } pub fn create(&self) -> i32 { log::debug!("creating pipe
; log::debug!("writing to pipe at {:?}", &self.pipe.path); match self.pipe.write() { Ok(_) => { log::info!("wrote to pipe at {:?}", &self.pipe.path); 0 } Err(error) => { log::error!( "failed writing to pipe at {:?}: {:?}", &self.pipe.path, error ); errno() } } } pub fn delete(&self) -> i32 { match self.pipe.delete() { Ok(_) => { log::info!("deleted pipe at {:?}", &self.pipe.path); 0 } Err(error) => match error.kind() { io::ErrorKind::NotFound => { log::warn!("pipe does not exist at {:?}", &self.pipe.path); 0 } _ => { log::error!( "failed to delete pipe at {:?}: {:?}", &self.pipe.path, error ); errno() } }, } } }
at {:?}", &self.pipe.path); match self.pipe.create() { Ok(_) => { log::info!("created pipe at {:?}", &self.pipe.path); 0 } Err(error) => match error { NixError::Sys(Errno::EEXIST) => { log::warn!("pipe already exists at {:?}", &self.pipe.path); 0 } _ => { log::error!( "failed to create pipe at {:?}: {:?}", &self.pipe.path, error ); errno() } }, } } pub fn exists(&self) -> i32 { log::debug!("checking if pipe exists at {:?}", &self.pipe.path); match self.pipe.exists() { Ok(_) => { log::info!("pipe exists at {:?}", &self.pipe.path); 0 } Err(error) => match error.kind() { io::ErrorKind::NotFound => { log::info!("pipe does not exist at {:?}", &self.pipe.path); 1 } _ => { log::error!("failed checking if pipe exists at {:?}", &self.pipe.path); errno() } }, } } fn ensure_exists(&self) { log::debug!("ensuring pipe exists at {:?}", &self.pipe.path); match self.pipe.exists() { Ok(_) => log::info!("pipe exists at {:?}", &self.pipe.path), Err(error) => match error.kind() { io::ErrorKind::NotFound => { log::warn!("pipe does not exist at {:?}", &self.pipe.path); self.create(); } _ => { log::error!("failed checking if pipe exists at {:?}", &self.pipe.path); process::exit(errno()); } }, } } pub fn read(&self) -> i32 { self.ensure_exists(); log::debug!("reading from pipe at {:?}", &self.pipe.path); match self.pipe.read() { Ok(_) => { log::info!("read from pipe at {:?}", &self.pipe.path); 0 } Err(error) => { log::error!( "failed reading from pipe at {:?}: {:?}", &self.pipe.path, error ); errno() } } } pub fn write(&self) -> i32 { self.ensure_exists()
random
[ { "content": "pub fn init() {\n\n Builder::from_env(\n\n Env::new()\n\n .filter_or(LOG_FILTER_ENVIRONMENT_VARIABLE, LOG_FILTER_DEFAULT_VALUE)\n\n .write_style_or(LOG_STYLE_ENVIRONMENT_VARIABLE, LOG_STYLE_DEFAULT_VALUE),\n\n )\n\n .format_timestamp(None)\n\n .init()\n\n}\n", "file_path": "sr...
Rust
src/windows.rs
Vlad-Shcherbina/whoami
14f0e4a4cebb5debf7284da5b83152689161857c
use crate::{DesktopEnv, Platform}; use std::{ convert::TryInto, ffi::OsString, os::{ raw::{c_char, c_int, c_uchar, c_ulong}, windows::ffi::OsStringExt, }, ptr, }; #[allow(unused)] #[repr(C)] enum ExtendedNameFormat { Unknown, FullyQualifiedDN, SamCompatible, Display, UniqueId, Canonical, UserPrincipal, CanonicalEx, ServicePrincipal, DnsDomain, GivenName, Surname, } #[allow(unused)] #[repr(C)] enum ComputerNameFormat { NetBIOS, DnsHostname, DnsDomain, DnsFullyQualified, PhysicalNetBIOS, PhysicalDnsHostname, PhysicalDnsDomain, PhysicalDnsFullyQualified, Max, } #[link(name = "secur32")] extern "system" { fn GetLastError() -> c_ulong; fn GetUserNameExW( a: ExtendedNameFormat, b: *mut c_char, c: *mut c_ulong, ) -> c_uchar; fn GetUserNameW(a: *mut c_char, b: *mut c_ulong) -> c_int; fn GetComputerNameExW( a: ComputerNameFormat, b: *mut c_char, c: *mut c_ulong, ) -> c_int; } fn string_from_os(string: OsString) -> String { match string.into_string() { Ok(string) => string, Err(string) => string.to_string_lossy().to_string(), } } pub fn username() -> String { string_from_os(username_os()) } pub fn username_os() -> OsString { let mut size = 0; let fail = unsafe { GetUserNameW(ptr::null_mut(), &mut size) == 0 }; debug_assert_eq!(fail, true); let mut name: Vec<u16> = Vec::with_capacity(size.try_into().unwrap()); let orig_size = size; let fail = unsafe { GetUserNameW(name.as_mut_ptr().cast(), &mut size) == 0 }; if fail { panic!( "Failed to get username: {}, report at https://github.com/libcala/whoami/issues", unsafe { GetLastError() } ); } debug_assert_eq!(orig_size, size); unsafe { name.set_len(size.try_into().unwrap()); } debug_assert_eq!(name.pop(), Some(0u16)); OsString::from_wide(&name) } #[inline(always)] pub fn realname() -> String { string_from_os(realname_os()) } #[inline(always)] pub fn realname_os() -> OsString { let mut size = 0; let fail = unsafe { GetUserNameExW(ExtendedNameFormat::Display, ptr::null_mut(), &mut size) == 0 }; debug_assert_eq!(fail, true); match unsafe { GetLastError() } { 0x00EA /* more data */ => { /* Success, continue */ } 0x054B /* no such domain */ => { return "Unknown".into() } 0x0534 /* none mapped */ => { return username_os(); } u => { eprintln!("Unknown error code: {}, report at https://github.com/libcala/whoami/issues", u); unreachable!(); } } let mut name: Vec<u16> = Vec::with_capacity(size.try_into().unwrap()); let orig_size = size; let fail = unsafe { GetUserNameExW( ExtendedNameFormat::Display, name.as_mut_ptr().cast(), &mut size, ) == 0 }; if fail { panic!( "Failed to get username: {}, report at https://github.com/libcala/whoami/issues", unsafe { GetLastError() } ); } debug_assert_eq!(orig_size, size); unsafe { name.set_len(size.try_into().unwrap()); } OsString::from_wide(&name) } #[inline(always)] pub fn devicename() -> String { string_from_os(devicename_os()) } #[inline(always)] pub fn devicename_os() -> OsString { let mut size = 0; let fail = unsafe { GetComputerNameExW( ComputerNameFormat::DnsHostname, ptr::null_mut(), &mut size, ) == 0 }; debug_assert_eq!(fail, true); let mut name: Vec<u16> = Vec::with_capacity(size.try_into().unwrap()); let fail = unsafe { GetComputerNameExW( ComputerNameFormat::DnsHostname, name.as_mut_ptr().cast(), &mut size, ) == 0 }; if fail { panic!( "Failed to get computer name: {}, report at https://github.com/libcala/whoami/issues", unsafe { GetLastError() } ); } unsafe { name.set_len(size.try_into().unwrap()); } OsString::from_wide(&name) } pub fn hostname() -> String { string_from_os(hostname_os()) } pub fn hostname_os() -> OsString { let mut size = 0; let fail = unsafe { GetComputerNameExW( ComputerNameFormat::NetBIOS, ptr::null_mut(), &mut size, ) == 0 }; debug_assert_eq!(fail, true); let mut name: Vec<u16> = Vec::with_capacity(size.try_into().unwrap()); let fail = unsafe { GetComputerNameExW( ComputerNameFormat::NetBIOS, name.as_mut_ptr().cast(), &mut size, ) == 0 }; if fail { panic!( "Failed to get computer name: {}, report at https://github.com/libcala/whoami/issues", unsafe { GetLastError() } ); } unsafe { name.set_len(size.try_into().unwrap()); } OsString::from_wide(&name) } pub fn distro_os() -> Option<OsString> { distro().map(|a| a.into()) } pub fn distro() -> Option<String> { extern "system" { fn GetVersion() -> usize; } let bits = unsafe { GetVersion() } as u32; let mut out = "Windows ".to_string(); let major: u8 = (bits & 0b00000000_00000000_00000000_11111111) as u8; let minor: u8 = ((bits & 0b00000000_00000000_11111111_00000000) >> 8) as u8; let build: u16 = ((bits & 0b11111111_11111111_00000000_00000000) >> 16) as u16; match major { 5 => out.push_str("XP"), 6 => match minor { 0 => out.push_str("Vista"), 1 => out.push_str("7"), 2 => match build { 9200 => out.push_str("10"), _ => out.push_str("8"), }, _ => out.push_str("8"), }, _ => out.push_str("Unknown"), } Some(out) } #[inline(always)] pub const fn desktop_env() -> DesktopEnv { DesktopEnv::Windows } #[inline(always)] pub const fn platform() -> Platform { Platform::Windows }
use crate::{DesktopEnv, Platform}; use std::{ convert::TryInto, ffi::OsString, os::{ raw::{c_char, c_int, c_uchar, c_ulong}, windows::ffi::OsStringExt, }, ptr, }; #[allow(unused)] #[repr(C)] enum ExtendedNameFormat { Unknown, FullyQualifiedDN, SamCompatible, Display, UniqueId, Canonical, UserPrincipal, CanonicalEx, ServicePrincipal, DnsDomain, GivenName, Surname, } #[allow(unused)] #[repr(C)] enum ComputerNameFormat { NetBIOS, DnsHostname, DnsDomain, DnsFullyQualified, PhysicalNetBIOS, PhysicalDnsHostname, PhysicalDnsDomain, PhysicalDnsFullyQualified, Max, } #[link(name = "secur32")] extern "system" { fn GetLastError() -> c_ulong; fn GetUserNameExW( a: ExtendedNameFormat, b: *mut c_char, c: *mut c_ulong, ) -> c_uchar; fn GetUserNameW(a: *mut c_char, b: *mut c_ulong) -> c_int; fn GetComputerNameExW( a: ComputerNameFormat, b: *mut c_char, c: *mut c_ulong, ) -> c_int; } fn string_from_os(string: OsString) -> String { match string.into_string() { Ok(string) => string, Err(string) => string.to_string_lossy().to_string(), } } pub fn username() -> String { string_from_os(username_os()) } pub fn username_os() -> OsString { let mut size = 0; let fail = unsafe { GetUserNameW(ptr::null_mut(), &mut size) == 0 }; debug_assert_eq!(fail, true); let mut name: Vec<u16> = Vec::with_capacity(size.try_into().unwrap()); let orig_size = size; let fail = unsafe { GetUserNameW(name.as_mut_ptr()
); } debug_assert_eq!(orig_size, size); unsafe { name.set_len(size.try_into().unwrap()); } debug_assert_eq!(name.pop(), Some(0u16)); OsString::from_wide(&name) } #[inline(always)] pub fn realname() -> String { string_from_os(realname_os()) } #[inline(always)] pub fn realname_os() -> OsString { let mut size = 0; let fail = unsafe { GetUserNameExW(ExtendedNameFormat::Display, ptr::null_mut(), &mut size) == 0 }; debug_assert_eq!(fail, true); match unsafe { GetLastError() } { 0x00EA /* more data */ => { /* Success, continue */ } 0x054B /* no such domain */ => { return "Unknown".into() } 0x0534 /* none mapped */ => { return username_os(); } u => { eprintln!("Unknown error code: {}, report at https://github.com/libcala/whoami/issues", u); unreachable!(); } } let mut name: Vec<u16> = Vec::with_capacity(size.try_into().unwrap()); let orig_size = size; let fail = unsafe { GetUserNameExW( ExtendedNameFormat::Display, name.as_mut_ptr().cast(), &mut size, ) == 0 }; if fail { panic!( "Failed to get username: {}, report at https://github.com/libcala/whoami/issues", unsafe { GetLastError() } ); } debug_assert_eq!(orig_size, size); unsafe { name.set_len(size.try_into().unwrap()); } OsString::from_wide(&name) } #[inline(always)] pub fn devicename() -> String { string_from_os(devicename_os()) } #[inline(always)] pub fn devicename_os() -> OsString { let mut size = 0; let fail = unsafe { GetComputerNameExW( ComputerNameFormat::DnsHostname, ptr::null_mut(), &mut size, ) == 0 }; debug_assert_eq!(fail, true); let mut name: Vec<u16> = Vec::with_capacity(size.try_into().unwrap()); let fail = unsafe { GetComputerNameExW( ComputerNameFormat::DnsHostname, name.as_mut_ptr().cast(), &mut size, ) == 0 }; if fail { panic!( "Failed to get computer name: {}, report at https://github.com/libcala/whoami/issues", unsafe { GetLastError() } ); } unsafe { name.set_len(size.try_into().unwrap()); } OsString::from_wide(&name) } pub fn hostname() -> String { string_from_os(hostname_os()) } pub fn hostname_os() -> OsString { let mut size = 0; let fail = unsafe { GetComputerNameExW( ComputerNameFormat::NetBIOS, ptr::null_mut(), &mut size, ) == 0 }; debug_assert_eq!(fail, true); let mut name: Vec<u16> = Vec::with_capacity(size.try_into().unwrap()); let fail = unsafe { GetComputerNameExW( ComputerNameFormat::NetBIOS, name.as_mut_ptr().cast(), &mut size, ) == 0 }; if fail { panic!( "Failed to get computer name: {}, report at https://github.com/libcala/whoami/issues", unsafe { GetLastError() } ); } unsafe { name.set_len(size.try_into().unwrap()); } OsString::from_wide(&name) } pub fn distro_os() -> Option<OsString> { distro().map(|a| a.into()) } pub fn distro() -> Option<String> { extern "system" { fn GetVersion() -> usize; } let bits = unsafe { GetVersion() } as u32; let mut out = "Windows ".to_string(); let major: u8 = (bits & 0b00000000_00000000_00000000_11111111) as u8; let minor: u8 = ((bits & 0b00000000_00000000_11111111_00000000) >> 8) as u8; let build: u16 = ((bits & 0b11111111_11111111_00000000_00000000) >> 16) as u16; match major { 5 => out.push_str("XP"), 6 => match minor { 0 => out.push_str("Vista"), 1 => out.push_str("7"), 2 => match build { 9200 => out.push_str("10"), _ => out.push_str("8"), }, _ => out.push_str("8"), }, _ => out.push_str("Unknown"), } Some(out) } #[inline(always)] pub const fn desktop_env() -> DesktopEnv { DesktopEnv::Windows } #[inline(always)] pub const fn platform() -> Platform { Platform::Windows }
.cast(), &mut size) == 0 }; if fail { panic!( "Failed to get username: {}, report at https://github.com/libcala/whoami/issues", unsafe { GetLastError() }
random
[ { "content": "pub fn username_os() -> OsString {\n\n // Unwrap never fails\n\n getpwuid(false).unwrap()\n\n}\n\n\n", "file_path": "src/unix.rs", "rank": 0, "score": 139876.23296355503 }, { "content": "#[inline(always)]\n\npub fn username_os() -> OsString {\n\n native::username_os()\...