blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
0ed5243ae7a422136a61d0b133969f7d1e390877
Rust
aylei/leetcode-rust
/src/solution/s0238_product_of_array_except_self.rs
UTF-8
1,501
3.515625
4
[ "Apache-2.0" ]
permissive
/** * [238] Product of Array Except Self * * Given an array nums of n integers where n > 1, return an array output such that output[i] is equal to the product of all the elements of nums except nums[i]. * * Example: * * * Input: [1,2,3,4] * Output: [24,12,8,6] * * * Note: Please solve it without division and in O(n). * * Follow up:<br /> * Could you solve it with constant space complexity? (The output array does not count as extra space for the purpose of space complexity analysis.) * */ pub struct Solution {} // problem: https://leetcode.com/problems/product-of-array-except-self/ // discuss: https://leetcode.com/problems/product-of-array-except-self/discuss/?currentPage=1&orderBy=most_votes&query= // submission codes start here /* x 2 3 4 = 24 1 x 3 4 = 12 1 2 x 4 = 8 1 2 3 x = 6 */ impl Solution { pub fn product_except_self(nums: Vec<i32>) -> Vec<i32> { if nums.len() < 2 { return vec![]; } let mut res = vec![1; nums.len()]; let mut n = 1; for i in (0..nums.len() - 1).rev() { n *= nums[i + 1]; res[i] = n; } n = 1; for i in 1..nums.len() { n *= nums[i - 1]; res[i] *= n; } res } } // submission codes end #[cfg(test)] mod tests { use super::*; #[test] fn test_238() { assert_eq!( Solution::product_except_self(vec![1, 2, 3, 4]), vec![24, 12, 8, 6] ); } }
true
06f1ce0fdc648310c63c2e5865a4a7a6f003d390
Rust
Lehona/Parsiphae
/src/parsers/base.rs
UTF-8
2,179
2.984375
3
[ "MIT" ]
permissive
use inner_errors::ParserError; use types::*; fn is_not_quote(input: u8) -> bool { input != b'\"' } fn convert_string_literal(input: Input) -> StringLiteral { StringLiteral::new(&input.0) } named!(pub string_parser<Input, StringLiteral, ParserError>, fix_error!(ParserError, map!( delimited!( tag!("\""), take_while!(is_not_quote), tag!("\"") ), convert_string_literal ))); named!(pub number_parser<Input, i64, ParserError>, fix_error!(ParserError, flat_map!( recognize!( tuple!( opt!(char!('-')), is_a!("0123456789") )), parse_to!(i64) ))); named!(pub float_parser<Input, f32, ParserError>, fix_error!(ParserError, flat_map!( recognize!( tuple!( opt!(tag!("-")), is_a!("0123456789"), char!('.'), is_a!("0123456789") ) ), parse_to!(f32) ))); #[cfg(test)] mod tests { use super::*; use nom::ErrorKind; use tests::utility::*; #[test] pub fn test_string_parser() { test_parser_done( string_parser, b"\"hello world\"", StringLiteral::new(b"hello world"), b"", ); test_parser_done(string_parser, b"\"\"", StringLiteral::new(b""), b""); test_parser_done( string_parser, b"\"hello\"world", StringLiteral::new(b"hello"), b"world", ); test_parser_error(string_parser, b"\"hello", incomplete_result()); test_parser_error(string_parser, b"", incomplete_result()); } #[test] pub fn test_number_parser() { test_parser_done(number_parser, b"-1", -1, b""); test_parser_done(number_parser, b"15", 15, b""); test_parser_error( number_parser, b"xxx", failure_result(b"xxx", ErrorKind::IsA), ); } #[test] pub fn test_float_parser() { test_parser_done(float_parser, b"-1.0", -1.0, b""); test_parser_done(float_parser, b"15.775", 15.775, b""); test_parser_error(float_parser, b"xxx", failure_result(b"xxx", ErrorKind::IsA)); } }
true
4fc731ef620f8fa0ab1bb739a964d1b7ca683606
Rust
RustWorks/potw-rs-actix-web-csrf
/src/middleware/liquid.rs
UTF-8
2,320
2.890625
3
[]
no_license
use std::path::PathBuf; use std::fs::File; use std::io::Read; use actix_web::{ error::ErrorInternalServerError, Responder, HttpRequest, HttpResponse, Error, Result, http::StatusCode, middleware::{ Middleware, Response }, }; use liquid::{ Parser, ParserBuilder, Object, compiler::FilesystemInclude }; use failure; pub struct Liquid { path: PathBuf, parser: Parser, } impl Liquid { pub fn new<P>(path: P) -> Self where P: Into<PathBuf> { let path = path.into(); let parser = ParserBuilder::with_liquid() .include_source(Box::new(FilesystemInclude::new(&path))) .build(); Self { path: path, parser: parser, } } fn render(&self, tmpl: Template) -> Result<String, failure::Error> { let mut file = File::open(self.path.join(tmpl.tmpl))?; let mut buf = String::new(); file.read_to_string(&mut buf)?; Ok(self.parser.parse(&buf)?.render(&tmpl.globals)?) } } impl<S> Middleware<S> for Liquid { fn response(&self, req: &HttpRequest<S>, mut resp: HttpResponse) -> Result<Response> { // If there is a template object we want to parse and render that template. if let Some(tmpl) = req.extensions_mut().remove::<Template>() { use actix_web::http::header::CONTENT_TYPE; let render = self.render(tmpl).map_err(ErrorInternalServerError)?; resp.headers_mut().insert(CONTENT_TYPE, "text/html; charset=UTF-8".parse().unwrap()); resp.set_body(render); } Ok(Response::Done(resp)) } } pub struct Template { pub tmpl: String, pub globals: Object, } impl Template { pub fn render<T, O>(tmpl: T, globals: O) -> Self where T: Into<String>, O: Into<Option<Object>>, { Template { tmpl: tmpl.into(), globals: globals.into().unwrap_or_else(|| Object::new()), } } } impl Responder for Template { type Item = HttpResponse; type Error = Error; fn respond_to<S>(self, req: &HttpRequest<S>) -> Result<Self::Item, Self::Error> { // Add the template to the request so middleware can add to it. req.extensions_mut().insert(self); Ok(HttpResponse::new(StatusCode::OK)) } }
true
abc96a836879a25074e7222fab7e61d7cddcc65d
Rust
Orlha/bright-red
/src/engine.rs
UTF-8
2,244
2.640625
3
[]
no_license
use std::io; use std::io::Write; use std::thread; use std::time; //use std::fmt::Write; use termion; use termion::input::TermRead; use termion::raw::IntoRawMode; use termion::raw::RawTerminal; use termion::AsyncReader; use termion::input::MouseTerminal; use crate::ext::Result; use crate::game::*; use log::{info, warn}; use simplelog::WriteLogger; use simplelog::LevelFilter; use simplelog::Config; use std::fs::File; use std::boxed::Box; pub struct Engine { stdin: termion::input::Events<termion::AsyncReader>, stdout: termion::input::MouseTerminal<termion::raw::RawTerminal<std::io::Stdout>>, cmd: termion::event::Event, on: bool, game: Game, //log: std::boxed::Box<WriteLogger<std::fs::File>>, } impl Engine { pub fn new() -> Engine { let _ = WriteLogger::init(LevelFilter::Info, Config::default(), File::create("vamp.log").unwrap()); let mut n = Engine{..Default::default()}; use std::fmt::Write; write!(n.stdout, "{}", termion::cursor::Hide).ok(); return n; } pub fn read_cmd(&mut self) -> Result<()> { let input = self.stdin.next(); if let Some(Ok(key)) = input { self.cmd = key; Ok(()) } else { thread::sleep(time::Duration::from_millis(50)); self.cmd = termion::event::Event::Key(termion::event::Key::Null); Ok(()) } } pub fn process(&mut self) -> Result<()> { use termion::event::Key; use termion::event::Event; use std::fmt::Write; match self.cmd { Event::Key(Key::Null) => (), Event::Key(Key::Char('q')) | Event::Key(Key::Char('Q')) => self.quit(), Event::Key(Key::Esc) => self.quit(), _ => { let mut s = String::new(); write!(s, "{:?}", self.cmd)?; self.game.process(&self.cmd); (); } } Ok(()) } pub fn output(&mut self) -> Result<()> { self.game.display(&mut self.stdout)?; Ok(()) } pub fn active(&self) -> bool { self.on } pub fn quit(&mut self) { self.on = false; write!(self.stdout, "{}", termion::cursor::Show).ok(); } } impl Default for Engine { fn default() -> Engine { Engine{ stdin: termion::async_stdin().events(), stdout: MouseTerminal::from(io::stdout().into_raw_mode().unwrap()), cmd: termion::event::Event::Key(termion::event::Key::Null), on: true, game: Default::default(), } } }
true
6e7302927fd80e59259b902434f731f87b55ecbc
Rust
Zerthox/finite
/src/nfa.rs
UTF-8
6,807
2.875
3
[ "MIT" ]
permissive
use super::{Automaton, AutomatonError, DFA}; use serde::{Deserialize, Serialize}; use std::{ collections::{BTreeSet, HashMap, HashSet}, fmt, hash::Hash, }; #[derive(Default, Debug, Serialize, Deserialize)] #[serde(default, deny_unknown_fields)] struct State<S, I> where S: Eq + Hash, I: Eq + Hash, { accepts: bool, transitions: HashMap<I, HashSet<S>>, } impl<S, I> State<S, I> where S: Eq + Hash, I: Eq + Hash, { pub fn new(accepts: bool, transitions: HashMap<I, HashSet<S>>) -> Self { Self { accepts, transitions, } } } #[derive(Default, Debug, Serialize, Deserialize)] #[serde(default, deny_unknown_fields)] pub struct NFA<S, I> where S: Default + Clone + Eq + Hash + fmt::Debug, I: Default + Eq + Hash, { current: HashSet<S>, states: HashMap<S, State<S, I>>, } impl<S, I> NFA<S, I> where S: Default + Clone + Eq + Hash + fmt::Debug, I: Default + Eq + Hash, { /// Creates a new NFA with a given map of states. pub fn from_map<M>(initial: HashSet<S>, states: M) -> Self where M: Into<HashMap<S, (bool, HashMap<I, HashSet<S>>)>>, { let map = states.into(); Self { current: initial, states: map .into_iter() .map(|(state, (accepts, transitions))| (state, State::new(accepts, transitions))) .collect(), } } /// Returns a reference to the requested state or an `AutomatonError::InexistentState` error otherwise. fn get_state(&self, id: &S) -> Result<&State<S, I>, AutomatonError<S>> { self.states .get(id) .ok_or_else(|| AutomatonError::InexistentState(id.clone())) } /// Returns a mutable reference to the requested state or an `AutomatonError::InexistentState` error otherwise. fn get_state_mut(&mut self, id: &S) -> Result<&mut State<S, I>, AutomatonError<S>> { self.states .get_mut(id) .ok_or_else(|| AutomatonError::InexistentState(id.clone())) } } impl<S, I> Automaton<S, I> for NFA<S, I> where S: Default + Clone + Eq + Hash + fmt::Debug, I: Default + Eq + Hash, { type State = HashSet<S>; type Transition = (S, I, S); fn new_state(id: S) -> Self::State { let mut state = HashSet::with_capacity(1); state.insert(id); state } fn has_state(&self, id: &S) -> bool { self.states.contains_key(id) } fn add_state(&mut self, id: S, accept: bool) { self.states.insert(id, State::new(accept, HashMap::new())); } fn add_transition(&mut self, transition: Self::Transition) -> Result<(), AutomatonError<S>> { let (prev, input, next) = transition; if !self.has_state(&next) { Err(AutomatonError::InexistentState(next)) } else { let State { transitions, .. } = self.get_state_mut(&prev)?; if let Some(set) = transitions.get_mut(&input) { set.insert(next); } else { transitions.insert(input, Self::new_state(next)); } Ok(()) } } fn get_current(&self) -> Option<&Self::State> { if !self.current.is_empty() { Some(&self.current) } else { None } } fn set_current(&mut self, state: Self::State) { if state.iter().all(|el| self.has_state(el)) { self.current = state; } else { self.current = HashSet::new(); } } fn accepts(&self) -> bool { self.current .iter() .any(|el| self.get_state(el).unwrap().accepts) } fn step(&mut self, input: &I) { let mut new = HashSet::with_capacity(self.current.len()); for el in &self.current { if let Some(states) = self.get_state(el).unwrap().transitions.get(input) { new = new.union(&states).cloned().collect(); } } new.shrink_to_fit(); self.current = new; } } impl<S, I> Into<DFA<BTreeSet<S>, I>> for NFA<S, I> where S: Default + Clone + Eq + Ord + Hash + fmt::Debug, I: Default + Clone + Eq + Hash, { fn into(self) -> DFA<BTreeSet<S>, I> { let size = 1 << self.states.len(); let mut states = HashMap::with_capacity(size - 1); for i in 1..size { let iter = self .states .iter() .enumerate() .filter(|(j, _)| i & (1 << j) != 0) .map(|(_, el)| el); let state_set = iter.clone().map(|(id, _)| id.clone()).collect(); let accepts = iter.clone().any(|(_, State { accepts, .. })| *accepts); let mut transition_map: HashMap<I, BTreeSet<S>> = HashMap::new(); for (_, State { transitions, .. }) in iter { for (input, next) in transitions { if let Some(states) = transition_map.get_mut(input) { states.append(&mut next.iter().cloned().collect()); } else { transition_map.insert(input.clone(), next.iter().cloned().collect()); } } } states.insert(state_set, (accepts, transition_map)); } DFA::from_map(self.current.into_iter().collect(), states) } } #[cfg(test)] mod tests { use super::*; use maplit::{btreeset, hashmap, hashset}; #[test] fn construct() { // construct a simple DFA let mut nfa = NFA::<u32, char>::with_state(0, false); nfa.add_state(1, true); nfa.add_transition((0, 'a', 0)).unwrap(); nfa.add_transition((0, 'a', 1)).unwrap(); // check states assert!(nfa.has_state(&0), "Initially added state missing"); assert!(nfa.has_state(&1), "Later added state missing"); assert!(!nfa.accepts(), "Initial state incorrectly accepting"); assert_eq!( Some(&hashset![0]), nfa.get_current(), "Initial state not set correctly" ); } #[test] fn run() { let mut nfa = NFA::<u8, char>::with_state(0, false); nfa.add_state(1, false); nfa.add_state(2, true); nfa.add_transition((0, 'a', 1)).unwrap(); nfa.add_transition((0, 'a', 2)).unwrap(); nfa.add_transition((1, 'b', 1)).unwrap(); nfa.set_current(hashset![0, 1]); assert_eq!( Some(&hashset![0, 1]), nfa.get_current(), "Incorrect state after valid state set" ); nfa.set_current(hashset![2, 4]); assert_eq!( None, nfa.get_current(), "Incorrect state after invalid state set" ); nfa.set_current(hashset![0]); assert!( nfa.run(&"a".chars().collect::<Vec<_>>()), "Incorrect result on accepting run" ); assert_eq!( Some(&hashset![0]), nfa.get_current(), "Incorrect state after run" ); } #[test] fn deserialize() { let yaml = r"{states: {0: {accepts: false, transitions: {a: [0, 1], b: [1]}}, 1: {accepts: true}}, current: [0]}"; let mut nfa: NFA<u8, char> = serde_yaml::from_str(yaml).unwrap(); assert!(nfa.has_state(&0), "Deserialized DFA is missing state 0"); assert!( nfa.run(&"aaa".chars().collect::<Vec<_>>()), "Incorrect result after run" ); } #[test] fn convert() { let nfa = NFA::from_map( hashset![0, 1], hashmap!( 0 => (true, hashmap!( 'a' => hashset![0, 1], 'b' => hashset![] )), 1 => (false, hashmap!( 'a' => hashset![1], 'b' => hashset![0, 1] )) ), ); let mut dfa: DFA<_, _> = nfa.into(); assert!( dfa.has_state(&btreeset![0, 1]), "Converted DFA is missing state {0, 1}" ); assert!(dfa.run(&['a', 'b', 'b']), "Incorrect result after run"); } }
true
50a391c59f003296db97c73c0a0faf7becd5116b
Rust
Rrogntudju/warp-mds
/mmds/src/lib.rs
UTF-8
7,590
2.65625
3
[]
no_license
// Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 use serde_json::{Map, Value}; use static_init::dynamic; mod data_store; use data_store::{Error as MmdsError, Mmds}; #[dynamic] static mut MMDS: Mmds = Mmds::default(); /// Patch provided JSON document (given as `serde_json::Value`) in-place with JSON Merge Patch /// [RFC 7396](https://tools.ietf.org/html/rfc7396). pub fn json_patch(target: &mut Value, patch: &Value) { if patch.is_object() { if !target.is_object() { // Replace target with a serde_json object so we can recursively copy patch values. *target = Value::Object(Map::new()); } // This is safe since we make sure patch and target are objects beforehand. let doc = target.as_object_mut().unwrap(); for (key, value) in patch.as_object().unwrap() { if value.is_null() { // If the value in the patch is null we remove the entry. doc.remove(key.as_str()); } else { // Recursive call to update target document. // If `key` is not in the target document (it's a new field defined in `patch`) // insert a null placeholder and pass it as the new target // so we can insert new values recursively. json_patch(doc.entry(key.as_str()).or_insert(Value::Null), value); } } } else { *target = patch.clone(); } } pub mod filters { use super::*; use warp::Filter; pub fn get_mds() -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::path("mds").and(warp::get()).and(warp::path::full()).and_then(handlers::get_mds) } pub fn put_mds() -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::path("mds") .and(warp::path::end()) .and(warp::put()) .and(json_body()) .and_then(handlers::put_mds) } pub fn patch_mds() -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::path("mds") .and(warp::path::end()) .and(warp::patch()) .and(json_body()) .and_then(handlers::patch_mds) } fn json_body() -> impl Filter<Extract = (Value,), Error = warp::Rejection> + Clone { warp::body::content_length_limit(10240).and(warp::body::json()) } } pub mod handlers { use super::*; use std::convert::Infallible; use warp::filters::path::FullPath; use warp::http::{Response, StatusCode}; pub async fn get_mds(fpath: FullPath) -> Result<impl warp::Reply, Infallible> { let path = fpath.as_str().strip_prefix("/mds").unwrap(); let result = MMDS.write().get_value(path.to_string()); let response = match result { Ok(value) => Response::builder().status(StatusCode::OK).body(value.join("\n")), Err(e) => match e { MmdsError::NotFound => Response::builder().status(StatusCode::NOT_FOUND).body(format!("{}", e)), MmdsError::UnsupportedValueType => Response::builder().status(StatusCode::INTERNAL_SERVER_ERROR).body(format!("{}", e)), }, }; Ok(response) } pub async fn put_mds(data: Value) -> Result<impl warp::Reply, Infallible> { let result = MMDS.write().put_data(data); let response = match result { Ok(()) => Response::builder().status(StatusCode::NO_CONTENT).body(String::new()), Err(e) => Response::builder().status(StatusCode::INTERNAL_SERVER_ERROR).body(format!("{}", e)), }; Ok(response) } pub async fn patch_mds(patch: Value) -> Result<impl warp::Reply, Infallible> { let result = MMDS.write().patch_data(patch); let response = match result { Ok(()) => Response::builder().status(StatusCode::NO_CONTENT).body(String::new()), Err(e) => Response::builder().status(StatusCode::INTERNAL_SERVER_ERROR).body(format!("{}", e)), }; Ok(response) } } #[cfg(test)] mod tests { use super::*; use serde_json; #[test] fn test_parse_request() { let data = r#"{ "name": { "first": "John", "second": "Doe" }, "age": "43", "phones": { "home": { "RO": "+40 1234567", "UK": "+44 1234567" }, "mobile": "+44 2345678" } }"#; MMDS.write().put_data(serde_json::from_str(data).unwrap()).unwrap(); let data = r#"{ "name": { "first": "John", "second": "Doe" }, "age": 43 }"#; assert_eq!( MMDS.write().put_data(serde_json::from_str(data).unwrap()), Err(MmdsError::UnsupportedValueType) ); } #[test] fn test_json_patch() { let mut data = serde_json::json!({ "name": { "first": "John", "second": "Doe" }, "age": "43", "phones": { "home": { "RO": "+40 1234567", "UK": "+44 1234567" }, "mobile": "+44 2345678" } }); let patch = serde_json::json!({ "name": { "second": null, "last": "Kennedy" }, "age": "44", "phones": { "home": "+44 1234567", "mobile": { "RO": "+40 2345678", "UK": "+44 2345678" } } }); json_patch(&mut data, &patch); // Test value replacement in target document. assert_eq!(data["age"], patch["age"]); // Test null value removal from target document. assert_eq!(data["name"]["second"], Value::Null); // Test add value to target document. assert_eq!(data["name"]["last"], patch["name"]["last"]); assert!(!data["phones"]["home"].is_object()); assert_eq!(data["phones"]["home"], patch["phones"]["home"]); assert!(data["phones"]["mobile"].is_object()); assert_eq!(data["phones"]["mobile"]["RO"], patch["phones"]["mobile"]["RO"]); assert_eq!(data["phones"]["mobile"]["UK"], patch["phones"]["mobile"]["UK"]); } use warp::http::StatusCode; use warp::test::request; #[tokio::test] async fn put_patch_get_ok() { let resp = request() .method("PUT") .path("/mds") .body(r#"{"c0":{"c1":"12345","c2":"6789"}}"#) .reply(&filters::put_mds()) .await; assert_eq!(resp.status(), StatusCode::NO_CONTENT); let resp = request() .method("PATCH") .path("/mds") .body(r#"{"c0":{"c3":["67890","a"]}}"#) .reply(&filters::patch_mds()) .await; assert_eq!(resp.status(), StatusCode::NO_CONTENT); let resp = request().method("GET").path("/mds/c0").reply(&filters::get_mds()).await; assert_eq!(resp.status(), StatusCode::OK); assert_eq!(resp.body(), "c1\nc2\nc3"); let resp = request().method("GET").path("/mds/c0/c3/0").reply(&filters::get_mds()).await; assert_eq!(resp.status(), StatusCode::OK); assert_eq!(resp.body(), "67890"); } }
true
48183a8a0650361e7cdb512c625323c097488fdd
Rust
mbrobbel/advent-of-code
/2019/day6/src/main.rs
UTF-8
2,301
3.4375
3
[ "Unlicense" ]
permissive
use std::{ collections::{HashMap, HashSet}, error::Error, io::Read, iter::FromIterator, }; fn count(graph: &HashMap<String, Vec<String>>, node: &str) -> u64 { graph.get(node).map_or(0, |values| { values.iter().fold(1, |mut acc, x| { acc += count(graph, x); acc }) }) } fn get_graph(input: &str) -> HashMap<String, Vec<String>> { input .lines() .map(|line| { let sep = line.find(')').unwrap(); (line[0..sep].to_string(), line[sep + 1..].to_string()) }) .fold( HashMap::new(), |mut map: HashMap<String, Vec<String>>, (a, b)| { map.entry(b) .and_modify(|v| v.push(a.clone())) .or_insert_with(|| vec![a]); map }, ) } fn part_one(input: &str) -> u64 { let graph = get_graph(input); graph.keys().fold(0, |mut acc, x| { acc += count(&graph, x); acc }) } fn path_to_center(graph: &HashMap<String, Vec<String>>, node: &str) -> Vec<String> { std::iter::successors(Some(node.to_owned()), |x| { if let Some(y) = graph.get(x) { y.get(0).cloned() } else { None } }) .collect() } fn part_two(input: &str) -> usize { let graph = get_graph(input); let you: HashSet<String> = HashSet::from_iter(path_to_center(&graph, "YOU").into_iter()); let san = HashSet::from_iter(path_to_center(&graph, "SAN").into_iter()); you.difference(&san).count() + san.difference(&you).count() - 2 } fn main() -> Result<(), Box<dyn Error>> { let mut input = String::new(); std::io::stdin().read_to_string(&mut input)?; println!("part_one: {}", part_one(&input)); println!("part_two: {}", part_two(&input)); Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn part_one_examples() { assert_eq!( part_one( r#"COM)B B)C C)D D)E E)F B)G G)H D)I E)J J)K K)L"# ), 42 ) } #[test] fn part_two_examples() { assert_eq!( part_two( r#"COM)B B)C C)D D)E E)F B)G G)H D)I E)J J)K K)L K)YOU I)SAN"# ), 4 ) } }
true
5c9d9173537ad4427da0cca521d1c2c1c40a4243
Rust
happenslol/rem
/src/main.rs
UTF-8
10,896
2.6875
3
[ "MIT" ]
permissive
use crate::{ config::{save_config, Config}, repo::Repo, }; use anyhow::{anyhow, bail, Context, Result}; use clap::{AppSettings, Clap}; use lazy_static::lazy_static; use regex::Regex; use std::env; use std::io::{self, Read}; use url::Url; mod config; mod git; mod github; mod gitlab; mod repo; lazy_static! { static ref API_SOURCE_REGEX: Regex = Regex::new(r"(?P<alias>^\w+)(@(?P<ref>\w+))?:(?P<script>.+)$").unwrap(); static ref GIT_SOURCE_REGEX: Regex = Regex::new(r"^(?P<repo>((git|ssh|http(s)?)|(git@[\w\.]+))(:(//)?)([\w\./\-~]+)(\.git)?(/)?)(@(?P<ref>\w+))?:(?P<script>.+)$") .unwrap(); } #[derive(Clap, Debug)] #[clap(author, about, version)] #[clap(global_setting = AppSettings::ColoredHelp)] #[clap(setting = AppSettings::DeriveDisplayOrder)] #[clap(setting = AppSettings::SubcommandRequiredElseHelp)] struct Opts { #[clap(subcommand)] command: Command, } const SCRIPT_HELP: &'static str = r"Script identifier for a script from a repository For saved repos: `<repo>[@ref]:<script_path>` Example: `myscripts:hello.bash` Example (w/ ref): `myscripts@v1.0:hello.bash` For git repos: `git@<repo_url>[@ref]:<script_path>` Example: `git@github.com:user/myscripts:hello.bash` Example (w/ ref): `git@github.com:user/myscripts@main:hello.bash` "; #[derive(Clap, Debug)] enum Command { /// Read and modify locally saved repositories Repo { #[clap(subcommand)] command: RepoCommand, }, /// Run a script using the locally installed bash shell Run { /// Force a fresh download of the script (only for raw git repositories) #[clap(short, long)] fresh: bool, #[clap(about = "Script to run", long_about = SCRIPT_HELP)] script: String, /// Args to be passed to the script #[clap(about = "Args to be passed to the script")] args: Vec<String>, }, /// Import a script and print it to stdout Import { #[clap(short, long)] fresh: bool, #[clap(about = "Script to import", long_about = SCRIPT_HELP)] script: String, }, } #[derive(Clap, Debug)] enum RepoCommand { /// List all locally saved repositories #[clap(alias = "ls")] List, /// Add a repository to the local repository list Add { /// Local alias for the repository to add name: String, /// URI of the repository to add uri: String, /// Username for the repository (if required) #[clap(long, short)] username: Option<String>, /// Password or token for the repository (if required) #[clap(long, short)] password: Option<String>, /// Reads the password from the given environment variable when the repo is used #[clap(long)] password_env: Option<String>, /// Reads the password or token from stdin #[clap(long)] password_stdin: bool, }, /// Remove a repository from the local repository list #[clap(alias = "rm")] Remove { /// Local alias for the repository to remove name: String, }, } #[derive(PartialEq)] pub enum Password { Saved(String), FromEnv(String, String), None, } #[tokio::main] async fn main() -> Result<()> { openssl_probe::init_ssl_cert_env_vars(); let mut config = config::load_config().await?; match Opts::parse().command { Command::Repo { command } => match command { RepoCommand::List => { if config.repo.is_empty() { println!("No Saved repositories."); return Ok(()); } println!("Saved repositories:"); for (k, v) in config.repo { println!(" {} ({} | {})", k, v.provider(), v.readable()); } } RepoCommand::Add { name, uri, username, password, password_env, password_stdin, } => { if config.repo.contains_key(&name) { bail!("A repository with the name `{}` already exists", &name); } let password_for_parse = match (password, password_env, password_stdin) { (Some(pass), _, _) => Password::Saved(pass), (_, Some(var), _) => Password::FromEnv(var.clone(), env::var(var)?), (_, _, true) => { let mut buf = String::new(); io::stdin().read_to_string(&mut buf)?; Password::Saved(buf) } _ => Password::None, }; let repo = validate_api_repo(&uri, username, password_for_parse).await?; config.repo.insert(name.clone(), repo); save_config(&config) .await .context("Failed to save updated config")?; println!("Repo `{}` was successfully added", &name); } RepoCommand::Remove { name } => { if !config.repo.contains_key(&name) { bail!("Repo `{}` was not found", &name); } config.repo.remove(&name); save_config(&config) .await .context("Failed to save updated config")?; println!("Repo `{}` was removed", &name); } }, Command::Run { script, args, fresh, } => { let src = ScriptSource::parse(&script, ScriptAction::Run)?; src.validate_script_name(&config)?; let contents = src.fetch_script_contents(&config, fresh).await?; let args = args.iter().map(|s| &**s).collect(); // TODO(happens): Find a way to propagate the actual exit code // instead of simply returning 0/1 depending on the script. // This should cover most use cases if you just want to know // if the script failed, but until `std::process::Termination` // is stabilized, it seems unsafe to use `std::process::exit` // since we're using a tokio main. let exit = repo::run_script(&contents, args).await?; if !exit.success() { bail!(""); } } Command::Import { script, fresh } => { let src = ScriptSource::parse(&script, ScriptAction::Import)?; src.validate_script_name(&config)?; let contents = src.fetch_script_contents(&config, fresh).await?; repo::import_script(&contents).await?; } }; Ok(()) } enum ScriptAction { Run, Import, } pub struct ScriptSource { repo: String, source_type: SourceType, script_name: String, rref: Option<String>, action: ScriptAction, } enum SourceType { Git, Saved, } impl ScriptSource { fn parse(script: &str, action: ScriptAction) -> Result<ScriptSource> { if let Some(matches) = API_SOURCE_REGEX.captures(script) { let repo = matches .name("alias") .expect("No alias matched") .as_str() .to_owned(); let script_name = matches .name("script") .expect("No script name matched") .as_str() .to_owned(); let rref = matches.name("ref").map(|rref| rref.as_str().to_owned()); return Ok(Self { source_type: SourceType::Saved, repo, script_name, rref, action, }); } if let Some(matches) = GIT_SOURCE_REGEX.captures(script) { let repo = matches .name("repo") .expect("No repo matched") .as_str() .to_owned(); let script_name = matches .name("script") .expect("No script name matched") .as_str() .to_owned(); let rref = matches.name("ref").map(|rref| rref.as_str().to_owned()); return Ok(Self { source_type: SourceType::Git, repo, script_name, rref, action, }); } bail!("Script source could not be parsed") } fn validate_script_name(&self, config: &Config) -> Result<()> { if config.require_bash_extension.is_none() && config.require_lib_extension.is_none() { return Ok(()); } let expected = match ( &config.require_bash_extension, &config.require_lib_extension, &self.action, ) { (Some(ref ext), _, &ScriptAction::Run) => ext, (_, Some(ext), &ScriptAction::Import) => ext, _ => unreachable!(), }; if !self.script_name.ends_with(expected) { bail!("Expected script name to end with `{}`", expected); } Ok(()) } async fn fetch_script_contents(&self, config: &config::Config, fresh: bool) -> Result<String> { let repo = match self.source_type { SourceType::Saved => config .repo .get(&self.repo) .ok_or(anyhow!("Repo `{}` was not found", &self.repo))? .box_clone(), SourceType::Git => git::GitRepo::from_src(&self), }; let rref = self.rref.clone().unwrap_or("HEAD".to_owned()); Ok(repo.fetch_script(&self.script_name, &rref, fresh).await?) } } async fn validate_api_repo( uri: &str, username: Option<String>, password: Password, ) -> Result<Box<dyn Repo>> { let mut maybe_parsed: Option<Url> = None; // Check if we've been given a raw gitlab or github url without scheme if uri.starts_with("gitlab.com") || uri.starts_with("github.com") { let with_scheme = format!("https://{}", uri); maybe_parsed = Some(Url::parse(&with_scheme)?); } // Try parsing the url manually otherwise let mut parsed = match maybe_parsed { Some(parsed) => parsed, None => Url::parse(uri)?, }; if parsed.cannot_be_a_base() { bail!("Repo URI was not recognized"); } // Enforce https let _ = parsed.set_scheme("https"); match parsed.host_str() { Some("gitlab.com") => Ok(gitlab::fetch_project(&parsed, password).await?), Some("github.com") => Ok(github::fetch_project(&parsed, username, password).await?), Some(_) => bail!("No provider recognized for passed URI"), None => bail!("No host on passed URI"), } }
true
e3cdd48fefff3e1db95438ade8d3bbf1a602f02e
Rust
adwhit/advent-of-code-2017
/src/bin/10_knot_hash.rs
UTF-8
3,530
3.109375
3
[]
no_license
#![feature(inclusive_range_syntax)] extern crate advent_of_code; #[macro_use] extern crate failure; use advent_of_code::Result; use std::fs; use std::io::prelude::*; fn get_data(path: &str) -> Result<Vec<u8>> { let mut f = fs::File::open(path)?; let mut s = String::new(); f.read_to_string(&mut s)?; s.split(',') .map(|v| { v.trim() .parse::<u8>() .map_err(|_| format_err!("parse fail: {:?}", v)) }) .collect() } fn get_data2(path: &str) -> Result<Vec<u8>> { let mut f = fs::File::open(path)?; let mut s = Vec::new(); f.read_to_end(&mut s)?; s.pop(); // remove newline s.extend(&[17, 31, 73, 47, 23]); Ok(s) } #[derive(Clone, Debug)] struct State { data: Vec<u8>, skip: usize, curpos: usize, } impl State { fn new(size: u8) -> State { State { data: (0..=size).collect(), skip: 0, curpos: 0, } } fn hash(&mut self, len: usize) { let l = self.data.len(); for ix in 0..(len / 2) { self.data .swap((self.curpos + ix) % l, (self.curpos + len - ix - 1) % l); } self.curpos = (self.curpos + len + self.skip) % l; self.skip = (self.skip + 1) % l; } fn dense_hash(&self) -> String { let mut out = String::new(); for round in 0..16 { let mut v = self.data[round * 16]; for roundix in 1..16 { let ix = round * 16 + roundix; v ^= self.data[ix] } if v < 16 { out.push_str("0") } out.push_str(&format!("{:x}", v)) } assert!(out.len() == 32); out } } fn knot_hash(state: &mut State, lens: &[u8]) -> u32 { for len in lens { state.hash(*len as usize) } state.data[0] as u32 * state.data[1] as u32 } fn knot_hash2(state: &mut State, lens: &[u8]) -> String { for _ in 0..64 { for len in lens { state.hash(*len as usize) } } state.dense_hash() } fn run() -> Result<()> { { let data = get_data("data/10.txt")?; let mut state = State::new(255); let outcome = knot_hash(&mut state, &data); println!("v1: {}", outcome); } { let data = get_data2("data/10.txt")?; let mut state = State::new(255); let outcome = knot_hash2(&mut state, &data); println!("v2: {}", outcome); } Ok(()) } fn main() { run().unwrap_or_else(|e| { println!("Error: {}", e); for cause in e.causes() { println!("{}", cause) } }) } #[cfg(test)] mod tests { use super::*; #[test] fn cases_v1() { let lens = &[3, 4, 1, 5]; let mut state = State::new(4); let outcome = knot_hash(&mut state, lens); assert_eq!(outcome, 12); } #[test] fn cases_v2() { let mut lens = Vec::new(); lens.extend(&[17, 31, 73, 47, 23]); let mut state = State::new(255); let outcome = knot_hash2(&mut state, &lens); let expect = "a2582a3a0e66e6e86e3812dcb672a272"; assert_eq!(&outcome, expect); let mut lens = b"AoC 2017".to_vec(); lens.extend(&[17, 31, 73, 47, 23]); let mut state = State::new(255); let outcome = knot_hash2(&mut state, &lens); let expect = "33efeb34ea91902bb2f59c9920caa6cd"; assert_eq!(&outcome, expect); } }
true
4d1582c6a87754a02bf2afed574aace39ab5b1c2
Rust
yannbolliger/rust-stainless-thesis-report
/thesis-report/src/code/lib.rs
UTF-8
4,875
2.84375
3
[]
no_license
extern crate stainless; use stainless::*; mod list; use list::*; /// Node IDs // PeerId was replaced by a simple u128 to make hashing easier. pub enum ErrorKind { NoWitnessLeft { context: Option<Box<ErrorKind>> }, } /// A generic container mapping `u128`s to some type `T`, /// which keeps track of the primary peer, witnesses, full nodes, /// and faulty nodes. Provides lifecycle methods to swap the primary, /// mark witnesses as faulty, and maintains an `invariant` for /// correctness. #[derive(Clone)] pub struct PeerList<T> { values: ListMap<u128, T>, primary: u128, witnesses: ListSet<u128>, full_nodes: ListSet<u128>, faulty_nodes: ListSet<u128>, } impl<T> PeerList<T> { /// Invariant maintained by a `PeerList` /// /// ## Implements /// - [LCD-INV-NODES] pub fn invariant(peer_list: &PeerList<T>) -> bool { peer_list.full_nodes.is_disjoint(&peer_list.witnesses) && peer_list.full_nodes.is_disjoint(&peer_list.faulty_nodes) && peer_list.witnesses.is_disjoint(&peer_list.faulty_nodes) && !peer_list.witnesses.contains(&peer_list.primary) && !peer_list.full_nodes.contains(&peer_list.primary) && !peer_list.faulty_nodes.contains(&peer_list.primary) && peer_list.values.contains(&peer_list.primary) && peer_list.values.contains_all(&peer_list.witnesses) && peer_list.values.contains_all(&peer_list.full_nodes) && peer_list.values.contains_all(&peer_list.faulty_nodes) } /// Get a reference to the light client instance for the given peer id. pub fn get(&self, peer_id: &u128) -> Option<&T> { self.values.get(peer_id) } /// Get current primary peer id. pub fn primary_id(&self) -> u128 { self.primary } /// Get a reference to the current primary instance. pub fn primary(&self) -> &T { // SAFETY: Enforced by invariant self.values.get(&self.primary).unwrap() } /// Get all the witnesses peer ids pub fn witnesses_ids(&self) -> &ListSet<u128> { &self.witnesses } /// Get all the full nodes peer ids pub fn full_nodes_ids(&self) -> &ListSet<u128> { &self.full_nodes } /// Get all the faulty nodes peer ids pub fn faulty_nodes_ids(&self) -> &ListSet<u128> { &self.faulty_nodes } /// Remove the given peer from the list of witnesses, /// and mark it as faulty. Get a new witness from /// the list of full nodes, if there are any left. /// Returns the new witness, if any. /// /// ## Precondition /// - The given peer id must not be the primary peer id. /// - The given peer must be in the witness list #[pre( Self::invariant(&self) && !(faulty_witness == self.primary) && self.witnesses.contains(&faulty_witness) )] #[post( Self::invariant(&self) && !self.witnesses.contains(&faulty_witness) && self.faulty_nodes.contains(&faulty_witness) )] pub fn replace_faulty_witness( &mut self, faulty_witness: u128 ) -> Option<u128> { let mut result = None; self.witnesses.remove(&faulty_witness); if let Some(new_witness) = self.full_nodes.first() { self.witnesses.insert(new_witness); self.full_nodes.remove(&new_witness); result = Some(new_witness); } self.faulty_nodes.insert(faulty_witness); result } /// Mark the primary as faulty and swap it for the next available /// witness, if any. Returns the new primary on success. /// /// ## Errors /// - If there are no witness left, returns `ErrorKind::NoWitnessLeft`. #[pre(Self::invariant(&self))] #[post((matches!(ret, Ok(_))).implies( Self::invariant(&self) && old(&self).primary != self.primary && self.faulty_nodes.contains(&old(&self).primary) && old(&self).witnesses.contains(&self.primary) ))] pub fn replace_faulty_primary( &mut self, primary_error: Option<Box<ErrorKind>>, ) -> Result<u128, Box<ErrorKind>> { self.faulty_nodes.insert(self.primary); if let Some(new_primary) = self.witnesses.first() { self.primary = new_primary; self.witnesses.remove(&new_primary); Ok(new_primary) } else if let Some(err) = primary_error { Err(Box::new(ErrorKind::NoWitnessLeft { context: Some(err) })) } else { Err(Box::new(ErrorKind::NoWitnessLeft { context: None })) } } /// Get a reference to the underlying `HashMap` pub fn values(&self) -> &ListMap<u128, T> { &self.values } /// Consume into the underlying `HashMap` pub fn into_values(self) -> ListMap<u128, T> { self.values } }
true
fd60a4110208381e95aadad2375132e70ace8b80
Rust
dyna-dot/Selenium
/rust/src/manager.rs
UTF-8
4,076
2.734375
3
[ "Apache-2.0" ]
permissive
use std::error::Error; use std::path::PathBuf; use std::process::Command; use crate::downloads::download_driver_to_tmp_folder; use crate::files::{parse_version, uncompress}; use crate::metadata::{create_browser_metadata, get_browser_version_from_metadata, get_metadata, write_metadata}; pub trait BrowserManager { fn get_browser_name(&self) -> &str; fn get_browser_version(&self, os: &str) -> Option<String>; fn get_driver_name(&self) -> &str; fn get_driver_version(&self, browser_version: &str, os: &str) -> Result<String, Box<dyn Error>>; fn get_driver_url(&self, driver_version: &str, os: &str, arch: &str) -> String; fn get_driver_path_in_cache(&self, driver_version: &str, os: &str, arch: &str) -> PathBuf; fn download_driver(&self, driver_version: &str, os: &str, arch: &str) -> Result<(), Box<dyn Error>> { let driver_url = Self::get_driver_url(self, driver_version, os, arch); let (_tmp_folder, driver_zip_file) = download_driver_to_tmp_folder(driver_url)?; let driver_path_in_cache = Self::get_driver_path_in_cache(self, driver_version, os, arch); uncompress(&driver_zip_file, driver_path_in_cache); Ok(()) } } #[allow(dead_code)] #[allow(clippy::upper_case_acronyms)] pub enum OS { WINDOWS, MACOS, LINUX } impl OS { pub fn to_str(&self) -> &str { match self { OS::WINDOWS => "windows", OS::MACOS => "macos", OS::LINUX => "linux", } } pub fn is(&self, os: &str) -> bool { self.to_str().eq_ignore_ascii_case(os) } } #[allow(dead_code)] #[allow(clippy::upper_case_acronyms)] pub enum ARCH { X32, X64, ARM64 } impl ARCH { pub fn to_str(&self) -> &str { match self { ARCH::X32 => "x86", ARCH::X64 => "x86_64", ARCH::ARM64 => "aarch64", } } pub fn is(&self, arch: &str) -> bool { self.to_str().eq_ignore_ascii_case(arch) } } pub fn run_shell_command(command: &str, flag: &str, args: &str) -> Result<String, Box<dyn Error>> { log::debug!("Running {} command: {:?}",command, args); let output = Command::new(command) .args([flag, args]) .output()?; log::debug!("{:?}", output); Ok(String::from_utf8_lossy(&output.stdout).to_string()) } pub fn detect_browser_version(browser_name: &str, shell: &str, flag: &str, args: Vec<&str>) -> Option<String> { let mut metadata = get_metadata(); match get_browser_version_from_metadata(&metadata.browsers, browser_name) { Some(version) => { log::trace!("Browser with valid TTL. Getting {} version from metadata", browser_name); Some(version) } _ => { log::debug!("Using shell command to find out {} version", browser_name); let mut browser_version = "".to_string(); for arg in args.iter() { let output = match run_shell_command(shell, flag, *arg) { Ok(out) => out, Err(_e) => continue, }; let full_browser_version = parse_version(output); if full_browser_version.is_empty() { continue; } log::debug!("The version of {} is {}", browser_name, full_browser_version); browser_version = get_major_version(&full_browser_version); break; } if browser_version.is_empty() { None } else { metadata.browsers.push(create_browser_metadata(browser_name, &browser_version)); write_metadata(&metadata); Some(browser_version) } } } } pub fn get_major_version(full_version: &str) -> String { let version_vec: Vec<&str> = full_version.split('.').collect(); version_vec.first().unwrap().to_string() } pub fn get_minor_version(full_version: &str) -> String { let version_vec: Vec<&str> = full_version.split('.').collect(); version_vec.get(1).unwrap().to_string() }
true
047e30d4c7ee90f10711c4dc649d722fd72b9ceb
Rust
wisest30/AlgoStudy
/source/leetcode/2316/hyo.rs
UTF-8
971
2.765625
3
[]
no_license
impl Solution { pub fn dfs(cur: usize, visited: &mut Vec<bool>, e: &Vec<Vec<usize>>) -> usize { if visited[cur] { 0 } else { visited[cur] = true; let mut ret = 1usize; for &nxt in &e[cur] { ret += Solution::dfs(nxt, visited, &e); } ret } } pub fn count_pairs(n: i32, edges: Vec<Vec<i32>>) -> i64 { let n = n as usize; let mut e = vec![vec![0usize; 0]; n]; for edge in edges { e[edge[0] as usize].push(edge[1] as usize); e[edge[1] as usize].push(edge[0] as usize); } let mut visited = vec![false; n]; let mut ret = 0usize; for cur in 0..n { if !visited[cur] { let cnt = Solution::dfs(cur, &mut visited, &e); ret += cnt * (n - cnt); } } ret /= 2; ret as i64 } }
true
d4dc51b1859becf510910b4041d959a6592ea78b
Rust
David-Kunz/proc-macro
/tests/basic.rs
UTF-8
367
3.15625
3
[]
no_license
use derive_entity::Entity; use entity::{Select,Sql}; #[derive(Entity)] pub struct Book { id: u64, title: String, pages: u64, author: String, } fn main() { let mut select = Book::select(); select.set_limit(200).set_unique().set_columns(vec!["id", "title"]); assert_eq!("select distinct id,title from Book limit 200;", select.to_sql()); }
true
695abb2e8315fb12aea5e65d01e5cceef2b659cb
Rust
alexliesenfeld/httpmock
/src/common/data.rs
UTF-8
14,065
2.6875
3
[ "LicenseRef-scancode-philippe-de-muyter", "MIT" ]
permissive
extern crate serde_regex; use std::cmp::Ordering; use std::collections::BTreeMap; use std::fmt; use std::fmt::Debug; use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering::Relaxed; use std::sync::{Arc, RwLock}; use std::time::Duration; use regex::Regex; use serde::{Deserialize, Serialize}; use serde_json::Value; /// A general abstraction of an HTTP request of `httpmock`. #[derive(Serialize, Deserialize, Debug, Clone)] pub struct HttpMockRequest { pub path: String, pub method: String, pub headers: Option<Vec<(String, String)>>, pub query_params: Option<Vec<(String, String)>>, pub body: Option<Vec<u8>>, } impl HttpMockRequest { pub fn new(method: String, path: String) -> Self { Self { path, method, headers: None, query_params: None, body: None, } } pub fn with_headers(mut self, arg: Vec<(String, String)>) -> Self { self.headers = Some(arg); self } pub fn with_query_params(mut self, arg: Vec<(String, String)>) -> Self { self.query_params = Some(arg); self } pub fn with_body(mut self, arg: Vec<u8>) -> Self { self.body = Some(arg); self } } /// A general abstraction of an HTTP response for all handlers. #[derive(Serialize, Deserialize, Clone)] pub struct MockServerHttpResponse { pub status: Option<u16>, pub headers: Option<Vec<(String, String)>>, #[serde(default, with = "opt_vector_serde_base64")] pub body: Option<Vec<u8>>, pub delay: Option<Duration>, } impl MockServerHttpResponse { pub fn new() -> Self { Self { status: None, headers: None, body: None, delay: None, } } } impl Default for MockServerHttpResponse { fn default() -> Self { Self::new() } } /// Serializes and deserializes the response body to/from a Base64 string. mod opt_vector_serde_base64 { use serde::{Deserialize, Deserializer, Serializer}; // See the following references: // https://github.com/serde-rs/serde/blob/master/serde/src/ser/impls.rs#L99 // https://github.com/serde-rs/serde/issues/661 pub fn serialize<T, S>(bytes: &Option<T>, serializer: S) -> Result<S::Ok, S::Error> where T: AsRef<[u8]>, S: Serializer, { match bytes { Some(ref value) => serializer.serialize_bytes(base64::encode(value).as_bytes()), None => serializer.serialize_none(), } } // See the following references: // https://github.com/serde-rs/serde/issues/1444 pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Vec<u8>>, D::Error> where D: Deserializer<'de>, { #[derive(Deserialize)] struct Wrapper(#[serde(deserialize_with = "from_base64")] Vec<u8>); let v = Option::deserialize(deserializer)?; Ok(v.map(|Wrapper(a)| a)) } fn from_base64<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error> where D: Deserializer<'de>, { let vec = Vec::deserialize(deserializer)?; base64::decode(vec).map_err(serde::de::Error::custom) } } /// Prints the response body as UTF8 string impl fmt::Debug for MockServerHttpResponse { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("MockServerHttpResponse") .field("status", &self.status) .field("headers", &self.headers) .field( "body", &self .body .as_ref() .map(|x| String::from_utf8_lossy(x.as_ref()).to_string()), ) .field("delay", &self.delay) .finish() } } /// A general abstraction of an HTTP request for all handlers. #[derive(Serialize, Deserialize, Clone, Debug)] pub struct Pattern { #[serde(with = "serde_regex")] pub regex: Regex, } impl Pattern { pub fn from_regex(regex: Regex) -> Pattern { Pattern { regex } } } impl Ord for Pattern { fn cmp(&self, other: &Self) -> Ordering { self.regex.as_str().cmp(other.regex.as_str()) } } impl PartialOrd for Pattern { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl PartialEq for Pattern { fn eq(&self, other: &Self) -> bool { self.regex.as_str() == other.regex.as_str() } } impl Eq for Pattern {} pub type MockMatcherFunction = fn(&HttpMockRequest) -> bool; /// A general abstraction of an HTTP request for all handlers. #[derive(Serialize, Deserialize, Clone)] pub struct RequestRequirements { pub path: Option<String>, pub path_contains: Option<Vec<String>>, pub path_matches: Option<Vec<Pattern>>, pub method: Option<String>, pub headers: Option<Vec<(String, String)>>, pub header_exists: Option<Vec<String>>, pub cookies: Option<Vec<(String, String)>>, pub cookie_exists: Option<Vec<String>>, pub body: Option<String>, pub json_body: Option<Value>, pub json_body_includes: Option<Vec<Value>>, pub body_contains: Option<Vec<String>>, pub body_matches: Option<Vec<Pattern>>, pub query_param_exists: Option<Vec<String>>, pub query_param: Option<Vec<(String, String)>>, pub x_www_form_urlencoded_key_exists: Option<Vec<String>>, pub x_www_form_urlencoded: Option<Vec<(String, String)>>, #[serde(skip_serializing, skip_deserializing)] pub matchers: Option<Vec<MockMatcherFunction>>, } impl Default for RequestRequirements { fn default() -> Self { Self::new() } } impl RequestRequirements { pub fn new() -> Self { Self { path: None, path_contains: None, path_matches: None, method: None, headers: None, header_exists: None, cookies: None, cookie_exists: None, body: None, json_body: None, json_body_includes: None, body_contains: None, body_matches: None, query_param_exists: None, query_param: None, x_www_form_urlencoded: None, x_www_form_urlencoded_key_exists: None, matchers: None, } } pub fn with_path(mut self, arg: String) -> Self { self.path = Some(arg); self } pub fn with_method(mut self, arg: String) -> Self { self.method = Some(arg); self } pub fn with_body(mut self, arg: String) -> Self { self.body = Some(arg); self } pub fn with_json_body(mut self, arg: Value) -> Self { self.json_body = Some(arg); self } pub fn with_path_contains(mut self, arg: Vec<String>) -> Self { self.path_contains = Some(arg); self } pub fn with_path_matches(mut self, arg: Vec<Pattern>) -> Self { self.path_matches = Some(arg); self } pub fn with_headers(mut self, arg: Vec<(String, String)>) -> Self { self.headers = Some(arg); self } pub fn with_header_exists(mut self, arg: Vec<String>) -> Self { self.header_exists = Some(arg); self } pub fn with_cookies(mut self, arg: Vec<(String, String)>) -> Self { self.cookies = Some(arg); self } pub fn with_cookie_exists(mut self, arg: Vec<String>) -> Self { self.cookie_exists = Some(arg); self } pub fn with_json_body_includes(mut self, arg: Vec<Value>) -> Self { self.json_body_includes = Some(arg); self } pub fn with_body_contains(mut self, arg: Vec<String>) -> Self { self.body_contains = Some(arg); self } pub fn with_body_matches(mut self, arg: Vec<Pattern>) -> Self { self.body_matches = Some(arg); self } pub fn with_query_param_exists(mut self, arg: Vec<String>) -> Self { self.query_param_exists = Some(arg); self } pub fn with_query_param(mut self, arg: Vec<(String, String)>) -> Self { self.query_param = Some(arg); self } } /// A Request that is made to set a new mock. #[derive(Serialize, Deserialize, Clone)] pub struct MockDefinition { pub request: RequestRequirements, pub response: MockServerHttpResponse, } impl MockDefinition { pub fn new(req: RequestRequirements, mock: MockServerHttpResponse) -> Self { Self { request: req, response: mock, } } } #[derive(Serialize, Deserialize)] pub struct MockRef { pub mock_id: usize, } impl MockRef { pub fn new(mock_id: usize) -> Self { Self { mock_id } } } #[derive(Serialize, Deserialize, Clone)] pub struct ActiveMock { pub id: usize, pub call_counter: usize, pub definition: MockDefinition, pub is_static: bool, } impl ActiveMock { pub fn new(id: usize, mock_definition: MockDefinition, is_static: bool) -> Self { ActiveMock { id, definition: mock_definition, call_counter: 0, is_static, } } } #[derive(Serialize, Deserialize)] pub struct ClosestMatch { pub request: HttpMockRequest, pub request_index: usize, pub mismatches: Vec<Mismatch>, } #[derive(Serialize, Deserialize)] pub struct ErrorResponse { pub message: String, } impl ErrorResponse { pub fn new<T>(message: &T) -> ErrorResponse where T: ToString, { ErrorResponse { message: message.to_string(), } } } // ************************************************************************************************* // Diff and Change correspond to difference::Changeset and Difference structs. They are duplicated // here only for the reason to make them serializable/deserializable using serde. // ************************************************************************************************* #[derive(PartialEq, Debug, Serialize, Deserialize)] pub enum Diff { Same(String), Add(String), Rem(String), } #[derive(Debug, Serialize, Deserialize)] pub struct DiffResult { pub differences: Vec<Diff>, pub distance: f32, pub tokenizer: Tokenizer, } #[derive(PartialEq, Debug, Serialize, Deserialize, Clone, Copy)] pub enum Tokenizer { Line, Word, Character, } #[derive(Debug, Serialize, Deserialize)] pub struct Reason { pub expected: String, pub actual: String, pub comparison: String, pub best_match: bool, } #[derive(Debug, Serialize, Deserialize)] pub struct Mismatch { pub title: String, pub reason: Option<Reason>, pub diff: Option<DiffResult>, } #[cfg(test)] mod test { use std::collections::BTreeMap; use regex::Regex; use serde_json::json; use crate::common::data::{Pattern, RequestRequirements}; /// This test makes sure that adding the matching rules to a mock fills the struct as expected. #[test] fn fill_mock_requirements() { // Arrange let with_path = "with_path"; let with_path_contains = vec!["with_path_contains".into()]; let with_path_matches = vec![Pattern::from_regex( Regex::new(r#"with_path_matches"#).unwrap(), )]; let mut with_headers = Vec::new(); with_headers.push(("test".into(), "value".into())); let with_method = "GET"; let with_body = "with_body"; let with_body_contains = vec!["body_contains".into()]; let with_body_matches = vec![Pattern::from_regex( Regex::new(r#"with_body_matches"#).unwrap(), )]; let with_json_body = json!(12.5); let with_json_body_includes = vec![json!(12.5)]; let with_query_param_exists = vec!["with_query_param_exists".into()]; let mut with_query_param = Vec::new(); with_query_param.push(("with_query_param".into(), "value".into())); let with_header_exists = vec!["with_header_exists".into()]; // Act let rr = RequestRequirements::new() .with_path(with_path.clone().into()) .with_path_contains(with_path_contains.clone()) .with_path_matches(with_path_matches.clone()) .with_headers(with_headers.clone()) .with_method(with_method.clone().into()) .with_body(with_body.clone().into()) .with_body_contains(with_body_contains.clone()) .with_body_matches(with_body_matches.clone()) .with_json_body(with_json_body.clone()) .with_json_body_includes(with_json_body_includes.clone()) .with_query_param_exists(with_query_param_exists.clone()) .with_query_param(with_query_param.clone()) .with_header_exists(with_header_exists.clone()); // Assert assert_eq!(rr.path.as_ref().unwrap(), with_path.clone()); assert_eq!( rr.path_contains.as_ref().unwrap(), &with_path_contains.clone() ); assert_eq!( rr.path_matches.as_ref().unwrap(), &with_path_matches.clone() ); assert_eq!(rr.headers.as_ref().unwrap(), &with_headers.clone()); assert_eq!(rr.body.as_ref().unwrap(), with_body.clone()); assert_eq!( rr.body_contains.as_ref().unwrap(), &with_body_contains.clone() ); assert_eq!( rr.body_matches.as_ref().unwrap(), &with_body_matches.clone() ); assert_eq!(rr.json_body.as_ref().unwrap(), &with_json_body.clone()); assert_eq!( rr.json_body_includes.as_ref().unwrap(), &with_json_body_includes.clone() ); assert_eq!( rr.query_param_exists.as_ref().unwrap(), &with_query_param_exists.clone() ); assert_eq!(rr.query_param.as_ref().unwrap(), &with_query_param.clone()); assert_eq!( rr.header_exists.as_ref().unwrap(), &with_header_exists.clone() ); } }
true
08b5394ae5924dd07cf07d8b891dbec1ebd3cbc5
Rust
gnames/gnparser-rs
/src/output/simple.rs
UTF-8
1,049
2.53125
3
[]
no_license
use crate::peg; use csv; use std::error::Error; pub fn header() -> String { "Id,Verbatim,CanonicalFull,CanonicalSimple,CanonicalStem,Authorship,Year,Quality".to_string() } pub fn row(sn: peg::SciName) -> Result<String, Box<dyn Error>> { let mut can_full = String::new(); let mut can_simple = String::new(); let mut can_stem = String::new(); let mut year = String::new(); if sn.canonical_name.is_some() { let can = sn.canonical_name.unwrap(); can_full = can.full; can_simple = can.simple; can_stem = can.stem; } if sn.year.is_some() { year = sn.year.unwrap(); } let mut wtr = csv::Writer::from_writer(vec![]); wtr.write_record(&[ sn.name_string_id.clone(), sn.verbatim.clone(), can_full, can_simple, can_stem, sn.authorship.unwrap_or("".to_string()).to_string(), year, sn.quality.to_string(), ])?; wtr.flush()?; let res = String::from_utf8(wtr.into_inner()?)?; Ok(res.to_string()) }
true
a3e9054a8a6ccc65331fdbb3310d663a420d71c9
Rust
aticu/size_format
/src/lib.rs
UTF-8
14,090
3.875
4
[ "Apache-2.0", "MIT" ]
permissive
//! # size_format //! //! This crate provides formatting for sizes. //! //! The main goal is to provide easy formatters for data sizes. //! //! It provides both binary and SI unit prefixes per default, though more could be added. //! ``` //! use size_format::{SizeFormatterBinary, SizeFormatterSI}; //! //! assert_eq!( //! format!("{}B", SizeFormatterBinary::new(42 * 1024 * 1024)), //! "42.0MiB".to_string() //! ); //! assert_eq!( //! format!("{}B", SizeFormatterSI::new(42_000_000)), //! "42.0MB".to_string() //! ); //! ``` //! //! The precision can also be specified. Please note that values are always rounded down. //! ``` //! use size_format::SizeFormatterSI; //! //! assert_eq!( //! format!("{:.4}B", SizeFormatterSI::new(1_999_999_999)), //! "1.9999GB".to_string() //! ); //! assert_eq!( //! format!("{:.0}B", SizeFormatterSI::new(1_999_999_999)), //! "1GB".to_string() //! ); //! ``` //! //! The presented precision will also never exceed the available precision. //! ``` //! use size_format::SizeFormatterSI; //! //! assert_eq!( //! format!("{:.10}B", SizeFormatterSI::new(678)), //! "678B".to_string() //! ); //! assert_eq!( //! format!("{:.10}B", SizeFormatterSI::new(1_999)), //! "1.999kB".to_string() //! ); //! ``` //! //! For more flexibility, use the `SizeFormatter` type directly with the correct type parameters. //! For example the following code formats a `u16` using binary prefixes and uses a comma as a separator. //! ``` //! use size_format::{BinaryPrefixes, CommaSeparated, SizeFormatter}; //! //! assert_eq!( //! format!("{:.2}B", SizeFormatter::<u16, BinaryPrefixes, CommaSeparated>::from(65_535u16)), //! "63,99KiB".to_string() //! ); //! ``` //! //! Although this crate was mainly intended for data sizes, it can also be used for other units. //! //! It is also possible to implement the `PrefixType` trait to make your own prefix system. //! ``` //! extern crate generic_array; //! extern crate size_format; //! //! use size_format::{PointSeparated, PrefixType, SizeFormatter}; //! use generic_array::{typenum::U3, GenericArray}; //! //! struct Millimeter; //! //! impl PrefixType for Millimeter { //! type N = U3; //! //! const PREFIX_SIZE: u32 = 1000; //! //! fn prefixes() -> GenericArray<&'static str, Self::N> { //! ["m", "", "k"].into() //! } //! } //! //! assert_eq!( //! format!("{}m", SizeFormatter::<u32, Millimeter, PointSeparated>::new(1)), //! "1mm".to_string() //! ); //! assert_eq!( //! format!("{}m", SizeFormatter::<u32, Millimeter, PointSeparated>::new(1_000)), //! "1.0m".to_string() //! ); //! assert_eq!( //! format!("{}m", SizeFormatter::<u32, Millimeter, PointSeparated>::new(1_000_000)), //! "1.0km".to_string() //! ); //! assert_eq!( //! format!("{}m", SizeFormatter::<u64, Millimeter, PointSeparated>::new(10_000_000_000)), //! "10000.0km".to_string() //! ); //! ``` #![no_std] #![warn(missing_docs)] extern crate generic_array; extern crate num; use core::{ cmp, fmt::{self, Display}, marker::PhantomData, }; use num::{integer::Integer, rational::Ratio, traits::cast::FromPrimitive, traits::Pow}; mod config; pub use self::config::{ BinaryPrefixes, CommaSeparated, DecimalSeparator, PointSeparated, PrefixType, SIPrefixes, }; /// The precision to use by default for formatting the numbers. const DEFAULT_PRECISION: usize = 1; /// Implements `Display` to format the contained byte size using SI prefixes. pub type SizeFormatterSI = SizeFormatter<u64, SIPrefixes, PointSeparated>; /// Implements `Display` to format the contained byte size using binary prefixes. pub type SizeFormatterBinary = SizeFormatter<u64, BinaryPrefixes, PointSeparated>; /// Represents a size that can be formatted. /// /// # Panics /// - May panic if the `BaseType` is too small for the prefix specified in `Prefix` /// and the number is being formatted. pub struct SizeFormatter<BaseType, Prefix, Separator> where BaseType: Clone + Integer + Display + FromPrimitive + Pow<u32, Output = BaseType>, Ratio<BaseType>: FromPrimitive, Prefix: PrefixType, Separator: DecimalSeparator, { /// The number to be formatted. num: BaseType, _marker: PhantomData<(Prefix, Separator)>, } impl<BaseType, Prefix, Separator> SizeFormatter<BaseType, Prefix, Separator> where BaseType: Clone + Integer + Display + FromPrimitive + Pow<u32, Output = BaseType>, Ratio<BaseType>: FromPrimitive, Prefix: PrefixType, Separator: DecimalSeparator, { /// Creates a new size formatter for the given number. pub fn new(num: BaseType) -> SizeFormatter<BaseType, Prefix, Separator> { SizeFormatter { num, _marker: PhantomData, } } /// Creates a new size formatter from a compatible number. pub fn from<T: Into<BaseType>>(num: T) -> SizeFormatter<BaseType, Prefix, Separator> { SizeFormatter { num: num.into(), _marker: PhantomData, } } } impl<BaseType, Prefix, Separator> Display for SizeFormatter<BaseType, Prefix, Separator> where BaseType: Clone + Integer + Display + FromPrimitive + Pow<u32, Output = BaseType>, Ratio<BaseType>: FromPrimitive, Prefix: PrefixType, Separator: DecimalSeparator, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let max_prefix = Prefix::prefixes().len() - 1; let precision = f.precision().unwrap_or(DEFAULT_PRECISION); let prefix_size = BaseType::from_u32(Prefix::PREFIX_SIZE) .expect("prefix size is too large for number type"); // Find the right prefix. let divisions = cmp::min(int_log(self.num.clone(), prefix_size.clone()), max_prefix); // Cap the precision to what makes sense. let precision = cmp::min(precision, divisions * 3); let ratio = Ratio::<BaseType>::new(self.num.clone(), prefix_size.pow(divisions as u32)); let format_number = FormatRatio::<BaseType, Separator>::new(ratio); write!( f, "{:.*}{}", precision, format_number, Prefix::prefixes()[divisions] ) } } /// Returns the number of times `num` can be divided by `base`. fn int_log<BaseType>(mut num: BaseType, base: BaseType) -> usize where BaseType: Clone + Integer + Display + FromPrimitive + Pow<u32, Output = BaseType>, Ratio<BaseType>: FromPrimitive, { let mut divisions = 0; while num >= base { num = num / base.clone(); divisions += 1; } divisions } /// This allows formatting a ratio as a decimal number. /// /// This is a temporary solution until support for that is added to the `num` crate. struct FormatRatio<BaseType, Separator> where BaseType: Clone + Integer + Display + FromPrimitive + Pow<u32, Output = BaseType>, Ratio<BaseType>: FromPrimitive, Separator: DecimalSeparator, { num: Ratio<BaseType>, _marker: PhantomData<Separator>, } impl<BaseType, Separator> FormatRatio<BaseType, Separator> where BaseType: Clone + Integer + Display + FromPrimitive + Pow<u32, Output = BaseType>, Ratio<BaseType>: FromPrimitive, Separator: DecimalSeparator, { /// Creates a new format ratio from the number. fn new(num: Ratio<BaseType>) -> FormatRatio<BaseType, Separator> { FormatRatio { num, _marker: PhantomData, } } } impl<BaseType, Separator> Display for FormatRatio<BaseType, Separator> where BaseType: Clone + Integer + Display + FromPrimitive + Pow<u32, Output = BaseType>, Ratio<BaseType>: FromPrimitive, Separator: DecimalSeparator, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.num.trunc())?; let precision = f.precision().unwrap_or(DEFAULT_PRECISION); if precision > 0 { write!(f, "{}", Separator::SEPARATOR)?; let mut frac = self.num.fract(); for _ in 0..precision { if frac.is_integer() { // If the fractional part is an integer, we're done and just need more zeroes. write!(f, "0")?; } else { // Otherwise print every digit separately. frac = frac * Ratio::from_u64(10).unwrap(); write!(f, "{}", frac.trunc())?; frac = frac.fract(); } } } Ok(()) } } #[cfg(test)] #[macro_use] extern crate std; #[cfg(test)] mod tests { use super::*; use std::string::ToString; #[test] fn small_sizes() { assert_eq!(format!("{}B", SizeFormatterSI::new(0)), "0B".to_string()); assert_eq!(format!("{}B", SizeFormatterSI::new(1)), "1B".to_string()); assert_eq!( format!("{}B", SizeFormatterSI::new(999)), "999B".to_string() ); assert_eq!( format!("{}B", SizeFormatterBinary::new(0)), "0B".to_string() ); assert_eq!( format!("{}B", SizeFormatterBinary::new(1)), "1B".to_string() ); assert_eq!( format!("{}B", SizeFormatterBinary::new(999)), "999B".to_string() ); assert_eq!( format!("{}B", SizeFormatterSI::new(1_000)), "1.0kB".to_string() ); assert_eq!( format!("{}B", SizeFormatterSI::new(55_000)), "55.0kB".to_string() ); assert_eq!( format!("{}B", SizeFormatterSI::new(999_999)), "999.9kB".to_string() ); assert_eq!( format!("{}B", SizeFormatterSI::new(1_000_000)), "1.0MB".to_string() ); assert_eq!( format!("{}B", SizeFormatterBinary::new(1 * 1024)), "1.0KiB".to_string() ); assert_eq!( format!("{}B", SizeFormatterBinary::new(55 * 1024)), "55.0KiB".to_string() ); assert_eq!( format!("{}B", SizeFormatterBinary::new(999 * 1024 + 1023)), "999.9KiB".to_string() ); assert_eq!( format!("{}B", SizeFormatterBinary::new(1 * 1024 * 1024)), "1.0MiB".to_string() ); } #[test] fn big_sizes() { assert_eq!( format!("{}B", SizeFormatterSI::new(387_854_348_875)), "387.8GB".to_string() ); assert_eq!( format!("{}B", SizeFormatterSI::new(123_456_789_999_999)), "123.4TB".to_string() ); assert_eq!( format!("{}B", SizeFormatterSI::new(499_999_999_999_999_999)), "499.9PB".to_string() ); assert_eq!( format!("{}B", SizeFormatterSI::new(1_000_000_000_000_000_000)), "1.0EB".to_string() ); assert_eq!( format!( "{}B", SizeFormatter::<u128, SIPrefixes, PointSeparated>::new( 1_000_000_000_000_000_000_000 ) ), "1.0ZB".to_string() ); assert_eq!( format!( "{}B", SizeFormatter::<u128, SIPrefixes, PointSeparated>::new( 1_000_000_000_000_000_000_000_000 ) ), "1.0YB".to_string() ); } #[test] fn exceeds_yotta() { assert_eq!( format!( "{}B", SizeFormatter::<u128, SIPrefixes, PointSeparated>::new( 1_000_000_000_000_000_000_000_000_000 ) ), "1000.0YB".to_string() ); assert_eq!( format!( "{}B", SizeFormatter::<u128, SIPrefixes, PointSeparated>::new( 1_000_000_000_000_000_000_000_000_000_000 ) ), "1000000.0YB".to_string() ); } #[test] fn precision() { assert_eq!(format!("{:.9}B", SizeFormatterSI::new(1)), "1B".to_string()); assert_eq!( format!("{:.0}B", SizeFormatterSI::new(1_111)), "1kB".to_string() ); assert_eq!( format!("{:.1}B", SizeFormatterSI::new(1_111)), "1.1kB".to_string() ); assert_eq!( format!("{:.2}B", SizeFormatterSI::new(1_111)), "1.11kB".to_string() ); assert_eq!( format!("{:.3}B", SizeFormatterSI::new(1_111)), "1.111kB".to_string() ); assert_eq!( format!("{:.4}B", SizeFormatterSI::new(1_111)), "1.111kB".to_string() ); assert_eq!( format!("{:.4}B", SizeFormatterSI::new(1_000_100)), "1.0001MB".to_string() ); assert_eq!( format!("{:.4}B", SizeFormatterSI::new(1_500_000)), "1.5000MB".to_string() ); assert_eq!( format!("{:.4}B", SizeFormatterSI::new(1_000_000)), "1.0000MB".to_string() ); } #[test] fn configurations() { assert_eq!( format!( "{}B", SizeFormatter::<u16, SIPrefixes, CommaSeparated>::new(65_535) ), "65,5kB".to_string() ); assert_eq!( format!( "{}B", SizeFormatter::<u16, BinaryPrefixes, PointSeparated>::new(65_535) ), "63.9KiB".to_string() ); } #[test] fn from() { assert_eq!( format!("{}B", SizeFormatterSI::from(546_987u32)), "546.9kB".to_string() ); } #[test] #[should_panic(expected = "prefix size is too large")] fn incompatile_base_type_fails() { assert_eq!( format!( "{}B", SizeFormatter::<u8, SIPrefixes, CommaSeparated>::new(10) ), "65.5kB".to_string() ); } }
true
4e00254ffe86f308eb6b494e97c34b2d27412dd0
Rust
FiloSanza/rust-raytracing
/src/objects/constant_medium.rs
UTF-8
2,129
2.796875
3
[]
no_license
use super::material::{isotropic::Isotropic, material::Material}; use super::hittable::{HitRecord, Hittable}; use super::bounding::BoundingBox; use super::{min_f64, max_f64}; use super::textures::Texture; use super::vec3::Vec3; use super::ray::Ray; use std::rc::Rc; use std::f64; use rand::Rng; pub struct ConstantMedium { object: Rc<dyn Hittable>, material: Rc<dyn Material>, density: f64, } impl ConstantMedium { pub fn new(object: Rc<dyn Hittable>, texture: Rc<dyn Texture>, density: f64) -> Self { Self { object, density: -1.0/density, material: Rc::new(Isotropic::new(texture)), } } } impl Hittable for ConstantMedium { fn hit(&self, ray: &Ray, tmin: f64, tmax: f64) -> Option<HitRecord> { let mut record1 = if let Some(record) = self.object.hit(ray, f64::MIN, f64::MAX) { record } else{ return None; }; let mut record2 = if let Some(record) = self.object.hit(ray, record1.time + 0.0001, f64::MAX) { record } else{ return None; }; record1.time = max_f64(tmin, record1.time); record2.time = min_f64(tmax, record2.time); if record1.time >= record2.time { return None; } record1.time = max_f64(0.0, record1.time); let mut rng = rand::thread_rng(); let ray_length = ray.direction.length(); let distance_inside = (record2.time - record1.time) * ray_length; let hit_distance = self.density * (rng.gen_range(0.0, 1.0) as f64).ln(); if hit_distance > distance_inside { return None; } let time = record1.time + hit_distance / ray_length; Some(HitRecord::new( &ray, ray.at(time), Vec3::new(1.0, 0.0, 0.0), //doesn't matter time, 0.0, 0.0, self.material.clone() )) } fn bounding_box(&self, t0: f64, t1: f64) -> Option<BoundingBox> { self.object.bounding_box(t0, t1) } }
true
c7cf994796dadc8a129f7d91e617c68a277a01a7
Rust
kjetilkjeka/uavcan.rs
/dsdl_compiler/src/config.rs
UTF-8
1,362
3.625
4
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Configurable options relating to compilation use std::str::FromStr; /// Makes certain things in the compilation process configurable. `CompileConfig::default()` is generally safe to use. pub struct CompileConfig { /// Compile data type signatures for types `#[DataTypeSignature = "0x12345678"]` pub data_type_signature: bool, /// Sets strategy for deriving the `Default` trait pub derive_default: DeriveDefault, } impl Default for CompileConfig { fn default() -> CompileConfig { CompileConfig { data_type_signature: false, derive_default: DeriveDefault::default(), } } } /// Strategy for deriving the `Default` trait #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum DeriveDefault { /// Derive default for structs only when the struct consists of primitive types or arrays of primitive types. PrimitiveTypes, } pub enum ParseDeriveDefaultError { NotVariant, } impl Default for DeriveDefault { fn default() -> DeriveDefault { DeriveDefault::PrimitiveTypes } } impl FromStr for DeriveDefault { type Err = ParseDeriveDefaultError; fn from_str(s: &str) -> Result<Self, Self::Err> { match s{ "primitive-types" => Ok(DeriveDefault::PrimitiveTypes), _ => Err(ParseDeriveDefaultError::NotVariant), } } }
true
10f3cddacc706173c56c7533c2996039a105d899
Rust
hone/jarvis
/src/marvel_champions.rs
UTF-8
2,199
3.09375
3
[]
no_license
use crate::{CardSearch, DbCard}; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; use serde::Deserialize; const IMAGE_HOST: &str = "https://marvelcdb.com"; const CARDS_API: &str = "https://marvelcdb.com/api/public/cards/"; #[derive(Deserialize)] pub struct Card { pub code: String, pub name: String, pub duplicate_of_code: Option<String>, pub duplicate_of_name: Option<String>, pub real_text: Option<String>, pub imagesrc: Option<String>, } impl DbCard for Card { fn name(&self) -> &str { &self.name } fn image(&self) -> Option<&str> { self.imagesrc.as_ref().map(|i| i.as_str()) } fn image_host(&self) -> &str { IMAGE_HOST } } impl Card { pub fn image_url(&self) -> Option<String> { self.imagesrc .as_ref() .map(|image| format!("{}{}", IMAGE_HOST, image)) } } pub struct API; impl CardSearch<Card> for API { fn cards_api() -> &'static str { CARDS_API } /// remove card duplicates fn process_search(results: Vec<&Card>) -> Vec<&Card> { results .into_par_iter() .filter(|card| card.duplicate_of_code.is_none()) .collect() } } #[cfg(test)] mod tests { use super::*; fn cards_from_fixtures() -> Vec<Card> { serde_json::from_str(include_str!("../fixtures/marvelcdb.json")).unwrap() } #[test] fn it_parses_all_cards() { let result = tokio_test::block_on(API::cards()); assert!(result.is_ok()); } #[test] fn it_searches_removing_dupes() { let cards = cards_from_fixtures(); let results: Vec<&Card> = API::search(&cards, "Enhanced Physique"); assert_eq!(results.len(), 1); } #[test] fn it_searches_doesnt_care_baout_case() { let cards = cards_from_fixtures(); let results: Vec<&Card> = API::search(&cards, "enhanced physique"); assert_eq!(results.len(), 1); } #[test] fn it_searches_for_dashed_names() { let cards = cards_from_fixtures(); let results: Vec<&Card> = API::search(&cards, "spider tracer"); assert_eq!(results.len(), 1); } }
true
530e1087c71b96f30282b371ecc6da8278261aa2
Rust
jduan/cosmos
/rust_sandbox/async_std_example/src/main.rs
UTF-8
2,994
3.4375
3
[ "MIT" ]
permissive
use async_std::task::{sleep, spawn, Context, Poll}; use pin_project_lite::pin_project; use std::future::Future; use std::pin::Pin; use std::time::Duration; #[async_std::main] async fn main() { // Here we spawn 3 async functions. This shows that all of them run in // the same main thread. You can adjust the number of sleepus we spawn here // and confirm that the number of threads won't change. let sleepus1 = spawn(sleepus5()); // let sleepus2 = spawn(sleepus()); // let sleepus3 = spawn(sleepus()); interruptus().await; sleepus1.await; // sleepus2.await; // sleepus3.await; } async fn sleepus() { for i in 1..=10 { println!("Sleepus {}", i); sleep(Duration::from_millis(500)).await; } } // This is identical to the sleepus() function above! #[allow(dead_code)] fn sleepus2() -> impl std::future::Future<Output = ()> { async { for i in 1..=10 { println!("Sleepus {}", i); sleep(Duration::from_millis(500)).await; } } } struct DoNothing; impl Future for DoNothing { type Output = (); fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> { Poll::Ready(()) } } // This is identical to the sleepus() function above! #[allow(dead_code)] fn sleepus3() -> impl std::future::Future<Output = ()> { async { for i in 1..=10 { println!("Sleepus {}", i); sleep(Duration::from_millis(500)).await; } // async_std::future::ready(()) DoNothing.await } } #[allow(dead_code)] async fn sleepus4() { DoNothing.await } /// A future that sleeps for a certain amount of time and then prints a message. struct SleepPrint<Fut> { sleep: Fut, } impl<Fut: Future<Output = ()>> Future for SleepPrint<Fut> { type Output = (); fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let sleep: Pin<&mut Fut> = unsafe { self.map_unchecked_mut(|s| &mut s.sleep) }; match sleep.poll(cx) { Poll::Pending => Poll::Pending, Poll::Ready(()) => { println!("Inside SleepPrint"); Poll::Ready(()) } } } } /// This wraps another future "sleep" inside. It finishes when the sleep future finishes. fn sleepus5() -> impl Future<Output = ()> { SleepPrint { sleep: sleep(Duration::from_millis(3000)), } } pin_project! { struct TwoFutures<Fut1, Fut2> { first_done: bool, #[pin] first: Fut1, #[pin] second: Fut2, } } // fn sleepus6() -> impl Future<Output = ()> { // TwoFutures { // first_done: false, // first: sleep(Duration::from_millis(3000)), // second: async { // println!("Hello TwoFutures!"); // }, // } // } async fn interruptus() { for i in 1..=5 { println!("Interruptus {}", i); sleep(Duration::from_millis(1000)).await; } }
true
6e4e2ec78d67aebe58aa3a1a606762f0fcff75d6
Rust
FernandoToigo/tetris-rs
/src/time.rs
UTF-8
1,035
2.96875
3
[]
no_license
use std::time::Instant; pub struct StdClock { } pub struct StdInstant { instant: Instant } impl Clock<StdInstant> for StdClock { fn now(&self) -> StdInstant { StdInstant { instant: Instant::now() } } } impl ClockInstant for StdInstant { fn difference_millis(&self, other_instant: &Self) -> u128 { other_instant.instant.duration_since(self.instant).as_millis() } } pub trait Clock<T: ClockInstant> { fn now(&self) -> T; } pub trait ClockInstant { fn difference_millis(&self, other_instant: &Self) -> u128; } pub struct ManualClock { pub now_milliseconds: u128 } impl Clock<ManualClockInstant> for ManualClock { fn now(&self) -> ManualClockInstant { ManualClockInstant { milliseconds: self.now_milliseconds } } } pub struct ManualClockInstant { milliseconds: u128 } impl ClockInstant for ManualClockInstant { fn difference_millis(&self, other_instant: &Self) -> u128 { other_instant.milliseconds - self.milliseconds } }
true
ad8aab0a8219840e78c968893b608eaae23ac030
Rust
YoungForest/leetcode-rust
/src/problems/problem1180.rs
UTF-8
596
3.296875
3
[]
no_license
struct Solution {} impl Solution { pub fn count_letters(s: String) -> i32 { let mut ans = 0; if s.is_empty() { return 0; } let s = s.into_bytes(); let mut c = s[0]; let mut count = 1; ans = 1; for i in 1..s.len() { if c == s[i] { count += 1; } else { count = 1; } ans += count; c = s[i]; } ans } } pub fn main() { println!( "{}", Solution::count_letters("aaaba".to_string()) ); }
true
f6a0c8a5f90fe61034414d2e8c6f5f30efe74fb4
Rust
chryslovelace/advent-of-code-2018
/src/bin/day06/main.rs
UTF-8
2,669
3.375
3
[]
no_license
use itertools::iproduct; use lazy_static::lazy_static; use scan_fmt::scan_fmt; use std::{ cmp::Ordering, collections::{HashMap, HashSet}, }; lazy_static! { static ref COORDINATES: Vec<(i32, i32)> = include_str!("input.txt") .lines() .map(|line| { let (x, y) = scan_fmt!(line, "{d}, {d}", i32, i32); (x.unwrap(), y.unwrap()) }) .collect(); } fn manhattan((x1, y1): (i32, i32), (x2, y2): (i32, i32)) -> i32 { (x1 - x2).abs() + (y1 - y2).abs() } trait IteratorExt: Iterator { fn single(self) -> Option<Self::Item>; fn single_min_by_key<B, F>(self, f: F) -> Option<Self::Item> where B: Ord, F: FnMut(&Self::Item) -> B; } impl<I: Iterator> IteratorExt for I { fn single(mut self) -> Option<Self::Item> { self.next().and_then(|elem| { if self.next().is_none() { Some(elem) } else { None } }) } fn single_min_by_key<B, F>(mut self, mut f: F) -> Option<Self::Item> where B: Ord, F: FnMut(&Self::Item) -> B, { let mut candidate = self.next(); let mut candidate_key = candidate.as_ref().map(|item| f(item)); for item in self { let key = f(&item); if let Some(ref mut candidate_key) = candidate_key { match key.cmp(&candidate_key) { Ordering::Less => { *candidate_key = key; candidate = Some(item); } Ordering::Equal => { candidate = None; } Ordering::Greater => {} } } } candidate } } fn part1() { let mut hull = HashSet::new(); let mut areas = HashMap::new(); for (i, j) in iproduct!(0..500, 0..500) { if let Some(closest) = COORDINATES .iter() .single_min_by_key(|&&point| manhattan(point, (i, j))) { if i == 0 || i == 499 || j == 0 || j == 499 { hull.insert(closest); } *areas.entry(closest).or_insert(0) += 1; } } let (_, area) = areas .iter() .filter(|(&point, _)| !hull.contains(point)) .max_by_key(|(_, &area)| area) .unwrap(); println!("{}", area); } fn part2() { let safe_region_size = iproduct!(0..500, 0..500) .filter(|&a| COORDINATES.iter().map(|&b| manhattan(a, b)).sum::<i32>() < 10000) .count(); println!("{}", safe_region_size); } fn main() { part1(); part2(); }
true
83ea65ddacee172e0aa75ae9c0529671729b7416
Rust
slionx/rocket-demo
/src/controller/frontend/index.rs
UTF-8
1,079
2.625
3
[ "MIT" ]
permissive
use crate::response::demo::Demo; use crate::request::api_user::ApiUser; use rocket_contrib::databases::diesel; use diesel::prelude::*; use crate::models::{ApiUserSecret, InsertableApiUserSecret}; use crate::schema::*; use std::error::Error; use crate::connection::*; use serde::Serialize; use rocket_contrib::json::Json; #[get("/")] pub fn index(connection: MysqlDatabase) -> String { let record = InsertableApiUserSecret { app_code: "app_code", app_secret: "app_secret", }; let result = diesel::insert_into(api_user_secret::table) .values(&record) .execute(&(connection.0)); println!("{:?}", result.unwrap()); String::from("hello") } #[get("/demo")] pub fn demo() -> Json<Demo> { Json(Demo { id: 1, title: String::from("hello"), }) } #[get("/counts")] pub fn counts() { } #[get("/api")] pub fn api(api_user: ApiUser) -> String { String::from("hello") } /* #[get("/multi?<id>&<user_id>")] fn get_multi(id: usize, user_id: usize) -> String { format!("id: {}, user_id: {}", id, user_id) }*/
true
cb9db52bd59ac77893f94b1d1296877b44c25131
Rust
jfecher/ante
/src/types/effects.rs
UTF-8
8,009
2.78125
3
[ "MIT" ]
permissive
use crate::cache::{EffectInfoId, ModuleCache}; use crate::error::location::Location; use crate::types::typechecker::TypeBindings; use crate::types::Type; use crate::util::fmap; use super::typechecker::{self, OccursResult, UnificationBindings}; use super::{TypeBinding, TypeVariableId}; #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct EffectSet { pub effects: Vec<Effect>, pub replacement: TypeVariableId, } pub type Effect = (EffectInfoId, Vec<Type>); impl EffectSet { /// Create a new polymorphic effect set pub fn any(cache: &mut ModuleCache) -> EffectSet { EffectSet { effects: vec![], replacement: typechecker::next_type_variable_id(cache) } } pub fn single(id: EffectInfoId, args: Vec<Type>, cache: &mut ModuleCache) -> EffectSet { let mut set = EffectSet::any(cache); set.effects.push((id, args)); set } pub fn new(effects: Vec<(EffectInfoId, Vec<Type>)>, cache: &mut ModuleCache) -> EffectSet { let mut set = EffectSet::any(cache); set.effects = effects; set } pub fn follow_bindings<'a>(&'a self, cache: &'a ModuleCache) -> &'a Self { match &cache.type_bindings[self.replacement.0] { TypeBinding::Bound(Type::Effects(effects)) => effects.follow_bindings(cache), _ => self, } } pub fn follow_unification_bindings<'a>( &'a self, bindings: &'a UnificationBindings, cache: &'a ModuleCache, ) -> &'a Self { match &cache.type_bindings[self.replacement.0] { TypeBinding::Bound(Type::Effects(effects)) => effects.follow_unification_bindings(bindings, cache), _ => match bindings.bindings.get(&self.replacement) { Some(Type::Effects(effects)) => effects.follow_unification_bindings(bindings, cache), _ => self, }, } } pub fn replace_all_typevars_with_bindings(&self, new_bindings: &mut TypeBindings, cache: &mut ModuleCache) -> Type { if let TypeBinding::Bound(Type::Effects(effects)) = &cache.type_bindings[self.replacement.0] { return effects.clone().replace_all_typevars_with_bindings(new_bindings, cache); } let replacement = match new_bindings.get(&self.replacement) { Some(Type::TypeVariable(new_id)) => *new_id, Some(other) => return other.clone(), None => typechecker::next_type_variable_id(cache), }; let effects = fmap(&self.effects, |(id, args)| { (*id, fmap(args, |arg| typechecker::replace_all_typevars_with_bindings(arg, new_bindings, cache))) }); Type::Effects(EffectSet { effects, replacement }) } /// Replace any typevars found with the given type bindings /// /// Compared to `replace_all_typevars_with_bindings`, this function does not instantiate /// unbound type variables that were not in type_bindings. Thus if type_bindings is empty, /// this function will just clone the original EffectSet. pub fn bind_typevars(&self, type_bindings: &TypeBindings, cache: &ModuleCache) -> Type { // type_bindings is checked for bindings before the cache, see the comment // in typechecker::bind_typevar let replacement = match type_bindings.get(&self.replacement) { Some(Type::TypeVariable(new_id)) => *new_id, Some(other) => return other.clone(), None => self.replacement, }; if let TypeBinding::Bound(typ) = &cache.type_bindings[self.replacement.0] { return typechecker::bind_typevars(&typ.clone(), type_bindings, cache); } let effects = fmap(&self.effects, |(id, args)| { (*id, fmap(args, |arg| typechecker::bind_typevars(arg, type_bindings, cache))) }); Type::Effects(EffectSet { effects, replacement }) } pub fn try_unify_with_bindings( &self, other: &EffectSet, bindings: &mut UnificationBindings, cache: &mut ModuleCache, ) { let a = self.follow_unification_bindings(bindings, cache); let b = other.follow_unification_bindings(bindings, cache); let mut new_effects = a.effects.clone(); new_effects.append(&mut b.effects.clone()); new_effects.sort(); new_effects.dedup(); let a_id = a.replacement; let b_id = b.replacement; let new_effect = EffectSet::new(new_effects, cache); bindings.bindings.insert(a_id, Type::Effects(new_effect.clone())); bindings.bindings.insert(b_id, Type::Effects(new_effect)); } pub fn combine(&self, other: &EffectSet, cache: &mut ModuleCache) -> EffectSet { let a = self.follow_bindings(cache); let b = other.follow_bindings(cache); let mut new_effects = a.effects.clone(); new_effects.append(&mut b.effects.clone()); new_effects.sort(); new_effects.dedup(); let a_id = a.replacement; let b_id = b.replacement; let new_effect = EffectSet::new(new_effects, cache); cache.bind(a_id, Type::Effects(new_effect.clone())); cache.bind(b_id, Type::Effects(new_effect.clone())); new_effect } pub fn find_all_typevars(&self, polymorphic_only: bool, cache: &ModuleCache) -> Vec<super::TypeVariableId> { let this = self.follow_bindings(cache); let mut vars = typechecker::find_typevars_in_typevar_binding(this.replacement, polymorphic_only, cache); for (_, args) in &this.effects { for arg in args { vars.append(&mut typechecker::find_all_typevars(arg, polymorphic_only, cache)); } } vars } pub fn contains_any_typevars_from_list(&self, list: &[super::TypeVariableId], cache: &ModuleCache) -> bool { let this = self.follow_bindings(cache); list.contains(&this.replacement) || this .effects .iter() .any(|(_, args)| args.iter().any(|arg| typechecker::contains_any_typevars_from_list(arg, list, cache))) } pub(super) fn occurs( &self, id: super::TypeVariableId, level: super::LetBindingLevel, bindings: &mut UnificationBindings, fuel: u32, cache: &mut ModuleCache, ) -> OccursResult { let this = self.follow_bindings(cache).clone(); let mut result = typechecker::typevars_match(id, level, this.replacement, bindings, fuel, cache); for (_, args) in &this.effects { result = result.then_all(args, |arg| typechecker::occurs(id, level, arg, bindings, fuel, cache)); } result } /// Returns the set difference between self and other. pub(super) fn handle_effects_from(&self, other: EffectSet, cache: &mut ModuleCache) { let a = self.follow_bindings(cache).clone(); let b = other.follow_bindings(cache).clone(); let mut new_effects = Vec::with_capacity(a.effects.len()); for a_effect in a.effects.iter() { match find_matching_effect(a_effect, &b.effects, cache) { Ok(bindings) => bindings.perform(cache), Err(()) => new_effects.push(a_effect.clone()), } } let a_id = a.replacement; let new_effect = EffectSet::new(new_effects, cache); cache.bind(a_id, Type::Effects(new_effect)); } } fn find_matching_effect(effect: &Effect, set: &[Effect], cache: &mut ModuleCache) -> Result<UnificationBindings, ()> { let (effect_id, effect_args) = effect; for (other_id, other_args) in set { if effect_id == other_id { let bindings = UnificationBindings::empty(); let no_loc = Location::builtin(); let no_error = ""; if let Ok(bindings) = typechecker::try_unify_all_with_bindings(effect_args, other_args, bindings, no_loc, cache, no_error) { return Ok(bindings); } } } Err(()) }
true
b5be14377598a3b5e91cc14dea55006ae1754a16
Rust
solana-labs/rust-bpf-sysroot
/src/std/src/sys/sgx/waitqueue/spin_mutex/tests.rs
UTF-8
469
2.8125
3
[ "Apache-2.0" ]
permissive
#![allow(deprecated)] use super::*; use crate::sync::Arc; use crate::thread; use crate::time::Duration; #[test] fn sleep() { let mutex = Arc::new(SpinMutex::<i32>::default()); let mutex2 = mutex.clone(); let guard = mutex.lock(); let t1 = thread::spawn(move || { *mutex2.lock() = 1; }); thread::sleep(Duration::from_millis(50)); assert_eq!(*guard, 0); drop(guard); t1.join().unwrap(); assert_eq!(*mutex.lock(), 1); }
true
eebc9596b745e9e2ba3a0220924aecea36b659e0
Rust
storiqaamericanteam/Autotests
/functional-tests/src/config.rs
UTF-8
1,964
2.71875
3
[]
no_license
use std::env; use config_crate::{Config as RawConfig, ConfigError, Environment, File}; #[derive(Debug, Deserialize, Clone)] pub struct Config { pub gateway_microservice: GatewayMicroservice, pub users_microservice: Microservice, pub stores_microservice: Microservice, pub saga_microservice: SagaMicroservice, pub orders_microservice: Microservice, pub billing_microservice: Microservice, pub warehouses_microservice: Microservice, pub notifications_microservice: Microservice, pub delivery_microservice: Microservice, pub test_environment: Option<RunMode>, } #[derive(Debug, Deserialize, Clone)] pub struct Microservice { pub database_url: String, pub url: String, } #[derive(Debug, Deserialize, Clone)] pub struct GatewayMicroservice { pub url: String, } #[derive(Debug, Deserialize, Clone)] pub struct SagaMicroservice { pub url: String, } #[derive(Debug, Deserialize, Clone)] #[serde(tag = "type", rename_all = "camelCase")] pub enum RunMode { Local { graphql_url: String, }, Cluster { graphql_url: String, test_tools_url: String, }, } impl Config { /// Creates config from base.toml, which are overwritten by <env>.toml, where env is one of dev, /// k8s, nightly. After that it could be overwritten by env variables like STQ_FUNCTIONAL_TESTS /// (this will override `url` field in config). pub fn new() -> Result<Self, ConfigError> { // Optional file specific for environment let env = env::var("RUN_MODE").unwrap_or_else(|_| "dev".into()); Config::with_env(env) } pub fn with_env(env: impl Into<String>) -> Result<Self, ConfigError> { let mut s = RawConfig::new(); s.merge(File::with_name("config/base"))?; s.merge(File::with_name(&format!("config/{}", env.into())).required(false))?; s.merge(Environment::with_prefix("STQ_FUNCTIONAL_TESTS"))?; s.try_into() } }
true
19ef842f65baaddfb5f3948bba4c8995d69888d7
Rust
0xA537FD/binance-rs-async
/src/lib.rs
UTF-8
2,931
2.546875
3
[ "LicenseRef-scancode-warranty-disclaimer", "MIT", "Apache-2.0" ]
permissive
//! [![github]](https://github.com/Igosuki/binance-rs-async)&ensp;[![crates-io]](https://crates.io/Igosuki/binance-rs-async)&ensp;[![docs-rs]](https://docs.rs/binance-rs-async) //! //! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github //! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust //! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K //! //! <br> //! //! This library provides access to all of Binance's APIs using . //! [`async/std`]: https://docs.rs/async-std/1.9.0/async_std/ //! //! <br> //! //! # Risk Warning //! //! It is a personal project, use at your own risk. I will not be responsible for your investment losses. //! Cryptocurrency investment is subject to high market risk. //! Nonetheless, this crate is aimed at high performance and production use. //! //! # Example //! //! This example simply pings the main binance api //! //! ```rust //! # use std::io; //! use binance::general::General; //! use binance::api::Binance; //! use binance::errors::Error as BinanceLibError; //! //! async fn main() -> std::io::Result<()> { //! let general: General = Binance::new(None, None); //! let ping = general.ping().await; //! match ping { //! Ok(answer) => println!("{:?}", answer), //! Err(err) => { //! match err { //! BinanceLibError::BinanceError { response } => match response.code { //! -1000_i16 => println!("An unknown error occured while processing the request"), //! _ => println!("Unknown code {}: {}", response.code, response.msg), //! }, //! _ => println!("Other errors: {}.", err), //! }; //! } //! } //! Ok(()) //! } //! ``` //! //! <br> //! //! # Details //! //! - Credentials are not enforced, you will get authentication errors if you don't provide //! credentials and they are required by an endpoint //! //! - Error codes are handled on a best effort basis as some are inconsistent and not even //! documented on Binance's side //! //! - Errors are implemented using [![thiserror]](https://docs.rs/thiserror/1.0.25/thiserror/) //! #![deny(unstable_features, unused_must_use, unused_mut, unused_imports, unused_import_braces)] extern crate hex; extern crate reqwest; extern crate ring; #[macro_use] extern crate serde; extern crate serde_json; extern crate serde_qs as qs; extern crate thiserror; extern crate tungstenite; extern crate url; mod client; pub mod errors; mod util; pub mod account; pub mod api; pub mod config; pub mod futures; pub mod general; pub mod margin; pub mod market; pub mod rest_model; pub mod savings; pub mod userstream; pub mod websockets; pub mod ws_model;
true
30e0adbb9bb0667736f2993d4a734a012f911123
Rust
gh0stl1m/rust-basics
/4.VariableBindings/4.2.scopeAndShadowing.rs
UTF-8
1,562
3.6875
4
[]
no_license
/** * Variable bindings have a scope, and are constrained to live in a block. * A block is a collection of statements enclosed by braces {}. Also, variable shadowing is allowed. * * It's possible to declare variable bindings first, and initialize them later. However, this form is seldom used, as it may lead to the use of uninitialized variables. */ fn main() { // This binding lives in the main function let long_lived_binding = 1; // This is a block, and has a smaller scope than the main function { // This binding only exists in this block let short_lived_binding = 2; println!("inner short: {}", short_lived_binding); // This binding *shadows* the outer one let long_lived_binding = 5_f32; println!("inner long: {}", long_lived_binding); } // End of the block // Error! `short_lived_binding` doesn't exist in this scope // println!("outer short: {}", short_lived_binding); // FIXME ^ Comment out this line println!("outer long: {}", long_lived_binding); // This binding also *shadows* the previous binding let long_lived_binding = 'a'; println!("outer long: {}", long_lived_binding); // Declare a variable binding let a_binding; { let x = 2; // Initialize the binding a_binding = x * x; } println!("a binding: {}", a_binding); let another_binding; // Error! Use of uninitialized binding // println!("another binding: {}", another_binding); // FIXME ^ Comment out this line another_binding = 1; println!("another binding: {}", another_binding); }
true
d1f482fa45e07b2c29af57f3f2bbc438e7e13855
Rust
magurotuna/leetcode-rust
/src/bin/1383.rs
UTF-8
1,945
3.375
3
[]
no_license
struct Solution; impl Solution { pub fn max_performance(n: i32, speed: Vec<i32>, efficiency: Vec<i32>, k: i32) -> i32 { use std::cmp::{max, Reverse}; use std::collections::BinaryHeap; let mut bh = BinaryHeap::new(); let MOD = 1_000_000_007; let mut se: Vec<(i64, i64)> = speed .into_iter() .zip(efficiency.into_iter()) .map(|(a, b)| (a as i64, b as i64)) .collect(); se.sort_unstable_by(|a, b| b.1.cmp(&a.1)); // sort by efficiency let mut ret = 0i64; let mut sum_speed = 0i64; for i in 0..n as usize { if bh.len() < k as usize { bh.push(Reverse(se[i].0)); // min heap sum_speed += se[i].0; } else { let Reverse(min_in_heap) = bh.peek().unwrap(); // if the minimum value in the heap is less than current worker's speed, then update sum_speed and the heap if min_in_heap < &se[i].0 { let Reverse(p) = bh.pop().unwrap(); bh.push(Reverse(se[i].0)); sum_speed -= p; sum_speed += se[i].0; } } ret = max(ret, se[i].1 * sum_speed); } (ret % MOD) as i32 } } fn main() { Solution::max_performance(6, vec![2, 10, 3, 1, 5, 8], vec![5, 4, 3, 9, 7, 2], 2); () } #[cfg(test)] mod tests { use super::*; #[test] fn test_max_performance() { assert_eq!( Solution::max_performance(6, vec![2, 10, 3, 1, 5, 8], vec![5, 4, 3, 9, 7, 2], 2), 60 ); assert_eq!( Solution::max_performance(6, vec![2, 10, 3, 1, 5, 8], vec![5, 4, 3, 9, 7, 2], 3), 68 ); assert_eq!( Solution::max_performance(6, vec![2, 10, 3, 1, 5, 8], vec![5, 4, 3, 9, 7, 2], 4), 72 ); } }
true
641f1d23d0c2fc0c78e029e5f80bd7e7f3c4cc0c
Rust
ipoemigmail/rust-study
/multi-ssh/src/main.rs
UTF-8
2,593
2.8125
3
[]
no_license
use std::{error::Error, string}; use std::fs::File; use std::io::prelude::*; use std::io::BufReader; use std::env; use futures::{FutureExt, stream::{self, StreamExt}}; use openssh::{KnownHosts, Session}; use serde::{Deserialize, Serialize}; #[derive(Debug)] enum MyError { OpenSshError(openssh::Error), JoinError(tokio::task::JoinError), FromUtf8Error(string::FromUtf8Error), } impl From<openssh::Error> for MyError { fn from(e: openssh::Error) -> Self { MyError::OpenSshError(e) } } impl From<tokio::task::JoinError> for MyError { fn from(e: tokio::task::JoinError) -> Self { MyError::JoinError(e) } } impl From<string::FromUtf8Error> for MyError { fn from(e: string::FromUtf8Error) -> Self { MyError::FromUtf8Error(e) } } #[derive(Debug, PartialEq, Serialize, Deserialize)] struct Hosts { hosts: Vec<String>, } #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { let args: Vec<String> = env::args().collect(); //let path = "/Users/ben.jeong/Develop/Works/story/story-deploy/projects/story-app-http/hosts/production.yml"; //let path = "/tmp/common.yml"; if args.len() < 2 { eprintln!("Usage: {} {{host-yaml-file-path}} cmd", args[0]); std::process::exit(-1); } let path = args[1].as_str(); let cmd = args[2].as_str(); let hosts = get_hosts(path)?; let fibers = hosts .iter() .map(move |host| run_cmd(host.clone(), cmd.to_string()).map(move |x| x.map(|y| (host.clone(), y)))) .collect::<Vec<_>>(); let results: Vec<_> = stream::iter(fibers) .then(|f| async move { f.await.unwrap() }) .collect() .await; results.iter().for_each(|line| { println!("============================== {} ==============================", line.0); println!("{}", line.1) }); Ok(()) } fn get_hosts(path: &str) -> Result<Vec<String>, Box<dyn Error>> { let file = File::open(path)?; let mut buf_reader = BufReader::new(file); let mut contents = String::new(); buf_reader.read_to_string(&mut contents)?; let hosts: Hosts = serde_yaml::from_str(&contents)?; let result: Vec<_> = hosts.hosts.into_iter().map(|x| x).collect(); Ok(result) } async fn run_cmd(cmd: String, host: String) -> Result<String, MyError> { let handler = tokio::spawn(async move { let session = Session::connect(host.clone(), KnownHosts::Accept).await?; let child = session.shell(cmd).output().await?; let r = String::from_utf8(child.stdout)?; Ok(r) }); handler.await? }
true
9be8fc8ea2155817c021319c9ce853d0cb3f3286
Rust
lexs/rust-os
/rost/drivers/vga.rs
UTF-8
2,889
2.859375
3
[]
no_license
use core::prelude::*; use core::intrinsics::volatile_store; use arch::io; bitflags!( #[packed] flags Color: u8 { static BRIGHT = 1 << 3, static BLACK = 0, static RED = 1 << 2, static GREEN = 1 << 1, static BLUE = 1 << 0, static CYAN = BLUE.bits | GREEN.bits, static MAGENTA = BLUE.bits | RED.bits, static BROWN = GREEN.bits | RED.bits, static WHITE = BLUE.bits | GREEN.bits | RED.bits } ) #[packed] struct Character { char: u8, attr: u8 } impl Character { #[inline] fn make(c: char, fg: Color, bg: Color) -> Character { Character { char: c as u8, attr: fg.bits() | bg.bits() << 4 } } } pub static ROWS: uint = 25; pub static COLS: uint = 80; static screen: *mut Character = 0xb8000 as *mut Character; static mut cursor_x: uint = 0; static mut cursor_y: uint = 0; pub fn puts(s: &str) { for c in s.chars() { unsafe { do_putch(c); } } unsafe { update_cursor() } } pub fn putch(c: char) { unsafe { if cursor_y > ROWS { clear_screen(); } do_putch(c); update_cursor(); } } pub fn clear_screen() { for x in range(0, COLS) { for y in range(0, ROWS) { unsafe { write(y, x, Character::make(' ', WHITE, BLACK)); } } } move_cursor(0, 0); } pub fn get_cursor() -> (uint, uint) { unsafe { (cursor_x, cursor_y) } } pub fn move_cursor(x: uint, y: uint) { unsafe { cursor_x = x; cursor_y = y; update_cursor(); } } static mut fg: Color = WHITE; static mut bg: Color = BLACK; pub fn set_color(_fg: Color, _bg: Color) { unsafe { fg = _fg; bg = _bg; } } unsafe fn do_putch(c: char) { match c { '\n' => newline(), '\t' => tab(), '\u0008' => backspace(), _ => { write(cursor_y, cursor_x, Character::make(c, fg, bg)); forward_cursor(1); } } } unsafe fn tab() { forward_cursor(4 - (cursor_x + 4) % 4); } unsafe fn backspace() { if cursor_x != 0 { cursor_x -= 1; } else if cursor_y != 0 { cursor_x = COLS - 1; cursor_y -= 1; } write(cursor_y, cursor_x, Character::make(' ', WHITE, BLACK)); } #[inline] unsafe fn write(y: uint, x: uint, c: Character) { let offset = y * COLS + x; volatile_store(screen.offset(offset as int), c); } unsafe fn forward_cursor(steps: uint) { cursor_x += steps; while cursor_x >= COLS { cursor_x -= COLS; cursor_y += 1; } } unsafe fn newline() { cursor_x = 0; cursor_y += 1; } unsafe fn update_cursor() { let position = cursor_y * COLS + cursor_x; io::write_port(0x3D4, 0x0F); io::write_port(0x3D5, position as u8); io::write_port(0x3D4, 0x0E); io::write_port(0x3D5, (position >> 8) as u8); }
true
5ee7ce4e5bae47fe5da72ee23ad9c265e8bb559e
Rust
servo/green-rs
/librustuv/tests/udp.rs
UTF-8
7,210
2.65625
3
[ "Apache-2.0", "MIT" ]
permissive
use std::io::net::ip::{SocketAddr, Ipv4Addr}; use std::io::test::{next_test_ip4, next_test_ip6}; use std::time::Duration; use green::task::spawn; use rustuv::{uvll, Udp}; test!(fn bind_error() { let addr = SocketAddr { ip: Ipv4Addr(0, 0, 0, 0), port: 1 }; match Udp::bind(addr) { Ok(..) => panic!(), Err(e) => assert_eq!(e.code(), uvll::EACCES), } }) test!(fn socket_smoke_test_ip4() { let server_ip = next_test_ip4(); let client_ip = next_test_ip4(); let (tx1, rx1) = channel(); let (tx2, rx2) = channel(); spawn(proc() { match Udp::bind(client_ip) { Ok(ref mut client) => { rx1.recv(); client.send_to(&mut [99], server_ip).unwrap() } Err(..) => panic!() } tx2.send(()); }); match Udp::bind(server_ip) { Ok(ref mut server) => { tx1.send(()); let mut buf = [0]; match server.recv_from(&mut buf) { Ok((nread, src)) => { assert_eq!(nread, 1); assert_eq!(buf[0], 99); assert_eq!(src, client_ip); } Err(..) => panic!() } } Err(..) => panic!() } rx2.recv(); }) test!(fn socket_smoke_test_ip6() { let server_ip = next_test_ip6(); let client_ip = next_test_ip6(); let (tx, rx) = channel::<()>(); spawn(proc() { match Udp::bind(client_ip) { Ok(ref mut client) => { rx.recv(); client.send_to(&mut [99], server_ip).unwrap() } Err(..) => panic!() } }); match Udp::bind(server_ip) { Ok(ref mut server) => { tx.send(()); let mut buf = [0]; match server.recv_from(&mut buf) { Ok((nread, src)) => { assert_eq!(nread, 1); assert_eq!(buf[0], 99); assert_eq!(src, client_ip); } Err(..) => panic!() } } Err(..) => panic!() } }) test!(fn stream_smoke_test_ip4() { let server_ip = next_test_ip4(); let client_ip = next_test_ip4(); let (tx1, rx1) = channel(); let (tx2, rx2) = channel(); spawn(proc() { match Udp::bind(client_ip) { Ok(mut client) => { rx1.recv(); client.send_to(&mut [99], server_ip).unwrap(); } Err(..) => panic!() } tx2.send(()); }); match Udp::bind(server_ip) { Ok(mut server) => { tx1.send(()); let mut buf = [0]; match server.recv_from(&mut buf) { Ok((nread, _)) => { assert_eq!(nread, 1); assert_eq!(buf[0], 99); } Err(..) => panic!() } } Err(..) => panic!() } rx2.recv(); }) pub fn socket_name(addr: SocketAddr) { let server = Udp::bind(addr); assert!(server.is_ok()); let mut server = server.unwrap(); // Make sure socket_name gives // us the socket we binded to. let so_name = server.socket_name(); assert!(so_name.is_ok()); assert_eq!(addr, so_name.unwrap()); } test!(fn socket_name_ip4() { socket_name(next_test_ip4()); }) test!(fn socket_name_ip6() { socket_name(next_test_ip6()); }) test!(fn udp_clone_smoke() { let addr1 = next_test_ip4(); let addr2 = next_test_ip4(); let mut sock1 = Udp::bind(addr1).unwrap(); let sock2 = Udp::bind(addr2).unwrap(); spawn(proc() { let mut sock2 = sock2; let mut buf = [0, 0]; assert_eq!(sock2.recv_from(&mut buf), Ok((1, addr1))); assert_eq!(buf[0], 1); sock2.send_to(&mut [2], addr1).unwrap(); }); let sock3 = sock1.clone(); let (tx1, rx1) = channel(); let (tx2, rx2) = channel(); spawn(proc() { let mut sock3 = sock3; rx1.recv(); sock3.send_to(&mut [1], addr2).unwrap(); tx2.send(()); }); tx1.send(()); let mut buf = [0, 0]; assert_eq!(sock1.recv_from(&mut buf), Ok((1, addr2))); rx2.recv(); }) test!(fn udp_clone_two_read() { let addr1 = next_test_ip4(); let addr2 = next_test_ip4(); let mut sock1 = Udp::bind(addr1).unwrap(); let sock2 = Udp::bind(addr2).unwrap(); let (tx1, rx) = channel(); let tx2 = tx1.clone(); spawn(proc() { let mut sock2 = sock2; sock2.send_to(&mut [1], addr1).unwrap(); rx.recv(); sock2.send_to(&mut [2], addr1).unwrap(); rx.recv(); }); let sock3 = sock1.clone(); let (done, rx) = channel(); spawn(proc() { let mut sock3 = sock3; let mut buf = [0, 0]; sock3.recv_from(&mut buf).unwrap(); tx2.send(()); done.send(()); }); let mut buf = [0, 0]; sock1.recv_from(&mut buf).unwrap(); tx1.send(()); rx.recv(); }) test!(fn udp_clone_two_write() { let addr1 = next_test_ip4(); let addr2 = next_test_ip4(); let mut sock1 = Udp::bind(addr1).unwrap(); let sock2 = Udp::bind(addr2).unwrap(); let (tx, rx) = channel(); let (serv_tx, serv_rx) = channel(); spawn(proc() { let mut sock2 = sock2; let mut buf = [0, 1]; rx.recv(); match sock2.recv_from(&mut buf) { Ok(..) => {} Err(e) => panic!("failed receive: {}", e), } serv_tx.send(()); }); let sock3 = sock1.clone(); let (done, rx) = channel(); let tx2 = tx.clone(); spawn(proc() { let mut sock3 = sock3; match sock3.send_to(&mut [1], addr2) { Ok(..) => { let _ = tx2.send_opt(()); } Err(..) => {} } done.send(()); }); match sock1.send_to(&mut [2], addr2) { Ok(..) => { let _ = tx.send_opt(()); } Err(..) => {} } drop(tx); rx.recv(); serv_rx.recv(); }) test!(fn recv_from_timeout() { let addr1 = next_test_ip4(); let addr2 = next_test_ip4(); let mut a = Udp::bind(addr1).unwrap(); let (tx, rx) = channel(); let (tx2, rx2) = channel(); spawn(proc() { let mut a = Udp::bind(addr2).unwrap(); assert_eq!(a.recv_from(&mut [0]), Ok((1, addr1))); assert_eq!(a.send_to(&mut [0], addr1), Ok(())); rx.recv(); assert_eq!(a.send_to(&mut [0], addr1), Ok(())); tx2.send(()); }); // Make sure that reads time out, but writes can continue a.set_read_timeout(Some(Duration::milliseconds(20))); assert_eq!(a.recv_from(&mut [0]).err().unwrap().code(), uvll::ECANCELED); assert_eq!(a.recv_from(&mut [0]).err().unwrap().code(), uvll::ECANCELED); assert_eq!(a.send_to(&mut [0], addr2), Ok(())); // Cloned handles should be able to block let mut a2 = a.clone(); assert_eq!(a2.recv_from(&mut [0]), Ok((1, addr2))); // Clearing the timeout should allow for receiving a.set_read_timeout(None); tx.send(()); assert_eq!(a2.recv_from(&mut [0]), Ok((1, addr2))); // Make sure the child didn't die rx2.recv(); })
true
5689604732369d54af610ca37f17051fdb1575ba
Rust
cnsr/leetcode
/414/lc414.rs
UTF-8
319
2.609375
3
[]
no_license
impl Solution { pub fn third_max(mut nums: Vec<i32>) -> i32 { use std::collections::HashSet; let set: HashSet<_> = nums.drain(..).collect(); // dedup nums.extend(set.into_iter()); nums.sort(); nums.reverse(); return if nums.len() < 3 {nums[0]} else {nums[2]} } }
true
5b263af972eb100ae9fd7523eb82f66acb8abd59
Rust
AndreyMakuha/math_modeling
/helper_fn/src/lib.rs
UTF-8
10,300
2.625
3
[ "MIT" ]
permissive
use pyo3::prelude::*; use pyo3::types::PyDict; use std::f64; use std::fs::File; use std::io::{self, prelude::*}; use rand::Rng; use std::fmt::Write; // Теоретические частоты попадания случайной величины в интервалы fn laplass(mw: f64, sd: f64, x: f64) -> PyResult<f64> { let gil = Python::acquire_gil(); let py = gil.python(); let locals = PyDict::new(py); locals.set_item("stats", py.import("scipy.stats")?)?; let res = py.eval(&format!("stats.norm.cdf({})", (x - mw) / sd), None, Some(&locals))?; let res: f64 = res.to_string().parse::<f64>().unwrap(); Ok(res) } pub fn laplace(mw: f64, sd: f64, x: f64) -> f64 { laplass(mw, sd, x).unwrap() } fn isf(q: f64, dfn: f64, dfd: f64) -> PyResult<f64> { let gil = Python::acquire_gil(); let py = gil.python(); let locals = PyDict::new(py); locals.set_item("stats", py.import("scipy.stats")?)?; let res = py.eval(&format!("stats.f.isf({}, {}, {})", q, dfn, dfd), None, Some(&locals))?; let res: f64 = if res.to_string() == "nan" { f64::NAN } else { res.to_string().parse::<f64>().unwrap() }; Ok(res) } pub fn is_f(q: f64, dfn: f64, dfd: f64) -> f64 { isf(q, dfn, dfd).unwrap() } fn chi2_isf(q: f64, dfn: f64) -> PyResult<f64> { let gil = Python::acquire_gil(); let py = gil.python(); let locals = PyDict::new(py); locals.set_item("stats", py.import("scipy.stats")?)?; let res = py.eval(&format!("stats.chi2.isf({}, {})", q, dfn), None, Some(&locals))?; let res: f64 = if res.to_string() == "nan" { f64::NAN } else { res.to_string().parse::<f64>().unwrap() }; Ok(res) } pub fn chi2_is_f(q: f64, dfn: f64) -> f64 { chi2_isf(q, dfn).unwrap() } pub fn math_waiting(nums: &Vec<f64>) -> f64 { nums.iter().fold(0f64, |acc, a| acc + a) / nums.len() as f64 } pub fn standart_deviation(nums: &Vec<f64>) -> f64 { let mw = math_waiting(nums); let res = nums.iter().fold(0f64, |acc, a| acc + (a - mw).powi(2)); (res / (nums.len() - 1) as f64).sqrt() } pub fn range(nums: &Vec<f64>) -> f64 { let f = |x: &&f64, y: &&f64| x.partial_cmp(y).unwrap(); nums.iter().max_by(f).unwrap() - nums.iter().min_by(f).unwrap() } pub fn sturges(nums: &Vec<f64>) -> f64 { range(nums) / (1 as f64 + 3.22 * (nums.len() as f64).log10()) } pub fn coordinates_of_intervals(nums: &Vec<f64>) -> Vec<f64> { let mut coords: Vec<f64> = Vec::new(); let min_val = *nums.iter().min_by(|x, y| x.partial_cmp(y).unwrap()).unwrap(); let sturges_val = sturges(nums); for i in 0..=nums.len() { coords.push(min_val + i as f64 * sturges_val); } coords } pub fn asymmetry_coefficient(nums: &Vec<f64>) -> f64 { let mw = math_waiting(nums); let numerator = nums.iter().fold(0f64, |acc, x| acc + (x - mw).powi(3)); let denominator = (nums.len() - 1) as f64 * standart_deviation(nums).powi(3); numerator / denominator } // Коэффицент асимметрии А служит для характеристики распределения pub fn skewness(nums: &Vec<f64>) -> f64 { let mw = math_waiting(nums); let numerator = (1f64 / nums.len() as f64) * nums.iter().fold(0f64, |acc, x| acc + (x - mw).powi(3)); let denominator = ((1f64 / (nums.len() as f64)) * nums.iter().fold(0f64, |acc, x| acc + (x - mw).powi(2))).powf(3f64 / 2f64); // println!("numerator: {} denominator: {}", numerator, denominator); if denominator == 0.0 { 0.0 } else { numerator / denominator } } pub fn excess_coefficient(nums: &Vec<f64>) -> f64 { let mw = math_waiting(nums); let numerator = nums.iter().fold(0f64, |acc, x| acc + (x - mw).powi(4)); let denominator = (nums.len() - 1) as f64 * standart_deviation(nums).powi(4); println!("numerator: {} denominator: {}", numerator, denominator); if denominator == 0f64 { 0f64 } else { numerator / denominator - 3f64 } } // Коэфицент эксцесса E служит для характеристики крутости, т.е. остовершинности распределения pub fn kurtosis(nums: &Vec<f64>) -> f64 { let mw = math_waiting(nums); let numerator = (1f64 / nums.len() as f64) * nums.iter().fold(0f64, |acc, x| acc + (x - mw).powi(4)); let denominator = ((1f64 / (nums.len() as f64)) * nums.iter().fold(0f64, |acc, x| acc + (x - mw).powi(2))).powi(2); if denominator == 0f64 { 0f64 } else { numerator / denominator - 3f64 } } pub fn emperical_law(nums: &Vec<f64>) -> Vec<((f64, f64), f64)> { let mut emperical_arr = Vec::new(); let coords = coordinates_of_intervals(nums); let len = nums.len() as f64; for i in 0..coords.len() - 1 { let mut hits = 0; for j in nums { if *j >= coords[i] && *j < coords[i + 1] { hits += 1; } } emperical_arr.push(((coords[i], coords[i + 1]), (hits as f64 / len))); } emperical_arr } pub fn statical_distribution_function(nums: &Vec<f64>) -> (Vec<f64>, Vec<f64>) { let emp = emperical_law(&nums); let x = coordinates_of_intervals(nums); let mut y = vec![0.0]; for i in 0..x.len() - 1 { y.push(emp[i].1 + y[i]); } (x, y) } pub fn pierson_criteria(nums: &Vec<f64>) -> f64 { let sdf = statical_distribution_function(nums).1; let mut res = 0.0; let len = nums.len() as f64; for i in 0..nums.len() { if sdf[i] == 0.0 { continue; } res += (nums[i] - len * sdf[i]).powi(2) / (len * sdf[i]) } res } pub fn hypotesis_check(nums: &Vec<f64>, signnificance_level: f64) -> String { let pirs = pierson_criteria(nums).powf(0.5); let table_value = chi2_is_f(signnificance_level, (nums.len() - 1) as f64); println!("X = {:e}, X**2k,2 = {}", pirs, table_value); if pirs < table_value { format!("Так как X < X**2k,a, распределение нормальное") } else { format!("Так как X > X**2k,a, распределение не нормальное") } } pub fn hit_chances(nums: &Vec<f64>) -> Vec<f64> { let mw = math_waiting(&nums); let sd = standart_deviation(&nums); let intervals = coordinates_of_intervals(&nums); let mut p = Vec::new(); for i in 0..intervals.len() - 1 { p.push(laplace(mw, sd, intervals[i + 1]) - laplace(mw, sd, intervals[i])) } p } pub fn join(a: &Vec<f64>) -> String { let s: String = a.iter().skip(1).fold(String::new(),|mut s,&n| {write!(s,", {}",n).ok(); s}); format!("{}{}", a[0], s) } pub fn scan<T: std::str::FromStr>() -> Result<T, <T as std::str::FromStr>::Err> { let mut line = String::new(); let stdin = io::stdin(); stdin.lock().read_line(&mut line).unwrap(); line.trim().parse::<T>() } pub fn fill_sequence() -> Vec<f64> { println!("1. Автоматическое заполнение"); println!("2. Ручное заполнение"); println!("3. Загрузить последовательность"); let mut nums: Vec<f64> = Vec::new(); let mut line = String::new(); let stdin = io::stdin(); stdin.lock().read_line(&mut line).unwrap(); let mut is_save_needed = false; match line.trim().parse::<i64>() { Ok(1) => { println!("Введите количество значений: "); line.clear(); stdin.lock().read_line(&mut line).unwrap(); // println!("{:?}", line); let len = line.trim().parse::<i64>().unwrap(); println!("Определите интервал появления значений. ОТ: "); line.clear(); stdin.lock().read_line(&mut line).unwrap(); let answer_from = line.trim().parse::<i64>().unwrap(); println!("Определите интервал появления значений. ДО: "); line.clear(); stdin.lock().read_line(&mut line).unwrap(); let answer_to = line.trim().parse::<i64>().unwrap(); let mut rng = rand::thread_rng(); for _ in 0..len { nums.push(rng.gen_range(answer_from as f64, answer_to as f64)); } is_save_needed = true; }, Ok(2) => { println!("Введите значания (через запятую и пробел):"); line.clear(); stdin.lock().read_line(&mut line).unwrap(); for i in line.trim().split(", ") { // println!("{:?}", i); nums.push(i.parse::<f64>().unwrap()); } is_save_needed = true }, Ok(3) => { println!("Введите имя файла:"); line.clear(); stdin.lock().read_line(&mut line).unwrap(); println!("{:?}", line.trim()); let mut f = File::open(line.trim()).expect("file not found"); let mut contents = String::new(); f.read_to_string(&mut contents) .expect("something went wrong reading the file"); for i in contents.trim().split(", ") { // println!("{:?}", i); nums.push(i.parse::<f64>().unwrap()); } }, Ok(i) => { println!("Было введено не правилное число {}", i); }, Err(_) => { panic!("error!"); }, } if is_save_needed { println!("Сохранить последовательность? 1 - Да, 2 - Нет: "); line.clear(); stdin.lock().read_line(&mut line).unwrap(); if line.trim().parse::<i32>().unwrap() == 1 { println!("Введите название последовательности: "); line.clear(); stdin.lock().read_line(&mut line).unwrap(); let mut file = match File::create(&line.trim()) { Err(_) => panic!("couldn't create"), Ok(file) => file, }; { use std::io::Write; match file.write_all(join(&nums).as_bytes()) { Err(_) => panic!(), Ok(_) => {}, } } } } nums }
true
9bb872b7a2bb76c599ab5b6d6548c04c35057afa
Rust
stanleydesu/rust-raytracing
/src/bin/blue_spheres.rs
UTF-8
4,631
2.5625
3
[ "MIT" ]
permissive
use raytracing::{ rand_f64, rand_in_range, write_sampled_color, Camera, Color, Dieletric, Hittable, HittableList, Lambertian, Material, Metal, Point3, Ray, Sphere, Vec3, }; use std::rc::Rc; fn random_scene() -> HittableList { let mut world = HittableList::default(); let ground_material = Rc::new(Lambertian::new(Color::new(0.5, 0.5, 0.5))); world.add(Rc::new(Sphere::new( Point3::new(0.0, -1000.0, 0.0), 1000.0, ground_material, ))); let metal_mat = Rc::new(Metal::new(Color::new(1.0, 1.0, 1.0), 0.0)); let metal_p = Point3::new(-4.0, 1.0, 0.5); world.add(Rc::new(Sphere::new(metal_p, 1.0, metal_mat))); let glass_mat = Rc::new(Dieletric::new(1.5)); let glass_p = Point3::new(5.0, 1.0, 0.5); world.add(Rc::new(Sphere::new(glass_p, 1.0, glass_mat.clone()))); let glass_hollow = Point3::new(5.0, 1.0, 0.5); world.add(Rc::new(Sphere::new(glass_hollow, -0.9, glass_mat.clone()))); let lamber_mat = Rc::new(Lambertian::new(Color::new(0.2, 0.2, 0.2))); let lamber_p = Point3::new(-1.0, 1.0, -3.0); world.add(Rc::new(Sphere::new(lamber_p, 1.0, lamber_mat))); let colors = vec![ Color::new(3., 4., 94.) / 255., Color::new(2., 62., 138.) / 255., Color::new(0., 119., 182.) / 255., Color::new(0., 150., 199.) / 255., Color::new(0., 180., 216.) / 255., Color::new(72., 202., 228.) / 255., Color::new(144., 224., 239.) / 255., Color::new(173., 232., 244.) / 255., Color::new(202., 240., 248.) / 255., ]; for a in -11..11 { for b in -11..11 { let choose_mat = rand_f64(); let center = Point3::new( a as f64 + 0.7 * rand_f64(), 0.2, b as f64 + 0.7 * rand_f64(), ); let can_spawn = (center - glass_p).length() > 1.2 && (center - lamber_p).length() > 1.2 && (center - metal_p).length() > 1.2; if can_spawn { let sphere_material: Rc<dyn Material>; if choose_mat < 0.8 { // diffuse let albedo = colors[(rand_in_range(0.0, colors.len() as f64)) as usize]; sphere_material = Rc::new(Lambertian::new(albedo)); } else { // glass sphere_material = glass_mat.clone(); } world.add(Rc::new(Sphere::new(center, 0.2, sphere_material))); } } } world } fn ray_color(r: Ray, world: &dyn Hittable, depth: u32) -> Color { if depth == 0 { return Color::zero(); } if let Some(rec) = world.hit(r, 0.001, f64::INFINITY) { if let Some(reflectance) = rec.mat_ptr.scatter(r, rec.clone()) { return reflectance.attenuation * ray_color(reflectance.scattered_ray, world, depth - 1); } return Color::zero(); } let unit_direction = Vec3::unit(r.direction()); let t = 0.5 * (unit_direction.y() + 1.0); ((1.0 - t) * Color::new(1.0, 1.0, 1.0)) + (t * Color::new(0.3, 0.7, 1.0)) } fn main() { // image let aspect_ratio = 3.0 / 2.0; let image_width = 1200u32; let image_height = (image_width as f64 / aspect_ratio) as u32; let samples_per_pixel = 500u32; let max_depth = 50u32; // camera let look_from = Point3::new(13.0, 2.0, 3.0); let look_at = Point3::new(0.0, 0.0, 0.0); let vup = Vec3::new(0.0, 1.0, 0.0); let dist_to_focus = (look_from - Point3::new(5.0, 1.0, 0.5)).length(); let aperture = 0.1; let vertical_fov = 20.0; let cam = Camera::new( look_from, look_at, vup, vertical_fov, aspect_ratio, aperture, dist_to_focus, ); // PPM image format specifications println!("P3"); // colors are in ascii println!("{} {}", image_width, image_height); println!("{}", 255); // world let world = random_scene(); for y in (0..image_height).rev() { eprintln!("Scanlines remaining: {}", y); for x in 0..image_width { let mut pixel_color = Color::zero(); for _s in 0..samples_per_pixel { let x_percent = (x as f64 + rand_f64()) / (image_width as f64); let y_percent = (y as f64 + rand_f64()) / (image_height as f64); let r = cam.get_ray(x_percent, y_percent); pixel_color += ray_color(r, &world, max_depth); } write_sampled_color(pixel_color, samples_per_pixel); } } }
true
f23aa0c5d67fbf3178e771be05c11b74189383fc
Rust
aeyoll/vernam-rs
/src/main.rs
UTF-8
1,372
2.84375
3
[ "MIT" ]
permissive
use std::process; #[macro_use] extern crate clap; use clap::App; use clap::ArgMatches; mod lib; #[macro_use] extern crate failure; use failure::Error; use lib::Vernam; fn build_vernam(submatches: &ArgMatches) -> Result<Vernam, Error> { let message = submatches.value_of("message").unwrap().to_string(); let key = submatches.value_of("key").unwrap().to_string(); if key.len() < message.len() { bail!("Key length must be equal or greater than the message") } let v = Vernam { message, key }; Ok(v) } fn app() -> Result<(), Error> { let yaml = load_yaml!("cli.yml"); let matches = App::from_yaml(yaml).get_matches(); match matches.subcommand() { ("encrypt", Some(submatches)) => { let vernam = build_vernam(submatches)?; let encrypted_message = vernam.encrypt()?; println!("{}", encrypted_message); } ("decrypt", Some(submatches)) => { let vernam = build_vernam(submatches)?; let decrypted_message = vernam.decrypt()?; println!("{}", decrypted_message); } _ => { println!("Command not implemented"); } }; Ok(()) } fn main() { process::exit(match app() { Ok(_) => 0, Err(err) => { println!("{}", err.to_string()); 1 } }); }
true
cd8dd84e0f2b03150c23636b7405b334c285cbc6
Rust
GaloisInc/mir-verifier
/lib/libstd/sys_common/mutex.rs
UTF-8
3,015
3.3125
3
[]
permissive
use crate::sys::crux::mutex as imp; /// An OS-based mutual exclusion lock. /// /// This is the thinnest cross-platform wrapper around OS mutexes. All usage of /// this mutex is unsafe and it is recommended to instead use the safe wrapper /// at the top level of the crate instead of this type. pub struct Mutex(imp::Mutex); unsafe impl Sync for Mutex {} impl Mutex { /// Creates a new mutex for use. /// /// Behavior is undefined if the mutex is moved after it is /// first used with any of the functions below. /// Also, until `init` is called, behavior is undefined if this /// mutex is ever used reentrantly, i.e., `raw_lock` or `try_lock` /// are called by the thread currently holding the lock. pub const fn new() -> Mutex { Mutex(imp::Mutex::new()) } /// Prepare the mutex for use. /// /// This should be called once the mutex is at a stable memory address. /// If called, this must be the very first thing that happens to the mutex. /// Calling it in parallel with or after any operation (including another /// `init()`) is undefined behavior. #[inline] pub unsafe fn init(&mut self) { self.0.init() } /// Locks the mutex blocking the current thread until it is available. /// /// Behavior is undefined if the mutex has been moved between this and any /// previous function call. #[inline] pub unsafe fn raw_lock(&self) { self.0.lock() } /// Calls raw_lock() and then returns an RAII guard to guarantee the mutex /// will be unlocked. #[inline] pub unsafe fn lock(&self) -> MutexGuard<'_> { self.raw_lock(); MutexGuard(&self.0) } /// Attempts to lock the mutex without blocking, returning whether it was /// successfully acquired or not. /// /// Behavior is undefined if the mutex has been moved between this and any /// previous function call. #[inline] pub unsafe fn try_lock(&self) -> bool { self.0.try_lock() } /// Unlocks the mutex. /// /// Behavior is undefined if the current thread does not actually hold the /// mutex. /// /// Consider switching from the pair of raw_lock() and raw_unlock() to /// lock() whenever possible. #[inline] pub unsafe fn raw_unlock(&self) { self.0.unlock() } /// Deallocates all resources associated with this mutex. /// /// Behavior is undefined if there are current or will be future users of /// this mutex. #[inline] pub unsafe fn destroy(&self) { self.0.destroy() } } // not meant to be exported to the outside world, just the containing module pub fn raw(mutex: &Mutex) -> &imp::Mutex { &mutex.0 } #[must_use] /// A simple RAII utility for the above Mutex without the poisoning semantics. pub struct MutexGuard<'a>(&'a imp::Mutex); impl Drop for MutexGuard<'_> { #[inline] fn drop(&mut self) { unsafe { self.0.unlock(); } } }
true
c217f29b23841834d8da792d3e7c7bce49203e81
Rust
bazelbuild/rules_rust
/proto/prost/private/tests/types/types_test.rs
UTF-8
3,414
3.015625
3
[ "Apache-2.0" ]
permissive
use types_proto::Enum; use types_proto::{types, Types}; #[test] fn test_types() { Types { a_enum: Enum::C.into(), a_double: 2.0, a_float: 3.0, a_int32: 4, a_int64: 5, a_uint32: 6, a_uint64: 7, a_sint32: 8, a_sint64: 9, a_fixed32: 10, a_fixed64: 11, a_sfixed32: 12, a_sfixed64: 13, a_bool: true, a_string: "a".to_string(), a_bytes: vec![1, 2, 3], repeated_enum: vec![Enum::A.into(), Enum::B.into()], repeated_double: vec![2.0, 3.0], repeated_float: vec![3.0, 4.0], repeated_int32: vec![4, 5], repeated_int64: vec![5, 6], repeated_uint32: vec![6, 7], repeated_uint64: vec![7, 8], repeated_sint32: vec![8, 9], repeated_sint64: vec![9, 10], repeated_fixed32: vec![10, 11], repeated_fixed64: vec![11, 12], repeated_sfixed32: vec![12, 13], repeated_sfixed64: vec![13, 14], repeated_bool: vec![true, false], repeated_string: vec!["a".to_string(), "b".to_string()], repeated_bytes: vec![vec![1, 2, 3], vec![4, 5, 6]], map_string_enum: vec![ ("a".to_string(), Enum::A.into()), ("b".to_string(), Enum::B.into()), ] .into_iter() .collect(), map_string_double: vec![("a".to_string(), 2.0), ("b".to_string(), 3.0)] .into_iter() .collect(), map_string_float: vec![("a".to_string(), 3.0), ("b".to_string(), 4.0)] .into_iter() .collect(), map_string_int32: vec![("a".to_string(), 4), ("b".to_string(), 5)] .into_iter() .collect(), map_string_int64: vec![("a".to_string(), 5), ("b".to_string(), 6)] .into_iter() .collect(), map_string_uint32: vec![("a".to_string(), 6), ("b".to_string(), 7)] .into_iter() .collect(), map_string_uint64: vec![("a".to_string(), 7), ("b".to_string(), 8)] .into_iter() .collect(), map_string_sint32: vec![("a".to_string(), 8), ("b".to_string(), 9)] .into_iter() .collect(), map_string_sint64: vec![("a".to_string(), 9), ("b".to_string(), 10)] .into_iter() .collect(), map_string_fixed32: vec![("a".to_string(), 10), ("b".to_string(), 11)] .into_iter() .collect(), map_string_fixed64: vec![("a".to_string(), 11), ("b".to_string(), 12)] .into_iter() .collect(), map_string_sfixed32: vec![("a".to_string(), 12), ("b".to_string(), 13)] .into_iter() .collect(), map_string_sfixed64: vec![("a".to_string(), 13), ("b".to_string(), 14)] .into_iter() .collect(), map_string_bool: vec![("a".to_string(), true), ("b".to_string(), false)] .into_iter() .collect(), map_string_string: vec![ ("a".to_string(), "a".to_string()), ("b".to_string(), "b".to_string()), ] .into_iter() .collect(), map_string_bytes: vec![ ("a".to_string(), vec![1, 2, 3]), ("b".to_string(), vec![4, 5, 6]), ] .into_iter() .collect(), one_of: Some(types::OneOf::OneofFloat(1.0)), }; }
true
83dcdbfb3c3b7aa8d7feefc4b1e905c3103d0de1
Rust
CrystalGamma/irc-zerocopy
/src/lib.rs
UTF-8
3,839
2.71875
3
[ "MIT" ]
permissive
pub mod parse; pub use parse::{IrcMessage, parse_irc_message}; pub mod reader; pub use reader::IrcReader; use std::convert::AsRef; trait FirstCharExt: AsRef<str> { fn first_char(&self) -> char {self.as_ref().chars().next().expect("tried to get the first code point of an empty string")} } impl<S: AsRef<str>> FirstCharExt for S {} #[derive(Clone, Copy, Debug)] pub struct TargetList<'a>(&'a str); impl<'a> TargetList<'a> { pub fn unwrap(self) -> &'a str { let TargetList(inner) = self; return inner; } pub fn from_str(inner: &'a str) -> TargetList<'a> { TargetList(inner) } pub fn iter(&self) -> std::str::Split<'a, char> { let &TargetList(ref inner) = self; inner.split(',') } } #[derive(Debug)] pub enum TypedMessage<'a> { Talk(TargetList<'a>, &'a str), Msg(&'a str, TargetList<'a>, &'a str), Notify(TargetList<'a>, &'a str), Notice(&'a str, TargetList<'a>, &'a str), SetNick(&'a str), NickChanged(&'a str, &'a str), Ping(Vec<&'a str>), Pong(Vec<&'a str>), Joined(&'a str, TargetList<'a>), Topic(&'a str, Option<&'a str>), Join(TargetList<'a>, Option<TargetList<'a>>), Register(&'a str, &'a str), // user, real name Welcome(&'a str), YourHost(&'a str), MotdStart(&'a str), Motd(&'a str), MotdEnd(&'a str), Other(IrcMessage<'a>) } pub use TypedMessage::*; pub fn analyse_message(msg: IrcMessage) -> TypedMessage { match msg.command { "PING" => Ping(msg.args), "001" if msg.args.len() == 2 => Welcome(msg.args[1]), "002" if msg.args.len() == 2 => YourHost(msg.args[1]), "375" if msg.args.len() == 2 => MotdStart(msg.args[1]), "372" if msg.args.len() == 2 => Motd(msg.args[1]), "376" if msg.args.len() == 2 => MotdEnd(msg.args[1]), "JOIN" if msg.args.len() == 1 => match msg.prefix { Some(p) => Joined(p, TargetList::from_str(msg.args[0])), None => Join(TargetList::from_str(msg.args[0]), None) }, "NICK" if msg.args.len() == 1 => match msg.prefix { Some(p) => NickChanged(p, msg.args[0]), None => SetNick(msg.args[0]) }, "PRIVMSG" if msg.args.len() == 2 => match msg.prefix { Some(p) => Msg(p, TargetList::from_str(msg.args[0]), msg.args[1]), None => Talk(TargetList::from_str(msg.args[0]), msg.args[1]) }, "NOTICE" if msg.args.len() == 2 => match msg.prefix { Some(p) => Notice(p, TargetList::from_str(msg.args[0]), msg.args[1]), None => Notify(TargetList::from_str(msg.args[0]), msg.args[1]) }, "331" if msg.args.len() == 2 => Topic(msg.args[0], None), "332" if msg.args.len() == 2 => Topic(msg.args[0], Some(msg.args[1])), _ => Other(msg) } } impl<'a> TypedMessage<'a> { pub fn to_dumb(self) -> IrcMessage<'a> { match self { Pong(targets) => IrcMessage { prefix: None, command: "PONG", args: targets }, Join(channels, keys) => IrcMessage { prefix: None, command: "JOIN", args: match keys { Some(list) => vec![channels.unwrap(), list.unwrap()], None => vec![channels.unwrap()] } }, Talk(list, text) => IrcMessage { prefix: None, command: "PRIVMSG", args: vec![list.unwrap(), text] }, Notify(list, text) => IrcMessage { prefix: None, command: "NOTICE", args: vec![list.unwrap(), text] }, SetNick(nick) => IrcMessage { prefix: None, command: "NICK", args: vec![nick] }, Register(user, real_name) => IrcMessage { prefix: None, command: "USER", args: vec![user, "8", "-", real_name] }, _ => unimplemented!() } } pub fn is_motd(&self) -> bool { match self { &Motd(..) | &MotdStart(..) | &MotdEnd(..) => true, _ => false } } } pub fn nick_from_mask(mask: &str) -> &str { mask.find('!').or_else(|| mask.find('@')).map_or(mask, |p| &mask[..p]) } pub fn is_channel_name(s: &str) -> bool { if s.len() > 2 { let c = s.first_char(); c == '#' || c == '!' || c == '&' || c == '+' } else {false} }
true
3fa6dac1ae193953fd8a3d1e206f19bd159f4847
Rust
ethankhall/crom
/src/models/user_config.rs
UTF-8
4,006
2.96875
3
[ "BSD-3-Clause" ]
permissive
use serde::{Deserialize, Serialize}; use std::collections::HashMap; use crate::version::VersionMatcher; #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "kebab-case")] pub struct CromConfig { #[serde(flatten)] pub project: ProjectConfig, #[serde(default)] pub artifact: HashMap<String, ProjectArtifacts>, } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(rename_all = "kebab-case")] pub struct ProjectConfig { pub pattern: String, pub cargo: Option<CargoConfig>, pub property: Option<PropertyFileConfig>, pub maven: Option<MavenConfig>, #[serde(rename = "node")] pub package_json: Option<NodeConfig>, #[serde(rename = "python")] pub version_py: Option<VersionPyConfig>, pub message_template: Option<String>, } impl CromConfig { pub fn create_default(pattern: String, message_template: String) -> Self { let project_config = ProjectConfig { pattern, message_template: Some(message_template), cargo: None, property: None, maven: None, package_json: None, version_py: None, }; CromConfig { project: project_config, artifact: HashMap::new(), } } pub fn create_version_matcher(&self) -> VersionMatcher { VersionMatcher::new(&self.project.pattern) } } #[derive(Serialize, Debug, PartialEq, Clone, Deserialize)] pub struct VersionPyConfig { pub path: String, } #[derive(Serialize, Debug, PartialEq, Clone, Deserialize)] pub struct NodeConfig { #[serde(default = "default_none_path")] #[serde(alias = "path")] pub directory: Option<String>, } #[derive(Serialize, Debug, PartialEq, Clone, Deserialize)] pub struct CargoConfig { #[serde(default = "default_none_path")] #[serde(alias = "path")] pub directory: Option<String>, } #[derive(Serialize, Debug, PartialEq, Clone, Deserialize)] pub struct MavenConfig {} #[derive(Serialize, Debug, PartialEq, Clone, Deserialize)] pub struct PropertyFileConfig { #[serde(default = "default_propery_file_path")] pub path: String, } fn default_none_path() -> Option<String> { None } fn default_propery_file_path() -> String { s!(crate::statics::VERSION_PROPERTIES) } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] pub enum ProjectArtifactCompressionFormat { #[serde(alias = "zip", alias = "ZIP")] Zip, #[serde(alias = "tgz", alias = "TGZ", alias = "tar.gz")] Tgz, } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] pub struct ProjectArtifactWrapper { pub name: String, pub format: ProjectArtifactCompressionFormat, } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] pub enum ProjectArtifactTarget { #[serde(alias = "github")] GitHub, } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(rename_all = "kebab-case")] pub struct ProjectArtifacts { pub paths: HashMap<String, String>, pub compress: Option<ProjectArtifactWrapper>, pub target: ProjectArtifactTarget, } #[test] fn verify_config_parse() { let example_text = " pattern = 'v0.1.%d' message-template = \"Created {version} for release.\" [cargo] [maven] [node] [python] path = \"path/to/version.py\" [property] path = \"path/to/property-file.properties\" "; let config = toml::from_str::<CromConfig>(example_text).unwrap(); println!("config: {:?}", config); assert_eq!(Some(CargoConfig { directory: None }), config.project.cargo); assert_eq!(Some(MavenConfig {}), config.project.maven); assert_eq!( Some(NodeConfig { directory: None }), config.project.package_json ); assert_eq!( Some(VersionPyConfig { path: s!("path/to/version.py"), }), config.project.version_py ); assert_eq!( Some(PropertyFileConfig { path: s!("path/to/property-file.properties"), }), config.project.property ); }
true
10b2dde5fb04d187bbdbb1323064a39b6a6b5719
Rust
s1na/ruster
/src/error.rs
UTF-8
479
3.03125
3
[ "MIT" ]
permissive
use std::error; use std::fmt; #[derive(Debug, Clone)] pub struct Error { msg: String, } impl Error { pub fn new(m: &str) -> Error { Error { msg: m.to_string() } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.msg) } } impl error::Error for Error { fn description(&self) -> &str { &self.msg } fn cause(&self) -> Option<&error::Error> { None } }
true
cc6df4678b15ccd118cd37a5f5df0031bc440262
Rust
birkenfeld/rust-ctrlc
/src/lib.rs
UTF-8
2,562
2.828125
3
[]
no_license
//! A simple easy to use wrapper around Ctrl-C signal. #![cfg_attr(feature="nightly", feature(static_condvar))] #![cfg_attr(feature="nightly", feature(static_mutex))] extern crate libc; #[cfg(feature="stable")] #[macro_use] extern crate lazy_static; #[cfg(feature="nightly")] mod features { use std::sync::{StaticCondvar, CONDVAR_INIT, StaticMutex, MUTEX_INIT}; pub static CVAR: StaticCondvar = CONDVAR_INIT; pub static MUTEX: StaticMutex = MUTEX_INIT; } #[cfg(not(feature="nightly"))] mod features { use std::sync::{Condvar, Mutex}; lazy_static! { pub static ref CVAR: Condvar = Condvar::new(); pub static ref MUTEX: Mutex<bool> = Mutex::new(false); } } use self::features::*; use std::os::raw::c_int; #[cfg(unix)] mod platform { use libc::c_int; use libc::types::os::common::posix01::sighandler_t; use libc::funcs::posix01::signal::signal; #[repr(C)] pub fn handler(_: c_int) { super::features::CVAR.notify_all(); } #[inline] pub unsafe fn set_os_handler(sigs: Vec<c_int>, handler: fn(c_int)) { for sig in sigs { signal(sig, ::std::mem::transmute::<_, sighandler_t>(handler)); } } } #[cfg(windows)] mod platform { use libc::c_int; type PHandlerRoutine = unsafe extern fn(CtrlType: c_int) -> bool; #[link(name = "kernel32")] extern { fn SetConsoleCtrlHandler(HandlerRoutine: PHandlerRoutine, Add: bool) -> bool; } #[repr(C)] pub fn handler(_: c_int) -> bool { super::features::CVAR.notify_all(); true } #[inline] pub unsafe fn set_os_handler(sigs: Vec<c_int>, handler: fn(c_int) -> bool) { SetConsoleCtrlHandler(::std::mem::transmute::<_, PHandlerRoutine>(handler), true); } } use self::platform::*; pub struct CtrlC; impl CtrlC { /// Sets up the signal handler for Ctrl-C /// # Example /// ``` /// use ctrlc::CtrlC; /// CtrlC::set_handler(|| println!("Hello world!")); /// ``` pub fn set_handler<F: Fn() -> () + 'static + Send>(sigs: Vec<c_int>, user_handler: F) -> () { unsafe { set_os_handler(sigs, handler); } ::std::thread::spawn(move || { loop { let _ = CVAR.wait(MUTEX.lock().unwrap()); user_handler(); } }); } pub fn get_waiter(sigs: Vec<c_int>) -> Box<Fn() -> ()> { unsafe { set_os_handler(sigs, handler); } return Box::new(|| { let _ = CVAR.wait(MUTEX.lock().unwrap()); }); } }
true
a4f69b4595456b7f71ebffcf1987d3aeeea2a64f
Rust
etorth/sniprun
/src/interpreters/Python3_original.rs
UTF-8
11,803
2.78125
3
[ "MIT" ]
permissive
#[derive(Clone)] #[allow(non_camel_case_types)] pub struct Python3_original { support_level: SupportLevel, data: DataHolder, code: String, imports: String, interpreter: String, main_file_path: String, plugin_root: String, cache_dir: String, venv: Option<String>, } impl Python3_original { fn fetch_imports(&mut self) -> Result<(), SniprunError> { if self.support_level < SupportLevel::Import { return Ok(()); } let mut v = vec![]; let mut errored = true; if let Some(real_nvim_instance) = self.data.nvim_instance.clone() { info!("got real nvim isntance"); let mut rvi = real_nvim_instance.lock().unwrap(); if let Ok(buffer) = rvi.get_current_buf() { info!("got buffer"); if let Ok(buf_lines) = buffer.get_lines(&mut rvi, 0, -1, false) { info!("got lines in buffer"); v = buf_lines; errored = false; } } } if errored { return Err(SniprunError::FetchCodeError); } info!("lines are : {:?}", v); if !self .data .current_bloc .replace(&[' ', '\t', '\n', '\r'][..], "") .is_empty() { self.code = self.data.current_bloc.clone(); } for line in v.iter() { // info!("lines are : {}", line); if (line.trim().starts_with("import ") || line.trim().starts_with("from ")) //basic selection && !line.trim().starts_with('#') && self.module_used(line, &self.code) { // embed in try catch blocs in case uneeded module is unavailable self.imports = self.imports.clone() + "\n" + line; } } info!("import founds : {:?}", self.imports); Ok(()) } fn module_used(&self, line: &str, code: &str) -> bool { info!( "checking for python module usage: line {} in code {}", line, code ); if line.contains('*') { return true; } if line.contains(" as ") { if let Some(name) = line.split(' ').last() { return code.contains(name); } } for name in line .replace(",", " ") .replace("from", " ") .replace("import ", " ") .split(' ') .filter(|&x| !x.is_empty()) { if code.contains(name.trim()) { return true; } } false } fn fetch_config(&mut self) { let default_compiler = String::from("python3"); if let Some(used_compiler) = Python3_original::get_interpreter_option(&self.get_data(), "interpreter") { if let Some(compiler_string) = used_compiler.as_str() { info!("Using custom compiler: {}", compiler_string); self.interpreter = compiler_string.to_string(); } } self.interpreter = default_compiler; if let Ok(path) = env::current_dir() { if let Some(venv_array_config) = Python3_original::get_interpreter_option(&self.get_data(), "venv") { if let Some(actual_vec_of_venv) = venv_array_config.as_array() { for possible_venv in actual_vec_of_venv.iter() { if let Some(possible_venv_str) = possible_venv.as_str() { let venv_abs_path = path.to_str().unwrap().to_owned() + "/" + possible_venv_str + "/bin/activate_this.py"; if std::path::Path::new(&venv_abs_path).exists() { self.venv = Some(venv_abs_path); break; } } } } } } } } impl Interpreter for Python3_original { fn new_with_level(data: DataHolder, level: SupportLevel) -> Box<Python3_original> { //create a subfolder in the cache folder let rwd = data.work_dir.clone() + "/python3_original"; let mut builder = DirBuilder::new(); builder.recursive(true); builder .create(&rwd) .expect("Could not create directory for python3-original"); //pre-create string pointing to main file's and binary's path let mfp = rwd.clone() + "/main.py"; let pgr = data.sniprun_root_dir.clone(); Box::new(Python3_original { data, support_level: level, code: String::from(""), imports: String::from(""), main_file_path: mfp, plugin_root: pgr, cache_dir: rwd, interpreter: String::new(), venv: None, }) } fn check_cli_args(&self) -> Result<(), SniprunError> { // All cli arguments are sendable to python // Though they will be ignored in REPL mode Ok(()) } fn get_name() -> String { String::from("Python3_original") } fn behave_repl_like_default() -> bool { false } fn has_repl_capability() -> bool { true } fn default_for_filetype() -> bool { true } fn get_supported_languages() -> Vec<String> { vec![ String::from("Python 3"), String::from("python"), String::from("python3"), String::from("py"), ] } fn get_current_level(&self) -> SupportLevel { self.support_level } fn set_current_level(&mut self, level: SupportLevel) { self.support_level = level; } fn get_data(&self) -> DataHolder { self.data.clone() } fn get_max_support_level() -> SupportLevel { SupportLevel::Import } fn fetch_code(&mut self) -> Result<(), SniprunError> { self.fetch_config(); self.fetch_imports()?; if !self .data .current_bloc .replace(&[' ', '\t', '\n', '\r'][..], "") .is_empty() && self.get_current_level() >= SupportLevel::Bloc { self.code = self.data.current_bloc.clone(); } else if !self.data.current_line.replace(" ", "").is_empty() && self.get_current_level() >= SupportLevel::Line { self.code = self.data.current_line.clone(); } else { self.code = String::from(""); } Ok(()) } fn add_boilerplate(&mut self) -> Result<(), SniprunError> { if !self.imports.is_empty() { let mut indented_imports = String::new(); for import in self.imports.lines() { indented_imports = indented_imports + "\t" + import + "\n"; } self.imports = String::from("\ntry:\n") + &indented_imports + "\nexcept:\n\tpass\n"; } let mut source_venv = String::new(); if let Some(venv_path) = &self.venv { info!("loading venv: {}", venv_path); source_venv = source_venv + "\n" + "activate_this_file = \"" + venv_path + "\""; source_venv += "\nexec(compile(open(activate_this_file, \"rb\").read(), activate_this_file, 'exec'), dict(__file__=activate_this_file))\n"; } self.code = source_venv + &self.imports.clone() + &unindent(&format!("{}{}", "\n", self.code.as_str())); Ok(()) } fn build(&mut self) -> Result<(), SniprunError> { // info!("python code:\n {}", self.code); write(&self.main_file_path, &self.code) .expect("Unable to write to file for python3_original"); Ok(()) } fn execute(&mut self) -> Result<String, SniprunError> { let output = Command::new(&self.interpreter) .arg(&self.main_file_path) .args(&self.get_data().cli_args) .output() .expect("Unable to start process"); if output.status.success() { Ok(String::from_utf8(output.stdout).unwrap()) } else { return Err(SniprunError::RuntimeError( String::from_utf8(output.stderr.clone()) .unwrap() .lines() .last() .unwrap_or(&String::from_utf8(output.stderr).unwrap()) .to_owned(), )); } } } impl ReplLikeInterpreter for Python3_original { fn fetch_code_repl(&mut self) -> Result<(), SniprunError> { self.fetch_code() } fn build_repl(&mut self) -> Result<(), SniprunError> { self.build() } fn execute_repl(&mut self) -> Result<String, SniprunError> { self.execute() } fn add_boilerplate_repl(&mut self) -> Result<(), SniprunError> { info!("begins add boilerplate repl"); //load save & load functions let mut path_to_python_functions = self.plugin_root.clone(); path_to_python_functions.push_str("/src/interpreters/Python3_original/saveload.py"); let python_functions = std::fs::read_to_string(&path_to_python_functions).unwrap(); let klepto_memo = String::from("'") + &self.cache_dir.clone() + "/" + "memo" + "'"; let mut final_code = self.imports.clone(); final_code.push('\n'); final_code.push_str(&python_functions); final_code.push('\n'); if self.read_previous_code().is_empty() { //first run self.save_code("Not the first run anymore".to_string()); } else { //not the first run, should load old variables { final_code.push_str("sniprun142859_load("); final_code.push_str(&klepto_memo); final_code.push(')'); } final_code.push('\n'); } final_code.push_str(&unindent(&format!("{}{}", "\n", self.code.as_str()))); final_code.push('\n'); { final_code.push_str("sniprun142859_save("); // if the run has not failed, save new variables final_code.push_str(&klepto_memo); final_code.push(')'); } self.code = final_code.clone(); // info!("---{}---", &final_code); Ok(()) } } #[cfg(test)] mod test_python3_original { use super::*; use crate::*; use crate::test_main::*; #[test] fn simple_print() { let mut data = DataHolder::new(); data.current_bloc = String::from("print(\"lol\",1);"); let mut interpreter = Python3_original::new(data); let res = interpreter.run_at_level(SupportLevel::Bloc); // should panic if not an Ok() let string_result = res.unwrap(); assert_eq!(string_result, "lol 1\n"); } fn test_repl() { let mut event_handler = fake_event(); event_handler.fill_data(&fake_msgpack()); event_handler.data.filetype = String::from("python"); event_handler.data.current_bloc = String::from("a=1"); event_handler.data.repl_enabled = vec![String::from("Python3_original")]; event_handler.data.sniprun_root_dir = String::from("."); //run the launcher (that selects, init and run an interpreter) let launcher = launcher::Launcher::new(event_handler.data.clone()); let _result = launcher.select_and_run(); event_handler.data.current_bloc = String::from("print(a)"); let launcher = launcher::Launcher::new(event_handler.data.clone()); let result = launcher.select_and_run(); assert!(result.is_ok()); } }
true
0c286a6e617d55954218349f4917efd4f2c495fe
Rust
LeedsCodeDojo/Connect4
/rustExample/src/api.rs
UTF-8
7,305
2.6875
3
[]
no_license
#![allow(non_snake_case)] use rustc_serialize::{Decodable, Decoder}; use rustc_serialize::json; use hyper::{self, Client}; use std::io::Read; const SERVER_URL: &'static str = "http://yorkdojoconnect4.azurewebsites.net/"; static TEAM_NAME: &'static str = "rustPlayer"; static TEAM_PASSWORD: &'static str = "asiduhfasjfhlkasjfhlaksjfhlksajfh"; const COLUMNS: usize = 7; const ROWS: usize = 6; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum GameState { GameNotStarted = 0, RedWon = 1, YellowWon = 2, RedToPlay = 3, YellowToPlay = 4, Draw = 5, } impl Decodable for GameState { fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> { let n = try!(d.read_usize()); let state = match n { 0 => GameState::GameNotStarted, 1 => GameState::RedWon, 2 => GameState::YellowWon, 3 => GameState::RedToPlay, 4 => GameState::YellowToPlay, 5 => GameState::Draw, _ => panic!("broken gamestate"), }; Ok(state) } } #[derive(Debug, Clone, Copy, PartialEq, Eq, RustcDecodable)] pub enum CellState { Empty = 0, Red = 1, Yellow = 2, } #[derive(Debug)] pub struct Board { cells: [[CellState; ROWS]; COLUMNS], } impl Decodable for Board { fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> { let mut cells = [[CellState::Empty; ROWS]; COLUMNS]; let _cells = d.read_seq(|d, len| { let mut column_list = Vec::new(); for i in 0..len { column_list.push(try!(d.read_seq_elt(i, |d| { let row = d.read_seq(|d, len| { let mut row_list = Vec::new(); for i in 0..len { row_list.push(try!(d.read_seq_elt(i, |d| { let state = match try!(d.read_usize()) { 0 => CellState::Empty, 1 => CellState::Red, 2 => CellState::Yellow, _ => panic!("Invalid cell state"), }; Ok(state) }))); } Ok(row_list) }); assert!(row.is_ok(), "row was invalid"); row }))); } Ok(column_list) }); assert!(_cells.is_ok(), "column was invalid"); let cells_vec = _cells.ok().unwrap(); for (inner_array, inner_vec) in cells.iter_mut().zip(cells_vec.iter()) { for (place, element) in inner_array.iter_mut().zip(inner_vec.iter()) { *place = *element; } } Ok(Board { cells: cells }) } } impl Board { pub fn new() -> Board { Board { cells: [[CellState::Empty; ROWS]; COLUMNS] } } } #[derive(Debug, RustcDecodable)] pub struct RemoteGameState { CurrentState: GameState, Cells: Board, YellowPlayerID: String, RedPlayerID: String, ID: String, } pub struct Game { pub state: GameState, pub board: Board, player_id: Option<String>, yellow_player_id: Option<String>, red_player_id: Option<String>, game_id: Option<String>, client: hyper::Client, } impl Game { pub fn new() -> Game { let mut game = Game { state: GameState::GameNotStarted, board: Board::new(), player_id: None, yellow_player_id: None, red_player_id: None, game_id: None, client: Client::new(), }; game.register(); game.new_game(); game } fn register(&mut self) { let mut body = String::new(); let url = format!("{}{}?teamName={}&password={}", SERVER_URL.to_string(), "api/register", TEAM_NAME, TEAM_PASSWORD); let mut res = self.client.post(&url).send().unwrap(); res.read_to_string(&mut body).unwrap(); assert_eq!(res.status, hyper::Ok, "Error returned: {}", body); body.remove(0); body.pop(); self.player_id = Some(body); } fn new_game(&mut self) { let mut body = String::new(); let url = format!("{}{}?playerID={}", SERVER_URL.to_string(), "api/NewGame", self.player_id.clone().unwrap()); let mut res = self.client.post(&url).send().unwrap(); res.read_to_string(&mut body).unwrap(); assert_eq!(res.status, hyper::Ok, "Error returned: {}", body); } pub fn get_game_state(&mut self) { let mut body = String::new(); let url = format!("{}{}?playerID={}", SERVER_URL.to_string(), "api/GameState", self.player_id.clone().unwrap()); let mut res = self.client.get(&url).send().unwrap(); res.read_to_string(&mut body).unwrap(); assert_eq!(res.status, hyper::Ok, "Error returned: {}", body); let remote_game_state: RemoteGameState = json::decode(&body).unwrap(); self.board = remote_game_state.Cells; self.state = remote_game_state.CurrentState; self.yellow_player_id = Some(remote_game_state.YellowPlayerID); self.red_player_id = Some(remote_game_state.RedPlayerID); self.game_id = Some(remote_game_state.ID); } pub fn over(&self) -> bool { match self.state { GameState::RedWon | GameState::YellowWon | GameState::Draw => true, _ => false, } } pub fn my_turn(&self) -> bool { match (self.player_id == self.yellow_player_id, self.state) { (true, GameState::YellowToPlay) => true, (false, GameState::RedToPlay) => true, _ => false, } } pub fn winner(&self) { match (self.player_id == self.yellow_player_id, self.state) { (_, GameState::Draw) => println!("The game was a draw"), (true, GameState::YellowWon) => println!("You won, as yellow!"), (false, GameState::RedWon) => println!("You won, as red!"), (false, GameState::YellowWon) => println!("You lost, as red!"), (true, GameState::RedWon) => println!("You lost, as yellow!"), _ => println!("The game isn't over yet!"), } } pub fn play(&self, column: usize) { let mut body = String::new(); let url = format!("{}{}?playerID={}&columnNumber={}&password={}", SERVER_URL.to_string(), "api/MakeMove", self.player_id.clone().unwrap(), column, TEAM_PASSWORD); let mut res = self.client.get(&url).send().unwrap(); res.read_to_string(&mut body).unwrap(); assert_eq!(res.status, hyper::Ok, "Error returned: {}", body); } pub fn valid_move(&self, column_idx: usize) -> bool { let column = self.board.cells[column_idx]; column[ROWS - 1] == CellState::Empty } }
true
425a99a42b58abc8338002eda949bbc08f7599ec
Rust
mcountryman/scraptor
/src/driver/dxgi/frame.rs
UTF-8
6,446
2.875
3
[]
no_license
use crate::{ bindings::Windows::Win32::{ Foundation::RECT, Graphics::Dxgi::{IDXGIOutputDuplication, DXGI_OUTDUPL_MOVE_RECT}, }, driver::dx11::frame::Dx11FrameData, DirtyRect, Frame, FrameFormat, MovedPoint, MovedRect, }; use std::{borrow::Cow, cmp::min}; #[derive(Debug, Clone)] pub struct DxgiFrame<'a> { data: DxgiFrameData<'a>, dirty: Option<Vec<DirtyRect>>, duplication: &'a IDXGIOutputDuplication, } impl<'a> DxgiFrame<'a> { pub fn new<D>(data: D, duplication: &'a IDXGIOutputDuplication) -> Self where D: Into<DxgiFrameData<'a>>, { Self { data: data.into(), dirty: None, duplication, } } /// Get reference to underlying data pub const fn data(&self) -> &DxgiFrameData<'a> { &self.data } /// Get rectangles where pixels have changed since last frame pub fn dirty(&self) -> Vec<DirtyRect> { unsafe { self.get_dirty_rects() } } /// Get rectangles where pixels have moved since last frame pub fn moved(&self) -> Vec<MovedRect> { unsafe { self.get_moved_rects() } } /// Get pixel format of underlying data /// /// # Notes /// Per the Microsoft DesktopDuplication API documentation the format of the desktop /// image is always `DXGI_FORMAT_B8G8R8A8_UNORM` which translates to `B8G8R8A8`. /// /// https://docs.microsoft.com/en-us/windows/win32/direct3ddxgi/desktop-dup-api#updating-the-desktop-image-data pub const fn format(&self) -> FrameFormat { FrameFormat::B8G8R8A8 } /// Get pixel data /// /// # Notes /// When frame data is [`DxgiFrameData::DirectX`] texture is copied to CPU memory and /// returned. No caching occurs so, if you plan on using this multiple times you should /// probably cache the result yourself. pub fn as_bytes(&self) -> anyhow::Result<Cow<'a, [u8]>> { match &self.data { DxgiFrameData::Memory(buf) => Ok(Cow::from(*buf)), DxgiFrameData::DirectX(texture) => Ok(Cow::from(texture.get_bytes()?)), } } /// Convert into underlying data pub fn into_data(self) -> DxgiFrameData<'a> { self.data } /// Gets dirty rectangles from [`IDXGIOutputDuplication`] while ignoring errors and doing /// best effort minimizing amount of memory while allowing further growth when needed. /// /// At some point I may consider caching [`RECT`] buffer and translated [`FrameRect`] /// items in [`DxgiFrame`] but, for the time being I'll let the end user decide where and /// how data is stored (with the exception of the initial allocations ofc) unsafe fn get_dirty_rects(&self) -> Vec<DirtyRect> { // Default rectangle buffer size (comes out to 2KB) const RECT_BUF_LEN: usize = 16; // Maximum rectangle buffer size (comes out to ~1MB) const RECT_BUF_MAX_LEN: usize = 7000 - RECT_BUF_LEN; let mut dirty = vec![RECT::default(); RECT_BUF_LEN]; let mut dirty_len = 0; let _ = self.duplication.GetFrameDirtyRects( dirty.len() as _, dirty.as_mut_ptr(), &mut dirty_len, ); let more = (dirty_len as usize).saturating_sub(dirty.len()); let more = min(RECT_BUF_MAX_LEN, more); // `RECT_LEN` rectangles is not enough, try extending dirty if more > 0 { dirty.extend_from_slice(&vec![RECT::default(); more]); let _ = self.duplication.GetFrameDirtyRects( dirty.len() as _, dirty.as_mut_ptr(), &mut dirty_len, ); } // I would _love_ if rust/llvm would optimize this away into a transparent type rather // than looping over a structure and mapping it into a structure that looks exactly the // same. I know Quartz, x11, and Wayland will have different definitions so we need a // generic type that will handle this and I _really_ don't want to add another nested // type definition to the trait tree for [`Frame`]. dirty .into_iter() .take(dirty_len as usize) .map(|rect| DirtyRect::new(rect.top, rect.right, rect.bottom, rect.left)) .collect() } unsafe fn get_moved_rects(&self) -> Vec<MovedRect> { // Default rectangle buffer size (comes out to 2KB) const RECT_BUF_LEN: usize = 16; // Maximum rectangle buffer size (comes out to ~1MB) const RECT_BUF_MAX_LEN: usize = 7000 - RECT_BUF_LEN; let mut moved = vec![DXGI_OUTDUPL_MOVE_RECT::default(); RECT_BUF_LEN]; let mut moved_len = 0; let _ = self.duplication.GetFrameMoveRects( moved.len() as _, moved.as_mut_ptr(), &mut moved_len, ); let more = (moved_len as usize).saturating_sub(moved.len()); let more = min(RECT_BUF_MAX_LEN, more); // `RECT_LEN` rectangles is not enough, try extending dirty if more > 0 { moved.extend_from_slice(&vec![DXGI_OUTDUPL_MOVE_RECT::default(); more]); let _ = self.duplication.GetFrameMoveRects( moved.len() as _, moved.as_mut_ptr(), &mut moved_len, ); } // I would _love_ if rust/llvm would optimize this away into a transparent type rather // than looping over a structure and mapping it into a structure that looks exactly the // same. I know Quartz, x11, and Wayland will have different definitions so we need a // generic type that will handle this and I _really_ don't want to add another nested // type definition to the trait tree for [`Frame`]. moved .into_iter() .take(moved_len as usize) .map(|moved| { MovedRect::new( DirtyRect::new( moved.DestinationRect.top, moved.DestinationRect.right, moved.DestinationRect.bottom, moved.DestinationRect.left, ), MovedPoint::new(moved.SourcePoint.x, moved.SourcePoint.y), ) }) .collect() } } impl<'frame> Frame<'frame> for DxgiFrame<'frame> { fn dirty(&self) -> Vec<DirtyRect> { self.dirty() } fn moved(&self) -> Vec<MovedRect> { self.moved() } fn format(&self) -> FrameFormat { self.format() } fn as_bytes(&self) -> anyhow::Result<Cow<'frame, [u8]>> { self.as_bytes() } } #[derive(Debug, Clone)] pub enum DxgiFrameData<'frame> { Memory(&'frame [u8]), DirectX(Dx11FrameData<'frame>), } impl<'frame> From<&'frame [u8]> for DxgiFrameData<'frame> { fn from(data: &'frame [u8]) -> Self { Self::Memory(data) } } impl<'frame> From<Dx11FrameData<'frame>> for DxgiFrameData<'frame> { fn from(data: Dx11FrameData<'frame>) -> Self { Self::DirectX(data) } }
true
d98082c69c63d37b5d127b1c1a47027db5c8d3c6
Rust
Anode194/rss_gen
/src/input.rs
UTF-8
4,586
2.796875
3
[]
no_license
extern crate nom; use crate::config::Post; use crate::config::ConfigData; use crate::config::read_config; use std::vec::Vec; use nom::{ bytes::complete::is_not, bytes::complete::tag, bytes::complete::take_until, // branch::Alt, needed for parsing multiple h tags for finding the heading. later sequence::delimited, IResult, }; use std::fs; use std::io; use std::path::Path; //pub fn parse input_file() -> { //} pub fn parse_title(input: &str) -> IResult<&str, &str> { delimited(tag("<h2>"), is_not("<"), tag("</h2>"))(input) } // TODO: need to implement a combinator to search for any h tag not just h2. priority: hi; pub fn strip_till_title(input: &str) -> IResult<&str, &str> { take_until("<h2>")(input) } #[allow(dead_code)] pub fn strip_prefix(input: &str) -> IResult<&str, &str> { take_until("<p>")(input) } pub fn strip_suffix(input: &str) -> IResult<&str, &str> { take_until("HTML;")(input) } #[allow(dead_code)] pub fn parse_body(input: &str) -> IResult<&str, &str> { delimited(tag("<<<HTML"), is_not("1>"), tag("</h"))(input) } pub fn enclose(content: &str, tag_type: &str) -> String { let mut tag = String::from("<"); let end_tag_start = "</"; let tag_end = ">"; tag.push_str(tag_type); tag.push_str(tag_end); tag.push_str(content); tag.push_str(end_tag_start); tag.push_str(tag_type); tag.push_str(tag_end); tag } //for description being on their own lines as well as comments etc. pub fn enclose_nl(content: &str, tag_type: &str) -> String { let mut tag = String::from("<"); let end_tag_start = "\n</"; let tag_end = ">"; let tag_end_nl = ">\n"; tag.push_str(tag_type); tag.push_str(tag_end_nl); tag.push_str(content); tag.push_str(end_tag_start); tag.push_str(tag_type); tag.push_str(tag_end); tag } pub fn post_dir(dir: &Path) -> Vec<Post> { //TODO: create test let mut post_vec = Vec::new(); if dir.is_dir() { for entry in fs::read_dir(dir) { for files in entry { let file = files.unwrap(); let path = file.path(); if !path.is_dir() { let mut contents = fs::read_to_string(&path).expect("Something went wrong opening the file."); let header_garbage = match strip_till_title(contents.as_mut_str()) { // should return tuple of .0=header_garbagef.1 rest of the contents of the file. Ok(i) => i, Err(e) => panic!("failed to parse header garbage \n{:?}", e), }; let header = match parse_title(header_garbage.0) { Ok(i) => i, Err(e) => panic!("parse blogpost title failed \n{:?}", e), }; let title = header.1; // saved for when we need to put it into the file at the end. let body = match strip_suffix(header.0) { Ok(i) => i.1, Err(e) => panic!("failed to strip prefix from body. \n{:?}", e), }; let body = &body.replace("\n", " ").to_string(); let mut conf = read_config(); let link = path.file_name().unwrap(); let post = Post::new(title, conf.language.as_mut_str(), body, "", link.to_str().unwrap()); post_vec.push(post); } else { continue; } } } } else { panic!("file passed in is not a directory, please provide a directory for the -d flag"); } post_vec } #[cfg(test)] mod tests { use super::*; #[test] fn title_string() { let test_title = enclose("title", "Description"); assert_eq!(test_title, "<Description>title</Description>".to_string()); } #[test] fn body_string() { let body = enclose("some blog", "title"); assert_eq!(body, "<title>some blog</title>"); } #[test] fn title_parse() { let input_str = "<h2>header</h2>"; let parse_title = parse_title(input_str); assert_eq!(parse_title, Ok(("", "header"))); } #[test] fn strip_pre() { let input_str = "asdfasdf<p>f"; let parse_pre = strip_prefix(input_str); assert_eq!(parse_pre, Ok(("<p>f", "asdfasdf"))); } #[test] fn strip_suff() { let input_str = "asdfasdfHTML;f"; let stripped = strip_suffix(input_str); assert_eq!(stripped, Ok(("HTML;f", "asdfasdf"))); } }
true
ea55e54fded8a63210696f0b36e05f56f12f5087
Rust
Botcoins/watchdog
/src/processes.rs
UTF-8
2,513
2.8125
3
[ "MIT" ]
permissive
use config::WatchdogCfg; use std::fs; use std::process::{Child, Command}; use std::sync::mpsc::Receiver; use std::thread; use std::time::Duration; pub struct WatchedChild { cfg: WatchdogCfg, exe_path: String, child: Child, } impl WatchedChild { pub fn spawn(cfg: WatchdogCfg, rebuild: bool) -> Result<WatchedChild, &'static str> { let exe_path = WatchedChild::gen_exe(&cfg, rebuild)?; let child = WatchedChild::spawn_child(&exe_path, &cfg); Ok(WatchedChild { cfg, exe_path, child }) } fn spawn_child(exe_path: &str, cfg: &WatchdogCfg) -> Child { let mut cmd = Command::new(&exe_path); for (k, v) in &cfg.env { cmd.env(k, v); } cmd.current_dir(&cfg.dir); cmd.spawn().expect("failed to spawn child") } pub fn gen_exe(cfg: &WatchdogCfg, rebuild: bool) -> Result<String, &'static str> { if rebuild { let _ = Command::new("git") .arg("pull") .current_dir(&cfg.dir) .spawn() .expect("failed to pull updates with git") .wait(); if cfg.test_on_redeploy { if 0 != Command::new("cargo") .arg("test") .current_dir(&cfg.dir) .spawn() .expect("failed to test with cargo") .wait() .unwrap() .code() .unwrap_or(-1) { return Err("Test for build failed..."); } } let exit_code = Command::new("cargo") .arg("build") .current_dir(&cfg.dir) .spawn() .expect("failed to build with cargo") .wait() .unwrap() .code() .unwrap_or(-1); if exit_code == 0 { return Err("Cannot build executable"); } } WatchedChild::cleanup(&cfg); let origin = format!("{}/target/debug/{}", cfg.dir, cfg.dir); let mut exe_path = String::new(); for discrim in 0..8 { let dest = format!("exe/{}-{}", cfg.dir, discrim); if fs::copy(&origin, &dest).is_ok() { exe_path = dest; break; } } if exe_path.len() == 0 { return Err("Failed to copy process"); } Ok(exe_path) } pub fn autorestart(&mut self, rx: Receiver<bool>) { loop { if let Ok(status) = self.child.try_wait() { break status; } else if rx.try_recv().is_ok() { self.child = WatchedChild::spawn_child(&self.exe_path, &self.cfg); } else { thread::sleep(Duration::from_secs(5)); continue; } }; } pub fn cleanup(cfg: &WatchdogCfg) { if let Ok(read) = fs::read_dir("exe/") { for entry in read { if let Ok(entry) = entry { if entry.file_name().to_str().unwrap().starts_with(&cfg.dir) { let _ = fs::remove_file(entry.path()); } } } } } }
true
6fb85b748f7646d9fbf95ddba28942bcfc34e402
Rust
laanwj/k210-pac
/src/sysctl/reset_status.rs
UTF-8
9,158
2.65625
3
[ "ISC" ]
permissive
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::RESET_STATUS { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct RESET_STS_CLRR { bits: bool, } impl RESET_STS_CLRR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct PIN_RESET_STSR { bits: bool, } impl PIN_RESET_STSR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct WDT0_RESET_STSR { bits: bool, } impl WDT0_RESET_STSR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct WDT1_RESET_STSR { bits: bool, } impl WDT1_RESET_STSR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct SOFT_RESET_STSR { bits: bool, } impl SOFT_RESET_STSR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Proxy"] pub struct _RESET_STS_CLRW<'a> { w: &'a mut W, } impl<'a> _RESET_STS_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _PIN_RESET_STSW<'a> { w: &'a mut W, } impl<'a> _PIN_RESET_STSW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _WDT0_RESET_STSW<'a> { w: &'a mut W, } impl<'a> _WDT0_RESET_STSW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _WDT1_RESET_STSW<'a> { w: &'a mut W, } impl<'a> _WDT1_RESET_STSW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 3; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _SOFT_RESET_STSW<'a> { w: &'a mut W, } impl<'a> _SOFT_RESET_STSW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 0"] #[inline] pub fn reset_sts_clr(&self) -> RESET_STS_CLRR { let bits = { const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) != 0 }; RESET_STS_CLRR { bits } } #[doc = "Bit 1"] #[inline] pub fn pin_reset_sts(&self) -> PIN_RESET_STSR { let bits = { const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u32) != 0 }; PIN_RESET_STSR { bits } } #[doc = "Bit 2"] #[inline] pub fn wdt0_reset_sts(&self) -> WDT0_RESET_STSR { let bits = { const MASK: bool = true; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) != 0 }; WDT0_RESET_STSR { bits } } #[doc = "Bit 3"] #[inline] pub fn wdt1_reset_sts(&self) -> WDT1_RESET_STSR { let bits = { const MASK: bool = true; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u32) != 0 }; WDT1_RESET_STSR { bits } } #[doc = "Bit 4"] #[inline] pub fn soft_reset_sts(&self) -> SOFT_RESET_STSR { let bits = { const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) != 0 }; SOFT_RESET_STSR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0"] #[inline] pub fn reset_sts_clr(&mut self) -> _RESET_STS_CLRW { _RESET_STS_CLRW { w: self } } #[doc = "Bit 1"] #[inline] pub fn pin_reset_sts(&mut self) -> _PIN_RESET_STSW { _PIN_RESET_STSW { w: self } } #[doc = "Bit 2"] #[inline] pub fn wdt0_reset_sts(&mut self) -> _WDT0_RESET_STSW { _WDT0_RESET_STSW { w: self } } #[doc = "Bit 3"] #[inline] pub fn wdt1_reset_sts(&mut self) -> _WDT1_RESET_STSW { _WDT1_RESET_STSW { w: self } } #[doc = "Bit 4"] #[inline] pub fn soft_reset_sts(&mut self) -> _SOFT_RESET_STSW { _SOFT_RESET_STSW { w: self } } }
true
584c6f819c3b6a31cb93f4675a172211607eabf8
Rust
Jackson93150/RetroRunner-
/src/tir.rs
UTF-8
1,109
3.46875
3
[]
no_license
use ggez::nalgebra::Point2; use ggez::nalgebra::Vector2; use ggez::{Context, GameResult}; pub struct Tir { pub position: Vector2<f32>, pub state: bool, //point de vie pub movement_right: f32, } impl Tir { pub fn new(_context: &mut Context) -> GameResult<Self> { let position = Vector2::new(250.0, 250.0); // position de la barre de vie let state = false; // point de vie let movement_right = 22.0; Ok(Tir { position, state, movement_right, }) } pub fn location(&self) -> Point2<f32> { Point2::new(self.position.x, self.position.y) } pub fn movement(&mut self) { // si le tir est en etat true il va ller en ligne droite a la vitesse défini if self.state == true { if self.position.x < 1450.0 { self.position.x += self.movement_right; } } } pub fn respawn(&mut self) { // si le tir sort de l'écran son état passe a false if self.position.x > 1450.0 { self.state = false; } } }
true
b3130fe87e9500aeee0a2e63ae0d888c5695aa32
Rust
cloudera/hue
/tools/ace-editor/demo/kitchen-sink/docs/rust.rs
UTF-8
495
3.1875
3
[ "CC-BY-3.0", "LicenseRef-scancode-other-copyleft", "LicenseRef-scancode-unknown-license-reference", "ZPL-2.0", "Unlicense", "LGPL-3.0-only", "CC0-1.0", "LicenseRef-scancode-other-permissive", "CNRI-Python", "LicenseRef-scancode-warranty-disclaimer", "GPL-2.0-or-later", "Python-2.0", "GPL-3.0-only", "CC-BY-4.0", "LicenseRef-scancode-jpython-1.1", "AFL-2.1", "JSON", "WTFPL", "MIT", "LicenseRef-scancode-generic-exception", "LicenseRef-scancode-jython", "GPL-3.0-or-later", "LicenseRef-scancode-python-cwi", "BSD-3-Clause", "LGPL-3.0-or-later", "Zlib", "LicenseRef-scancode-free-unknown", "Classpath-exception-2.0", "LicenseRef-scancode-proprietary-license", "GPL-1.0-or-later", "LGPL-2.0-or-later", "MPL-2.0", "ISC", "GPL-2.0-only", "ZPL-2.1", "BSL-1.0", "Apache-2.0", "LGPL-2.0-only", "LicenseRef-scancode-public-domain", "Xnet", "BSD-2-Clause" ]
permissive
use core::rand::RngUtil; fn main() { for ["Alice", "Bob", "Carol"].each |&name| { do spawn { let v = rand::Rng().shuffle([1, 2, 3]); for v.each |&num| { print(fmt!("%s says: '%d'\n", name, num + 1)) } } } } fn map<T, U>(vector: &[T], function: &fn(v: &T) -> U) -> ~[U] { let mut accumulator = ~[]; for vec::each(vector) |element| { accumulator.push(function(element)); } return accumulator; }
true
1a8dfbdc2fef1f25f96772b51af50ed91ebeb0e3
Rust
stackcats/leetcode
/algorithms/easy/search_insert_position.rs
UTF-8
522
3.203125
3
[ "MIT" ]
permissive
// https://leetcode.com/problems/search-insert-position/ impl Solution { pub fn search_insert(nums: Vec<i32>, target: i32) -> i32 { let mut i = 0; let mut j = nums.len() as i32 - 1; while i <= j { let mid = i + (j - i) / 2; if nums[mid as usize] == target { return mid; } if nums[mid as usize] < target { i = mid + 1; } else { j = mid - 1; } } i } }
true
3b960ea8237af40e650c42a9deea92c4bbf58163
Rust
serenity-rs/serenity
/src/framework/standard/structures/check.rs
UTF-8
2,732
3.3125
3
[ "ISC" ]
permissive
use std::error::Error; use std::fmt; use futures::future::BoxFuture; use crate::client::Context; use crate::framework::standard::{Args, CommandOptions}; use crate::model::channel::Message; /// This type describes why a check has failed. /// /// **Note**: /// The bot-developer is supposed to process this `enum` as the framework is not. /// It solely serves as a way to inform a user about why a check /// has failed and for the developer to log given failure (e.g. bugs or statistics) /// occurring in [`Check`]s. #[derive(Clone, Debug)] #[non_exhaustive] pub enum Reason { /// No information on the failure. Unknown, /// Information dedicated to the user. User(String), /// Information purely for logging purposes. Log(String), /// Information for the user but also for logging purposes. UserAndLog { user: String, log: String }, } impl Error for Reason {} pub type CheckFunction = for<'fut> fn( &'fut Context, &'fut Message, &'fut mut Args, &'fut CommandOptions, ) -> BoxFuture<'fut, Result<(), Reason>>; /// A check can be part of a command or group and will be executed to /// determine whether a user is permitted to use related item. /// /// Additionally, a check may hold additional settings. pub struct Check { /// Name listed in help-system. pub name: &'static str, /// Function that will be executed. pub function: CheckFunction, /// Whether a check should be evaluated in the help-system. /// `false` will ignore check and won't fail execution. pub check_in_help: bool, /// Whether a check shall be listed in the help-system. /// `false` won't affect whether the check will be evaluated help, /// solely [`Self::check_in_help`] sets this. pub display_in_help: bool, } impl fmt::Debug for Check { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Check") .field("name", &self.name) .field("function", &"<fn>") .field("check_in_help", &self.check_in_help) .field("display_in_help", &self.display_in_help) .finish() } } impl fmt::Display for Reason { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Unknown => f.write_str("Unknown"), Self::User(reason) => write!(f, "User {}", reason), Self::Log(reason) => write!(f, "Log {}", reason), Self::UserAndLog { user, log, } => { write!(f, "UserAndLog {{user: {}, log: {}}}", user, log) }, } } } impl PartialEq for Check { fn eq(&self, other: &Self) -> bool { self.name == other.name } }
true
ae15b5cf95c6890c6bf1defcc985407b95b24bc2
Rust
SpectralSequences/sseq
/ext/crates/fp/src/matrix/subspace.rs
UTF-8
6,447
3.265625
3
[ "Apache-2.0", "MIT" ]
permissive
use super::Matrix; use crate::prime::ValidPrime; use crate::vector::{FpVector, Slice, SliceMut}; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use std::io::{Read, Write}; /// A subspace of a vector space. /// /// In general, a method is defined on the [`Subspace`] if it is a meaningful property of the /// subspace itself. Otherwise, users are expected to access the matrix object directly. When the /// user directly modifies the matrix, they are expected to ensure the matrix is row reduced after /// the operations conclude. /// /// # Fields /// * `matrix` - A matrix in reduced row echelon, whose number of columns is the dimension of the /// ambient space and each row is a basis vector of the subspace. #[derive(Debug, Clone, PartialEq, Eq)] #[repr(transparent)] pub struct Subspace { pub matrix: Matrix, } impl Subspace { pub fn new(p: ValidPrime, rows: usize, columns: usize) -> Self { let mut matrix = Matrix::new(p, rows, columns); matrix.initialize_pivots(); Self { matrix } } pub fn prime(&self) -> ValidPrime { self.matrix.prime() } pub fn pivots(&self) -> &[isize] { self.matrix.pivots() } pub fn from_bytes(p: ValidPrime, data: &mut impl Read) -> std::io::Result<Self> { let rows = data.read_u64::<LittleEndian>()? as usize; let ambient_dimension = data.read_u64::<LittleEndian>()? as usize; let mut matrix = Matrix::from_bytes(p, rows, ambient_dimension, data)?; matrix.pivots = Matrix::read_pivot(matrix.columns(), data)?; Ok(Self { matrix }) } pub fn to_bytes(&self, buffer: &mut impl Write) -> std::io::Result<()> { buffer.write_u64::<LittleEndian>(self.matrix.rows() as u64)?; buffer.write_u64::<LittleEndian>(self.ambient_dimension() as u64)?; self.matrix.to_bytes(buffer)?; Matrix::write_pivot(self.pivots(), buffer) } pub fn empty_space(p: ValidPrime, dim: usize) -> Self { Self::new(p, 0, dim) } pub fn entire_space(p: ValidPrime, dim: usize) -> Self { let mut result = Self::new(p, dim, dim); for i in 0..dim { result.matrix.row_mut(i).set_entry(i, 1); result.matrix.pivots_mut()[i] = i as isize; } result } /// This adds a vector to the subspace. This function assumes that the last row of the /// matrix is zero, i.e. the dimension of the current subspace is strictly less than the number /// of rows. This can be achieved by setting the number of rows to be the dimension plus one /// when creating the subspace. /// /// # Returns /// The new dimension of the subspace pub fn add_vector(&mut self, row: Slice) -> usize { let last_row = self.matrix.rows() - 1; self.matrix.row_mut(last_row).assign(row); self.matrix.row_reduce() } /// This adds some rows to the subspace /// /// # Arguments /// - `rows`: A function that writes the row to be added to the given SliceMut. This returns /// `None` if it runs out of rows, `Some(())` otherwise. pub fn add_vectors(&mut self, mut rows: impl for<'a> FnMut(SliceMut<'a>) -> Option<()>) { let num_rows = self.matrix.rows(); 'outer: loop { let first_row = self.dimension(); if first_row == num_rows { return; } for i in first_row..num_rows { if rows(self.matrix.row_mut(i)).is_none() { break 'outer; } } self.matrix.row_reduce(); } self.matrix.row_reduce(); } pub fn add_basis_elements(&mut self, mut rows: impl std::iter::Iterator<Item = usize>) { self.add_vectors(|mut row| { row.set_entry(rows.next()?, 1); Some(()) }); } /// Projects a vector to a complement of the subspace. The complement is the set of vectors /// that have a 0 in every column where there is a pivot in `matrix` pub fn reduce(&self, mut vector: SliceMut) { assert_eq!(vector.as_slice().len(), self.ambient_dimension()); if self.matrix.rows() == 0 { return; } let p = self.prime(); let iter = self .pivots() .iter() .enumerate() .filter(|(_, x)| **x >= 0) .map(|(col, _)| col) .zip(self.iter()); for (col, row) in iter { let c = vector.as_slice().entry(col); if c != 0 { vector.add(row, *p - c); } } } pub fn contains(&self, vector: Slice) -> bool { let mut vector: FpVector = vector.to_owned(); self.reduce(vector.as_slice_mut()); vector.is_zero() } pub fn dimension(&self) -> usize { self.pivots() .iter() .rev() .find(|&&i| i >= 0) .map_or(0, |&i| i as usize + 1) } /// Whether the subspace is empty. This assumes the subspace is row reduced. pub fn is_empty(&self) -> bool { self.matrix.rows() == 0 || self.matrix[0].is_zero() } pub fn ambient_dimension(&self) -> usize { self.matrix.columns() } /// Returns a basis of the subspace. pub fn basis(&self) -> &[FpVector] { &self.matrix[..self.dimension()] } /// Sets the subspace to be the zero subspace. pub fn set_to_zero(&mut self) { self.matrix.set_to_zero(); for x in self.matrix.pivots_mut() { *x = -1; } } /// Sets the subspace to be the entire subspace. pub fn set_to_entire(&mut self) { self.matrix.set_to_zero(); for i in 0..self.matrix.columns() { self.matrix.row_mut(i).set_entry(i, 1); self.matrix.pivots_mut()[i] = i as isize; } } pub fn iter(&self) -> impl Iterator<Item = Slice> { self.matrix .iter() .map(FpVector::as_slice) .take(self.dimension()) } } impl std::fmt::Display for Subspace { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let dim = self.dimension(); for row in self.matrix.iter().take(dim) { if f.alternate() { writeln!(f, "{row:#}")?; } else { writeln!(f, "{row}")?; } } Ok(()) } }
true
a19ea48eb23b3fb569c16d468f021955875978e6
Rust
typst/typst
/crates/typst-library/src/text/mod.rs
UTF-8
27,757
3.328125
3
[ "Apache-2.0", "Bitstream-Vera", "CC-BY-4.0", "OFL-1.1", "LicenseRef-scancode-gust-font-1.0", "BSD-3-Clause", "LicenseRef-scancode-ubuntu-font-1.0", "0BSD", "LicenseRef-scancode-free-unknown", "LicenseRef-scancode-public-domain", "MIT", "LicenseRef-scancode-public-domain-disclaimer" ]
permissive
//! Text handling. mod deco; mod misc; mod quotes; mod raw; mod shaping; mod shift; pub use self::deco::*; pub use self::misc::*; pub use self::quotes::*; pub use self::raw::*; pub use self::shaping::*; pub use self::shift::*; use rustybuzz::Tag; use ttf_parser::Rect; use typst::font::{Font, FontStretch, FontStyle, FontWeight, VerticalFontMetric}; use crate::layout::ParElem; use crate::prelude::*; /// Hook up all text definitions. pub(super) fn define(global: &mut Scope) { global.define("text", TextElem::func()); global.define("linebreak", LinebreakElem::func()); global.define("smartquote", SmartQuoteElem::func()); global.define("strong", StrongElem::func()); global.define("emph", EmphElem::func()); global.define("lower", lower_func()); global.define("upper", upper_func()); global.define("smallcaps", smallcaps_func()); global.define("sub", SubElem::func()); global.define("super", SuperElem::func()); global.define("underline", UnderlineElem::func()); global.define("strike", StrikeElem::func()); global.define("overline", OverlineElem::func()); global.define("raw", RawElem::func()); global.define("lorem", lorem_func()); } /// Customizes the look and layout of text in a variety of ways. /// /// This function is used frequently, both with set rules and directly. While /// the set rule is often the simpler choice, calling the `text` function /// directly can be useful when passing text as an argument to another function. /// /// ## Example { #example } /// ```example /// #set text(18pt) /// With a set rule. /// /// #emph(text(blue)[ /// With a function call. /// ]) /// ``` /// /// Display: Text /// Category: text #[element(Construct, PlainText)] pub struct TextElem { /// A prioritized sequence of font families. /// /// When processing text, Typst tries all specified font families in order /// until it finds a font that has the necessary glyphs. In the example /// below, the font `Inria Serif` is preferred, but since it does not /// contain Arabic glyphs, the arabic text uses `Noto Sans Arabic` instead. /// /// ```example /// #set text(font: ( /// "Inria Serif", /// "Noto Sans Arabic", /// )) /// /// This is Latin. \ /// هذا عربي. /// /// ``` #[default(FontList(vec![FontFamily::new("Linux Libertine")]))] pub font: FontList, /// Whether to allow last resort font fallback when the primary font list /// contains no match. This lets Typst search through all available fonts /// for the most similar one that has the necessary glyphs. /// /// _Note:_ Currently, there are no warnings when fallback is disabled and /// no glyphs are found. Instead, your text shows up in the form of "tofus": /// Small boxes that indicate the lack of an appropriate glyph. In the /// future, you will be able to instruct Typst to issue warnings so you know /// something is up. /// /// ```example /// #set text(font: "Inria Serif") /// هذا عربي /// /// #set text(fallback: false) /// هذا عربي /// ``` #[default(true)] pub fallback: bool, /// The desired font style. /// /// When an italic style is requested and only an oblique one is available, /// it is used. Similarly, the other way around, an italic style can stand /// in for an oblique one. When neither an italic nor an oblique style is /// available, Typst selects the normal style. Since most fonts are only /// available either in an italic or oblique style, the difference between /// italic and oblique style is rarely observable. /// /// If you want to emphasize your text, you should do so using the /// [emph]($func/emph) function instead. This makes it easy to adapt the /// style later if you change your mind about how to signify the emphasis. /// /// ```example /// #text(font: "Linux Libertine", style: "italic")[Italic] /// #text(font: "DejaVu Sans", style: "oblique")[Oblique] /// ``` pub style: FontStyle, /// The desired thickness of the font's glyphs. Accepts an integer between /// `{100}` and `{900}` or one of the predefined weight names. When the /// desired weight is not available, Typst selects the font from the family /// that is closest in weight. /// /// If you want to strongly emphasize your text, you should do so using the /// [strong]($func/strong) function instead. This makes it easy to adapt the /// style later if you change your mind about how to signify the strong /// emphasis. /// /// ```example /// #set text(font: "IBM Plex Sans") /// /// #text(weight: "light")[Light] \ /// #text(weight: "regular")[Regular] \ /// #text(weight: "medium")[Medium] \ /// #text(weight: 500)[Medium] \ /// #text(weight: "bold")[Bold] /// ``` pub weight: FontWeight, /// The desired width of the glyphs. Accepts a ratio between `{50%}` and /// `{200%}`. When the desired width is not available, Typst selects the /// font from the family that is closest in stretch. This will only stretch /// the text if a condensed or expanded version of the font is available. /// /// If you want to adjust the amount of space between characters instead of /// stretching the glyphs itself, use the [`tracking`]($func/text.tracking) /// property instead. /// /// ```example /// #text(stretch: 75%)[Condensed] \ /// #text(stretch: 100%)[Normal] /// ``` pub stretch: FontStretch, /// The size of the glyphs. This value forms the basis of the `em` unit: /// `{1em}` is equivalent to the font size. /// /// You can also give the font size itself in `em` units. Then, it is /// relative to the previous font size. /// /// ```example /// #set text(size: 20pt) /// very #text(1.5em)[big] text /// ``` #[parse(args.named_or_find("size")?)] #[fold] #[default(Abs::pt(11.0))] pub size: TextSize, /// The glyph fill color. /// /// ```example /// #set text(fill: red) /// This text is red. /// ``` #[parse(args.named_or_find("fill")?)] #[default(Color::BLACK.into())] pub fill: Paint, /// The amount of space that should be added between characters. /// /// ```example /// #set text(tracking: 1.5pt) /// Distant text. /// ``` #[resolve] pub tracking: Length, /// The amount of space between words. /// /// Can be given as an absolute length, but also relative to the width of /// the space character in the font. /// /// If you want to adjust the amount of space between characters rather than /// words, use the [`tracking`]($func/text.tracking) property instead. /// /// ```example /// #set text(spacing: 200%) /// Text with distant words. /// ``` #[resolve] #[default(Rel::one())] pub spacing: Rel<Length>, /// An amount to shift the text baseline by. /// /// ```example /// A #text(baseline: 3pt)[lowered] /// word. /// ``` #[resolve] pub baseline: Length, /// Whether certain glyphs can hang over into the margin in justified text. /// This can make justification visually more pleasing. /// /// ```example /// #set par(justify: true) /// This justified text has a hyphen in /// the paragraph's first line. Hanging /// the hyphen slightly into the margin /// results in a clearer paragraph edge. /// /// #set text(overhang: false) /// This justified text has a hyphen in /// the paragraph's first line. Hanging /// the hyphen slightly into the margin /// results in a clearer paragraph edge. /// ``` #[default(true)] pub overhang: bool, /// The top end of the conceptual frame around the text used for layout and /// positioning. This affects the size of containers that hold text. /// /// ```example /// #set rect(inset: 0pt) /// #set text(size: 20pt) /// /// #set text(top-edge: "ascender") /// #rect(fill: aqua)[Typst] /// /// #set text(top-edge: "cap-height") /// #rect(fill: aqua)[Typst] /// ``` #[default(TopEdge::Metric(TopEdgeMetric::CapHeight))] pub top_edge: TopEdge, /// The bottom end of the conceptual frame around the text used for layout /// and positioning. This affects the size of containers that hold text. /// /// ```example /// #set rect(inset: 0pt) /// #set text(size: 20pt) /// /// #set text(bottom-edge: "baseline") /// #rect(fill: aqua)[Typst] /// /// #set text(bottom-edge: "descender") /// #rect(fill: aqua)[Typst] /// ``` #[default(BottomEdge::Metric(BottomEdgeMetric::Baseline))] pub bottom_edge: BottomEdge, /// An [ISO 639-1/2/3 language code.](https://en.wikipedia.org/wiki/ISO_639) /// /// Setting the correct language affects various parts of Typst: /// /// - The text processing pipeline can make more informed choices. /// - Hyphenation will use the correct patterns for the language. /// - [Smart quotes]($func/smartquote) turns into the correct quotes for the /// language. /// - And all other things which are language-aware. /// /// ```example /// #set text(lang: "de") /// #outline() /// /// = Einleitung /// In diesem Dokument, ... /// ``` #[default(Lang::ENGLISH)] pub lang: Lang, /// An [ISO 3166-1 alpha-2 region code.](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2) /// /// This lets the text processing pipeline make more informed choices. pub region: Option<Region>, /// The OpenType writing script. /// /// The combination of `{lang}` and `{script}` determine how font features, /// such as glyph substitution, are implemented. Frequently the value is a /// modified (all-lowercase) ISO 15924 script identifier, and the `math` /// writing script is used for features appropriate for mathematical /// symbols. /// /// When set to `{auto}`, the default and recommended setting, an /// appropriate script is chosen for each block of characters sharing a /// common Unicode script property. /// /// ```example /// #set text( /// font: "Linux Libertine", /// size: 20pt, /// ) /// /// #let scedilla = [Ş] /// #scedilla // S with a cedilla /// /// #set text(lang: "ro", script: "latn") /// #scedilla // S with a subscript comma /// /// #set text(lang: "ro", script: "grek") /// #scedilla // S with a cedilla /// ``` pub script: Smart<WritingScript>, /// The dominant direction for text and inline objects. Possible values are: /// /// - `{auto}`: Automatically infer the direction from the `lang` property. /// - `{ltr}`: Layout text from left to right. /// - `{rtl}`: Layout text from right to left. /// /// When writing in right-to-left scripts like Arabic or Hebrew, you should /// set the [text language]($func/text.lang) or direction. While individual /// runs of text are automatically layouted in the correct direction, /// setting the dominant direction gives the bidirectional reordering /// algorithm the necessary information to correctly place punctuation and /// inline objects. Furthermore, setting the direction affects the alignment /// values `start` and `end`, which are equivalent to `left` and `right` in /// `ltr` text and the other way around in `rtl` text. /// /// If you set this to `rtl` and experience bugs or in some way bad looking /// output, please do get in touch with us through the /// [contact form](https://typst.app/contact) or our /// [Discord server]($community/#discord)! /// /// ```example /// #set text(dir: rtl) /// هذا عربي. /// ``` #[resolve] pub dir: TextDir, /// Whether to hyphenate text to improve line breaking. When `{auto}`, text /// will be hyphenated if and only if justification is enabled. /// /// Setting the [text language]($func/text.lang) ensures that the correct /// hyphenation patterns are used. /// /// ```example /// #set page(width: 200pt) /// /// #set par(justify: true) /// This text illustrates how /// enabling hyphenation can /// improve justification. /// /// #set text(hyphenate: false) /// This text illustrates how /// enabling hyphenation can /// improve justification. /// ``` #[resolve] pub hyphenate: Hyphenate, /// Whether to apply kerning. /// /// When enabled, specific letter pairings move closer together or further /// apart for a more visually pleasing result. The example below /// demonstrates how decreasing the gap between the "T" and "o" results in a /// more natural look. Setting this to `{false}` disables kerning by turning /// off the OpenType `kern` font feature. /// /// ```example /// #set text(size: 25pt) /// Totally /// /// #set text(kerning: false) /// Totally /// ``` #[default(true)] pub kerning: bool, /// Whether to apply stylistic alternates. /// /// Sometimes fonts contain alternative glyphs for the same codepoint. /// Setting this to `{true}` switches to these by enabling the OpenType /// `salt` font feature. /// /// ```example /// #set text( /// font: "IBM Plex Sans", /// size: 20pt, /// ) /// /// 0, a, g, ß /// /// #set text(alternates: true) /// 0, a, g, ß /// ``` #[default(false)] pub alternates: bool, /// Which stylistic set to apply. Font designers can categorize alternative /// glyphs forms into stylistic sets. As this value is highly font-specific, /// you need to consult your font to know which sets are available. When set /// to an integer between `{1}` and `{20}`, enables the corresponding /// OpenType font feature from `ss01`, ..., `ss20`. pub stylistic_set: Option<StylisticSet>, /// Whether standard ligatures are active. /// /// Certain letter combinations like "fi" are often displayed as a single /// merged glyph called a _ligature._ Setting this to `{false}` disables /// these ligatures by turning off the OpenType `liga` and `clig` font /// features. /// /// ```example /// #set text(size: 20pt) /// A fine ligature. /// /// #set text(ligatures: false) /// A fine ligature. /// ``` #[default(true)] pub ligatures: bool, /// Whether ligatures that should be used sparingly are active. Setting this /// to `{true}` enables the OpenType `dlig` font feature. #[default(false)] pub discretionary_ligatures: bool, /// Whether historical ligatures are active. Setting this to `{true}` /// enables the OpenType `hlig` font feature. #[default(false)] pub historical_ligatures: bool, /// Which kind of numbers / figures to select. When set to `{auto}`, the /// default numbers for the font are used. /// /// ```example /// #set text(font: "Noto Sans", 20pt) /// #set text(number-type: "lining") /// Number 9. /// /// #set text(number-type: "old-style") /// Number 9. /// ``` pub number_type: Smart<NumberType>, /// The width of numbers / figures. When set to `{auto}`, the default /// numbers for the font are used. /// /// ```example /// #set text(font: "Noto Sans", 20pt) /// #set text(number-width: "proportional") /// A 12 B 34. \ /// A 56 B 78. /// /// #set text(number-width: "tabular") /// A 12 B 34. \ /// A 56 B 78. /// ``` pub number_width: Smart<NumberWidth>, /// Whether to have a slash through the zero glyph. Setting this to `{true}` /// enables the OpenType `zero` font feature. /// /// ```example /// 0, #text(slashed-zero: true)[0] /// ``` #[default(false)] pub slashed_zero: bool, /// Whether to turn numbers into fractions. Setting this to `{true}` /// enables the OpenType `frac` font feature. /// /// It is not advisable to enable this property globally as it will mess /// with all appearances of numbers after a slash (e.g., in URLs). Instead, /// enable it locally when you want a fraction. /// /// ```example /// 1/2 \ /// #text(fractions: true)[1/2] /// ``` #[default(false)] pub fractions: bool, /// Raw OpenType features to apply. /// /// - If given an array of strings, sets the features identified by the /// strings to `{1}`. /// - If given a dictionary mapping to numbers, sets the features /// identified by the keys to the values. /// /// ```example /// // Enable the `frac` feature manually. /// #set text(features: ("frac",)) /// 1/2 /// ``` #[fold] pub features: FontFeatures, /// Content in which all text is styled according to the other arguments. #[external] #[required] pub body: Content, /// The text. #[internal] #[required] pub text: EcoString, /// A delta to apply on the font weight. #[internal] #[fold] pub delta: Delta, /// Whether the font style should be inverted. #[internal] #[fold] #[default(false)] pub emph: Toggle, /// Decorative lines. #[internal] #[fold] pub deco: Decoration, /// A case transformation that should be applied to the text. #[internal] pub case: Option<Case>, /// Whether small capital glyphs should be used. ("smcp") #[internal] #[default(false)] pub smallcaps: bool, } impl TextElem { /// Create a new packed text element. pub fn packed(text: impl Into<EcoString>) -> Content { Self::new(text.into()).pack() } } impl Construct for TextElem { fn construct(vm: &mut Vm, args: &mut Args) -> SourceResult<Content> { // The text constructor is special: It doesn't create a text element. // Instead, it leaves the passed argument structurally unchanged, but // styles all text in it. let styles = Self::set(vm, args)?; let body = args.expect::<Content>("body")?; Ok(body.styled_with_map(styles)) } } impl PlainText for TextElem { fn plain_text(&self, text: &mut EcoString) { text.push_str(&self.text()); } } /// A lowercased font family like "arial". #[derive(Clone, Eq, PartialEq, Hash)] pub struct FontFamily(EcoString); impl FontFamily { /// Create a named font family variant. pub fn new(string: &str) -> Self { Self(string.to_lowercase().into()) } /// The lowercased family name. pub fn as_str(&self) -> &str { &self.0 } } impl Debug for FontFamily { fn fmt(&self, f: &mut Formatter) -> fmt::Result { self.0.fmt(f) } } cast! { FontFamily, self => self.0.into_value(), string: EcoString => Self::new(&string), } /// Font family fallback list. #[derive(Debug, Default, Clone, Eq, PartialEq, Hash)] pub struct FontList(pub Vec<FontFamily>); impl IntoIterator for FontList { type IntoIter = std::vec::IntoIter<FontFamily>; type Item = FontFamily; fn into_iter(self) -> Self::IntoIter { self.0.into_iter() } } cast! { FontList, self => if self.0.len() == 1 { self.0.into_iter().next().unwrap().0.into_value() } else { self.0.into_value() }, family: FontFamily => Self(vec![family]), values: Array => Self(values.into_iter().map(|v| v.cast()).collect::<StrResult<_>>()?), } /// The size of text. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] pub struct TextSize(pub Length); impl Fold for TextSize { type Output = Abs; fn fold(self, outer: Self::Output) -> Self::Output { self.0.em.at(outer) + self.0.abs } } cast! { TextSize, self => self.0.into_value(), v: Length => Self(v), } /// Specifies the top edge of text. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] pub enum TopEdge { /// An edge specified via font metrics or bounding box. Metric(TopEdgeMetric), /// An edge specified as a length. Length(Length), } impl TopEdge { /// Determine if the edge is specified from bounding box info. pub fn is_bounds(&self) -> bool { matches!(self, Self::Metric(TopEdgeMetric::Bounds)) } /// Resolve the value of the text edge given a font's metrics. pub fn resolve(self, styles: StyleChain, font: &Font, bbox: Option<Rect>) -> Abs { match self { TopEdge::Metric(metric) => { if let Ok(metric) = metric.try_into() { font.metrics().vertical(metric).resolve(styles) } else { bbox.map(|bbox| (font.to_em(bbox.y_max)).resolve(styles)) .unwrap_or_default() } } TopEdge::Length(length) => length.resolve(styles), } } } cast! { TopEdge, self => match self { Self::Metric(metric) => metric.into_value(), Self::Length(length) => length.into_value(), }, v: TopEdgeMetric => Self::Metric(v), v: Length => Self::Length(v), } /// Metrics that describe the top edge of text. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)] pub enum TopEdgeMetric { /// The font's ascender, which typically exceeds the height of all glyphs. Ascender, /// The approximate height of uppercase letters. CapHeight, /// The approximate height of non-ascending lowercase letters. XHeight, /// The baseline on which the letters rest. Baseline, /// The top edge of the glyph's bounding box. Bounds, } impl TryInto<VerticalFontMetric> for TopEdgeMetric { type Error = (); fn try_into(self) -> Result<VerticalFontMetric, Self::Error> { match self { Self::Ascender => Ok(VerticalFontMetric::Ascender), Self::CapHeight => Ok(VerticalFontMetric::CapHeight), Self::XHeight => Ok(VerticalFontMetric::XHeight), Self::Baseline => Ok(VerticalFontMetric::Baseline), _ => Err(()), } } } /// Specifies the top edge of text. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] pub enum BottomEdge { /// An edge specified via font metrics or bounding box. Metric(BottomEdgeMetric), /// An edge specified as a length. Length(Length), } impl BottomEdge { /// Determine if the edge is specified from bounding box info. pub fn is_bounds(&self) -> bool { matches!(self, Self::Metric(BottomEdgeMetric::Bounds)) } /// Resolve the value of the text edge given a font's metrics. pub fn resolve(self, styles: StyleChain, font: &Font, bbox: Option<Rect>) -> Abs { match self { BottomEdge::Metric(metric) => { if let Ok(metric) = metric.try_into() { font.metrics().vertical(metric).resolve(styles) } else { bbox.map(|bbox| (font.to_em(bbox.y_min)).resolve(styles)) .unwrap_or_default() } } BottomEdge::Length(length) => length.resolve(styles), } } } cast! { BottomEdge, self => match self { Self::Metric(metric) => metric.into_value(), Self::Length(length) => length.into_value(), }, v: BottomEdgeMetric => Self::Metric(v), v: Length => Self::Length(v), } /// Metrics that describe the bottom edge of text. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)] pub enum BottomEdgeMetric { /// The baseline on which the letters rest. Baseline, /// The font's descender, which typically exceeds the depth of all glyphs. Descender, /// The bottom edge of the glyph's bounding box. Bounds, } impl TryInto<VerticalFontMetric> for BottomEdgeMetric { type Error = (); fn try_into(self) -> Result<VerticalFontMetric, Self::Error> { match self { Self::Baseline => Ok(VerticalFontMetric::Baseline), Self::Descender => Ok(VerticalFontMetric::Descender), _ => Err(()), } } } /// The direction of text and inline objects in their line. #[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Hash)] pub struct TextDir(pub Smart<Dir>); cast! { TextDir, self => self.0.into_value(), v: Smart<Dir> => { if v.map_or(false, |dir| dir.axis() == Axis::Y) { bail!("text direction must be horizontal"); } Self(v) }, } impl Resolve for TextDir { type Output = Dir; fn resolve(self, styles: StyleChain) -> Self::Output { match self.0 { Smart::Auto => TextElem::lang_in(styles).dir(), Smart::Custom(dir) => dir, } } } /// Whether to hyphenate text. #[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Hash)] pub struct Hyphenate(pub Smart<bool>); cast! { Hyphenate, self => self.0.into_value(), v: Smart<bool> => Self(v), } impl Resolve for Hyphenate { type Output = bool; fn resolve(self, styles: StyleChain) -> Self::Output { match self.0 { Smart::Auto => ParElem::justify_in(styles), Smart::Custom(v) => v, } } } /// A stylistic set in a font. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] pub struct StylisticSet(u8); impl StylisticSet { /// Create a new set, clamping to 1-20. pub fn new(index: u8) -> Self { Self(index.clamp(1, 20)) } /// Get the value, guaranteed to be 1-20. pub fn get(self) -> u8 { self.0 } } cast! { StylisticSet, self => self.0.into_value(), v: i64 => match v { 1 ..= 20 => Self::new(v as u8), _ => bail!("stylistic set must be between 1 and 20"), }, } /// Which kind of numbers / figures to select. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)] pub enum NumberType { /// Numbers that fit well with capital text (the OpenType `lnum` /// font feature). Lining, /// Numbers that fit well into a flow of upper- and lowercase text (the /// OpenType `onum` font feature). OldStyle, } /// The width of numbers / figures. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)] pub enum NumberWidth { /// Numbers with glyph-specific widths (the OpenType `pnum` font feature). Proportional, /// Numbers of equal width (the OpenType `tnum` font feature). Tabular, } /// OpenType font features settings. #[derive(Debug, Default, Clone, Eq, PartialEq, Hash)] pub struct FontFeatures(pub Vec<(Tag, u32)>); cast! { FontFeatures, self => self.0 .into_iter() .map(|(tag, num)| { let bytes = tag.to_bytes(); let key = std::str::from_utf8(&bytes).unwrap_or_default(); (key.into(), num.into_value()) }) .collect::<Dict>() .into_value(), values: Array => Self(values .into_iter() .map(|v| { let tag = v.cast::<EcoString>()?; Ok((Tag::from_bytes_lossy(tag.as_bytes()), 1)) }) .collect::<StrResult<_>>()?), values: Dict => Self(values .into_iter() .map(|(k, v)| { let num = v.cast::<u32>()?; let tag = Tag::from_bytes_lossy(k.as_bytes()); Ok((tag, num)) }) .collect::<StrResult<_>>()?), } impl Fold for FontFeatures { type Output = Self; fn fold(mut self, outer: Self::Output) -> Self::Output { self.0.extend(outer.0); self } }
true
4bc4ec048d6a09043277f3e597ef950f3b018ed6
Rust
romac/cask
/src/cask.rs
UTF-8
22,421
2.65625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use std::collections::hash_map::{Entry as HashMapEntry, Keys}; use std::collections::{BTreeSet, HashMap}; use std::default::Default; use std::path::PathBuf; use std::result::Result::Ok; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex, RwLock}; use std::thread; use std::time::Duration; use std::vec::Vec; use time; use data::{Entry, Hint, SequenceNumber}; use errors::Result; use log::{Log, LogWrite}; use stats::Stats; use util::human_readable_byte_count; #[derive(Debug)] pub struct IndexEntry { pub file_id: u32, entry_pos: u64, pub entry_size: u64, sequence: SequenceNumber, } struct Index { map: HashMap<Vec<u8>, IndexEntry>, stats: Stats, } impl Index { fn new() -> Index { Index { map: HashMap::new(), stats: Stats::new(), } } fn get(&self, key: &[u8]) -> Option<&IndexEntry> { self.map.get(key) } fn insert(&mut self, key: Vec<u8>, index_entry: IndexEntry) -> Option<IndexEntry> { self.stats.add_entry(&index_entry); self.map.insert(key, index_entry).map(|entry| { self.stats.remove_entry(&entry); entry }) } fn remove(&mut self, key: &[u8]) -> Option<IndexEntry> { self.map.remove(key).map(|entry| { self.stats.remove_entry(&entry); entry }) } fn update(&mut self, hint: Hint, file_id: u32) { let index_entry = IndexEntry { file_id: file_id, entry_pos: hint.entry_pos, entry_size: hint.entry_size(), sequence: hint.sequence, }; match self.map.entry(hint.key.to_vec()) { HashMapEntry::Occupied(mut o) => { if o.get().sequence <= hint.sequence { self.stats.remove_entry(o.get()); if hint.deleted { o.remove(); } else { self.stats.add_entry(&index_entry); o.insert(index_entry); } } else { self.stats.add_entry(&index_entry); self.stats.remove_entry(&index_entry); } } HashMapEntry::Vacant(e) => { if !hint.deleted { self.stats.add_entry(&index_entry); e.insert(index_entry); } } } } pub fn keys(&self) -> Keys<Vec<u8>, IndexEntry> { self.map.keys() } } struct CaskInner { current_sequence: SequenceNumber, index: Index, log: Log, } impl CaskInner { fn get(&self, key: &[u8]) -> Result<Option<Vec<u8>>> { let value = match self.index.get(key) { Some(index_entry) => { let entry = self .log .read_entry(index_entry.file_id, index_entry.entry_pos)?; if entry.deleted { warn!( "Index pointed to dead entry: Entry {{ key: {:?}, sequence: {} }} at \ file: {}", entry.key, entry.sequence, index_entry.file_id ); None } else { Some(entry.value.into_owned()) } } _ => None, }; Ok(value) } fn put(&mut self, key: Vec<u8>, value: &[u8]) -> Result<()> { let index_entry = { let entry = Entry::new(self.current_sequence, &*key, value)?; let (file_id, file_pos) = self.log.append_entry(&entry)?; self.current_sequence += 1; IndexEntry { file_id: file_id, entry_pos: file_pos, entry_size: entry.size(), sequence: entry.sequence, } }; self.index.insert(key, index_entry); Ok(()) } fn delete(&mut self, key: &[u8]) -> Result<()> { if self.index.remove(key).is_some() { let entry = Entry::deleted(self.current_sequence, key); self.log.append_entry(&entry)?; self.current_sequence += 1; } Ok(()) } pub fn keys(&self) -> Keys<Vec<u8>, IndexEntry> { self.index.keys() } } /// An handle to a `Cask` database. /// /// This handle can be "cheaply" cloned and safely shared between threads. `Cask`s cannot be used /// concurrently by separate processes and this is ensured by using a file lock in the `Cask` dir. #[derive(Clone)] pub struct Cask { path: PathBuf, options: CaskOptions, dropped: Arc<AtomicBool>, inner: Arc<RwLock<CaskInner>>, compaction: Arc<Mutex<()>>, } /// `Cask` configuration. Provides control over the properties and behavior of the `Cask` instance. /// /// # Examples /// /// ```rust,no_run /// use cask::{CaskOptions, SyncStrategy}; /// /// let cask = CaskOptions::default() /// .compaction_check_frequency(1200) /// .sync(SyncStrategy::Never) /// .max_file_size(1024 * 1024 * 1024) /// .open("cask.db") /// .unwrap(); /// ``` #[derive(Clone)] pub struct CaskOptions { create: bool, sync: SyncStrategy, max_file_size: usize, file_pool_size: usize, compaction: bool, compaction_check_frequency: u64, compaction_window: (usize, usize), fragmentation_trigger: f64, dead_bytes_trigger: u64, fragmentation_threshold: f64, dead_bytes_threshold: u64, small_file_threshold: u64, } /// Strategy used to synchronize writes to disk. #[derive(Clone, PartialEq)] pub enum SyncStrategy { /// Never explicitly synchronize writes (the OS manages it). Never, /// Always synchronize writes. Always, /// Synchronize writes in the background every `n` milliseconds. Interval(usize), } impl Default for CaskOptions { fn default() -> CaskOptions { CaskOptions { create: true, sync: SyncStrategy::Interval(1000), max_file_size: 2 * 1024 * 1024 * 1024, file_pool_size: 2048, compaction: true, compaction_check_frequency: 3600, compaction_window: (0, 23), fragmentation_trigger: 0.6, dead_bytes_trigger: 512 * 1024 * 1024, fragmentation_threshold: 0.4, dead_bytes_threshold: 128 * 1024 * 1024, small_file_threshold: 10 * 1024 * 1024, } } } #[allow(dead_code)] impl CaskOptions { /// Generates the base configuration for opening a `Cask`, from which configuration methods can /// be chained. pub fn new() -> CaskOptions { CaskOptions::default() } /// Sets the strategy used to synchronize writes to disk. Defaults to /// `SyncStrategy::Interval(1000)`. pub fn sync(&mut self, sync: SyncStrategy) -> &mut CaskOptions { self.sync = sync; self } /// Sets the maximum file size. Defaults to `2GB`. pub fn max_file_size(&mut self, max_file_size: usize) -> &mut CaskOptions { self.max_file_size = max_file_size; self } /// Sets the maximum size of the file descriptor cache. Defaults to `2048`. pub fn file_pool_size(&mut self, file_pool_size: usize) -> &mut CaskOptions { self.file_pool_size = file_pool_size; self } /// Enable or disable background compaction. Defaults to `true`. pub fn compaction(&mut self, compaction: bool) -> &mut CaskOptions { self.compaction = compaction; self } /// Create `Cask` if it doesn't exist. Defaults to `true`. pub fn create(&mut self, create: bool) -> &mut CaskOptions { self.create = create; self } /// Sets the frequency of compaction, in seconds. Defaults to `3600`. pub fn compaction_check_frequency( &mut self, compaction_check_frequency: u64, ) -> &mut CaskOptions { self.compaction_check_frequency = compaction_check_frequency; self } /// Sets the time window during which compaction can run. Defaults to `[0, 23]`. pub fn compaction_window(&mut self, start: usize, end: usize) -> &mut CaskOptions { self.compaction_window = (start, end); self } /// Sets the ratio of dead entries to total entries in a file that will trigger compaction. /// Defaults to `0.6`. pub fn fragmentation_trigger(&mut self, fragmentation_trigger: f64) -> &mut CaskOptions { self.fragmentation_trigger = fragmentation_trigger; self } /// Sets the minimum amount of data occupied by dead entries in a single file that will trigger /// compaction. Defaults to `512MB`. pub fn dead_bytes_trigger(&mut self, dead_bytes_trigger: u64) -> &mut CaskOptions { self.dead_bytes_trigger = dead_bytes_trigger; self } /// Sets the ratio of dead entries to total entries in a file that will cause it to be included /// in a compaction. Defaults to `0.4`. pub fn fragmentation_threshold(&mut self, fragmentation_threshold: f64) -> &mut CaskOptions { self.fragmentation_threshold = fragmentation_threshold; self } /// Sets the minimum amount of data occupied by dead entries in a single file that will cause it /// to be included in a compaction. Defaults to `128MB`. pub fn dead_bytes_threshold(&mut self, dead_bytes_threshold: u64) -> &mut CaskOptions { self.dead_bytes_threshold = dead_bytes_threshold; self } /// Sets the minimum size a file must have to be excluded from compaction. Defaults to `10MB`. pub fn small_file_threshold(&mut self, small_file_threshold: u64) -> &mut CaskOptions { self.small_file_threshold = small_file_threshold; self } /// Opens/creates a `Cask` at `path`. pub fn open(&self, path: &str) -> Result<Cask> { Cask::open(path, self.clone()) } } impl Cask { /// Opens/creates a new `Cask`. pub fn open(path: &str, options: CaskOptions) -> Result<Cask> { info!("Opening database: {:?}", &path); let mut log = Log::open( path, options.create, options.sync == SyncStrategy::Always, options.max_file_size, options.file_pool_size, )?; let mut index = Index::new(); let mut sequence = 0; for file_id in log.files() { let mut f = |hint: Hint| { if hint.sequence > sequence { sequence = hint.sequence; } index.update(hint, file_id); }; match log.hints(file_id)? { Some(hints) => { for hint in hints { f(hint?); } } None => { for hint in log.recreate_hints(file_id)? { f(hint?); } } }; } info!("Opened database: {:?}", &path); info!("Current sequence number: {:?}", sequence); let cask = Cask { path: log.path.clone(), options: options, dropped: Arc::new(AtomicBool::new(false)), inner: Arc::new(RwLock::new(CaskInner { current_sequence: sequence + 1, log: log, index: index, })), compaction: Arc::new(Mutex::new(())), }; if let SyncStrategy::Interval(millis) = cask.options.sync { let cask = cask.clone(); thread::spawn(move || { let duration = Duration::from_millis(millis as u64); loop { if cask.dropped.load(Ordering::SeqCst) { info!( "Cask has been dropped, background file sync \ thread is exiting" ); break; } debug!("Background file sync"); cask.inner.read().unwrap().log.sync().unwrap(); thread::sleep(duration); } }); }; if cask.options.compaction { let cask = cask.clone(); thread::spawn(move || { let duration = Duration::from_secs(cask.options.compaction_check_frequency); loop { if cask.dropped.load(Ordering::SeqCst) { info!( "Cask has been dropped, background compaction \ thread is exiting" ); break; } info!("Compaction thread wake up"); let current_hour = time::PrimitiveDateTime::now().hour() as usize; let (window_start, window_end) = cask.options.compaction_window; let in_window = if window_start <= window_end { current_hour >= window_start && current_hour <= window_end } else { current_hour >= window_end || current_hour <= window_end }; if !in_window { info!( "Compaction outside defined window {:?}", cask.options.compaction_window ); continue; } else if let Err(err) = cask.compact() { warn!("Error during compaction: {}", err); } thread::sleep(duration); } }); } Ok(cask) } fn compact_files_aux(&self, files: &[u32]) -> Result<(Vec<u32>, Vec<u32>)> { let active_file_id = { self.inner.read().unwrap().log.active_file_id }; let compacted_files_hints = files.iter().flat_map(|&file_id| { if active_file_id.is_some() && active_file_id.unwrap() == file_id { None } else { self.inner .read() .unwrap() .log .hints(file_id) .ok() // FIXME: log the error? .and_then(|hints| hints.map(|h| (file_id, h))) } }); let mut compacted_files = Vec::new(); let mut new_files = Vec::new(); let mut deletes = HashMap::new(); let mut log_writer = { // FIXME: turn into error self.inner.read().unwrap().log.writer() }; for (file_id, hints) in compacted_files_hints { let mut inserts = Vec::new(); for hint in hints { let hint = hint?; let inner = self.inner.read().unwrap(); let index_entry = inner.index.get(&*hint.key); if hint.deleted { if index_entry.is_none() { match deletes.entry(hint.key.to_vec()) { HashMapEntry::Occupied(mut o) => { if *o.get() < hint.sequence { o.insert(hint.sequence); } } HashMapEntry::Vacant(e) => { e.insert(hint.sequence); } } } } else if index_entry.is_some() && index_entry.unwrap().sequence == hint.sequence { inserts.push(hint) } } for hint in inserts { // FIXME: turn into error let log = &self.inner.read().unwrap().log; let log_write = log_writer.write(&log.read_entry(file_id, hint.entry_pos)?)?; if let LogWrite::NewFile(file_id) = log_write { new_files.push(file_id); } } compacted_files.push(file_id); } for (key, sequence) in deletes { log_writer.write(&Entry::deleted(sequence, key))?; } Ok((compacted_files, new_files)) } fn compact_files(&self, files: &[u32]) -> Result<()> { info!("Compacting data files: {:?}", files); let (ref compacted_files, ref new_files) = self.compact_files_aux(files)?; for &file_id in new_files { let hints = { self.inner.read().unwrap().log.hints(file_id)? }; if let Some(hints) = hints { for hint in hints { let hint = hint?; self.inner.write().unwrap().index.update(hint, file_id); } }; } self.inner .write() .unwrap() .index .stats .remove_files(compacted_files); self.inner .write() .unwrap() .log .swap_files(compacted_files, new_files)?; // FIXME: print files not compacted info!( "Finished compacting data files: {:?} into: {:?}", compacted_files, new_files ); Ok(()) } /// Trigger `Cask` log compaction. pub fn compact(&self) -> Result<()> { let _lock = self.compaction.lock().unwrap(); let active_file_id = { self.inner.read().unwrap().log.active_file_id }; let file_stats = { self.inner.read().unwrap().index.stats.file_stats() }; let mut files = BTreeSet::new(); let mut triggered = false; for (file_id, fragmentation, dead_bytes) in file_stats { if active_file_id.is_some() && file_id == active_file_id.unwrap() { continue; } if !triggered { if fragmentation >= self.options.fragmentation_trigger { info!( "File {} has fragmentation factor of {:.1}%, triggered compaction", file_id, fragmentation * 100.0 ); triggered = true; files.insert(file_id); } else if dead_bytes >= self.options.dead_bytes_trigger && !files.contains(&file_id) { info!( "File {} has {} of dead data, triggered compaction", file_id, human_readable_byte_count(dead_bytes as usize, true) ); triggered = true; files.insert(file_id); } } if fragmentation >= self.options.fragmentation_threshold && !files.contains(&file_id) { info!( "File {} has fragmentation factor of {:.1}%, adding for compaction", file_id, fragmentation * 100.0 ); files.insert(file_id); } else if dead_bytes >= self.options.dead_bytes_threshold && !files.contains(&file_id) { info!( "File {} has {} of dead data, adding for compaction", file_id, human_readable_byte_count(dead_bytes as usize, true) ); files.insert(file_id); } if !files.contains(&file_id) { let file_size = { self.inner.read().unwrap().log.file_size(file_id).ok() }; if let Some(file_size) = file_size { if file_size <= self.options.small_file_threshold { info!( "File {} has total size of {}, adding for compaction", file_id, human_readable_byte_count(file_size as usize, true) ); files.insert(file_id); } }; } } if triggered { let files: Vec<_> = files.into_iter().collect(); self.compact_files(&files)?; } else if !files.is_empty() { info!( "Compaction of files {:?} aborted due to missing trigger", &files ); } else { info!("No files eligible for compaction") } Ok(()) } /// Returns the value corresponding to the key, if any. pub fn get<K: AsRef<[u8]>>(&self, key: K) -> Result<Option<Vec<u8>>> { self.inner.read().unwrap().get(key.as_ref()) } /// Inserts a key-value pair into the map. pub fn put<K: Into<Vec<u8>>, V: AsRef<[u8]>>(&self, key: K, value: V) -> Result<()> { self.inner.write().unwrap().put(key.into(), value.as_ref()) } /// Removes a key from the map. pub fn delete<K: AsRef<[u8]>>(&self, key: K) -> Result<()> { self.inner.write().unwrap().delete(key.as_ref()) } /// Returns all keys stored in the map. pub fn keys(&self) -> Vec<Vec<u8>> { self.inner.read().unwrap().keys().cloned().collect() } } impl Drop for Cask { fn drop(&mut self) { self.dropped.store(true, Ordering::SeqCst); let _lock = self.compaction.lock().unwrap(); } } #[cfg(test)] mod tests { use cask::CaskOptions; use std::fs; #[test] fn test_keys() { let cask_result = CaskOptions::default() .compaction_check_frequency(1) .max_file_size(50 * 1024 * 1024) .open("test.db"); assert!(cask_result.is_ok()); let cask = cask_result.unwrap(); let key1: &[u8] = &[0]; let key2: &[u8] = &[1]; let key3: &[u8] = &[2]; let val: &[u8] = &[0]; assert!(cask.put(key1, val).is_ok()); assert!(cask.put(key2, val).is_ok()); assert!(cask.put(key3, val).is_ok()); assert!(cask.delete(key3).is_ok()); let mut keys = cask.keys(); //Keys are not guaranteed to be in order. keys.sort(); assert_eq!(keys.len(), 2); assert_eq!(keys[0], key1); assert_eq!(keys[1], key2); assert!(fs::remove_dir_all("test.db").is_ok()); } }
true
d56adef7240835779c7e8c2d985e49ea902d4bb4
Rust
konamilk/atcoder-abc177
/src/bin/a.rs
UTF-8
257
2.546875
3
[]
no_license
use proconio::input; #[allow(unused_imports)] use proconio::marker::{Chars, Bytes}; fn main() { input!{ d: i32, t: i32, s: i32 } if d <= t * s { println!("Yes"); } else { println!("No"); } }
true
c438e0cf805baab01a26b481c59f0caf2ccf585a
Rust
Visse/Bluntforge
/server/src/main.rs
UTF-8
1,028
2.75
3
[]
no_license
use std::net::SocketAddr; use futures_util::StreamExt; use log::*; use warp::{Filter, ws::WebSocket}; async fn accept_connection(socket: WebSocket) { match handle_connection(socket).await { Ok(()) => (), Err(e) => { error!("Error handling stream: {:?}", e); } } } async fn handle_connection(mut socket: WebSocket) -> anyhow::Result<()> { while let Some(msg) = socket.next().await { let msg = msg?; info!("Message resived: {:?}", msg); } Ok(()) } #[tokio::main] async fn main() -> anyhow::Result<()> { env_logger::init(); let addr = ([0, 0, 0, 0], 2000); let socket_path = warp::path("socket") .and(warp::ws()) .and(warp::addr::remote()) .map(|ws: warp::ws::Ws, peer: Option<SocketAddr>| { info!("Connection from {:?} accepted", peer); ws.on_upgrade(accept_connection) }); warp::serve( socket_path.or(warp::fs::dir("dist/client")) ).run(addr).await; Ok(()) }
true
aeb80f32b91dc576dc66f77fb7ca73f16c126ca7
Rust
lloydmeta/frunk
/benches/monoid.rs
UTF-8
774
2.546875
3
[ "MIT" ]
permissive
#![feature(test)] extern crate frunk; extern crate test; use frunk::monoid::*; use test::Bencher; #[bench] fn combine_all_i32(b: &mut Bencher) { let v = vec![ Some(1), Some(2), Some(3), Some(4), Some(5), Some(6), Some(7), Some(8), Some(9), Some(10), ]; b.iter(|| combine_all(&v)) } #[bench] fn std_add_all_i32(b: &mut Bencher) { let v = vec![ Some(1), Some(2), Some(3), Some(4), Some(5), Some(6), Some(7), Some(8), Some(9), Some(10), ]; b.iter(|| { v.iter().fold(Some(0), |maybe_acc, maybe_n| { maybe_acc.and_then(|acc| maybe_n.map(|n| acc + n)) }) }) }
true
eae3d4a43c4054f44fa0a69f20aee385e6baac1b
Rust
arafat877/git-git
/src/commands/merge.rs
UTF-8
2,926
2.875
3
[]
no_license
use crate::command_chain::*; use crate::commands::*; use crate::git::{branch_exists, current_branch_with_confirm, ConfirmDefault, Git}; use clap::ArgMatches; use std::string::ToString; pub fn run_merge(args: &ArgMatches) { MergeArgs::from_args(&args) .unwrap() .parse_args_and_run_command(&args, merge_command); } fn merge_command(args: &MergeArgs) -> CommandChain { let mut c = CommandChain::new(); c.add(Git::checkout(&args.into)); c.add(Git::pull()); for branch in &args.branches { c.add(Git::checkout(&branch)); c.add(Git::pull()); if args.no_rebase { c.add(Git::checkout(&args.into)); c.add(Git::merge(&branch)); } else { c.add(Git::rebase(&args.into)); c.add(Git::force_push()); c.add(Git::checkout(&args.into)); c.add(Git::fast_forward_merge(&branch)); } } c.add(Git::checkout(&args.into)); c.add(Git::push()); for branch in &args.branches { c.add(Git::delete_branch(branch)); c.add(Git::delete_remote_branch(branch)); c.add(Git::prune_remote()); } if args.into == "master" { for branch in ["staging", "develop"].iter() { if branch_exists(branch) { c.add(Git::checkout(branch)); c.add(Git::pull()); c.add(Git::merge(&args.into)); c.add(Git::push()); } } } c.add(Git::checkout(&args.into)); c } #[derive(Debug)] struct MergeArgs { no_rebase: bool, into: String, branches: Vec<String>, } impl MergeArgs { fn from_args(args: &ArgMatches) -> Option<Self> { let branches = if let Some(branches) = args.values_of("BRANCH") { branches.map(ToString::to_string).collect() } else { vec![current_branch_with_confirm( |current_branch| { format!("Do you want to merge the current branch {}", current_branch) }, ConfirmDefault::No, )] }; let into = if let Some(into) = args.value_of("into") { into.into() } else { "master".into() }; let mut s = MergeArgs { no_rebase: false, into, branches, }; if args.is_present("no-rebase") { s.no_rebase = true; } Some(s) } } impl CommandArgs for MergeArgs { fn rerun_command(&self) -> String { let mut rerun_command = String::new(); rerun_command.push_str("merge"); if self.no_rebase { rerun_command.push_str(" --no-rebase"); } rerun_command.push_str(&format!(" --into {}", self.into)); for branch in &self.branches { rerun_command.push_str(&format!(" {}", branch)); } rerun_command } }
true
ec7656a4442355ffbbbd95c660167de555cd0eae
Rust
yati-sagade/autodiff-rust
/src/main.rs
UTF-8
2,890
2.96875
3
[ "MIT" ]
permissive
extern crate autodiff; extern crate num; extern crate gnuplot; use autodiff::AutoDiff; use std::io::{self,Write}; use num::Float; use gnuplot::{Figure, Caption, Color, LegendOption, Coordinate}; use std::f64::consts::PI; // A function `fn plot_fn_with_derivative<T, F>(func: F...) where F: Fn(T) -> T` // does not work, since calls are monomorphized at call site, meaning that // F can not be generic when plot_fn_with_derivative is monomorphized. macro_rules! plot_fn_with_derivative { ($func:ident, ($start:expr, $end:expr, $incr:expr), $caption_fx:expr, $caption_dfx:expr) => {{ let mut xs = Vec::new(); let mut ys = Vec::new(); let mut dys = Vec::new(); let start: f64 = $start as f64; let stop: f64 = $end as f64; let incr: f64 = $incr as f64; let mut x = start; while x <= stop { let ad = AutoDiff::var(x); let sig = $func(ad); xs.push(x); ys.push(sig.val()); dys.push(sig.dval()); x += incr; } let mut fig = Figure::new(); fig.axes2d().lines( &xs, &ys, &[Caption($caption_fx), Color("blue")] ).lines( &xs, &dys, &[Caption($caption_dfx), Color("black")] ).set_legend( Coordinate::Graph(0.95f64), Coordinate::Graph(0.95f64), &[], &[] ); fig.show(); }}; } fn main() { plot_fn_with_derivative!(sigmoid, (-10f64, 10f64, 0.1), "f(x) = 1/(1+exp(-x))", "f'(x) = f(x)(1 - f(x))"); plot_fn_with_derivative!(e_to_pi_x, (0f64, 10f64, 0.1), "f(x) = exp(pi*x)", "f'(x) = pi * exp(pi*x)"); plot_fn_with_derivative!(sin_2x, (0, 2f64 * PI, 0.1), "f(x) = 2sinθcosθ = sin(2θ)", "f'(x) = 2cos(2θ)"); } /// computes exp(pi*x) fn e_to_pi_x<T>(x: T) -> T where T: Float + From<f64> { let pi: T = From::from(std::f64::consts::PI); T::exp(pi * x) } /// computes `1/(1 + exp(-x))` fn sigmoid<T>(x: T) -> T where T: Float { T::one() / (T::one() + (-x).exp()) } /// computes sin(2*x) fn sin_2x<T>(x: T) -> T where T: Float + From<f64> { <T as From<f64>>::from(2f64) * x.sin() * x.cos() } // A_c * cos(w_c*t - A_m * sin(w_m * t)) fn fm<T>(t: T) -> T where T: Float + From<f64> { let A_c: T = From::from(5f64); let A_m: T = From::from(2f64); let f_c: T = From::from(4f64); // Hz let f_m: T = From::from(1.5f64); // Hz let pi: T = From::from(std::f64::consts::PI); let two: T = From::from(2f64); A_c * (two * pi * f_c * t - A_m * (two * pi * t).sin()).cos() }
true
98455c8f4e3c8031ea341be2e7df0ec6ce1a04f7
Rust
RussellPaine/AdventCode2020
/day_six/main.rs
UTF-8
1,059
3.390625
3
[]
no_license
use std::fs; fn main() { let data = fs::read_to_string("input.in").expect("Unable to read file"); let lines: Vec<&str> = data.split("\n\n").collect(); let mut c = 0; for line in lines { // let mut y: Vec<char> = Vec::new(); // for elem in line.chars() { // if elem == '\n' { // continue; // } // if !y.contains(&elem){ // y.push(elem); // } // } // c = c + y.len(); let people: Vec<&str> = line.split_whitespace().collect(); let alpha = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"]; let mut y = 0; 'outer: for a in alpha.iter() { for peep in &people { if !peep.contains(a) { continue 'outer; } } print!("{}",a); y += 1; } c = c + y; print!("{}",c); println!(); } println!("{}", c); }
true
fb616f1cf7454a11782165f149bd7c576854bcfe
Rust
imuxin/kubectl-watch
/difftastic-lib/src/diff/myers_diff.rs
UTF-8
787
3.125
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! A fast diff for linear content, using Myer's diff algorithm. #[derive(Debug)] pub enum DiffResult<T> { Left(T), Both(T, T), Right(T), } pub fn slice<'a, T: PartialEq + Clone>(lhs: &'a [T], rhs: &'a [T]) -> Vec<DiffResult<&'a T>> { wu_diff::diff(lhs, rhs) .into_iter() .map(|result| match result { wu_diff::DiffResult::Removed(r) => DiffResult::Left(&lhs[r.old_index.unwrap()]), wu_diff::DiffResult::Common(c) => { let lhs_id = c.old_index.unwrap(); let rhs_id = c.new_index.unwrap(); DiffResult::Both(&lhs[lhs_id], &rhs[rhs_id]) } wu_diff::DiffResult::Added(a) => DiffResult::Right(&rhs[a.new_index.unwrap()]), }) .collect::<Vec<_>>() }
true
82952b774e4cbe67fed1946b915c2654ac746a6f
Rust
domain-independent-dp/didp-rs
/dypdl-heuristic-search/src/search_algorithm/data_structure/search_node/f_node.rs
UTF-8
49,121
3.046875
3
[ "MIT", "Apache-2.0" ]
permissive
use super::super::hashable_state::HashableSignatureVariables; use super::super::state_registry::{StateInRegistry, StateInformation, StateRegistry}; use super::super::transition_chain::{GetTransitions, RcChain, TransitionChain}; use super::super::util::exceed_bound; use super::BfsNode; use dypdl::variable_type::Numeric; use dypdl::{Model, ReduceFunction, Transition, TransitionInterface}; use std::cell::Cell; use std::cmp::Ordering; use std::fmt::Display; use std::ops::Deref; use std::rc::Rc; /// Node ordered by the f-value, and tie is broken by the h-value. /// /// The f-value is a dual bound on the path cost from the target state to a base state via this node. /// The h-value is a dual bound on the path cost from this node to a base state. /// /// In minimization, a node having a lower f-value is `Greater` in `Ord`. /// In maximization , a node having a higher f-value is `Greater` in `Ord`. #[derive(Debug, Clone)] pub struct FNode<T, V = Transition> where T: Numeric, V: TransitionInterface + Clone, Transition: From<V>, { state: StateInRegistry<Rc<HashableSignatureVariables>>, g: T, h: T, f: T, closed: Cell<bool>, transitions: Option<Rc<RcChain<V>>>, } impl<T, V> FNode<T, V> where T: Numeric, V: TransitionInterface + Clone, Transition: From<V>, { fn new( state: StateInRegistry, cost: T, h: T, f: T, parent: Option<&Self>, transition: Option<Rc<V>>, ) -> Self { let transitions = transition.map(|transition| { Rc::new(RcChain::new( parent.and_then(|parent| parent.transitions.clone()), transition, )) }); FNode { state, g: cost, h, f, closed: Cell::new(false), transitions, } } /// Generates a root search node given a state, its cost, a DyPDL model, h- and f-evaluators, /// and a primal bound on the solution cost. /// /// Returns `None` if the node is a dead-end, or the f-value exceeds the primal bound. /// /// `h_evaluator` is a function that takes a state and returns the dual bound (the h-value). /// If `h_evaluator` returns `None`, the state is a dead-end, so the node is not generated. /// `f_evaluator` is a function that takes g- and h-values and the state and returns the f-value. /// /// # Examples /// /// ``` /// use dypdl::prelude::*; /// use dypdl_heuristic_search::search_algorithm::{FNode, StateInRegistry}; /// use dypdl_heuristic_search::search_algorithm::data_structure::{ /// GetTransitions, StateInformation, /// }; /// /// let mut model = Model::default(); /// model.add_integer_variable("variable", 0).unwrap(); /// /// let state = model.target.clone(); /// let cost = 0; /// let h_evaluator = |_: &StateInRegistry| Some(0); /// let f_evaluator = |g, h, _: &StateInRegistry| g + h; /// let node = FNode::<_>::generate_root_node( /// state, cost, &model, &h_evaluator, &f_evaluator, None, /// ); /// assert!(node.is_some()); /// let node = node.unwrap(); /// assert_eq!(node.state(), &StateInRegistry::from(model.target.clone())); /// assert_eq!(node.cost(&model), cost); /// assert_eq!(node.bound(&model), Some(0)); /// assert!(!node.is_closed()); /// assert_eq!(node.transitions(), vec![]); /// ``` pub fn generate_root_node<S, H, F>( state: S, cost: T, model: &Model, h_evaluator: H, f_evaluator: F, primal_bound: Option<T>, ) -> Option<Self> where StateInRegistry: From<S>, H: FnOnce(&StateInRegistry) -> Option<T>, F: FnOnce(T, T, &StateInRegistry) -> T, { let state = StateInRegistry::from(state); let h = h_evaluator(&state)?; let f = f_evaluator(cost, h, &state); if exceed_bound(model, f, primal_bound) { return None; } let (h, f) = if model.reduce_function == ReduceFunction::Max { (h, f) } else { (-h, -f) }; Some(FNode::new(state, cost, h, f, None, None)) } /// Generates a successor node given a transition, a DyPDL model, h- and f-evaluators, /// and a primal bound on the solution cost. /// /// Returns `None` if the successor state is pruned by a state constraint or a dead-end, /// or the f-value exceeds the primal bound. /// /// `h_evaluator` is a function that takes a state and returns the dual bound (the h-value). /// If `h_evaluator` returns `None`, the state is a dead-end, so the node is not generated. /// `f_evaluator` is a function that takes g- and h-values and the state and returns the f-value. /// /// # Panics /// /// If an expression used in the transition is invalid. /// /// # Examples /// /// ``` /// use dypdl::prelude::*; /// use dypdl_heuristic_search::search_algorithm::{FNode, StateInRegistry}; /// use dypdl_heuristic_search::search_algorithm::data_structure::{ /// GetTransitions, StateInformation, /// }; /// use std::rc::Rc; /// /// let mut model = Model::default(); /// let variable = model.add_integer_variable("variable", 0).unwrap(); /// /// let state = model.target.clone(); /// let cost = 0; /// let h_evaluator = |_: &StateInRegistry| Some(0); /// let f_evaluator = |g, h, _: &StateInRegistry| g + h; /// let node = FNode::<_>::generate_root_node( /// state, cost, &model, &h_evaluator, &f_evaluator, None, /// ).unwrap(); /// /// let mut transition = Transition::new("transition"); /// transition.set_cost(IntegerExpression::Cost + 1); /// transition.add_effect(variable, variable + 1).unwrap(); /// let expected_state: StateInRegistry = transition.apply( /// &model.target, &model.table_registry, /// ); /// /// let node = node.generate_successor_node( /// Rc::new(transition.clone()), &model, &h_evaluator, &f_evaluator, None, /// ); /// assert!(node.is_some()); /// let node = node.unwrap(); /// assert_eq!(node.state(), &expected_state); /// assert_eq!(node.cost(&model), 1); /// assert_eq!(node.bound(&model), Some(1)); /// assert!(!node.is_closed()); /// assert_eq!(node.transitions(), vec![transition]); /// ``` pub fn generate_successor_node<H, F>( &self, transition: Rc<V>, model: &Model, h_evaluator: H, f_evaluator: F, primal_bound: Option<T>, ) -> Option<Self> where H: FnOnce(&StateInRegistry) -> Option<T>, F: FnOnce(T, T, &StateInRegistry) -> T, { let (state, g) = model.generate_successor_state(&self.state, self.g, transition.as_ref(), None)?; let h = h_evaluator(&state)?; let f = f_evaluator(g, h, &state); if exceed_bound(model, f, primal_bound) { return None; } let (h, f) = if model.reduce_function == ReduceFunction::Max { (h, f) } else { (-h, -f) }; Some(FNode::new(state, g, h, f, Some(self), Some(transition))) } /// Generates a successor node given a transition, h- and f- evaluators, and a primal bound on the solution cost, /// and inserts it into a state registry. /// /// Returns the successor node and whether a new entry is generated or not. /// If the successor node dominates an existing non-closed node in the registry, the second return value is `false`. /// /// `h_evaluator` is a function that takes a state and returns the dual bound (the h-value). /// If `h_evaluator` returns `None`, the state is a dead-end, so the node is not generated. /// `f_evaluator` is a function that takes g- and h-values and the state and returns the f-value. /// /// Returns `None` if the successor state is pruned by a state constraint or a dead-end, /// the f-value exceeds the primal bound, or the successor node is dominated. /// /// # Panics /// /// If an expression used in the transition is invalid. /// /// # Examples /// /// ``` /// use dypdl::prelude::*; /// use dypdl_heuristic_search::search_algorithm::{FNode, StateInRegistry, StateRegistry}; /// use dypdl_heuristic_search::search_algorithm::data_structure::{ /// GetTransitions, StateInformation, /// }; /// use std::rc::Rc; /// /// let mut model = Model::default(); /// let variable = model.add_integer_variable("variable", 0).unwrap(); /// let mut registry = StateRegistry::<_, FNode<_>>::new(Rc::new(model.clone())); /// /// let state = model.target.clone(); /// let cost = 0; /// let h_evaluator = |_: &StateInRegistry| Some(0); /// let f_evaluator = |g, h, _: &StateInRegistry| g + h; /// let node = FNode::<_>::generate_root_node( /// state, cost, &model, &h_evaluator, &f_evaluator, None, /// ).unwrap(); /// /// let mut transition = Transition::new("transition"); /// transition.set_cost(IntegerExpression::Cost + 1); /// transition.add_effect(variable, variable + 1).unwrap(); /// let expected_state: StateInRegistry = transition.apply( /// &model.target, &model.table_registry, /// ); /// /// let result = node.insert_successor_node( /// Rc::new(transition.clone()), &mut registry, &h_evaluator, &f_evaluator, None, /// ); /// assert!(result.is_some()); /// let (node, generated) = result.unwrap(); /// assert!(generated); /// assert_eq!(node.state(), &expected_state); /// assert_eq!(node.cost(&model), 1); /// assert_eq!(node.bound(&model), Some(1)); /// assert!(!node.is_closed()); /// assert_eq!(node.transitions(), vec![transition]); /// ``` pub fn insert_successor_node<H, F, N, M>( &self, transition: Rc<V>, registry: &mut StateRegistry<T, Self, N, Rc<HashableSignatureVariables>, M>, h_evaluator: H, f_evaluator: F, primal_bound: Option<T>, ) -> Option<(N, bool)> where H: FnOnce(&StateInRegistry) -> Option<T>, F: FnOnce(T, T, &StateInRegistry) -> T, N: Deref<Target = Self> + From<Self> + Clone, M: Deref<Target = Model> + Clone, { let (state, g) = registry.model().generate_successor_state( &self.state, self.g, transition.as_ref(), None, )?; let model = registry.model().clone(); let maximize = model.reduce_function == ReduceFunction::Max; let constructor = |state, g, other: Option<&FNode<T, V>>| { let h = if let Some(other) = other { if maximize { other.h } else { -other.h } } else { h_evaluator(&state)? }; let f = f_evaluator(g, h, &state); if exceed_bound(&model, f, primal_bound) { return None; } let (h, f) = if maximize { (h, f) } else { (-h, -f) }; Some(FNode::new(state, g, h, f, Some(self), Some(transition))) }; let (successor, dominated) = registry.insert_with(state, g, constructor)?; let mut generated = true; if let Some(dominated) = dominated { if !dominated.is_closed() { dominated.close(); generated = false; } } Some((successor, generated)) } } impl<T, V> PartialEq for FNode<T, V> where T: Numeric + PartialOrd, V: TransitionInterface + Clone, Transition: From<V>, { /// Nodes are compared by their f- and h-values. /// This does not mean that the nodes are the same. #[inline] fn eq(&self, other: &Self) -> bool { self.f == other.f && self.h == other.h } } impl<T, V> Eq for FNode<T, V> where T: Numeric + Ord, V: TransitionInterface + Clone, Transition: From<V>, { } impl<T, V> Ord for FNode<T, V> where T: Numeric + Ord, V: TransitionInterface + Clone, Transition: From<V>, { #[inline] fn cmp(&self, other: &Self) -> Ordering { match self.f.cmp(&other.f) { Ordering::Equal => self.h.cmp(&other.h), result => result, } } } impl<T, V> PartialOrd for FNode<T, V> where T: Numeric + Ord, V: TransitionInterface + Clone, Transition: From<V>, { #[inline] fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl<T, V> StateInformation<T, Rc<HashableSignatureVariables>> for FNode<T, V> where T: Numeric, V: TransitionInterface + Clone, Transition: From<V>, { #[inline] fn state(&self) -> &StateInRegistry { &self.state } #[inline] fn state_mut(&mut self) -> &mut StateInRegistry { &mut self.state } #[inline] fn cost(&self, _: &Model) -> T { self.g } #[inline] fn bound(&self, model: &Model) -> Option<T> { if model.reduce_function == ReduceFunction::Min { Some(-self.f) } else { Some(self.f) } } #[inline] fn is_closed(&self) -> bool { self.closed.get() } #[inline] fn close(&self) { self.closed.set(true); } } impl<T, V> GetTransitions<V> for FNode<T, V> where T: Numeric + Ord, V: TransitionInterface + Clone, Transition: From<V>, { #[inline] fn transitions(&self) -> Vec<V> { self.transitions .as_ref() .map_or_else(Vec::new, |transitions| transitions.transitions()) } #[inline] fn last(&self) -> Option<&V> { self.transitions .as_ref() .map(|transitions| transitions.last()) } } impl<T, V> BfsNode<T, V> for FNode<T, V> where T: Numeric + Ord + Display, V: TransitionInterface + Clone, Transition: From<V>, { #[inline] fn ordered_by_bound() -> bool { true } } #[cfg(test)] mod tests { use super::*; use dypdl::expression::*; use dypdl::prelude::*; #[test] fn ordered_by_bound() { assert!(FNode::<Integer>::ordered_by_bound()); } #[test] fn generate_root_node_some_min() { let mut model = dypdl::Model::default(); model.set_minimize(); let variable = model.add_integer_variable("variable", 0); assert!(variable.is_ok()); let state = model.target.clone(); let mut expected_state = StateInRegistry::from(state.clone()); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::<_>::generate_root_node(state, 1, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let mut node = node.unwrap(); assert_eq!(node.state(), &expected_state); assert_eq!(node.state_mut(), &mut expected_state); assert_eq!(node.cost(&model), 1); assert_eq!(node.bound(&model), Some(1)); assert!(!node.is_closed()); assert_eq!(node.last(), None); assert_eq!(node.transitions(), vec![]); } #[test] fn generate_root_node_some_max() { let mut model = dypdl::Model::default(); model.set_maximize(); let variable = model.add_integer_variable("variable", 0); assert!(variable.is_ok()); let state = model.target.clone(); let mut expected_state = StateInRegistry::from(state.clone()); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::<_>::generate_root_node(state, 1, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let mut node = node.unwrap(); assert_eq!(node.state(), &expected_state); assert_eq!(node.state_mut(), &mut expected_state); assert_eq!(node.cost(&model), 1); assert_eq!(node.bound(&model), Some(1)); assert!(!node.is_closed()); assert_eq!(node.last(), None); assert_eq!(node.transitions(), vec![]); } #[test] fn generate_root_node_pruned_by_bound_min() { let mut model = dypdl::Model::default(); model.set_minimize(); let variable = model.add_integer_variable("variable", 0); assert!(variable.is_ok()); let state = model.target.clone(); let h_evaluator = |_: &StateInRegistry| Some(1); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::<_>::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, Some(0)); assert!(node.is_none()); } #[test] fn generate_root_node_pruned_by_bound_max() { let mut model = dypdl::Model::default(); model.set_maximize(); let variable = model.add_integer_variable("variable", 0); assert!(variable.is_ok()); let state = model.target.clone(); let h_evaluator = |_: &StateInRegistry| Some(1); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::<_>::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, Some(2)); assert!(node.is_none()); } #[test] fn generate_root_node_pruned_by_h() { let mut model = dypdl::Model::default(); let variable = model.add_integer_variable("variable", 0); assert!(variable.is_ok()); let state = model.target.clone(); let h_evaluator = |_: &StateInRegistry| None; let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::<_>::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_none()); } #[test] fn close() { let model = dypdl::Model::default(); let state = model.target.clone(); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::<_>::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); assert!(!node.is_closed()); node.close(); assert!(node.is_closed()); } #[test] fn generate_successor_some_min() { let mut model = Model::default(); model.set_minimize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let state = model.target.clone(); let mut expected_state: StateInRegistry = transition.apply(&state, &model.table_registry); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let successor = node.generate_successor_node( Rc::new(transition.clone()), &model, &h_evaluator, &f_evaluator, None, ); assert!(successor.is_some()); let mut successor = successor.unwrap(); assert_eq!(successor.state(), &expected_state); assert_eq!(successor.state_mut(), &mut expected_state); assert_eq!(successor.cost(&model), 1); assert_eq!(successor.bound(&model), Some(1)); assert!(!successor.is_closed()); assert_eq!(successor.last(), Some(&transition)); assert_eq!(successor.transitions(), vec![transition]); } #[test] fn generate_successor_some_max() { let mut model = Model::default(); model.set_maximize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let state = model.target.clone(); let mut expected_state: StateInRegistry = transition.apply(&state, &model.table_registry); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let successor = node.generate_successor_node( Rc::new(transition.clone()), &model, &h_evaluator, &f_evaluator, None, ); assert!(successor.is_some()); let mut successor = successor.unwrap(); assert_eq!(successor.state(), &expected_state); assert_eq!(successor.state_mut(), &mut expected_state); assert_eq!(successor.cost(&model), 1); assert_eq!(successor.bound(&model), Some(1)); assert!(!successor.is_closed()); assert_eq!(successor.last(), Some(&transition)); assert_eq!(successor.transitions(), vec![transition]); } #[test] fn generate_successor_pruned_by_constraint() { let mut model = Model::default(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let result = model.add_state_constraint(Condition::comparison_i(ComparisonOperator::Le, v1, 0)); assert!(result.is_ok()); let state = model.target.clone(); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let result = node.generate_successor_node( Rc::new(transition), &model, &h_evaluator, &f_evaluator, None, ); assert_eq!(result, None); } #[test] fn generate_successor_pruned_by_bound_min() { let mut model = Model::default(); model.set_minimize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let state = model.target.clone(); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let result = node.generate_successor_node( Rc::new(transition), &model, &h_evaluator, &f_evaluator, Some(0), ); assert_eq!(result, None); } #[test] fn generate_successor_pruned_by_bound_max() { let mut model = Model::default(); model.set_maximize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let state = model.target.clone(); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let result = node.generate_successor_node( Rc::new(transition), &model, &h_evaluator, &f_evaluator, Some(2), ); assert_eq!(result, None); } #[test] fn generate_successor_pruned_by_h() { let mut model = Model::default(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let state = model.target.clone(); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let h_evaluator = |_: &StateInRegistry| None; let result = node.generate_successor_node( Rc::new(transition), &model, &h_evaluator, &f_evaluator, None, ); assert_eq!(result, None); } #[test] fn insert_successor_non_dominance_min() { let mut model = Model::default(); model.set_minimize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let model = Rc::new(model); let state = StateInRegistry::from(model.target.clone()); let mut registry = StateRegistry::<_, FNode<_>>::new(model.clone()); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let expected_state: StateInRegistry = transition.apply(&state, &model.table_registry); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let result = registry.insert(node.clone()); assert!(result.is_some()); let result = node.insert_successor_node( Rc::new(transition.clone()), &mut registry, &h_evaluator, &f_evaluator, None, ); assert!(result.is_some()); let (successor, generated) = result.unwrap(); assert_eq!(successor.state(), &expected_state); assert_eq!(successor.cost(&model), 1); assert_eq!(successor.bound(&model), Some(1)); assert!(!successor.is_closed()); assert_eq!(successor.last(), Some(&transition)); assert_eq!(successor.transitions(), vec![transition],); assert!(generated); assert!(!node.is_closed()); } #[test] fn insert_successor_non_dominance_max() { let mut model = Model::default(); model.set_maximize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let model = Rc::new(model); let state = StateInRegistry::from(model.target.clone()); let mut registry = StateRegistry::<_, FNode<_>>::new(model.clone()); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let expected_state: StateInRegistry = transition.apply(&state, &model.table_registry); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let result = registry.insert(node.clone()); assert!(result.is_some()); let result = node.insert_successor_node( Rc::new(transition.clone()), &mut registry, &h_evaluator, &f_evaluator, None, ); assert!(result.is_some()); let (successor, generated) = result.unwrap(); assert_eq!(successor.state(), &expected_state); assert_eq!(successor.cost(&model), 1); assert_eq!(successor.bound(&model), Some(1)); assert!(!successor.is_closed()); assert_eq!(successor.last(), Some(&transition)); assert_eq!(successor.transitions(), vec![transition],); assert!(generated); assert!(!node.is_closed()); } #[test] fn insert_successor_pruned_by_constraint() { let mut model = Model::default(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let result = model.add_state_constraint(Condition::comparison_i(ComparisonOperator::Le, v1, 0)); assert!(result.is_ok()); let state = StateInRegistry::from(model.target.clone()); let mut registry = StateRegistry::<_, FNode<_>>::new(Rc::new(model.clone())); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let result = node.insert_successor_node( Rc::new(transition), &mut registry, &h_evaluator, &f_evaluator, None, ); assert_eq!(result, None); assert!(!node.is_closed()); } #[test] fn insert_successor_dominating_min() { let mut model = Model::default(); model.set_minimize(); let v1 = model.add_integer_resource_variable("v1", false, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); let state = StateInRegistry::from(model.target.clone()); let expected_state: StateInRegistry = transition.apply(&state, &model.table_registry); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut registry = StateRegistry::<_, FNode<_>>::new(Rc::new(model.clone())); let result = registry.insert(node); assert!(result.is_some()); let (node, dominated) = result.unwrap(); assert!(dominated.is_none()); let result = node.insert_successor_node( Rc::new(transition.clone()), &mut registry, &h_evaluator, &f_evaluator, None, ); assert!(result.is_some()); let (successor, generated) = result.unwrap(); assert_eq!(successor.state(), &expected_state); assert_eq!(successor.cost(&model), 0); assert_eq!(successor.bound(&model), Some(0)); assert!(!successor.is_closed()); assert_eq!(successor.last(), Some(&transition)); assert_eq!(successor.transitions(), vec![transition]); assert!(!generated); assert!(node.is_closed()); } #[test] fn insert_successor_dominating_max() { let mut model = Model::default(); model.set_maximize(); let v1 = model.add_integer_resource_variable("v1", false, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let state = StateInRegistry::from(model.target.clone()); let expected_state: StateInRegistry = transition.apply(&state, &model.table_registry); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut registry = StateRegistry::<_, FNode<_>>::new(Rc::new(model.clone())); let result = registry.insert(node); assert!(result.is_some()); let (node, dominated) = result.unwrap(); assert!(dominated.is_none()); let result = node.insert_successor_node( Rc::new(transition.clone()), &mut registry, &h_evaluator, &f_evaluator, None, ); assert!(result.is_some()); let (successor, generated) = result.unwrap(); assert_eq!(successor.state(), &expected_state); assert_eq!(successor.cost(&model), 1); assert_eq!(successor.bound(&model), Some(1)); assert!(!successor.is_closed()); assert_eq!(successor.last(), Some(&transition)); assert_eq!(successor.transitions(), vec![transition]); assert!(!generated); assert!(node.is_closed()); } #[test] fn insert_successor_dominated_min() { let mut model = Model::default(); model.set_minimize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", true, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); let state = StateInRegistry::from(model.target.clone()); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut registry = StateRegistry::<_, FNode<_>>::new(Rc::new(model.clone())); let result = registry.insert(node); assert!(result.is_some()); let (node, dominated) = result.unwrap(); assert!(dominated.is_none()); let result = node.insert_successor_node( Rc::new(transition.clone()), &mut registry, &h_evaluator, &f_evaluator, None, ); assert!(result.is_none()); } #[test] fn insert_successor_dominated_max() { let mut model = Model::default(); model.set_maximize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", true, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); let state = StateInRegistry::from(model.target.clone()); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut registry = StateRegistry::<_, FNode<_>>::new(Rc::new(model.clone())); let result = registry.insert(node); assert!(result.is_some()); let (node, dominated) = result.unwrap(); assert!(dominated.is_none()); let result = node.insert_successor_node( Rc::new(transition.clone()), &mut registry, &h_evaluator, &f_evaluator, None, ); assert!(result.is_none()); } #[test] fn insert_successor_pruned_by_bound_min() { let mut model = Model::default(); model.set_minimize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let state = StateInRegistry::from(model.target.clone()); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut registry = StateRegistry::<_, FNode<_>>::new(Rc::new(model.clone())); let result = registry.insert(node); assert!(result.is_some()); let (node, dominated) = result.unwrap(); assert!(dominated.is_none()); let result = node.insert_successor_node( Rc::new(transition.clone()), &mut registry, &h_evaluator, &f_evaluator, Some(0), ); assert!(result.is_none()); } #[test] fn insert_successor_pruned_by_bound_max() { let mut model = Model::default(); model.set_maximize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let state = StateInRegistry::from(model.target.clone()); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut registry = StateRegistry::<_, FNode<_>>::new(Rc::new(model.clone())); let result = registry.insert(node); assert!(result.is_some()); let (node, dominated) = result.unwrap(); assert!(dominated.is_none()); let result = node.insert_successor_node( Rc::new(transition.clone()), &mut registry, &h_evaluator, &f_evaluator, Some(2), ); assert!(result.is_none()); } #[test] fn insert_successor_pruned_by_h() { let mut model = Model::default(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let state = StateInRegistry::from(model.target.clone()); let mut registry = StateRegistry::<_, FNode<_>>::new(Rc::new(model.clone())); let h_evaluator = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node = FNode::generate_root_node(state, 0, &model, &h_evaluator, &f_evaluator, None); assert!(node.is_some()); let node = node.unwrap(); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); transition.set_cost(IntegerExpression::Cost + 1); let h_evaluator = |_: &StateInRegistry| None; let result = node.insert_successor_node( Rc::new(transition), &mut registry, &h_evaluator, &f_evaluator, None, ); assert_eq!(result, None); assert!(!node.is_closed()); } #[test] fn cmp_min() { let mut model = Model::default(); model.set_minimize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let model = Rc::new(model); let state = model.target.clone(); let h_evaluator_0 = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node1 = FNode::<_>::generate_root_node( state.clone(), 0, &model, &h_evaluator_0, &f_evaluator, None, ); assert!(node1.is_some()); let node1 = Rc::new(node1.unwrap()); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); let node2 = node1.generate_successor_node( Rc::new(transition), &model, &h_evaluator_0, &f_evaluator, None, ); assert!(node2.is_some()); let node2 = Rc::new(node2.unwrap()); let mut transition = Transition::default(); transition.set_cost(IntegerExpression::Cost + 1); let mut registry = StateRegistry::<_, FNode<_>>::new(model.clone()); let node3 = node1.insert_successor_node( Rc::new(transition), &mut registry, &h_evaluator_0, &f_evaluator, None, ); assert!(node3.is_some()); let (node3, _) = node3.unwrap(); let h_evaluator_1 = |_: &StateInRegistry| Some(1); let node4 = FNode::<_>::generate_root_node(state, 0, &model, &h_evaluator_1, &f_evaluator, None); assert!(node4.is_some()); let node4 = Rc::new(node4.unwrap()); assert!(node1 == node1); assert!(node1 >= node1); assert!(node1 == node2); assert!(node1 >= node2); assert!(node1 <= node2); assert!(node1 >= node3); assert!(node1 > node3); assert!(node1 != node3); assert!(node1 >= node4); assert!(node1 > node4); assert!(node1 != node4); assert!(node3 >= node4); assert!(node3 > node4); assert!(node3 != node4); } #[test] fn cmp_max() { let mut model = Model::default(); model.set_maximize(); let v1 = model.add_integer_resource_variable("v1", true, 0); assert!(v1.is_ok()); let v1 = v1.unwrap(); let v2 = model.add_integer_resource_variable("v2", false, 0); assert!(v2.is_ok()); let v2 = v2.unwrap(); let model = Rc::new(model); let state = model.target.clone(); let h_evaluator_0 = |_: &StateInRegistry| Some(0); let f_evaluator = |g, h, _: &StateInRegistry| g + h; let node1 = FNode::<_>::generate_root_node( state.clone(), 0, &model, &h_evaluator_0, &f_evaluator, None, ); assert!(node1.is_some()); let node1 = Rc::new(node1.unwrap()); let mut transition = Transition::default(); let result = transition.add_effect(v1, v1 + 1); assert!(result.is_ok()); let result = transition.add_effect(v2, v2 + 1); assert!(result.is_ok()); let node2 = node1.generate_successor_node( Rc::new(transition), &model, &h_evaluator_0, &f_evaluator, None, ); assert!(node2.is_some()); let node2 = Rc::new(node2.unwrap()); let mut transition = Transition::default(); transition.set_cost(IntegerExpression::Cost + 1); let mut registry = StateRegistry::<_, FNode<_>>::new(model.clone()); let node3 = node1.insert_successor_node( Rc::new(transition), &mut registry, &h_evaluator_0, &f_evaluator, None, ); assert!(node3.is_some()); let (node3, _) = node3.unwrap(); let h_evaluator_1 = |_: &StateInRegistry| Some(1); let node4 = FNode::<_>::generate_root_node(state, 0, &model, &h_evaluator_1, &f_evaluator, None); assert!(node4.is_some()); let node4 = Rc::new(node4.unwrap()); assert!(node1 == node1); assert!(node1 >= node1); assert!(node1 == node2); assert!(node1 >= node2); assert!(node1 <= node2); assert!(node1 <= node3); assert!(node1 < node3); assert!(node1 != node3); assert!(node1 <= node4); assert!(node1 < node4); assert!(node1 != node4); assert!(node3 <= node4); assert!(node3 < node4); assert!(node3 != node4); } }
true
fbe5dd7f2bba31ac245446a71ac35b0ed2532889
Rust
elipsitz/advent-of-code-2019
/day15/src/main.rs
UTF-8
11,199
3.140625
3
[]
no_license
use std::fs::File; use std::io::{BufRead, BufReader}; use crate::MachineStatus::{BadOpcode, Finished, Blocked}; use std::collections::{HashMap, HashSet}; fn read_lines(filename: &str) -> impl Iterator<Item=String> { let file = File::open(filename).unwrap(); let reader = BufReader::new(file); reader.lines().map(|line| line.unwrap()) } #[derive(Copy, Clone, Debug, PartialEq)] enum MachineStatus { Runnable, Blocked, Finished, BadOpcode(i64), } struct Machine { mem: Vec<i64>, pos: usize, inputs: Vec<i64>, outputs: Vec<i64>, input_pos: usize, output_pos: usize, status: MachineStatus, relative_base: i64, } impl Machine { fn new(mem: &Vec<i64>) -> Machine { let mut new_mem = Vec::new(); new_mem.extend(mem); for _ in 0..1000 { new_mem.push(0); } Machine { mem: new_mem, pos: 0, inputs: Vec::new(), outputs: Vec::new(), input_pos: 0, output_pos: 0, status: MachineStatus::Runnable, relative_base: 0, } } fn arg(&mut self, arg: usize) -> &mut i64 { let addressing: i64 = self.mem[self.pos] / 100; let mode = (addressing / 10_i64.pow(arg as u32)) % 10; match mode { 0 => { let addr = self.mem[self.pos + 1 + arg]; &mut self.mem[addr as usize] }, 1 => &mut self.mem[self.pos + 1 + arg], 2 => { let val = self.mem[self.pos + 1 + arg]; &mut self.mem[(self.relative_base + val) as usize] } _ => { panic!(); } } } fn run(&mut self) { match self.status { BadOpcode(_) => { return; }, Finished => { return; }, _ => {} } loop { let opcode = self.mem[self.pos] % 100; // println!("raw: {}, pos: {}, opcode: {}, addressing: {}", mem[pos], pos, opcode, addressing); match opcode { 1 => { let a = *self.arg(0); let b = *self.arg(1); *self.arg(2) = a + b; self.pos += 4; } 2 => { let a = *self.arg(0); let b = *self.arg(1); *self.arg(2) = a * b; self.pos += 4; } 3 => { if self.input_pos < self.inputs.len() { let val = self.inputs[self.input_pos]; self.input_pos += 1; *self.arg(0) = val; self.pos += 2; } else { self.status = Blocked; return; } } 4 => { let val = *self.arg(0); self.outputs.push(val); self.pos += 2; } 5 => { let cond = *self.arg(0); let target = *self.arg(1); if cond != 0 { self.pos = target as usize; } else { self.pos += 3; } } 6 => { let cond = *self.arg(0); let target = *self.arg(1); if cond == 0 { self.pos = target as usize; } else { self.pos += 3; } } 7 => { let a = *self.arg(0); let b = *self.arg(1); let val = (a < b) as i64; *self.arg(2) = val; self.pos += 4; } 8 => { let a = *self.arg(0); let b = *self.arg(1); let val = (a == b) as i64; *self.arg(2) = val; self.pos += 4; } 9 => { let val = *self.arg(0); self.relative_base += val; self.pos += 2; } 99 => { self.status = Finished; return; } _ => { self.status = BadOpcode(opcode); return; } } } } fn easy_run(&mut self, inputs: &Vec<i64>) -> &Vec<i64> { self.add_inputs(inputs); self.run(); &self.outputs } fn add_input(&mut self, input: i64) { self.inputs.push(input); } fn add_inputs(&mut self, inputs: &Vec<i64>) { self.inputs.extend(inputs); } fn get_output(&mut self) -> Option<i64> { if self.output_pos < self.outputs.len() { let val = self.outputs[self.output_pos]; self.output_pos += 1; Some(val) } else { None } } fn get_status(&self) -> MachineStatus { self.status } } #[derive(Copy, Clone)] enum Direction { None, North, East, South, West } impl Direction { fn number(&self) -> i64 { match self { Direction::None => 0, Direction::North => 1, Direction::South => 2, Direction::West => 3, Direction::East => 4, } } } #[derive(Copy, Clone, Eq, PartialEq, Debug)] enum TileKind { Empty, Wall, Oxygen, Unknown, } impl TileKind { fn passable(&self) -> bool { self == &TileKind::Empty || self == &TileKind::Oxygen } } #[derive(Copy, Clone, Eq, PartialEq, Hash)] struct Position(i64, i64); impl Position { fn neighbor(&self, direction: Direction) -> Position { match direction { Direction::None => Position(self.0, self.1), Direction::North => Position(self.0, self.1 - 1), Direction::East => Position(self.0 + 1, self.1), Direction::South => Position(self.0, self.1 + 1), Direction::West => Position(self.0 - 1, self.1), } } fn neighbors(&self) -> Vec<Position> { vec![ self.neighbor(Direction::North), self.neighbor(Direction::East), self.neighbor(Direction::South), self.neighbor(Direction::West), ] } fn direction(&self, other: &Position) -> Direction { if self.0 < other.0 { Direction::East } else if self.0 > other.0 { Direction::West } else if self.1 > other.1 { Direction::North } else if self.1 < other.1 { Direction::South } else { Direction::None } } } struct Tile { kind: TileKind, prev: Position, } struct Robot { machine: Machine, pos: Position, tile: TileKind } impl Robot { fn new(machine: Machine) -> Robot { Robot { machine, pos: Position(0, 0), tile: TileKind::Empty, } } fn step(&mut self, dir: &Direction) -> TileKind { self.machine.add_input(dir.number()); self.machine.run(); let status = self.machine.get_output(); match status { Some(0) => TileKind::Wall, Some(1) => TileKind::Empty, Some(2) => TileKind::Oxygen, _ => TileKind::Unknown, } } } struct World { mem: Vec<i64>, tiles: HashMap<Position, TileKind>, backpointers: HashMap<Position, Position>, } impl World { fn new(mem: &Vec<i64>) -> World { let mut tiles = HashMap::new(); World { mem: mem.to_vec(), tiles, backpointers: HashMap::new(), } } fn get_path(&self, target: &Position) -> Vec<Direction> { let mut backwards = Vec::new(); let mut curr = target; loop { let prev = self.backpointers.get(curr); match prev { None => { break; } Some(prev) => { if prev == curr { break; } backwards.push(prev.direction(curr)); curr = prev; } }; } backwards.reverse(); backwards } fn get_type(&self, pos: &Position) -> TileKind { match self.tiles.get(pos) { None => TileKind::Unknown, Some(tile) => *tile, } } fn explore(&mut self) { let mut fringe: Vec<(Position, Position)> = Vec::new(); fringe.push((Position(0, 0), Position(0, 0))); while fringe.len() > 0 { let (prev, curr) = fringe.remove(0); if self.tiles.contains_key(&curr) { continue; } let mut robot = Robot::new(Machine::new(&self.mem)); let state = self.get_path(&curr).iter().map(|dir| robot.step(dir)).last(); let tile = state.unwrap_or(TileKind::Empty); self.tiles.insert(curr, tile); self.backpointers.insert(curr, prev); if tile != TileKind::Wall { for neighbor in curr.neighbors() { if !self.tiles.contains_key(&neighbor) { fringe.push((curr, neighbor)); self.backpointers.insert(neighbor, curr); } } } } } fn get_oxygen_pos(&self) -> Position { let (pos, _) = self.tiles.iter().filter(|(k, v)| **v == TileKind::Oxygen).next().unwrap(); *pos } fn time_to_spread(&self) -> usize { let oxygen_pos = self.get_oxygen_pos(); let mut visited: HashSet<Position> = HashSet::new(); let mut fringe: Vec<(Position, usize)> = Vec::new(); fringe.push((oxygen_pos, 0)); let mut max_time = 0; while fringe.len() > 0 { let (curr, time) = fringe.remove(0); if visited.contains(&curr) { continue; } visited.insert(curr); if time > max_time { max_time = time; } for neighbor in curr.neighbors() { if *self.tiles.get(&neighbor).unwrap_or(&TileKind::Unknown) == TileKind::Empty { fringe.push((neighbor, time + 1)); } } } max_time } } fn main() { let line = read_lines("input.in").nth(0).unwrap(); let mem: Vec<i64> = line.split(",").map(|x| x.parse::<i64>().unwrap()).collect(); // Part 1. let mut world = World::new(&mem); world.explore(); let oxygen_pos = world.get_oxygen_pos(); println!("pos: {}, {}", oxygen_pos.0, oxygen_pos.1); let path = world.get_path(&oxygen_pos); println!("path len: {}", path.len()); let time = world.time_to_spread(); println!("time to spread: {}", time); }
true
7b0917f6f66dfadd6efaa43d189d5a0666715fc5
Rust
0xpr03/LineBuffer
/tests/perf_simple.rs
UTF-8
1,335
3.09375
3
[ "MIT" ]
permissive
use linebuffer::{typenum, LineBuffer}; use std::convert::TryInto; use std::time::*; #[test] #[ignore] fn perf_simple() { const AMOUNT: usize = 8192; let mut buffer: LineBuffer<(), typenum::U2048> = LineBuffer::new(AMOUNT); let start = Instant::now(); let max: u32 = 1_000_000_000; for i in 0..max { buffer.insert(&i.to_ne_bytes(), ()); } let nanos = start.elapsed().as_nanos(); assert_eq!(buffer.capacity_bytes(), AMOUNT); println!("Duration: {} ns for {} entries", nanos, max); // let bytes: u128 = (max * 4) as u128; // let ms = nanos / 1_000_000; // println!("{} Byte in, {} B/ms",bytes, (bytes / ms) ); let expected: u32 = max - 1; assert_eq!( buffer.get((max - 1) as usize), Some((&(expected.to_ne_bytes()[..]), &())) ); } #[test] #[ignore] fn perf_lines() { const AMOUNT: usize = 1024; let mut buffer: LineBuffer<(), typenum::U1024> = LineBuffer::new(AMOUNT); let start = Instant::now(); let max: u32 = 1_000_000_000; let empty = [0; 0]; for _ in 0..max { buffer.insert(&empty, ()); } let nanos = start.elapsed().as_nanos(); assert_eq!(buffer.capacity_bytes(), AMOUNT); println!("Duration: {} ns for {} entries", nanos, max); } #[test] #[ignore] fn perf_from_file() { unimplemented!(); }
true
9d629bebb9fcf5652b27392a9e178250b3a31688
Rust
gngeorgiev/semver_rs
/src/range.rs
UTF-8
15,884
3.21875
3
[ "MIT" ]
permissive
use crate::builder::{Builder, Options, Parseable}; use crate::comparator::{Comparator, ComparatorPair}; use crate::error::Error; use crate::expressions::{ COMPARATOR_LOOSE, COMP_REPLACE_CARETS, RANGE_HYPHEN, RANGE_HYPHEN_LOOSE, RANGE_OR, RANGE_TRIM_CARET, RANGE_TRIM_OPERATORS, RANGE_TRIM_TILDE, SPLIT_SPACES, }; use crate::operator::Operator; use crate::util::{is_any_version, match_at_index}; use crate::version::Version; use std::borrow::Cow; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; /// A `version range` is a set of `comparators` which specify versions that satisfy the `range`. /// A comparator is composed of an operator and a version. The set of primitive operators is: /// /// `<` Less than /// /// `<=` Less than or equal to /// /// `>` Greater than /// /// `>=` Greater than or equal to /// /// `=` Equal. If no operator is specified, then equality is assumed, so this operator is optional, but MAY be included. /// /// /// For example, the comparator `>=1.2.7` would match the versions `1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` or `1.1.0`. /// /// Comparators can be joined by whitespace to form a comparator set, which is satisfied by the intersection of all of the comparators it includes. /// /// A range is composed of one or more comparator sets, joined by ||. A version matches a range if and only if every comparator in at least one of the ||-separated comparator sets is satisfied by the version. /// /// For example, the range `>=1.2.7 <1.3.0` would match the versions `1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, or `1.1.0`. /// /// The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, `1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. #[derive(Debug)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct Range { pub(crate) comparators: Vec<Vec<Comparator>>, opts: Option<Options>, } impl<'p> Parseable<'p> for Range { fn parse(range_input: &'p str, opts: Option<Options>) -> Result<Self, Error> { let loose = opts.unwrap_or_default().loose; if range_input.is_empty() { return Ok(Range { //TODO: Figure out how to make it work with only an empty vec to avoid allocating comparators: vec![vec![]], opts, }); } let comparators = RANGE_OR .split(range_input) .map(move |range: &str| { //1. trim the range let range = range.trim(); //2. replace hyphens `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` let range = if let Some(range) = Range::replace_hyphens(range, loose)? { range.to_string() } else if let Some(range) = Range::replace_carets(range)? { range.to_string() } else { //3. trim the spaces around operators `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` let range = Range::trim_operators(range); //4. trim spaces around the tilde operator `~ 1.2.3` => `~1.2.3` let range = Range::trim_tilde(&range); //5. trim spaces around the caret operator `^ 1.2.3` => `^1.2.3` let range = Range::trim_caret(&range); //6. trim all the spaces that are left `1.2.3 1.2.4` => `1.2.3 1.2.4` let range = Range::trim_spaces(&range); range.to_string() }; let comparators_parsed = range .split(' ') .map(|c| Comparator::normalize(c, loose)) .fold(String::new(), |acc, c| acc + &c + " "); let comparators_parsed = comparators_parsed.trim(); if comparators_parsed.is_empty() { return Ok(Some(vec![])); } // TODO: this split should yield an array with one empty string inside // when used on an empty string, just like in the original npm package. // The condition above is a workaround atm let comparators = SPLIT_SPACES .split(comparators_parsed) .filter(|c| { if loose { COMPARATOR_LOOSE.is_match(c) } else { true } }) .map(|r| Comparator::new(r, opts)) .collect::<Result<Vec<Comparator>, Error>>()?; Ok(match comparators.is_empty() { true => None, false => Some(comparators), }) }) .collect::<Result<Vec<Option<Vec<Comparator>>>, Error>>()?; if comparators.is_empty() { Err(Error::InvalidRange(range_input.into())) } else { let comparators = comparators.into_iter().flatten().collect(); Ok(Range { comparators, opts }) } } } impl<'p> Range { /// Construct a new Range, e.g. `>=1.2.4`. pub fn new(range: &'p str) -> Builder<'p, Self> { Builder::new(range) } fn trim_spaces(range: &str) -> Cow<str> { //the other regexes won't allocate if they don't match, however this one will always allocate //so we check whether there's a match if SPLIT_SPACES.is_match(range) { let mut buf = SPLIT_SPACES .split(range) .fold(String::with_capacity(range.len()), |mut acc, s| { acc.push_str(s); acc.push(' '); acc }); buf.pop(); Cow::Owned(buf) } else { Cow::Borrowed(range) } } fn trim_caret(range: &str) -> Cow<str> { RANGE_TRIM_CARET.replace_all(range, "$1^") } fn trim_tilde(range: &str) -> Cow<str> { RANGE_TRIM_TILDE.replace_all(range, "$1~") } fn trim_operators(range: &str) -> Cow<str> { RANGE_TRIM_OPERATORS.replace_all(range, "$1$2$3") } fn replace_hyphens(range: &str, loose: bool) -> Result<Option<ComparatorPair>, Error> { let mut caps = match loose { true => RANGE_HYPHEN_LOOSE.captures_iter(range), false => RANGE_HYPHEN.captures_iter(range), }; let cap = match caps.next() { Some(cap) => cap, None => return Ok(None), }; let from = match_at_index(&cap, 1); let from_major = match_at_index(&cap, 2); let from_minor = match_at_index(&cap, 3); let from_patch = match_at_index(&cap, 4); let comparator_from = if is_any_version(from_major) { Comparator::empty() } else if is_any_version(from_minor) { Comparator::from_parts( Operator::Gte, Version::from_parts(from_major.parse()?, 0, 0, None), ) } else if is_any_version(from_patch) { Comparator::from_parts( Operator::Gte, Version::from_parts(from_major.parse()?, from_minor.parse()?, 0, None), ) } else { Comparator::from_parts(Operator::Gte, Version::new(from).parse()?) }; let to = match_at_index(&cap, 7); let to_major = match_at_index(&cap, 8); let to_minor = match_at_index(&cap, 9); let to_patch = match_at_index(&cap, 10); let to_prerelease = match_at_index(&cap, 11); let comparator_to = if is_any_version(to_major) { Comparator::empty() } else if is_any_version(to_minor) { let mut to_major = to_major.parse()?; to_major += 1; Comparator::from_parts(Operator::Lt, Version::from_parts(to_major, 0, 0, None)) } else if is_any_version(to_patch) { let mut to_minor = to_minor.parse()?; to_minor += 1; Comparator::from_parts( Operator::Lt, Version::from_parts(to_major.parse()?, to_minor, 0, None), ) } else if !to_prerelease.is_empty() { Comparator::from_parts( Operator::Lte, Version::from_parts( to_major.parse()?, to_minor.parse()?, to_patch.parse()?, Some(to_prerelease.to_string()), ), ) } else { Comparator::from_parts(Operator::Lte, Version::new(to).parse()?) }; Ok(Some(ComparatorPair( Some(comparator_from), Some(comparator_to), ))) } fn replace_carets(range: &str) -> Result<Option<ComparatorPair>, Error> { let mut caps = COMP_REPLACE_CARETS.captures_iter(range); let cap = match caps.next() { Some(cap) => cap, None => return Ok(None), }; let major = match_at_index(&cap, 1); let minor = match_at_index(&cap, 2); let patch = match_at_index(&cap, 3); let prerelease = match_at_index(&cap, 4); let mut cmp = ComparatorPair(None, None); if is_any_version(major) { cmp.0 = Some(Comparator::empty()); } else if is_any_version(minor) { let major = major.parse()?; cmp.0 = Some(Comparator::from_parts( Operator::Gte, Version::from_parts(major, 0, 0, None), )); cmp.1 = Some(Comparator::from_parts( Operator::Lt, Version::from_parts(major + 1, 0, 0, None), )); } else if is_any_version(patch) { let major = major.parse()?; let minor = minor.parse()?; cmp.0 = Some(Comparator::from_parts( Operator::Gte, Version::from_parts(major, minor, 0, None), )); if major == 0 { cmp.1 = Some(Comparator::from_parts( Operator::Lt, Version::from_parts(major, minor + 1, 0, None), )); } else { cmp.1 = Some(Comparator::from_parts( Operator::Lt, Version::from_parts(major + 1, 0, 0, None), )); } } else if !prerelease.is_empty() { //this unwrap will never panic since we already verified that we have at least //one char in the string let prerelease = if prerelease.starts_with('-') { prerelease.to_string() } else { format!("-{}", prerelease) }; let major = major.parse()?; let minor = minor.parse()?; let patch = patch.parse()?; cmp.0 = Some(Comparator::from_parts( Operator::Gte, Version::from_parts(major, minor, patch, Some(prerelease)), )); if major == 0 { if minor == 0 { cmp.1 = Some(Comparator::from_parts( Operator::Lt, Version::from_parts(major, minor, patch + 1, None), )); } else { cmp.1 = Some(Comparator::from_parts( Operator::Lt, Version::from_parts(major, minor + 1, 0, None), )); } } else { cmp.1 = Some(Comparator::from_parts( Operator::Lt, Version::from_parts(major + 1, 0, 0, None), )); } } else { let major = major.parse()?; let minor = minor.parse()?; let patch = patch.parse()?; cmp.0 = Some(Comparator::from_parts( Operator::Gte, Version::from_parts(major, minor, patch, None), )); if major == 0 { if minor == 0 { cmp.1 = Some(Comparator::from_parts( Operator::Lt, Version::from_parts(major, minor, patch + 1, None), )); } else { cmp.1 = Some(Comparator::from_parts( Operator::Lt, Version::from_parts(major, minor + 1, 0, None), )); } } else { cmp.1 = Some(Comparator::from_parts( Operator::Lt, Version::from_parts(major + 1, 0, 0, None), )); } } Ok(Some(cmp)) } /// Tests whether a `version` is in this `range`. pub fn test(&self, version: &Version) -> bool { let include_prerelease = match self.opts { Some(ref opts) => opts.include_prerelease, None => false, }; self.comparators.iter().any(move |comparators| { for c in comparators.iter() { if !c.test(version) { return false; } } if version.has_prerelease() && !include_prerelease { // Find the set of versions that are allowed to have prereleases // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 // That should allow `1.2.3-pr.2` to pass. // However, `1.2.4-alpha.notready` should NOT be allowed, // even though it's within the range set by the comparators. for c in comparators.iter() { let v = &c.version; if v.is_any() { continue; } if v.has_prerelease() && version.major == v.major && version.minor == v.minor && version.patch == v.patch { return true; } } false } else { true } }) } } #[cfg(test)] mod tests { use super::*; #[test] fn replace_hyphens() { let v = vec![("1.2.3 - 1.2.4", ">=1.2.3 <=1.2.4")]; for v in v { let res = Range::replace_hyphens(v.0, false).unwrap(); let comp = format!("{}", &res.unwrap()); assert!(!comp.contains('-'), "contains hyphen"); assert_eq!(comp, String::from(v.1)); } } #[test] fn trim_operators() { let v = vec![("> 1.2.3 < 1.2.5", ">1.2.3 <1.2.5")]; for v in v { let res = Range::trim_operators(v.0); assert_eq!(res, String::from(v.1)); } } #[test] fn trim_tilde() { let v = vec![("~ 1.2.3", "~1.2.3")]; for v in v { let res = Range::trim_tilde(v.0); assert_eq!(res, String::from(v.1)); } } #[test] fn trim_caret() { let v = vec![("^ 1.2.3", "^1.2.3")]; for v in v { let res = Range::trim_caret(v.0); assert_eq!(res, String::from(v.1)); } } #[test] fn trim_spaces() { let v = vec![("1.2.3 1.2.4", "1.2.3 1.2.4")]; for v in v { let res = Range::trim_spaces(v.0); assert_eq!(res, String::from(v.1)); } } #[test] fn replce_carets() { let v = vec![("^1.2.3", ">=1.2.3 <2.0.0")]; for v in v { let res = Range::replace_carets(v.0).unwrap().unwrap(); assert_eq!(res.to_string(), String::from(v.1)); } } }
true
074555a9bd6413971eb8f9e8c5232e60545afa05
Rust
MartinBG/MiniGit
/src/dir_structs.rs
UTF-8
4,085
2.53125
3
[]
no_license
extern crate walkdir; use self::walkdir::WalkDir; use std::path::PathBuf; use std::ops::Deref; use ::dir_ops::*; use std::fs::{create_dir, remove_file}; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct DirItem { pub path: String, pub hash: String } impl DirItem { pub fn new() -> Self { DirItem { path: String::new(), hash: String::new() } } } #[derive(Debug)] pub struct DirTreeDifferences { removed_items: Vec<DirItem>, added_items: Vec<DirItem> } impl DirTreeDifferences { pub fn new() -> Self { DirTreeDifferences { removed_items: vec!(), added_items: vec!() } } pub fn apply(&self) { for f in self.removed_items.iter() { if f.hash != "" { let path = PathBuf::from(&f.path); remove_file(&path).expect("Could not delete file!"); } } for f in self.added_items.iter() { let path_buf = PathBuf::from(&f.path); let path = path_buf.as_path(); if f.hash == "" && !path.is_dir() { create_dir(&path).expect("Could not create directory!"); } else { let path_str = String::from(path.to_str().unwrap()); let content = get_from_db(&path_str, &f.hash); write_to_file(&path_buf, &content); } } } pub fn print(&self) { println!(); if self.added_items.len() == 0 && self.removed_items.len() == 0 { print!("No changes in directory!"); } else { for x in self.added_items.iter() { if x.hash != "" { println!("++{:?}", x.path); } } println!("\n"); for x in self.removed_items.iter() { if x.hash != "" { println!("--{:?}", x.path); } } } } } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct DirTree { pub dir_items: Vec<DirItem> } impl DirTree { pub fn new(root_path: &String) -> Self { let mut dir_items: Vec<DirItem> = vec!(); for entry in WalkDir::new(root_path) { let walk_dir = match entry { Ok(walk_dir) => walk_dir, Err(err) => panic!(err) }; let path = walk_dir.path(); let is_dir = path.is_dir(); let path_str = String::from(path.to_str().unwrap()); if path_str.contains("_init_") { continue; } let mut file_hash = String::new(); if !is_dir { let file_contents = get_file_content(&path.to_path_buf()); file_hash = gen_hash(&file_contents); add_to_db(root_path, &file_hash, &file_contents); } let new_item = DirItem { path: path_str, hash: file_hash }; dir_items.push(new_item); } DirTree { dir_items: dir_items } } pub fn differences(&self, other: &DirTree) -> DirTreeDifferences { let mut diff = DirTreeDifferences::new(); let mut it = self.dir_items.iter(); let mut other_it = other.dir_items.iter(); let mut it_stop = false; let mut other_it_stop = false; let dummy = DirItem::new(); let mut it_el: &DirItem = match it.next() { Some(x) => x, None => {it_stop = true; &dummy} }; let mut other_it_el: &DirItem = match other_it.next() { Some(x) => x, None => {other_it_stop = true; &dummy} }; loop { if it_stop && other_it_stop { return diff; } else if it_stop || (!other_it_stop && it_el.path > other_it_el.path) { diff.added_items.push(other_it_el.deref().clone()); other_it_el = match other_it.next() { Some(x) => x, None => {other_it_stop = true; &dummy} }; } else if other_it_stop || (!it_stop && it_el.path < other_it_el.path) { diff.removed_items.push(it_el.clone()); it_el = match it.next() { Some(x) => x, None => {it_stop = true; &dummy} }; } else if it_el.path == other_it_el.path { if it_el.hash != other_it_el.hash { diff.added_items.push(other_it_el.deref().clone()); diff.removed_items.push(it_el.deref().clone()); } it_el = match it.next() { Some(x) => x, None => {it_stop = true; &dummy} }; other_it_el = match other_it.next() { Some(x) => x, None => {other_it_stop = true; &dummy} }; } } } pub fn print(&self) { for x in self.dir_items.iter() { println!("{:?}", x); } } }
true
b01b8316c14d673f79edbe397b5be1ddce6ea7f5
Rust
rust-lang/rust
/tests/ui/borrowck/issue-111554.rs
UTF-8
641
3.484375
3
[ "Apache-2.0", "LLVM-exception", "NCSA", "BSD-2-Clause", "LicenseRef-scancode-unicode", "MIT", "LicenseRef-scancode-other-permissive" ]
permissive
struct Foo {} impl Foo { pub fn foo(&mut self) { || bar(&mut self); //~^ ERROR cannot borrow `self` as mutable, as it is not declared as mutable } pub fn baz(&self) { || bar(&mut self); //~^ ERROR cannot borrow `self` as mutable, as it is not declared as mutable //~| ERROR cannot borrow data in a `&` reference as mutable } pub fn qux(mut self) { || bar(&mut self); // OK } pub fn quux(self) { || bar(&mut self); //~^ ERROR cannot borrow `self` as mutable, as it is not declared as mutable } } fn bar(_: &mut Foo) {} fn main() {}
true
8527b47078cf057d75b08cb3c3ac19d40fcaa87e
Rust
doronz2/curv
/src/cryptographic_primitives/pairing/pairing_bls12_381.rs
UTF-8
3,854
2.515625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
pub use super::traits::Pairing; use crate::elliptic::curves::bls12_381::g1; use crate::elliptic::curves::bls12_381::g2; use bls12_381::Gt; use bls12_381::pairing; use bls12_381::G2Prepared; use bls12_381::multi_miller_loop; pub use crate::elliptic::curves::traits::*; type FE1 = g1::FE; type GE1 = g1::GE; type PK1 = g1::PK; type SK1 = g1::SK; type FE2 = g2::FE; type GE2 = g2::GE; type PK2 = g2::PK; type SK2 = g2::SK; pub struct bls_pairing; impl <'a>Pairing<GE1,GE2,Gt> for bls_pairing{ fn compute_pairing(g1:&GE1,g2:&GE2)->Gt { pairing(&g1.get_element(),&g2.get_element()) } } #[cfg(test)] mod tests{ use super::*; use crate::BigInt; type FE1 = g1::FE; type GE1 = g1::GE; type PK1 = g1::PK; type SK1 = g1::SK; type FE2 = g2::FE; type GE2 = g2::GE; type PK2 = g2::PK; type SK2 = g2::SK; /* crate::elliptic::curves::bls12_381::g2; use bls12_381::Gt; use bls12_381::pairing; pub use crate::elliptic::curves::traits::ECPoint; use crate::cryptographic_primitives::pairing::pairing_bls12_381::GE1; */ fn compute_pairing_for_debug(){ let a:GE1 = ECPoint::generator(); let b:GE2 = ECPoint::generator(); let res = bls_pairing::compute_pairing(&a,&b); println!("pairing result {:?}", res); } #[test] fn basic_pairing(){ let a:GE1 = ECPoint::generator(); let b:GE2 = ECPoint::generator(); let res = bls_pairing::compute_pairing(&a,&b); let prep = G2Prepared::from(b.get_element()); assert_eq!( res, multi_miller_loop(&[(&a.get_element(), &prep)]).final_exponentiation() ); } #[test] fn powers_of_g1_and_g2(){ let a:GE1 = ECPoint::generator(); let b:GE2 = ECPoint::generator(); let scalar_factor:FE1 = ECScalar::new_random(); let res_mul_a:GE1 = a.scalar_mul(&scalar_factor.get_element()); let res_mul_b:GE2 = b.scalar_mul(&scalar_factor.get_element()); let res_a_power = bls_pairing::compute_pairing(&res_mul_a,&b); let res_b_power = bls_pairing::compute_pairing(&a,&res_mul_b); assert_eq!(res_a_power,res_b_power); } #[test] fn powers_of_g1_and_gt_eq(){ let a:GE1 = ECPoint::generator(); let b:GE2 = ECPoint::generator(); let scalar_factor:FE1 = ECScalar::new_random(); let res_mul_a:GE1 = a.scalar_mul(&scalar_factor.get_element()); let gt_from_a_power = bls_pairing::compute_pairing(&res_mul_a,&b); let gt_direct_power = bls_pairing::compute_pairing(&a,&b)* scalar_factor.get_element(); assert_eq!(gt_direct_power,gt_from_a_power); } #[test] #[should_panic] fn powers_of_g1_and_gt_not_eq(){ let a:GE1 = ECPoint::generator(); let b:GE2 = ECPoint::generator(); let scalar_factor:FE1 = ECScalar::new_random(); let scalar_factor_2:FE1 = ECScalar::new_random(); let res_mul_a:GE1 = a.scalar_mul(&scalar_factor.get_element()); let gt_from_a_power = bls_pairing::compute_pairing(&res_mul_a,&b); let gt_direct_power = bls_pairing::compute_pairing(&a,&b)* scalar_factor.get_element(); let gt_direct_power = bls_pairing::compute_pairing(&a,&b)* scalar_factor_2.get_element(); assert_eq!(gt_direct_power,gt_from_a_power); } #[test] fn powers_of_g2_and_gt_eq(){ let a:GE1 = ECPoint::generator(); let b:GE2 = ECPoint::generator(); let scalar_factor:FE1 = ECScalar::new_random(); let res_mul_b:GE2 = b.scalar_mul(&scalar_factor.get_element()); let gt_from_a_power = bls_pairing::compute_pairing(&a,&res_mul_b); let gt_direct_power = bls_pairing::compute_pairing(&a,&b)* scalar_factor.get_element(); assert_eq!(gt_direct_power,gt_from_a_power); } }
true
1fed6f990258c51721b5487699352c827ff46695
Rust
sathishvinayk/rust_360
/concurrency/data_sharing_bt_threads.rs
UTF-8
339
3.453125
3
[]
no_license
use std::sync::Arc; fn loop_value(a: Arc<Vec<usize>>) { for value in 0..4 { let y = a.clone(); std::thread::spawn(move || { println!("looping thru each value {}", y[value]); }).join().expect("Error while spawning!"); } } fn main() { let x = Arc::new(vec![1,2,3,4,5]); loop_value(x); }
true
26b96cabf33742866ae8cb8fbffb55c2e11a0891
Rust
algon-320/denwa
/src/main.rs
UTF-8
16,271
2.625
3
[ "MIT" ]
permissive
mod pulse_ext; mod traffic_meter; use std::io::prelude::*; use std::io::{stdin, stdout}; use std::net::{Ipv4Addr, SocketAddr, ToSocketAddrs, UdpSocket}; use std::sync::{Arc, Mutex}; use std::thread::{sleep, spawn}; use std::time::{Duration, Instant}; use log::{debug, error, info}; use p2p_handshake::{ client::get_peer_addr, crypto::{Sealed, SymmetricKey}, error::Error, message::{recv_from, send_to}, }; use pulse::{ sample::{Format, Spec}, stream::Direction, }; use pulse_ext::PulseSimpleExt as _; use traffic_meter::TrafficMeter; #[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] enum Message { Heartbeat, Text(String), Opus(Vec<u8>), } #[derive(Debug)] struct Config { heartbeat_freq: Duration, channels: opus::Channels, sampling_rate: u32, frame_length: u32, } impl Config { fn ch_num(&self) -> u8 { match self.channels { opus::Channels::Mono => 1, opus::Channels::Stereo => 2, } } fn samples_per_frame(&self) -> usize { let ch = self.ch_num() as usize; let samples = (self.sampling_rate * self.frame_length / 1000) as usize; ch * samples } } fn spawn_command_thread( sock: Arc<UdpSocket>, key: Arc<Mutex<SymmetricKey>>, peer_addr: SocketAddr, traffic: Arc<TrafficMeter>, ) { println!("['?' to show available commands]"); spawn(move || { || -> Result<(), Error> { loop { print!("command >>> "); stdout().flush().unwrap(); let mut buffer = String::new(); stdin().read_line(&mut buffer)?; match buffer.as_str().trim() { "" => { continue; } "?" => { println!("?\t\t: show available commands"); println!("stat\t\t: show traffic statistics"); println!("volume+\t\t: mic volume up (10%)"); println!("volume-\t\t: mic volume down (10%)"); println!("mute\t\t: mic mute"); println!("unmute\t\t: mic unmute"); println!("text <message>\t: send a text message"); } "stat" => { println!("{}", traffic); } "volume+" => { error!("not yet implemented"); } "volume-" => { error!("not yet implemented"); } "mute" => { error!("not yet implemented"); } "unmute" => { error!("not yet implemented"); } cmd if cmd.starts_with("text ") => { let text = cmd.strip_prefix("text ").unwrap(); let msg = Message::Text(text.into()); let enc_msg = key.lock().unwrap().encrypt(msg)?; send_to(enc_msg, &sock, peer_addr)?; } cmd => { error!("unknown command: {}", cmd); } } } }() .unwrap_or_else(|e| error!("stdin thread panicked: {}", e)) }); } fn spawn_pulseaudio_input_thread( sock: Arc<UdpSocket>, key: Arc<Mutex<SymmetricKey>>, peer_addr: SocketAddr, config: Arc<Config>, traffic: Arc<TrafficMeter>, ) { let spec = Spec { format: Format::S16NE, channels: config.ch_num(), rate: config.sampling_rate, }; assert!(spec.is_valid()); let pulse_record = simple_pulse::Simple::new( None, // Use the default server "denwa", // Our application’s name Direction::Record, // We want a record stream None, // Use the default device "recording", // Description of our stream &spec, // Our sample format None, // Use default channel map None, // Use default buffering attributes ) .unwrap_or_else(|e| panic!("pulseaudio error: {:?}", e.to_string())); spawn(move || { || -> Result<(), Error> { let mut opus = opus::Encoder::new(spec.rate, config.channels, opus::Application::Voip).unwrap(); let bufsize = config.samples_per_frame(); let mut buf = vec![0i16; bufsize]; let mut encoded = vec![0; bufsize * 2]; loop { pulse_record.read16(&mut buf).unwrap(); let sz = opus.encode(&buf, &mut encoded).unwrap(); let encoded = &encoded[..sz]; let msg = Message::Opus(encoded.to_vec()); let enc_msg = key.lock().unwrap().encrypt(msg)?; send_to(enc_msg, &sock, peer_addr)?; traffic.sent_bytes(encoded.len()); } }() .unwrap_or_else(|e| error!("heartbeat thread panicked: {}", e)) }); } fn spawn_heartbeat_thread( sock: Arc<UdpSocket>, key: Arc<Mutex<SymmetricKey>>, peer_addr: SocketAddr, config: Arc<Config>, ) { spawn(move || { || -> Result<(), Error> { loop { let msg = Message::Heartbeat; let enc_msg = key.lock().unwrap().encrypt(msg)?; send_to(enc_msg, &sock, peer_addr)?; sleep(config.heartbeat_freq); } }() .unwrap_or_else(|e| error!("heartbeat thread panicked: {}", e)) }); } fn spawn_watchdog_thread(config: Arc<Config>, last_hb_recved: Arc<Mutex<Option<Instant>>>) { spawn(move || loop { let last_hb_recved: Option<Instant> = *last_hb_recved.lock().unwrap(); if let Some(last) = last_hb_recved { if last.elapsed() >= config.heartbeat_freq * 5 { error!("Peer was dead."); std::process::exit(1); } } sleep(Duration::from_millis(500)); }); } fn voice_chat( sock: UdpSocket, my_addr: SocketAddr, peer_addr: SocketAddr, preshared_key: &[u8], config: Config, ) -> Result<(), Error> { let sock = Arc::new(sock); info!("config = {:?}", config); let config = Arc::new(config); // `key_id` is needed to agree the same "direction" of encryption on both sides. assert_ne!(my_addr, peer_addr); let key_id = if my_addr < peer_addr { 0 } else { 1 }; debug!("key_id = {}", key_id); // derive a symmetric key for encryption of messages let key = SymmetricKey::new(preshared_key, key_id)?; let key = Arc::new(Mutex::new(key)); let spec = Spec { format: Format::S16NE, channels: config.ch_num(), rate: config.sampling_rate, }; assert!(spec.is_valid()); let traffic = Arc::new(TrafficMeter::new()); let last_hb_recved = Arc::new(Mutex::new(None)); // spawn threads spawn_command_thread(sock.clone(), key.clone(), peer_addr, traffic.clone()); spawn_pulseaudio_input_thread( sock.clone(), key.clone(), peer_addr, config.clone(), traffic.clone(), ); spawn_heartbeat_thread(sock.clone(), key.clone(), peer_addr, config.clone()); spawn_watchdog_thread(config.clone(), last_hb_recved.clone()); let pulse_output = simple_pulse::Simple::new( None, // Use the default server "denwa", // Our application’s name Direction::Playback, // We want a playback stream None, // Use the default device "output", // Description of our stream &spec, // Our sample format None, // Use default channel map None, // Use default buffering attributes ) .unwrap_or_else(|e| panic!("pulseaudio error: {:?}", e.to_string())); let mut opus = opus::Decoder::new(spec.rate, config.channels).unwrap(); let mut buf = vec![0i16; config.samples_per_frame()]; 'process_message: loop { let (enc_msg, src) = match recv_from::<Sealed<Message>>(&sock) { Ok(ok) => ok, Err(Error::Io(err)) if err.kind() == std::io::ErrorKind::WouldBlock => { debug!("timeout"); continue 'process_message; } Err(err) => { error!("{}", err); sleep(Duration::from_secs(1)); continue 'process_message; } }; if src != peer_addr { error!("message from other than the expected peer. ignored."); continue 'process_message; } // decrypt received message let msg = match key.lock().unwrap().decrypt(enc_msg) { Ok(msg) => msg, Err(err) => { error!("invalid message: {}", err); continue 'process_message; } }; match msg { Message::Heartbeat => { debug!("Heatbeat from {}", src); let mut last_hb_recved = last_hb_recved.lock().unwrap(); *last_hb_recved = Some(Instant::now()); } Message::Opus(data) => { traffic.received_bytes(data.len()); let sz = opus.decode(&data, &mut buf, false).unwrap(); pulse_output.write16(&buf[..sz]).unwrap(); } Message::Text(text) => { println!("text message: {}", text); } } } } #[derive(Debug, serde::Serialize, serde::Deserialize)] struct InvitationToken { addr: SocketAddr, psk: Vec<u8>, } fn random_psk(len: usize) -> Vec<u8> { use rand::{thread_rng, Rng}; let mut key = vec![0u8; len]; let mut rng = thread_rng(); rng.fill(key.as_mut_slice()); key } fn start(matches: clap::ArgMatches) -> Result<(), Box<dyn std::error::Error>> { use clap::value_t; let config = { let audio_ch = matches.value_of("audio-ch").expect("default"); let channels = match audio_ch { "mono" => opus::Channels::Mono, "stereo" => opus::Channels::Stereo, _ => unreachable!(), }; Config { heartbeat_freq: Duration::from_secs(1), channels, sampling_rate: value_t!(matches, "audio-rate", u32).expect("default"), frame_length: value_t!(matches, "frame-length", u32).expect("default"), } }; match matches.subcommand() { ("wait", Some(matches)) => { let sock = UdpSocket::bind((Ipv4Addr::UNSPECIFIED, 0))?; info!("socket local address = {:?}", sock.local_addr().unwrap()); let addr = matches.value_of("server-address").expect("required arg"); let port = value_t!(matches, "server-port", u16).expect("required arg"); let server_sockaddr = (addr, port).to_socket_addrs()?.next().unwrap(); let psk = matches .value_of("preshared-key") .map(|psk| psk.as_bytes().to_vec()) .unwrap_or_else(|| random_psk(8)); let token = InvitationToken { addr: server_sockaddr, psk: psk.clone(), }; let token_bytes = serde_cbor::to_vec(&token)?; println!("invitation-token: {}", base64::encode(&token_bytes)); let (my_addr, peer_addr) = get_peer_addr(&sock, server_sockaddr, &psk)?; voice_chat(sock, my_addr, peer_addr, &psk, config)?; Ok(()) } ("join", Some(matches)) => { let sock = UdpSocket::bind((Ipv4Addr::UNSPECIFIED, 0))?; info!("socket local address = {:?}", sock.local_addr().unwrap()); let token = matches.value_of("invitation-token").expect("required arg"); let token_bytes = base64::decode(token)?; let token: InvitationToken = serde_cbor::from_slice(&token_bytes)?; let (my_addr, peer_addr) = get_peer_addr(&sock, token.addr, &token.psk)?; voice_chat(sock, my_addr, peer_addr, &token.psk, config)?; Ok(()) } ("lan", Some(matches)) => { let local_port = value_t!(matches, "local-port", u16).unwrap_or(0); let bind_addr = value_t!(matches, "bind-address", Ipv4Addr).unwrap(); let sock = UdpSocket::bind((bind_addr, local_port))?; info!("socket local address = {:?}", sock.local_addr().unwrap()); println!("Specify peer's address and port number (e.g. 127.0.0.1:10001)"); print!("address:port > "); stdout().flush()?; let psk = matches .value_of("preshared-key") .map(|psk| psk.as_bytes().to_vec()) .unwrap_or_else(|| random_psk(8)); let peer_addr = { let mut peer_addr = String::new(); stdin().read_line(&mut peer_addr)?; peer_addr.trim().to_socket_addrs()?.next().unwrap() }; // FIXME: use of 0.0.0.0 will lead to insecure channel let my_addr = sock.local_addr()?; voice_chat(sock, my_addr, peer_addr, &psk, config)?; Ok(()) } (cmd, _) => Err(format!("unknown subcommand {:?}", cmd).into()), } } fn main() { env_logger::init(); use clap::{App, Arg, SubCommand}; let matches = App::new("denwa") .version(env!("CARGO_PKG_VERSION")) .author("algon-320 <algon.0320@mail.com>") .subcommand( SubCommand::with_name("wait") .arg( Arg::with_name("server-address") .takes_value(true) .required(true), ) .arg( Arg::with_name("server-port") .takes_value(true) .required(true), ) .arg( Arg::with_name("preshared-key") .long("psk") .takes_value(true), ), ) .subcommand( SubCommand::with_name("join").arg( Arg::with_name("invitation-token") .takes_value(true) .required(true), ), ) .subcommand( SubCommand::with_name("lan") .arg( Arg::with_name("bind-address") .long("bind-address") .short("a") .takes_value(true) .default_value("127.0.0.1"), ) .arg( Arg::with_name("local-port") .long("local-port") .short("p") .takes_value(true), ) .arg( Arg::with_name("preshared-key") .long("psk") .takes_value(true), ), ) .arg( Arg::with_name("audio-ch") .long("ch") .takes_value(true) .possible_values(&["mono", "stereo"]) .default_value("mono"), ) .arg( Arg::with_name("audio-rate") .long("rate") .takes_value(true) .help("Sampling rate") .possible_values(&["8000", "12000", "16000", "24000", "48000"]) .default_value("24000"), ) .arg( Arg::with_name("frame-length") .long("frame-length") .takes_value(true) .help("Opus frame length in milliseconds") .possible_values(&["2.5", "5", "10", "20", "40", "60"]) .default_value("20"), ) .get_matches(); match start(matches) { Ok(()) => {} Err(err) => { error!("{}", err); } } }
true
ba25d44bcdd0cef9d4c75d0595ea8056739dfdbc
Rust
Ryan-Gribben/github-rs
/tests/gitignore.rs
UTF-8
1,427
2.703125
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "Apache-2.0" ]
permissive
extern crate github_rs as gh; extern crate serde_json; use gh::StatusCode; use gh::client::{Executor, Github}; use serde_json::Value; use std::io::BufReader; use std::io::prelude::*; use std::fs::File; fn auth_token() -> Result<String, std::io::Error> { let file = File::open("tests/auth_token")?; let mut reader = BufReader::new(file); let mut buffer = String::new(); let _ = reader.read_line(&mut buffer)?; Ok(buffer) } #[test] fn get_gitignore_templates() { let g = Github::new(&auth_token().unwrap()).unwrap(); let (_headers, status, json) = g.get().gitignore().templates().execute::<Value>().unwrap(); println!("Status: {}\nResponse: {:#?}", status, json); assert_eq!(status, StatusCode::Ok); if let Some(Value::Array(languages)) = json { assert!(languages.contains(&Value::String("Rust".into()))) } } #[test] fn get_gitignore_templates_rust() { let g = Github::new(&auth_token().unwrap()).unwrap(); let (_headers, status, json) = g.get() .gitignore() .templates() .lang("Rust") .execute::<Value>() .unwrap(); println!("Status: {}\nResponse: {:#?}", status, json); assert_eq!(status, StatusCode::Ok); if let Some(json) = json { assert!( json.get("source") .unwrap() .as_str() .unwrap() .contains("Cargo") ) } }
true
43e8f3034190217b2aa67b1878414b9d69806aff
Rust
jgilchrist/advent-of-code
/rust/src/y2015/src/d04.rs
UTF-8
1,244
2.90625
3
[]
no_license
use prelude::*; pub struct Day04; fn all_hashes(input: &str) -> impl Iterator<Item = String> + '_ { let natural_numbers = iterate(0, |&i| i + 1); let natural_numbers_as_strings = natural_numbers.map(|i| i.to_string()); let infinite_challenge_input = std::iter::repeat(input); let inputs_with_numbers = infinite_challenge_input.zip(natural_numbers_as_strings); let inputs = inputs_with_numbers.map(|(a, b)| a.to_owned() + &b); inputs.map(|i| format!("{:x}", md5::compute(i))) } impl AocSolution for Day04 { type Input = String; fn process_input(input: &str) -> Self::Input { input.trim().to_owned() } const PART1_SOLUTION: Solution = solution(346386); fn part1(input: &Self::Input) -> impl Into<Solution> { for (i, hash) in all_hashes(input).enumerate() { if hash.starts_with("00000") { return i; } } unreachable!() } const PART2_SOLUTION: Solution = solution(9958218); fn part2(input: &Self::Input) -> impl Into<Solution> { for (i, hash) in all_hashes(input).enumerate() { if hash.starts_with("000000") { return i; } } unreachable!() } }
true
0521c3d7b912c58ea75476f7186a02e628610459
Rust
astro/rust-lpc43xx
/src/sct/res/mod.rs
UTF-8
66,903
2.828125
3
[ "Apache-2.0" ]
permissive
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::RES { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `O0RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O0RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR0 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR0 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O0RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O0RESR::NO_CHANGE => 0, O0RESR::SET_OUTPUT_OR_CLEAR => 1, O0RESR::CLEAR_OUTPUT_OR_SET => 2, O0RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O0RESR { match value { 0 => O0RESR::NO_CHANGE, 1 => O0RESR::SET_OUTPUT_OR_CLEAR, 2 => O0RESR::CLEAR_OUTPUT_OR_SET, 3 => O0RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O0RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O0RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O0RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O0RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O1RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O1RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR1 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR1 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O1RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O1RESR::NO_CHANGE => 0, O1RESR::SET_OUTPUT_OR_CLEAR => 1, O1RESR::CLEAR_OUTPUT_OR_SET => 2, O1RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O1RESR { match value { 0 => O1RESR::NO_CHANGE, 1 => O1RESR::SET_OUTPUT_OR_CLEAR, 2 => O1RESR::CLEAR_OUTPUT_OR_SET, 3 => O1RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O1RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O1RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O1RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O1RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O2RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O2RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR2 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output n (or set based on the SETCLR2 field)."] CLEAR_OUTPUT_N_OR_S, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O2RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O2RESR::NO_CHANGE => 0, O2RESR::SET_OUTPUT_OR_CLEAR => 1, O2RESR::CLEAR_OUTPUT_N_OR_S => 2, O2RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O2RESR { match value { 0 => O2RESR::NO_CHANGE, 1 => O2RESR::SET_OUTPUT_OR_CLEAR, 2 => O2RESR::CLEAR_OUTPUT_N_OR_S, 3 => O2RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O2RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O2RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_N_OR_S`"] #[inline] pub fn is_clear_output_n_or_s(&self) -> bool { *self == O2RESR::CLEAR_OUTPUT_N_OR_S } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O2RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O3RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O3RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR3 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR3 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O3RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O3RESR::NO_CHANGE => 0, O3RESR::SET_OUTPUT_OR_CLEAR => 1, O3RESR::CLEAR_OUTPUT_OR_SET => 2, O3RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O3RESR { match value { 0 => O3RESR::NO_CHANGE, 1 => O3RESR::SET_OUTPUT_OR_CLEAR, 2 => O3RESR::CLEAR_OUTPUT_OR_SET, 3 => O3RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O3RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O3RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O3RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O3RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O4RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O4RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR4 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR4 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O4RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O4RESR::NO_CHANGE => 0, O4RESR::SET_OUTPUT_OR_CLEAR => 1, O4RESR::CLEAR_OUTPUT_OR_SET => 2, O4RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O4RESR { match value { 0 => O4RESR::NO_CHANGE, 1 => O4RESR::SET_OUTPUT_OR_CLEAR, 2 => O4RESR::CLEAR_OUTPUT_OR_SET, 3 => O4RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O4RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O4RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O4RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O4RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O5RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O5RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR5 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR5 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O5RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O5RESR::NO_CHANGE => 0, O5RESR::SET_OUTPUT_OR_CLEAR => 1, O5RESR::CLEAR_OUTPUT_OR_SET => 2, O5RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O5RESR { match value { 0 => O5RESR::NO_CHANGE, 1 => O5RESR::SET_OUTPUT_OR_CLEAR, 2 => O5RESR::CLEAR_OUTPUT_OR_SET, 3 => O5RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O5RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O5RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O5RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O5RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O6RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O6RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR6 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR6 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O6RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O6RESR::NO_CHANGE => 0, O6RESR::SET_OUTPUT_OR_CLEAR => 1, O6RESR::CLEAR_OUTPUT_OR_SET => 2, O6RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O6RESR { match value { 0 => O6RESR::NO_CHANGE, 1 => O6RESR::SET_OUTPUT_OR_CLEAR, 2 => O6RESR::CLEAR_OUTPUT_OR_SET, 3 => O6RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O6RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O6RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O6RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O6RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O7RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O7RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR7 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR7 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O7RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O7RESR::NO_CHANGE => 0, O7RESR::SET_OUTPUT_OR_CLEAR => 1, O7RESR::CLEAR_OUTPUT_OR_SET => 2, O7RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O7RESR { match value { 0 => O7RESR::NO_CHANGE, 1 => O7RESR::SET_OUTPUT_OR_CLEAR, 2 => O7RESR::CLEAR_OUTPUT_OR_SET, 3 => O7RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O7RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O7RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O7RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O7RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O8RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O8RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR8 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR8 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O8RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O8RESR::NO_CHANGE => 0, O8RESR::SET_OUTPUT_OR_CLEAR => 1, O8RESR::CLEAR_OUTPUT_OR_SET => 2, O8RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O8RESR { match value { 0 => O8RESR::NO_CHANGE, 1 => O8RESR::SET_OUTPUT_OR_CLEAR, 2 => O8RESR::CLEAR_OUTPUT_OR_SET, 3 => O8RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O8RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O8RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O8RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O8RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O9RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O9RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR9 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR9 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O9RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O9RESR::NO_CHANGE => 0, O9RESR::SET_OUTPUT_OR_CLEAR => 1, O9RESR::CLEAR_OUTPUT_OR_SET => 2, O9RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O9RESR { match value { 0 => O9RESR::NO_CHANGE, 1 => O9RESR::SET_OUTPUT_OR_CLEAR, 2 => O9RESR::CLEAR_OUTPUT_OR_SET, 3 => O9RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O9RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O9RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O9RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O9RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O10RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O10RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR10 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR10 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O10RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O10RESR::NO_CHANGE => 0, O10RESR::SET_OUTPUT_OR_CLEAR => 1, O10RESR::CLEAR_OUTPUT_OR_SET => 2, O10RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O10RESR { match value { 0 => O10RESR::NO_CHANGE, 1 => O10RESR::SET_OUTPUT_OR_CLEAR, 2 => O10RESR::CLEAR_OUTPUT_OR_SET, 3 => O10RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O10RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O10RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O10RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O10RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O11RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O11RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR11 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR11 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O11RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O11RESR::NO_CHANGE => 0, O11RESR::SET_OUTPUT_OR_CLEAR => 1, O11RESR::CLEAR_OUTPUT_OR_SET => 2, O11RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O11RESR { match value { 0 => O11RESR::NO_CHANGE, 1 => O11RESR::SET_OUTPUT_OR_CLEAR, 2 => O11RESR::CLEAR_OUTPUT_OR_SET, 3 => O11RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O11RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O11RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O11RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O11RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O12RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O12RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR12 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR12 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O12RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O12RESR::NO_CHANGE => 0, O12RESR::SET_OUTPUT_OR_CLEAR => 1, O12RESR::CLEAR_OUTPUT_OR_SET => 2, O12RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O12RESR { match value { 0 => O12RESR::NO_CHANGE, 1 => O12RESR::SET_OUTPUT_OR_CLEAR, 2 => O12RESR::CLEAR_OUTPUT_OR_SET, 3 => O12RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O12RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O12RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O12RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O12RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O13RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O13RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR13 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR13 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O13RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O13RESR::NO_CHANGE => 0, O13RESR::SET_OUTPUT_OR_CLEAR => 1, O13RESR::CLEAR_OUTPUT_OR_SET => 2, O13RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O13RESR { match value { 0 => O13RESR::NO_CHANGE, 1 => O13RESR::SET_OUTPUT_OR_CLEAR, 2 => O13RESR::CLEAR_OUTPUT_OR_SET, 3 => O13RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O13RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O13RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O13RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O13RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O14RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O14RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR14 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR14 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O14RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O14RESR::NO_CHANGE => 0, O14RESR::SET_OUTPUT_OR_CLEAR => 1, O14RESR::CLEAR_OUTPUT_OR_SET => 2, O14RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O14RESR { match value { 0 => O14RESR::NO_CHANGE, 1 => O14RESR::SET_OUTPUT_OR_CLEAR, 2 => O14RESR::CLEAR_OUTPUT_OR_SET, 3 => O14RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O14RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O14RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O14RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O14RESR::TOGGLE_OUTPUT } } #[doc = "Possible values of the field `O15RES`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum O15RESR { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR15 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR15 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O15RESR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { O15RESR::NO_CHANGE => 0, O15RESR::SET_OUTPUT_OR_CLEAR => 1, O15RESR::CLEAR_OUTPUT_OR_SET => 2, O15RESR::TOGGLE_OUTPUT => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> O15RESR { match value { 0 => O15RESR::NO_CHANGE, 1 => O15RESR::SET_OUTPUT_OR_CLEAR, 2 => O15RESR::CLEAR_OUTPUT_OR_SET, 3 => O15RESR::TOGGLE_OUTPUT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `NO_CHANGE`"] #[inline] pub fn is_no_change(&self) -> bool { *self == O15RESR::NO_CHANGE } #[doc = "Checks if the value of the field is `SET_OUTPUT_OR_CLEAR`"] #[inline] pub fn is_set_output_or_clear(&self) -> bool { *self == O15RESR::SET_OUTPUT_OR_CLEAR } #[doc = "Checks if the value of the field is `CLEAR_OUTPUT_OR_SET`"] #[inline] pub fn is_clear_output_or_set(&self) -> bool { *self == O15RESR::CLEAR_OUTPUT_OR_SET } #[doc = "Checks if the value of the field is `TOGGLE_OUTPUT`"] #[inline] pub fn is_toggle_output(&self) -> bool { *self == O15RESR::TOGGLE_OUTPUT } } #[doc = "Values that can be written to the field `O0RES`"] pub enum O0RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR0 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR0 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O0RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O0RESW::NO_CHANGE => 0, O0RESW::SET_OUTPUT_OR_CLEAR => 1, O0RESW::CLEAR_OUTPUT_OR_SET => 2, O0RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O0RESW<'a> { w: &'a mut W, } impl<'a> _O0RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O0RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O0RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR0 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O0RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR0 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O0RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O0RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O1RES`"] pub enum O1RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR1 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR1 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O1RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O1RESW::NO_CHANGE => 0, O1RESW::SET_OUTPUT_OR_CLEAR => 1, O1RESW::CLEAR_OUTPUT_OR_SET => 2, O1RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O1RESW<'a> { w: &'a mut W, } impl<'a> _O1RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O1RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O1RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR1 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O1RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR1 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O1RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O1RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O2RES`"] pub enum O2RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR2 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output n (or set based on the SETCLR2 field)."] CLEAR_OUTPUT_N_OR_S, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O2RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O2RESW::NO_CHANGE => 0, O2RESW::SET_OUTPUT_OR_CLEAR => 1, O2RESW::CLEAR_OUTPUT_N_OR_S => 2, O2RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O2RESW<'a> { w: &'a mut W, } impl<'a> _O2RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O2RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O2RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR2 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O2RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output n (or set based on the SETCLR2 field)."] #[inline] pub fn clear_output_n_or_s(self) -> &'a mut W { self.variant(O2RESW::CLEAR_OUTPUT_N_OR_S) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O2RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O3RES`"] pub enum O3RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR3 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR3 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O3RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O3RESW::NO_CHANGE => 0, O3RESW::SET_OUTPUT_OR_CLEAR => 1, O3RESW::CLEAR_OUTPUT_OR_SET => 2, O3RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O3RESW<'a> { w: &'a mut W, } impl<'a> _O3RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O3RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O3RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR3 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O3RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR3 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O3RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O3RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 6; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O4RES`"] pub enum O4RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR4 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR4 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O4RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O4RESW::NO_CHANGE => 0, O4RESW::SET_OUTPUT_OR_CLEAR => 1, O4RESW::CLEAR_OUTPUT_OR_SET => 2, O4RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O4RESW<'a> { w: &'a mut W, } impl<'a> _O4RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O4RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O4RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR4 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O4RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR4 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O4RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O4RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O5RES`"] pub enum O5RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR5 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR5 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O5RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O5RESW::NO_CHANGE => 0, O5RESW::SET_OUTPUT_OR_CLEAR => 1, O5RESW::CLEAR_OUTPUT_OR_SET => 2, O5RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O5RESW<'a> { w: &'a mut W, } impl<'a> _O5RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O5RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O5RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR5 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O5RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR5 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O5RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O5RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 10; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O6RES`"] pub enum O6RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR6 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR6 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O6RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O6RESW::NO_CHANGE => 0, O6RESW::SET_OUTPUT_OR_CLEAR => 1, O6RESW::CLEAR_OUTPUT_OR_SET => 2, O6RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O6RESW<'a> { w: &'a mut W, } impl<'a> _O6RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O6RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O6RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR6 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O6RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR6 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O6RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O6RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 12; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O7RES`"] pub enum O7RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR7 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR7 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O7RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O7RESW::NO_CHANGE => 0, O7RESW::SET_OUTPUT_OR_CLEAR => 1, O7RESW::CLEAR_OUTPUT_OR_SET => 2, O7RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O7RESW<'a> { w: &'a mut W, } impl<'a> _O7RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O7RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O7RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR7 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O7RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR7 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O7RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O7RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 14; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O8RES`"] pub enum O8RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR8 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR8 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O8RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O8RESW::NO_CHANGE => 0, O8RESW::SET_OUTPUT_OR_CLEAR => 1, O8RESW::CLEAR_OUTPUT_OR_SET => 2, O8RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O8RESW<'a> { w: &'a mut W, } impl<'a> _O8RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O8RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O8RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR8 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O8RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR8 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O8RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O8RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O9RES`"] pub enum O9RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR9 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR9 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O9RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O9RESW::NO_CHANGE => 0, O9RESW::SET_OUTPUT_OR_CLEAR => 1, O9RESW::CLEAR_OUTPUT_OR_SET => 2, O9RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O9RESW<'a> { w: &'a mut W, } impl<'a> _O9RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O9RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O9RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR9 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O9RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR9 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O9RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O9RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 18; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O10RES`"] pub enum O10RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR10 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR10 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O10RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O10RESW::NO_CHANGE => 0, O10RESW::SET_OUTPUT_OR_CLEAR => 1, O10RESW::CLEAR_OUTPUT_OR_SET => 2, O10RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O10RESW<'a> { w: &'a mut W, } impl<'a> _O10RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O10RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O10RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR10 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O10RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR10 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O10RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O10RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 20; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O11RES`"] pub enum O11RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR11 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR11 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O11RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O11RESW::NO_CHANGE => 0, O11RESW::SET_OUTPUT_OR_CLEAR => 1, O11RESW::CLEAR_OUTPUT_OR_SET => 2, O11RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O11RESW<'a> { w: &'a mut W, } impl<'a> _O11RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O11RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O11RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR11 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O11RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR11 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O11RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O11RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 22; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O12RES`"] pub enum O12RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR12 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR12 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O12RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O12RESW::NO_CHANGE => 0, O12RESW::SET_OUTPUT_OR_CLEAR => 1, O12RESW::CLEAR_OUTPUT_OR_SET => 2, O12RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O12RESW<'a> { w: &'a mut W, } impl<'a> _O12RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O12RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O12RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR12 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O12RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR12 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O12RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O12RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O13RES`"] pub enum O13RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR13 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR13 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O13RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O13RESW::NO_CHANGE => 0, O13RESW::SET_OUTPUT_OR_CLEAR => 1, O13RESW::CLEAR_OUTPUT_OR_SET => 2, O13RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O13RESW<'a> { w: &'a mut W, } impl<'a> _O13RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O13RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O13RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR13 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O13RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR13 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O13RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O13RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 26; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O14RES`"] pub enum O14RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR14 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR14 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O14RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O14RESW::NO_CHANGE => 0, O14RESW::SET_OUTPUT_OR_CLEAR => 1, O14RESW::CLEAR_OUTPUT_OR_SET => 2, O14RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O14RESW<'a> { w: &'a mut W, } impl<'a> _O14RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O14RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O14RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR14 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O14RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR14 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O14RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O14RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 28; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `O15RES`"] pub enum O15RESW { #[doc = "No change."] NO_CHANGE, #[doc = "Set output (or clear based on the SETCLR15 field)."] SET_OUTPUT_OR_CLEAR, #[doc = "Clear output (or set based on the SETCLR15 field)."] CLEAR_OUTPUT_OR_SET, #[doc = "Toggle output."] TOGGLE_OUTPUT, } impl O15RESW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { O15RESW::NO_CHANGE => 0, O15RESW::SET_OUTPUT_OR_CLEAR => 1, O15RESW::CLEAR_OUTPUT_OR_SET => 2, O15RESW::TOGGLE_OUTPUT => 3, } } } #[doc = r" Proxy"] pub struct _O15RESW<'a> { w: &'a mut W, } impl<'a> _O15RESW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: O15RESW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "No change."] #[inline] pub fn no_change(self) -> &'a mut W { self.variant(O15RESW::NO_CHANGE) } #[doc = "Set output (or clear based on the SETCLR15 field)."] #[inline] pub fn set_output_or_clear(self) -> &'a mut W { self.variant(O15RESW::SET_OUTPUT_OR_CLEAR) } #[doc = "Clear output (or set based on the SETCLR15 field)."] #[inline] pub fn clear_output_or_set(self) -> &'a mut W { self.variant(O15RESW::CLEAR_OUTPUT_OR_SET) } #[doc = "Toggle output."] #[inline] pub fn toggle_output(self) -> &'a mut W { self.variant(O15RESW::TOGGLE_OUTPUT) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 30; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:1 - Effect of simultaneous set and clear on output 0."] #[inline] pub fn o0res(&self) -> O0RESR { O0RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 2:3 - Effect of simultaneous set and clear on output 1."] #[inline] pub fn o1res(&self) -> O1RESR { O1RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 4:5 - Effect of simultaneous set and clear on output 2."] #[inline] pub fn o2res(&self) -> O2RESR { O2RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 6:7 - Effect of simultaneous set and clear on output 3."] #[inline] pub fn o3res(&self) -> O3RESR { O3RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 6; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 8:9 - Effect of simultaneous set and clear on output 4."] #[inline] pub fn o4res(&self) -> O4RESR { O4RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 10:11 - Effect of simultaneous set and clear on output 5."] #[inline] pub fn o5res(&self) -> O5RESR { O5RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 10; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 12:13 - Effect of simultaneous set and clear on output 6."] #[inline] pub fn o6res(&self) -> O6RESR { O6RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 12; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 14:15 - Effect of simultaneous set and clear on output 7."] #[inline] pub fn o7res(&self) -> O7RESR { O7RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 14; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 16:17 - Effect of simultaneous set and clear on output 8."] #[inline] pub fn o8res(&self) -> O8RESR { O8RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 16; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 18:19 - Effect of simultaneous set and clear on output 9."] #[inline] pub fn o9res(&self) -> O9RESR { O9RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 18; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 20:21 - Effect of simultaneous set and clear on output 10."] #[inline] pub fn o10res(&self) -> O10RESR { O10RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 20; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 22:23 - Effect of simultaneous set and clear on output 11."] #[inline] pub fn o11res(&self) -> O11RESR { O11RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 22; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 24:25 - Effect of simultaneous set and clear on output 12."] #[inline] pub fn o12res(&self) -> O12RESR { O12RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 24; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 26:27 - Effect of simultaneous set and clear on output 13."] #[inline] pub fn o13res(&self) -> O13RESR { O13RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 26; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 28:29 - Effect of simultaneous set and clear on output 14."] #[inline] pub fn o14res(&self) -> O14RESR { O14RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 28; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 30:31 - Effect of simultaneous set and clear on output 15."] #[inline] pub fn o15res(&self) -> O15RESR { O15RESR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 30; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:1 - Effect of simultaneous set and clear on output 0."] #[inline] pub fn o0res(&mut self) -> _O0RESW { _O0RESW { w: self } } #[doc = "Bits 2:3 - Effect of simultaneous set and clear on output 1."] #[inline] pub fn o1res(&mut self) -> _O1RESW { _O1RESW { w: self } } #[doc = "Bits 4:5 - Effect of simultaneous set and clear on output 2."] #[inline] pub fn o2res(&mut self) -> _O2RESW { _O2RESW { w: self } } #[doc = "Bits 6:7 - Effect of simultaneous set and clear on output 3."] #[inline] pub fn o3res(&mut self) -> _O3RESW { _O3RESW { w: self } } #[doc = "Bits 8:9 - Effect of simultaneous set and clear on output 4."] #[inline] pub fn o4res(&mut self) -> _O4RESW { _O4RESW { w: self } } #[doc = "Bits 10:11 - Effect of simultaneous set and clear on output 5."] #[inline] pub fn o5res(&mut self) -> _O5RESW { _O5RESW { w: self } } #[doc = "Bits 12:13 - Effect of simultaneous set and clear on output 6."] #[inline] pub fn o6res(&mut self) -> _O6RESW { _O6RESW { w: self } } #[doc = "Bits 14:15 - Effect of simultaneous set and clear on output 7."] #[inline] pub fn o7res(&mut self) -> _O7RESW { _O7RESW { w: self } } #[doc = "Bits 16:17 - Effect of simultaneous set and clear on output 8."] #[inline] pub fn o8res(&mut self) -> _O8RESW { _O8RESW { w: self } } #[doc = "Bits 18:19 - Effect of simultaneous set and clear on output 9."] #[inline] pub fn o9res(&mut self) -> _O9RESW { _O9RESW { w: self } } #[doc = "Bits 20:21 - Effect of simultaneous set and clear on output 10."] #[inline] pub fn o10res(&mut self) -> _O10RESW { _O10RESW { w: self } } #[doc = "Bits 22:23 - Effect of simultaneous set and clear on output 11."] #[inline] pub fn o11res(&mut self) -> _O11RESW { _O11RESW { w: self } } #[doc = "Bits 24:25 - Effect of simultaneous set and clear on output 12."] #[inline] pub fn o12res(&mut self) -> _O12RESW { _O12RESW { w: self } } #[doc = "Bits 26:27 - Effect of simultaneous set and clear on output 13."] #[inline] pub fn o13res(&mut self) -> _O13RESW { _O13RESW { w: self } } #[doc = "Bits 28:29 - Effect of simultaneous set and clear on output 14."] #[inline] pub fn o14res(&mut self) -> _O14RESW { _O14RESW { w: self } } #[doc = "Bits 30:31 - Effect of simultaneous set and clear on output 15."] #[inline] pub fn o15res(&mut self) -> _O15RESW { _O15RESW { w: self } } }
true
71daad4043129cd3028a174064dd111667bb76a1
Rust
TAKEDA-Takashi/rust-project-euler
/project-euler/src/bin/problem-124.rs
UTF-8
436
2.71875
3
[]
no_license
//! http://odz.sakura.ne.jp/projecteuler/index.php?cmd=read&page=Problem%20124 //! Ordered radicals use euler_lib::Prime; use itertools::Itertools; fn main() { let ubound = 100_000; let prime = Prime::<usize>::new(); println!( "{:?}", (1..=ubound) .map(|n| (prime.factorization(&n).iter().dedup().product::<usize>(), n)) .sorted() .nth(10000 - 1) // 0-origin ); }
true
4a72ea9afb840d10bb79f8fca3c151cd345b9732
Rust
imoegirl/rust-by-example
/src/p041_generic_trait.rs
UTF-8
309
2.890625
3
[]
no_license
struct Empty; struct Null; trait DoubleDrop<T> { fn double_drop(self, _: T); } impl<T, U> DoubleDrop<T> for U { fn double_drop(self, _: T) {} } pub fn run() { println!("p041_generic_trait >>>>>>>>"); let empty = Empty; let null = Null; empty.double_drop(null); println!("\n"); }
true
5564bb4877c9b63fe1bf632b8f6035f5c7d75fe8
Rust
Dynisious/UniHack
/src/neural_net/mod.rs
UTF-8
6,096
2.8125
3
[]
no_license
use std::hash::{Hash, Hasher}; use std::collections::HashMap; mod neuron; use self::neuron::*; pub const LAYER_SIZE: usize = 20; pub const NEURAL_OUTPUT: usize = 3; #[derive(Clone, Serialize, Deserialize)] pub struct NeuralNet { layers: Vec<HashMap<usize, Neuron>>, } impl NeuralNet { pub fn new(layers: Vec<HashMap<usize, Neuron>>) -> Self { Self { layers } } pub fn clean(mut self) -> Self { for layer in self.layers.iter_mut() { for (_, neuron) in layer.iter_mut() { *neuron = neuron.clone().clean(); } } self } pub fn integrety(mut self) -> Self { for layer in 1..(LAYER_SIZE - 1) { for index in 0..LAYER_SIZE { if let Some(ref mut neuron) = self.layers[layer].get_mut(&index) .map(|neuron| neuron.clone()) { neuron.outputs = neuron.outputs.iter() .map(Clone::clone) .filter(|(output, _)| self.layers[layer].iter().any(|(target, _)| target == output)) .collect(); } } } self } pub fn inputs(mut self, inputs: &[usize]) -> Self { for input in inputs.iter() { self.layers[0].get_mut(input).expect("First layer is missing a Neuron!").state = 1.0 } self } pub fn run(&mut self) -> [usize; NEURAL_OUTPUT] { for layer in 0..self.layers.len() { for index in 0..LAYER_SIZE { if let Some((value, outputs)) = self.layers[layer].get_mut(&index) .map(|neuron| (neuron.clone().normalise().state, neuron.outputs.clone())) { let layer = layer + 1; for (output, weight) in outputs.iter() { if let Some(ref mut neuron) = self.layers[layer].get_mut(&output) { neuron.state += value * weight; } } } } } let mut res = [0; NEURAL_OUTPUT]; let mut output = self.layers[LAYER_SIZE - 1].iter() .map(|(index, neuron)| (index, neuron.clone().normalise().state)) .collect::<Vec<_>>(); output.sort_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap_or(::std::cmp::Ordering::Equal)); for (res, output) in res.iter_mut() .zip(output.iter().map(|(&index, _)| index)) { *res = output; } return res; } pub fn reproduce(mut self, other: &Self) -> Self { use std::collections::hash_map::{DefaultHasher, Entry::*}; println!("Reproduction"); let mut hasher = DefaultHasher::default(); self.hash(&mut hasher); other.hash(&mut hasher); for layer in 1..(LAYER_SIZE - 1) { for index in 0..LAYER_SIZE { let other = other.layers[layer].get(&index); match self.layers[layer].entry(index) { Occupied(mut occupied) => match other { Some(other_neuron) => { let self_neuron = occupied.get_mut(); other_neuron.hash(&mut hasher); self_neuron.hash(&mut hasher); match hasher.finish() % 3 { 0 => *self_neuron = self_neuron.clone().reproduce(other_neuron), 1 => *self_neuron = other_neuron.clone(), 2 => (), _ => panic!(), } }, None => { occupied.get_mut().hash(&mut hasher); match hasher.finish() % 3 { 0 => { occupied.remove(); }, 1 => { let self_neuron = occupied.get_mut(); *self_neuron = self_neuron.clone().mutate(); }, 2 => (), _ => panic!(), } }, }, Vacant(mut vacant) => match other { Some(other_neuron) => { other_neuron.hash(&mut hasher); match hasher.finish() % 3 { 0 => { vacant.insert(other_neuron.clone()); }, 1 => { vacant.insert(other_neuron.clone().mutate()); }, 2 => (), _ => panic!(), } }, None => { match hasher.finish() % 2 { 0 => { vacant.insert(Neuron::default().mutate()); }, 1 => (), _ => panic!(), } }, }, } } } self.integrety() } } impl Default for NeuralNet { fn default() -> Self { use std::iter::{repeat, FromIterator}; let mut layers = vec![HashMap::with_capacity(LAYER_SIZE); LAYER_SIZE]; layers[0] = HashMap::from_iter(repeat(Neuron::default()).take(LAYER_SIZE).enumerate()); layers[LAYER_SIZE - 1] = layers[0].clone(); NeuralNet::new(layers) } } impl Hash for NeuralNet { fn hash<H: Hasher>(&self, hasher: &mut H) { for layer in self.layers.iter() { for (index, neuron) in layer.iter() { index.hash(hasher); neuron.hash(hasher); } } } }
true
7cb9d1eb3dd827ad4aeb3daad341adc9709a989c
Rust
flyer5200/reflow
/src/conf/prefix_match/util.rs
UTF-8
3,466
2.796875
3
[]
no_license
use bytes::Bytes; use std::collections::HashMap; use std::fs::{self, DirEntry}; use std::io; use std::path; pub fn find_domain_map_files(config: &path::Path) -> io::Result<HashMap<Bytes, Vec<DirEntry>>> { let p = config.join("namezone"); let mut m = if p.exists() { find_map_files(&p)? } else { HashMap::new() }; let m1 = find_confs(config, "region")?; merge_map_vec_value(&mut m, m1); Ok(m) } pub fn find_addr_map_files(config: &path::Path) -> io::Result<HashMap<Bytes, Vec<DirEntry>>> { let p = config.join("addrzone"); let mut m = if p.exists() { find_map_files(&p)? } else { HashMap::new() }; let m1 = find_confs(config, "ipregion")?; merge_map_vec_value(&mut m, m1); Ok(m) } fn find_map_files(path: &path::Path) -> io::Result<HashMap<Bytes, Vec<DirEntry>>> { let mut m = HashMap::new(); for entry in fs::read_dir(path)? { let entry = entry?; let ftype = fs::metadata(entry.path())?.file_type(); let n = entry.file_name(); let name = n.to_str().expect("Bad encoding in filename"); let nb: Bytes = name.into(); if ftype.is_file() { m.insert(nb, vec![entry]); } else if ftype.is_dir() { m.insert(nb, find_dir_entris(entry)?); } } Ok(m) } /// merge two HashMaps whose values are both vectors fn merge_map_vec_value(m0: &mut HashMap<Bytes, Vec<DirEntry>>, m1: HashMap<Bytes, Vec<DirEntry>>) { for (k, v) in m1.into_iter() { if m0.get(&k).is_some() { m0.get_mut(&k).unwrap().extend(v); } else { m0.insert(k, v); } } } /// deprecated fn find_confs(path: &path::Path, kind: &str) -> io::Result<HashMap<Bytes, Vec<DirEntry>>> { let mut region_map = HashMap::new(); for entry in fs::read_dir(path)? { let file = entry?; let m = fs::metadata(file.path())?; let ftype = m.file_type(); let f = file.file_name(); let n = f.to_str().and_then(|x| extract_name(x, kind)); if ftype.is_dir() && n.is_some() { let conf = find_dir_entris(file)?; region_map.insert(n.unwrap(), conf); } } Ok(region_map) } fn extract_name(filename: &str, prefix: &str) -> Option<Bytes> { if !filename.starts_with(prefix) { return None; } let rest = filename.trim_start_matches(prefix); if !rest.starts_with(".") { return None; } let rest = rest.trim_start_matches("."); if rest.len() < 1 { return None; } Some(rest.into()) } fn find_dir_entris(dir: DirEntry) -> io::Result<Vec<DirEntry>> { let readdir = fs::read_dir(dir.path())?; let entries = readdir .filter_map(|entry| { let file = entry.unwrap(); let file_type = file.file_type().unwrap(); if file_type.is_file() || file_type.is_symlink() { Some(file) } else { None } }) .collect(); Ok(entries) } pub fn lines_without_comments(bytes: &[u8]) -> impl Iterator<Item = &[u8]> { bytes .split(|&x| x == b'\r' || x == b'\n') .map(|line: &[u8]| { line.split(|&x| x == b'#') .next() .unwrap_or(b"") .split(|x| x.is_ascii_whitespace()) .next() .unwrap_or(b"") }) .filter(|l| l.len() > 0) }
true
a71a9a29256d95f46eaca6f660a5a100600ff630
Rust
pbzweihander/async-h1
/src/chunked/encoder.rs
UTF-8
3,913
3.140625
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "Apache-2.0" ]
permissive
use std::pin::Pin; use async_std::io; use async_std::io::prelude::*; use async_std::task::{Context, Poll}; use futures_core::ready; /// An encoder for chunked encoding. #[derive(Debug)] pub(crate) struct ChunkedEncoder<R> { reader: R, done: bool, } impl<R: Read + Unpin> ChunkedEncoder<R> { /// Create a new instance. pub(crate) fn new(reader: R) -> Self { Self { reader, done: false, } } } impl<R: Read + Unpin> Read for ChunkedEncoder<R> { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8], ) -> Poll<io::Result<usize>> { if self.done { return Poll::Ready(Ok(0)); } let reader = &mut self.reader; let max_bytes_to_read = max_bytes_to_read(buf.len()); let bytes = ready!(Pin::new(reader).poll_read(cx, &mut buf[..max_bytes_to_read]))?; if bytes == 0 { self.done = true; } let start = format!("{:X}\r\n", bytes); let start_length = start.as_bytes().len(); let total = bytes + start_length + 2; buf.copy_within(..bytes, start_length); buf[..start_length].copy_from_slice(start.as_bytes()); buf[total - 2..total].copy_from_slice(b"\r\n"); Poll::Ready(Ok(total)) } } fn max_bytes_to_read(buf_len: usize) -> usize { if buf_len < 6 { // the minimum read size is of 6 represents one byte of // content from the body. the other five bytes are 1\r\n_\r\n // where _ is the actual content in question panic!("buffers of length {} are too small for this implementation. if this is a problem for you, please open an issue", buf_len); } let bytes_remaining_after_two_cr_lns = (buf_len - 4) as f64; // the maximum number of bytes that the hex representation of remaining bytes might take let max_bytes_of_hex_framing = bytes_remaining_after_two_cr_lns.log2() / 4f64; (bytes_remaining_after_two_cr_lns - max_bytes_of_hex_framing.ceil()) as usize } #[cfg(test)] mod test_bytes_to_read { #[test] fn simple_check_of_known_values() { // the marked rows are the most important part of this test, // and a nonobvious but intentional consequence of the // implementation. in order to avoid overflowing, we must use // one fewer than the available buffer bytes because // increasing the read size increase the number of framed // bytes by two. This occurs when the hex representation of // the content bytes is near an increase in order of magnitude // (F->10, FF->100, FFF-> 1000, etc) let values = vec![ (6, 1), // 1 (7, 2), // 2 (20, 15), // F (21, 15), // F <- (22, 16), // 10 (23, 17), // 11 (260, 254), // FE (261, 254), // FE <- (262, 255), // FF <- (263, 256), // 100 (4100, 4093), // FFD (4101, 4093), // FFD <- (4102, 4094), // FFE <- (4103, 4095), // FFF <- (4104, 4096), // 1000 ]; for (input, expected) in values { let actual = super::max_bytes_to_read(input); assert_eq!( actual, expected, "\n\nexpected max_bytes_to_read({}) to be {}, but it was {}", input, expected, actual ); // testing the test: let used_bytes = expected + 4 + format!("{:X}", expected).len(); assert!( used_bytes == input || used_bytes == input - 1, "\n\nfor an input of {}, expected used bytes to be {} or {}, but was {}", input, input, input - 1, used_bytes ); } } }
true
79ec22c60d64fb9d4c9a12932a39377b0a67e7ed
Rust
GuillaumeGomez/sysinfo
/src/windows/sid.rs
UTF-8
5,343
2.875
3
[ "MIT" ]
permissive
// Take a look at the license at the top of the repository in the LICENSE file. use std::{fmt::Display, str::FromStr}; use winapi::{ shared::{ sddl::{ConvertSidToStringSidW, ConvertStringSidToSidW}, winerror::ERROR_INSUFFICIENT_BUFFER, }, um::{ errhandlingapi::GetLastError, securitybaseapi::{CopySid, GetLengthSid, IsValidSid}, winbase::{LocalFree, LookupAccountSidW}, winnt::{SidTypeUnknown, LPWSTR, PSID}, }, }; use crate::sys::utils::to_str; #[doc = include_str!("../../md_doc/sid.md")] #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Sid { sid: Vec<u8>, } impl Sid { /// Creates an `Sid` by making a copy of the given raw SID. pub(crate) unsafe fn from_psid(psid: PSID) -> Option<Self> { if psid.is_null() { return None; } if IsValidSid(psid) == 0 { return None; } let length = GetLengthSid(psid); let mut sid = vec![0; length as usize]; if CopySid(length, sid.as_mut_ptr() as *mut _, psid) == 0 { sysinfo_debug!("CopySid failed: {:?}", GetLastError()); return None; } // We are making assumptions about the SID internal structure, // and these only hold if the revision is 1 // https://learn.microsoft.com/en-us/windows/win32/api/winnt/ns-winnt-sid // Namely: // 1. SIDs can be compared directly (memcmp). // 2. Following from this, to hash a SID we can just hash its bytes. // These are the basis for deriving PartialEq, Eq, and Hash. // And since we also need PartialOrd and Ord, we might as well derive them // too. The default implementation will be consistent with Eq, // and we don't care about the actual order, just that there is one. // So it should all work out. // Why bother with this? Because it makes the implementation that // much simpler :) assert_eq!(sid[0], 1, "Expected SID revision to be 1"); Some(Self { sid }) } /// Retrieves the account name of this SID. pub(crate) fn account_name(&self) -> Option<String> { unsafe { let mut name_len = 0; let mut domain_len = 0; let mut name_use = SidTypeUnknown; if LookupAccountSidW( std::ptr::null_mut(), self.sid.as_ptr() as *mut _, std::ptr::null_mut(), &mut name_len, std::ptr::null_mut(), &mut domain_len, &mut name_use, ) == 0 { let error = GetLastError(); if error != ERROR_INSUFFICIENT_BUFFER { sysinfo_debug!("LookupAccountSidW failed: {:?}", error); return None; } } let mut name = vec![0; name_len as usize]; // Reset length to 0 since we're still passing a NULL pointer // for the domain. domain_len = 0; if LookupAccountSidW( std::ptr::null_mut(), self.sid.as_ptr() as *mut _, name.as_mut_ptr(), &mut name_len, std::ptr::null_mut(), &mut domain_len, &mut name_use, ) == 0 { sysinfo_debug!("LookupAccountSidW failed: {:?}", GetLastError()); return None; } Some(to_str(name.as_mut_ptr())) } } } impl Display for Sid { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { unsafe fn convert_sid_to_string_sid(sid: PSID) -> Option<String> { let mut string_sid: LPWSTR = std::ptr::null_mut(); if ConvertSidToStringSidW(sid, &mut string_sid) == 0 { sysinfo_debug!("ConvertSidToStringSidW failed: {:?}", GetLastError()); return None; } let result = to_str(string_sid); LocalFree(string_sid as *mut _); Some(result) } let string_sid = unsafe { convert_sid_to_string_sid(self.sid.as_ptr() as *mut _) }; let string_sid = string_sid.ok_or(std::fmt::Error)?; write!(f, "{string_sid}") } } impl FromStr for Sid { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { unsafe { let mut string_sid: Vec<u16> = s.encode_utf16().collect(); string_sid.push(0); let mut psid: PSID = std::ptr::null_mut(); if ConvertStringSidToSidW(string_sid.as_ptr(), &mut psid) == 0 { return Err(format!( "ConvertStringSidToSidW failed: {:?}", GetLastError() )); } let sid = Self::from_psid(psid); LocalFree(psid as *mut _); // Unwrapping because ConvertStringSidToSidW should've performed // all the necessary validations. If it returned an invalid SID, // we better fail fast. Ok(sid.unwrap()) } } }
true
a1c9052151383854142020a2006fb34da4342e82
Rust
bazelbuild/rules_rust
/crate_universe/src/splicing.rs
UTF-8
22,157
2.53125
3
[ "Apache-2.0" ]
permissive
//! This module is responsible for finding a Cargo workspace pub(crate) mod cargo_config; mod crate_index_lookup; mod splicer; use std::collections::{BTreeMap, BTreeSet}; use std::convert::TryFrom; use std::fs; use std::path::{Path, PathBuf}; use std::str::FromStr; use anyhow::{anyhow, bail, Context, Result}; use cargo_toml::Manifest; use serde::{Deserialize, Serialize}; use crate::config::CrateId; use crate::metadata::{Cargo, CargoUpdateRequest, LockGenerator}; use crate::utils; use crate::utils::starlark::{Label, SelectList}; use self::cargo_config::CargoConfig; use self::crate_index_lookup::CrateIndexLookup; pub use self::splicer::*; type DirectPackageManifest = BTreeMap<String, cargo_toml::DependencyDetail>; /// A collection of information used for splicing together a new Cargo manifest. #[derive(Debug, Default, Serialize, Deserialize, Clone)] #[serde(deny_unknown_fields)] pub struct SplicingManifest { /// A set of all packages directly written to the rule pub direct_packages: DirectPackageManifest, /// A mapping of manifest paths to the labels representing them pub manifests: BTreeMap<PathBuf, Label>, /// The path of a Cargo config file pub cargo_config: Option<PathBuf>, /// The Cargo resolver version to use for splicing pub resolver_version: cargo_toml::Resolver, } impl FromStr for SplicingManifest { type Err = serde_json::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { serde_json::from_str(s) } } impl SplicingManifest { pub fn try_from_path<T: AsRef<Path>>(path: T) -> Result<Self> { let content = fs::read_to_string(path.as_ref())?; Self::from_str(&content).context("Failed to load SplicingManifest") } pub fn resolve(self, workspace_dir: &Path, output_base: &Path) -> Self { let Self { manifests, cargo_config, .. } = self; let workspace_dir_str = workspace_dir.to_string_lossy(); let output_base_str = output_base.to_string_lossy(); // Ensure manifests all have absolute paths let manifests = manifests .into_iter() .map(|(path, label)| { let resolved_path = path .to_string_lossy() .replace("${build_workspace_directory}", &workspace_dir_str) .replace("${output_base}", &output_base_str); (PathBuf::from(resolved_path), label) }) .collect(); // Ensure the cargo config is located at an absolute path let cargo_config = cargo_config.map(|path| { let resolved_path = path .to_string_lossy() .replace("${build_workspace_directory}", &workspace_dir_str) .replace("${output_base}", &output_base_str); PathBuf::from(resolved_path) }); Self { manifests, cargo_config, ..self } } } /// The result of fully resolving a [SplicingManifest] in preparation for splicing. #[derive(Debug, Serialize, Default)] pub struct SplicingMetadata { /// A set of all packages directly written to the rule pub direct_packages: DirectPackageManifest, /// A mapping of manifest paths to the labels representing them pub manifests: BTreeMap<Label, cargo_toml::Manifest>, /// The path of a Cargo config file pub cargo_config: Option<CargoConfig>, } impl TryFrom<SplicingManifest> for SplicingMetadata { type Error = anyhow::Error; fn try_from(value: SplicingManifest) -> Result<Self, Self::Error> { let direct_packages = value.direct_packages; let manifests = value .manifests .into_iter() .map(|(path, label)| { // We read the content of a manifest file to buffer and use `from_slice` to // parse it. The reason is that the `from_path` version will resolve indirect // path dependencies in the workspace to absolute path, which causes the hash // to be unstable. Not resolving implicit data is okay here because the // workspace manifest is also included in the hash. // See https://github.com/bazelbuild/rules_rust/issues/2016 let manifest_content = fs::read(&path) .with_context(|| format!("Failed to load manifest '{}'", path.display()))?; let manifest = cargo_toml::Manifest::from_slice(&manifest_content) .with_context(|| format!("Failed to parse manifest '{}'", path.display()))?; Ok((label, manifest)) }) .collect::<Result<BTreeMap<Label, Manifest>>>()?; let cargo_config = match value.cargo_config { Some(path) => Some( CargoConfig::try_from_path(&path) .with_context(|| format!("Failed to load cargo config '{}'", path.display()))?, ), None => None, }; Ok(Self { direct_packages, manifests, cargo_config, }) } } #[derive(Debug, Default, Serialize, Deserialize, Clone)] pub struct SourceInfo { /// A url where to a `.crate` file. pub url: String, /// The `.crate` file's sha256 checksum. pub sha256: String, } /// Information about the Cargo workspace relative to the Bazel workspace #[derive(Debug, Default, Serialize, Deserialize)] pub struct WorkspaceMetadata { /// A mapping of crates to information about where their source can be downloaded pub sources: BTreeMap<CrateId, SourceInfo>, /// The path from the root of a Bazel workspace to the root of the Cargo workspace pub workspace_prefix: Option<String>, /// Paths from the root of a Bazel workspace to a Cargo package pub package_prefixes: BTreeMap<String, String>, /// Feature set for each target triplet and crate. /// /// We store this here because it's computed during the splicing phase via /// calls to "cargo tree" which need the full spliced workspace. pub features: BTreeMap<CrateId, SelectList<String>>, } impl TryFrom<toml::Value> for WorkspaceMetadata { type Error = anyhow::Error; fn try_from(value: toml::Value) -> Result<Self, Self::Error> { match value.get("cargo-bazel") { Some(v) => v .to_owned() .try_into() .context("Failed to deserialize toml value"), None => bail!("cargo-bazel workspace metadata not found"), } } } impl TryFrom<serde_json::Value> for WorkspaceMetadata { type Error = anyhow::Error; fn try_from(value: serde_json::Value) -> Result<Self, Self::Error> { match value.get("cargo-bazel") { Some(value) => { serde_json::from_value(value.to_owned()).context("Faield to deserialize json value") } None => bail!("cargo-bazel workspace metadata not found"), } } } impl WorkspaceMetadata { fn new( splicing_manifest: &SplicingManifest, member_manifests: BTreeMap<&PathBuf, String>, ) -> Result<Self> { let mut package_prefixes: BTreeMap<String, String> = member_manifests .iter() .filter_map(|(original_manifest, cargo_pkg_name)| { let label = match splicing_manifest.manifests.get(*original_manifest) { Some(v) => v, None => return None, }; let package = match &label.package { Some(pkg) => PathBuf::from(pkg), None => return None, }; let prefix = package.to_string_lossy().to_string(); Some((cargo_pkg_name.clone(), prefix)) }) .collect(); // It is invald for toml maps to use empty strings as keys. In the case // the empty key is expected to be the root package. If the root package // has a prefix, then all other packages will as well (even if no other // manifest represents them). The value is then saved as a separate value let workspace_prefix = package_prefixes.remove(""); let package_prefixes = package_prefixes .into_iter() .map(|(k, v)| { let prefix_path = PathBuf::from(v); let prefix = prefix_path.parent().unwrap(); (k, prefix.to_string_lossy().to_string()) }) .collect(); Ok(Self { sources: BTreeMap::new(), workspace_prefix, package_prefixes, features: BTreeMap::new(), }) } pub fn write_registry_urls_and_feature_map( cargo: &Cargo, lockfile: &cargo_lock::Lockfile, features: BTreeMap<CrateId, SelectList<String>>, input_manifest_path: &Path, output_manifest_path: &Path, ) -> Result<()> { let mut manifest = read_manifest(input_manifest_path)?; let mut workspace_metaata = WorkspaceMetadata::try_from( manifest .workspace .as_ref() .unwrap() .metadata .as_ref() .unwrap() .clone(), )?; // Locate all packages sourced from a registry let pkg_sources: Vec<&cargo_lock::Package> = lockfile .packages .iter() .filter(|pkg| pkg.source.is_some()) .filter(|pkg| pkg.source.as_ref().unwrap().is_registry()) .collect(); // Collect a unique set of index urls let index_urls: BTreeSet<String> = pkg_sources .iter() .map(|pkg| pkg.source.as_ref().unwrap().url().to_string()) .collect(); // Load the cargo config let cargo_config = { // Note that this path must match the one defined in `splicing::setup_cargo_config` let config_path = input_manifest_path .parent() .unwrap() .join(".cargo") .join("config.toml"); if config_path.exists() { Some(CargoConfig::try_from_path(&config_path)?) } else { None } }; // Load each index for easy access let crate_indexes = index_urls .into_iter() .map(|url| { // Ensure the correct registry is mapped based on the give Cargo config. let index_url = if let Some(config) = &cargo_config { config.resolve_replacement_url(&url)? } else { &url }; let index = if cargo.use_sparse_registries_for_crates_io()? && index_url == utils::CRATES_IO_INDEX_URL { CrateIndexLookup::Http(crates_index::SparseIndex::from_url( "sparse+https://index.crates.io/", )?) } else if index_url.starts_with("sparse+https://") { CrateIndexLookup::Http(crates_index::SparseIndex::from_url(index_url)?) } else { let index = { // Load the index for the current url let index = crates_index::Index::from_url(index_url).with_context(|| { format!("Failed to load index for url: {index_url}") })?; // Ensure each index has a valid index config index.index_config().with_context(|| { format!("`config.json` not found in index: {index_url}") })?; index }; CrateIndexLookup::Git(index) }; Ok((url, index)) }) .collect::<Result<BTreeMap<String, _>>>() .context("Failed to locate crate indexes")?; // Get the download URL of each package based on it's registry url. let additional_sources = pkg_sources .iter() .map(|pkg| { let source_id = pkg.source.as_ref().unwrap(); let source_url = source_id.url().to_string(); let lookup = crate_indexes.get(&source_url).ok_or_else(|| { anyhow!( "Couldn't find crate_index data for SourceID {:?}", source_id ) })?; lookup.get_source_info(pkg).map(|source_info| { ( CrateId::new(pkg.name.as_str().to_owned(), pkg.version.to_string()), source_info, ) }) }) .collect::<Result<Vec<_>>>()?; workspace_metaata .sources .extend( additional_sources .into_iter() .filter_map(|(crate_id, source_info)| { source_info.map(|source_info| (crate_id, source_info)) }), ); workspace_metaata.features = features; workspace_metaata.inject_into(&mut manifest)?; write_root_manifest(output_manifest_path, manifest)?; Ok(()) } fn inject_into(&self, manifest: &mut Manifest) -> Result<()> { let metadata_value = toml::Value::try_from(self)?; let workspace = manifest.workspace.as_mut().unwrap(); match &mut workspace.metadata { Some(data) => match data.as_table_mut() { Some(map) => { map.insert("cargo-bazel".to_owned(), metadata_value); } None => bail!("The metadata field is always expected to be a table"), }, None => { let mut table = toml::map::Map::new(); table.insert("cargo-bazel".to_owned(), metadata_value); workspace.metadata = Some(toml::Value::Table(table)) } } Ok(()) } } #[derive(Debug)] pub enum SplicedManifest { Workspace(PathBuf), Package(PathBuf), MultiPackage(PathBuf), } impl SplicedManifest { pub fn as_path_buf(&self) -> &PathBuf { match self { SplicedManifest::Workspace(p) => p, SplicedManifest::Package(p) => p, SplicedManifest::MultiPackage(p) => p, } } } pub fn read_manifest(manifest: &Path) -> Result<Manifest> { let content = fs::read_to_string(manifest)?; cargo_toml::Manifest::from_str(content.as_str()).context("Failed to deserialize manifest") } pub fn generate_lockfile( manifest_path: &SplicedManifest, existing_lock: &Option<PathBuf>, cargo_bin: Cargo, rustc_bin: &Path, update_request: &Option<CargoUpdateRequest>, ) -> Result<cargo_lock::Lockfile> { let manifest_dir = manifest_path .as_path_buf() .parent() .expect("Every manifest should be contained in a parent directory"); let root_lockfile_path = manifest_dir.join("Cargo.lock"); // Remove the file so it's not overwitten if it happens to be a symlink. if root_lockfile_path.exists() { fs::remove_file(&root_lockfile_path)?; } // Generate the new lockfile let lockfile = LockGenerator::new(cargo_bin, PathBuf::from(rustc_bin)).generate( manifest_path.as_path_buf(), existing_lock, update_request, )?; // Write the lockfile to disk if !root_lockfile_path.exists() { bail!("Failed to generate Cargo.lock file") } Ok(lockfile) } #[cfg(test)] mod test { use super::*; use std::path::PathBuf; #[test] fn deserialize_splicing_manifest() { let runfiles = runfiles::Runfiles::create().unwrap(); let path = runfiles.rlocation( "rules_rust/crate_universe/test_data/serialized_configs/splicing_manifest.json", ); let content = std::fs::read_to_string(path).unwrap(); let manifest: SplicingManifest = serde_json::from_str(&content).unwrap(); // Check manifests assert_eq!( manifest.manifests, BTreeMap::from([ ( PathBuf::from("${build_workspace_directory}/submod/Cargo.toml"), Label::from_str("//submod:Cargo.toml").unwrap() ), ( PathBuf::from("${output_base}/external_crate/Cargo.toml"), Label::from_str("@external_crate//:Cargo.toml").unwrap() ), ( PathBuf::from("/tmp/abs/path/workspace/Cargo.toml"), Label::from_str("//:Cargo.toml").unwrap() ), ]) ); // Check splicing configs assert_eq!(manifest.resolver_version, cargo_toml::Resolver::V2); // Check packages assert_eq!(manifest.direct_packages.len(), 4); let package = manifest.direct_packages.get("rand").unwrap(); assert_eq!( package, &cargo_toml::DependencyDetail { default_features: false, features: vec!["small_rng".to_owned()], version: Some("0.8.5".to_owned()), ..Default::default() } ); let package = manifest.direct_packages.get("cfg-if").unwrap(); assert_eq!( package, &cargo_toml::DependencyDetail { git: Some("https://github.com/rust-lang/cfg-if.git".to_owned()), rev: Some("b9c2246a".to_owned()), default_features: true, ..Default::default() } ); let package = manifest.direct_packages.get("log").unwrap(); assert_eq!( package, &cargo_toml::DependencyDetail { git: Some("https://github.com/rust-lang/log.git".to_owned()), branch: Some("master".to_owned()), default_features: true, ..Default::default() } ); let package = manifest.direct_packages.get("cargo_toml").unwrap(); assert_eq!( package, &cargo_toml::DependencyDetail { git: Some("https://gitlab.com/crates.rs/cargo_toml.git".to_owned()), tag: Some("v0.15.2".to_owned()), default_features: true, ..Default::default() } ); // Check cargo config assert_eq!( manifest.cargo_config, Some(PathBuf::from("/tmp/abs/path/workspace/.cargo/config.toml")) ); } #[test] fn splicing_manifest_resolve() { let runfiles = runfiles::Runfiles::create().unwrap(); let path = runfiles.rlocation( "rules_rust/crate_universe/test_data/serialized_configs/splicing_manifest.json", ); let content = std::fs::read_to_string(path).unwrap(); let mut manifest: SplicingManifest = serde_json::from_str(&content).unwrap(); manifest.cargo_config = Some(PathBuf::from( "${build_workspace_directory}/.cargo/config.toml", )); manifest = manifest.resolve( &PathBuf::from("/tmp/abs/path/workspace"), &PathBuf::from("/tmp/output_base"), ); // Check manifests assert_eq!( manifest.manifests, BTreeMap::from([ ( PathBuf::from("/tmp/abs/path/workspace/submod/Cargo.toml"), Label::from_str("//submod:Cargo.toml").unwrap() ), ( PathBuf::from("/tmp/output_base/external_crate/Cargo.toml"), Label::from_str("@external_crate//:Cargo.toml").unwrap() ), ( PathBuf::from("/tmp/abs/path/workspace/Cargo.toml"), Label::from_str("//:Cargo.toml").unwrap() ), ]) ); // Check cargo config assert_eq!( manifest.cargo_config.unwrap(), PathBuf::from("/tmp/abs/path/workspace/.cargo/config.toml"), ) } #[test] fn splicing_metadata_workspace_path() { let runfiles = runfiles::Runfiles::create().unwrap(); let workspace_manifest_path = runfiles .rlocation("rules_rust/crate_universe/test_data/metadata/workspace_path/Cargo.toml"); let workspace_path = workspace_manifest_path.parent().unwrap().to_path_buf(); let child_a_manifest_path = runfiles.rlocation( "rules_rust/crate_universe/test_data/metadata/workspace_path/child_a/Cargo.toml", ); let child_b_manifest_path = runfiles.rlocation( "rules_rust/crate_universe/test_data/metadata/workspace_path/child_b/Cargo.toml", ); let manifest = SplicingManifest { direct_packages: BTreeMap::new(), manifests: BTreeMap::from([ ( workspace_manifest_path, Label::from_str("//:Cargo.toml").unwrap(), ), ( child_a_manifest_path, Label::from_str("//child_a:Cargo.toml").unwrap(), ), ( child_b_manifest_path, Label::from_str("//child_b:Cargo.toml").unwrap(), ), ]), cargo_config: None, resolver_version: cargo_toml::Resolver::V2, }; let metadata = SplicingMetadata::try_from(manifest).unwrap(); let metadata = serde_json::to_string(&metadata).unwrap(); assert!( !metadata.contains(workspace_path.to_str().unwrap()), "serialized metadata should not contain absolute path" ); } }
true
d73e16f80c2e6f21bce07f94cf7cb887e8dd3a07
Rust
n1tram1/lr35902-disass
/src/analyzer/disassembler.rs
UTF-8
493
2.78125
3
[]
no_license
use super::error::AnalyzerError; use super::instruction::Instruction; pub struct Disassembler; impl Disassembler { pub fn disassemble(bytes: &Vec<u8>) -> Result<Vec<Instruction>, AnalyzerError> { let mut instructions: Vec<Instruction> = Vec::new(); let mut i = 0; while i < bytes.len() { let inst = Instruction::from_slice(&bytes[i..])?; i += inst.size(); instructions.push(inst); } Ok(instructions) } }
true
bdcbe8b6d795d4f566407e5893eb94d0f6bb0600
Rust
jaems33/intro-to-rust
/chapter_6/option_enum/src/main.rs
UTF-8
737
3.796875
4
[]
no_license
/* Option Enum encodes the idea of a value being present or absent. enum is Option<T> This is similar to the library implementation enum QuasiOption<T> { Some(T), None, } */ fn divide(num: i32, den: i32) -> Option<i32> { if den == 0 { return Option::None } Option::Some(num / den) } fn main() { /* The type of None must be provided as Rust can't infer the type */ let _number: Option<i32> = None; let test_num = divide(10, 0); match test_num { Some(x) => println!("Total is: {}", x), None => println!("Can't divide by zero.") } let test_num = divide(10, 8); match test_num { Some(x) => println!("Total is: {}", x), None => println!("Can't divide by zero.") } }
true
7f51cf2d59483a70c77dce048b7deac1c5c4ea73
Rust
brl/citadel-tools
/libcitadel/src/log.rs
UTF-8
3,405
2.921875
3
[]
no_license
use std::sync::Mutex; use std::io::{self,Write}; use crate::Result; lazy_static! { static ref LOGGER: Mutex<Logger> = Mutex::new(Logger::new()); } #[macro_export] macro_rules! debug { ($e:expr) => { $crate::Logger::log($crate::LogLevel::Debug, String::from($e)) }; ($fmt:expr, $($arg:tt)+) => { $crate::Logger::log($crate::LogLevel::Debug, format!($fmt, $($arg)+)) }; } #[macro_export] macro_rules! verbose { ($e:expr) => { $crate::Logger::log($crate::LogLevel::Verbose, String::from($e)) }; ($fmt:expr, $($arg:tt)+) => { $crate::Logger::log($crate::LogLevel::Verbose, format!($fmt, $($arg)+)) }; } #[macro_export] macro_rules! info { ($e:expr) => { $crate::Logger::log($crate::LogLevel::Info, String::from($e)) }; ($fmt:expr, $($arg:tt)+) => { $crate::Logger::log($crate::LogLevel::Info, format!($fmt, $($arg)+)) }; } #[macro_export] macro_rules! notify { ($e:expr) => { $crate::Logger::log($crate::LogLevel::Notice, String::from($e)) }; ($fmt:expr, $($arg:tt)+) => { $crate::Logger::log($crate::LogLevel::Notice, format!($fmt, $($arg)+)) }; } #[macro_export] macro_rules! warn { ($e:expr) => { $crate::Logger::log($crate::LogLevel::Warn, String::from($e)) }; ($fmt:expr, $($arg:tt)+) => { $crate::Logger::log($crate::LogLevel::Warn, format!($fmt, $($arg)+)) }; } #[derive(PartialOrd,PartialEq,Copy,Clone)] pub enum LogLevel { Warn, Notice, Info, Verbose, Debug, } pub trait LogOutput: Send { fn log_output(&mut self, level: LogLevel, line: &str) -> Result<()>; } pub struct Logger { level: LogLevel, output: Box<dyn LogOutput>, } impl Logger { pub fn set_log_level(level: LogLevel) { let mut logger = LOGGER.lock().unwrap(); logger.level = level; } pub fn set_log_output(output: Box<dyn LogOutput>) { let mut logger = LOGGER.lock().unwrap(); logger.output = output; } pub fn log(level: LogLevel, message: impl AsRef<str>) { let mut logger = LOGGER.lock().unwrap(); logger.log_message(level, message.as_ref()); } fn new() -> Self { Self { level: LogLevel::Notice, output: Box::new(DefaultLogOutput) } } fn log_message(&mut self, level: LogLevel, message: &str) { if self.level >= level { if let Err(err) = self.output.log_output(level, message) { eprintln!("Error writing logline: {}", err); } } } pub fn format_logline(level: LogLevel, line: &str) -> String { let prefix = match level { LogLevel::Debug => "[.]", LogLevel::Verbose => "[-]", LogLevel::Info => "[+]", LogLevel::Notice => "[*]", LogLevel::Warn => "[Warning]", }; format!("{} {}\n", prefix, line) } } #[derive(Clone,Default)] pub struct DefaultLogOutput; impl DefaultLogOutput { pub fn new() -> Self { DefaultLogOutput::default() } } impl LogOutput for DefaultLogOutput { fn log_output(&mut self, level: LogLevel, line: &str) -> Result<()> { let line = Logger::format_logline(level, line); let stdout = io::stdout(); let mut lock = stdout.lock(); lock.write_all(line.as_bytes()) .map_err(context!("error writing log line to stdout"))?; lock.flush() .map_err(context!("error flushing stdout"))?; Ok(()) } }
true
0885529b73a3c7145990d44af738be2f63458add
Rust
PeterW-LWL/mail
/internals/src/bind/quoted_string.rs
UTF-8
3,100
2.734375
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "Apache-2.0" ]
permissive
use quoted_string::spec::{ GeneralQSSpec, PartialCodePoint, WithoutQuotingValidator }; use media_type_impl_utils::quoted_string; use ::MailType; /// A Quoted String specification in context of Mail ([rfc5322](https://tools.ietf.org/html/rfc5322#section-2.2.3)) /// /// This implementation of MailQsSpec _does not_ include support for the obsolete parts of the grammar /// as it's meant for generation/encoding and no obsolete parts should be generated at all (Through /// a parser would have to be able to parse them for compatibility reasons). /// #[derive(Copy, Clone, Debug, Default)] pub struct MailQsSpec; impl GeneralQSSpec for MailQsSpec { type Quoting = quoted_string::NormalQuoting; type Parsing = quoted_string::MimeParsing; } /// A Quoted String specification in context of Mail ([rfc5322](https://tools.ietf.org/html/rfc5322#section-2.2.3)) /// /// This implementation of MailQsSpec _does not_ include support for the obsolete parts of the grammar /// as it's meant for generation/encoding and no obsolete parts should be generated at all (Through /// a parser would have to be able to parse them for compatibility reasons). #[derive(Copy, Clone, Debug, Default)] pub struct InternationalizedMailQsSpec; impl GeneralQSSpec for InternationalizedMailQsSpec { type Quoting = quoted_string::NormalUtf8Quoting; type Parsing = quoted_string::MimeParsingUtf8; } pub use self::quoted_string::MimeTokenValidator as UnquotedTokenValidator; #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] pub struct UnquotedATextValidator { mail_type: MailType } impl UnquotedATextValidator { pub fn new(mail_type: MailType) -> Self { UnquotedATextValidator { mail_type } } } impl WithoutQuotingValidator for UnquotedATextValidator { fn next(&mut self, pcp: PartialCodePoint) -> bool { is_atext(pcp, self.mail_type) } } #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] pub struct UnquotedDotAtomTextValidator { mail_type: MailType, allow_dot: bool } impl UnquotedDotAtomTextValidator { pub fn new(mail_type: MailType) -> Self { UnquotedDotAtomTextValidator { mail_type, allow_dot: false } } } impl WithoutQuotingValidator for UnquotedDotAtomTextValidator { fn next(&mut self, pcp: PartialCodePoint) -> bool { if is_atext(pcp, self.mail_type) { self.allow_dot = true; true } else if self.allow_dot && pcp.as_u8() == b'.' { self.allow_dot = false; true } else { false } } fn end(&self) -> bool { // it can't end in a dot so it's the same as allow_dot // (as empty or starting with a dot is also not allowed) self.allow_dot } } //TODO replace with lookup table (which could be placed in `::grammar`)! fn is_atext(pcp: PartialCodePoint, mail_type: MailType) -> bool { use grammar::is_special; let iu8 = pcp.as_u8(); if iu8 > 0x7f { mail_type == MailType::Internationalized } else { b'!' <= iu8 && iu8 <= b'~' && !is_special(iu8 as char) } }
true
655c6930fcc4de83b456ec8fd82c4f0d041bf11b
Rust
ndtho8205/rust-simple-apps
/snake_game/src/game.rs
UTF-8
4,519
2.8125
3
[]
no_license
use piston_window::{clear, rectangle, Context, G2d, Key}; use rand::{thread_rng, Rng}; use crate::snake::*; const BACKGROUND_COLOR: [f32; 4] = [0.0, 0.0, 0.0, 1.0]; const GAME_OVER_BACKGROUND_COLOR: [f32; 4] = [1.0, 0.0, 0.0, 1.0]; const BORDER_COLOR: [f32; 4] = [1.0, 1.0, 1.0, 1.0]; const APPLE_COLOR: [f32; 4] = [1.0, 0.0, 0.0, 1.0]; const SNAKE_COLOR: [f32; 4] = [0.0, 0.8, 0.0, 1.0]; const WIDTH: i32 = 30; const HEIGHT: i32 = 30; const BLOCK_SIZE: f64 = 10.0; const FPS: f64 = 10.0; pub struct Game { width: i32, height: i32, snake: Snake, apple: (i32, i32), waiting_time: f64, game_over: bool, } impl Game { pub fn new() -> Self { Self { width: WIDTH, height: HEIGHT, snake: Snake::new(3, 1), apple: (10, 10), waiting_time: 0.0, game_over: false, } } pub fn width(&self) -> f64 { self.width as f64 * BLOCK_SIZE } pub fn height(&self) -> f64 { self.height as f64 * BLOCK_SIZE } } impl Game { pub fn handle_key(&mut self, key: Key) { self.snake.change_direction(match key { Key::Right => Direction::Right, Key::Left => Direction::Left, Key::Up => Direction::Up, Key::Down => Direction::Down, _ => return, }); } } impl Game { pub fn render(&mut self, ctx: &Context, g: &mut G2d) { if self.game_over { clear(GAME_OVER_BACKGROUND_COLOR, g); return; } clear(BACKGROUND_COLOR, g); self.render_border(ctx, g); rectangle( APPLE_COLOR, self.to_gui_block(self.apple.0, self.apple.1, 1, 1), ctx.transform, g, ); let draw_rectangle = |x: i32, y: i32| { rectangle(SNAKE_COLOR, self.to_gui_block(x, y, 1, 1), ctx.transform, g) }; self.snake.draw(draw_rectangle); } pub fn render_border(&self, ctx: &Context, g: &mut G2d) { rectangle( BORDER_COLOR, self.to_gui_block(0, 0, 1, HEIGHT), ctx.transform, g, ); rectangle( BORDER_COLOR, self.to_gui_block(WIDTH - 1, 0, 1, HEIGHT - 1), ctx.transform, g, ); rectangle( BORDER_COLOR, self.to_gui_block(0, 0, WIDTH, 1), ctx.transform, g, ); rectangle( BORDER_COLOR, self.to_gui_block(0, HEIGHT - 1, WIDTH, 1), ctx.transform, g, ); } pub fn update(&mut self, delta: f64) { self.waiting_time += delta; if self.waiting_time > 1.0 / FPS { if self.game_over { self.restart(); } self.snake.go_forward(); let (head_x, head_y) = self.snake.head_position(); if self.is_game_over() { self.game_over = true; } if head_x == self.apple.0 && head_y == self.apple.1 { self.snake.grow(); self.apple = self.generate_random_apple(); } self.waiting_time = 0.0; } } fn restart(&mut self) { self.snake = Snake::new(3, 1); self.apple = self.generate_random_apple(); self.waiting_time = 0.0; self.game_over = false; } fn is_game_over(&self) -> bool { let (head_x, head_y) = self.snake.head_position(); self.snake.is_collided_with_body() || self.is_collided_with_border(head_x, head_y) || ((self.width - 2) * (self.height - 2)) as usize == self.snake.length() } fn generate_random_apple(&self) -> (i32, i32) { let mut rng = thread_rng(); loop { let apple = (rng.gen_range(1, WIDTH - 1), rng.gen_range(1, HEIGHT - 1)); if !self.snake.is_on_position(apple.0, apple.1) { return apple; } } } fn is_collided_with_border(&self, x: i32, y: i32) -> bool { x <= 0 || x >= HEIGHT - 1 || y <= 0 || y >= WIDTH - 1 } fn to_gui_block(&self, x: i32, y: i32, w: i32, h: i32) -> [f64; 4] { [ self.to_gui_coord(x), self.to_gui_coord(y), self.to_gui_coord(w), self.to_gui_coord(h), ] } fn to_gui_coord(&self, x: i32) -> f64 { x as f64 * BLOCK_SIZE } }
true
d193a3f7944118cadf986557dd75b4b1b0fe1c78
Rust
rust-lang/rust
/src/tools/clippy/tests/ui/significant_drop_tightening.rs
UTF-8
3,466
3.21875
3
[ "Apache-2.0", "MIT", "LLVM-exception", "NCSA", "BSD-2-Clause", "LicenseRef-scancode-unicode", "LicenseRef-scancode-other-permissive" ]
permissive
#![warn(clippy::significant_drop_tightening)] use std::sync::Mutex; pub fn complex_return_triggers_the_lint() -> i32 { fn foo() -> i32 { 1 } let mutex = Mutex::new(1); let lock = mutex.lock().unwrap(); let _ = *lock; let _ = *lock; foo() } pub fn issue_10413() { let mutex = Mutex::new(Some(1)); let opt = Some(1); if opt.is_some() { let lock = mutex.lock().unwrap(); let _ = *lock; if opt.is_some() { let _ = *lock; } } } pub fn issue_11128() { use std::mem::drop as unlock; struct Foo { droppable: Option<Vec<i32>>, mutex: Mutex<Vec<i32>>, } impl Drop for Foo { fn drop(&mut self) { if let Some(droppable) = self.droppable.take() { let lock = self.mutex.lock().unwrap(); let idx_opt = lock.iter().copied().find(|el| Some(el) == droppable.first()); if let Some(idx) = idx_opt { let local_droppable = vec![lock.first().copied().unwrap_or_default()]; unlock(lock); drop(local_droppable); } } } } } pub fn issue_11160() -> bool { let mutex = Mutex::new(1i32); let lock = mutex.lock().unwrap(); let _ = lock.abs(); true } pub fn issue_11189() { struct Number { pub value: u32, } fn do_something() -> Result<(), ()> { let number = Mutex::new(Number { value: 1 }); let number2 = Mutex::new(Number { value: 2 }); let number3 = Mutex::new(Number { value: 3 }); let mut lock = number.lock().unwrap(); let mut lock2 = number2.lock().unwrap(); let mut lock3 = number3.lock().unwrap(); lock.value += 1; lock2.value += 1; lock3.value += 1; drop((lock, lock2, lock3)); Ok(()) } } pub fn path_return_can_be_ignored() -> i32 { let mutex = Mutex::new(1); let lock = mutex.lock().unwrap(); let rslt = *lock; let _ = *lock; rslt } pub fn post_bindings_can_be_ignored() { let mutex = Mutex::new(1); let lock = mutex.lock().unwrap(); let rslt = *lock; let another = rslt; let _ = another; } pub fn unnecessary_contention_with_multiple_owned_results() { { let mutex = Mutex::new(1i32); let lock = mutex.lock().unwrap(); let _ = lock.abs(); let _ = lock.is_positive(); } { let mutex = Mutex::new(1i32); let lock = mutex.lock().unwrap(); let rslt0 = lock.abs(); let rslt1 = lock.is_positive(); do_heavy_computation_that_takes_time((rslt0, rslt1)); } } pub fn unnecessary_contention_with_single_owned_results() { { let mutex = Mutex::new(1i32); let lock = mutex.lock().unwrap(); let _ = lock.abs(); } { let mutex = Mutex::new(vec![1i32]); let mut lock = mutex.lock().unwrap(); lock.clear(); } { let mutex = Mutex::new(1i32); let lock = mutex.lock().unwrap(); let rslt0 = lock.abs(); do_heavy_computation_that_takes_time(rslt0); } { let mutex = Mutex::new(vec![1i32]); let mut lock = mutex.lock().unwrap(); lock.clear(); do_heavy_computation_that_takes_time(()); } } // Marker used for illustration purposes. pub fn do_heavy_computation_that_takes_time<T>(_: T) {} fn main() {}
true
46c852fcb19f2b5439d9855178efbc08bad03706
Rust
1kasu/gpeli
/src/fysiikka.rs
UTF-8
11,475
2.8125
3
[]
no_license
use std::cell::RefCell; use std::rc::Rc; use std::time::Duration; use super::maailma::kappale::{Kappale, Muoto, Tagi}; use super::maailma::vektori::Vektori; use crate::maailma::Lisaosa; type RcKappale = Rc<RefCell<Kappale>>; /// Kohde, jolle voidaan laskea fysiikkaan liittyviä laskuja. pub trait Fysiikallinen { /// Antaa kohteen nopeuden fn anna_nopeus(&self) -> Vektori; /// Asettaa kohteen nopeuden /// # Arguments /// * `nopeus` - Kohteen uusi nopeus fn aseta_nopeus(&mut self, nopeus: Vektori); /// Antaa kohteen sijainnin fn anna_sijainti(&self) -> Vektori; /// Asettaa kohteen sijainnin /// # Arguments /// * `sijainti` - Kohteen uusi sijainti fn aseta_sijainti(&mut self, sijainti: Vektori); /// Antaa kohteen muodon fn anna_muoto(&self) -> Muoto; /// Laskee kohteen uuden sijainnin ja palauttaa sen /// # Arguments /// * `paivitysaika` - Aika, jonka verran kappaleen sijaintia päivitetään fn laske_uusi_sijainti(&self, paivitysaika: &Duration) -> Vektori<f32>; } /// Fysiikka lisäosa tavalliselle kappaleelle. Kertoo käytännössä, että /// kappale voi törmätä ja sillä on nopeus pub struct Fysiikkakappale { /// Varsinainen kappale kappale: RcKappale, /// Kappaleen nopeus ja suunta nopeus: Vektori, } impl Fysiikkakappale { /// Antaa uuden fysiikkakappaleen, jolla on annettu nopeus ja annettu kappale /// # Arguments /// * `nopeus` - Kappaleen alkunopeus /// * `kappale` - Kappale, jolle lisätään fysiikka pub fn new(nopeus: Vektori, kappale: RcKappale) -> Self { Fysiikkakappale { kappale: kappale, nopeus: nopeus, } } /// Antaa fysiikkakappaleen tagin pub fn anna_tagi(&self) -> Tagi { self.kappale.borrow().tagi } } impl Lisaosa for Fysiikkakappale { /// Antaa fysiikkakappaleen käyttämän kappaleen fn anna_kappale(&self) -> RcKappale { Rc::clone(&self.kappale) } } impl Fysiikallinen for Fysiikkakappale { /// Antaa kohteen nopeuden fn anna_nopeus(&self) -> Vektori { self.nopeus } /// Asettaa kohteen nopeuden /// # Arguments /// * `nopeus` - Kohteen uusi nopeus fn aseta_nopeus(&mut self, nopeus: Vektori) { self.nopeus = nopeus; } /// Antaa kohteen sijainnin fn anna_sijainti(&self) -> Vektori { self.kappale.borrow().kulman_sijainti() } /// Asettaa kohteen sijainnin /// # Arguments /// * `sijainti` - Kohteen uusi sijainti fn aseta_sijainti(&mut self, sijainti: Vektori) { self.kappale.borrow_mut().aseta_kulman_sijainti(sijainti); } /// Antaa kohteen muodon fn anna_muoto(&self) -> Muoto { self.kappale.borrow().muoto } /// Laskee kappaleen uuden sijainnin annetun ajan mukaan /// # Arguments /// * `paivitysaika` - Päivityksessä käytettävä aika fn laske_uusi_sijainti(&self, paivitysaika: &Duration) -> Vektori<f32> { self.anna_sijainti() + self.anna_nopeus() * (paivitysaika.as_micros() as f32 * 0.000_001) } } /// Sisältää listan kaikista tapahtuneista törmäyksistä. Perustuu indekseihin, joten /// ei ole pitkään ajankohtainen. #[derive(Default)] pub struct Tormaystiedot { /// Lista törmäyksistä ja niiden tiedoista tormays_tiedot: Vec<Tormaystieto>, } impl Tormaystiedot { /// Antaa uuden törmäystiedot otuksen pub fn new() -> Self { Tormaystiedot { tormays_tiedot: Vec::new(), } } /// Lisää törmäyksen tägin törmäystietoon ja tarvittaessa luo törmäystiedon /// # Arguments /// * `indeksi` - törmänneen kappaleen indeksi /// * `tagi` - Törmätyn kappaleen tagi pub fn lisaa_tormays(&mut self, indeksi: usize, tagi: Tagi) { match self .tormays_tiedot .iter_mut() .find(|x| x.indeksi == indeksi) { Some(a) => a.lisaa_tagi(tagi), None => { let mut uusi_tormaystieto = Tormaystieto::new(indeksi); uusi_tormaystieto.lisaa_tagi(tagi); self.tormays_tiedot.push(uusi_tormaystieto); } } } /// Antaa tiedot kaikista kerätyistä törmäyksistä pub fn anna_tormaykset(&self) -> &[Tormaystieto] { &self.tormays_tiedot } } /// Sisältää yksittäisen kappaleen tiedot, että mihin kaikkiin tageihin /// on törmännyt. pub struct Tormaystieto { /// Törmänneen kappaleen indeksi pub indeksi: usize, /// Lista kohteiden tageista, joihin on törmätty tormatyt_kohteet: Vec<Tagi>, } impl Tormaystieto { /// Luo uuden törmäystiedon annetulla indeksillä /// # Arguments /// * `indeksi` - Törmääjän indeksi pub fn new(indeksi: usize) -> Self { Tormaystieto { indeksi: indeksi, tormatyt_kohteet: Vec::new(), } } /// Lisää annetun tagin, jos kyseinen tagi ei jo ole lisätty /// # Arguments /// * `lisattava_tagi` - Törmätyn kohteen tagi pub fn lisaa_tagi(&mut self, lisattava_tagi: Tagi) { if !self.tormatyt_kohteet.contains(&lisattava_tagi) { self.tormatyt_kohteet.push(lisattava_tagi) } } /// Antaa törmättyjen kohteiden tagit pub fn anna_tagit(&self) -> &[Tagi] { &self.tormatyt_kohteet } } /// Fysiikka otus, joka muistaa jotakin fysiikan päivitysksistä #[derive(Default)] pub struct Fysiikka { /// Viimeisimmän fysiikkapäivityksen aikana tapahtuneet törmäykset pub tormaykset: Tormaystiedot, } impl Fysiikka { /// Luo uuden fysiikan pub fn new() -> Self { Fysiikka { tormaykset: Default::default(), } } } impl Fysiikka { /// Laskee kaikille annetuille fysiikkakappaleille uuden sijainnin ja palauttaa tiedot tapahtuneista törmäyksistä /// # Arguments /// * `kappaleet` - Päivitettävät kappaleet /// * `paivitysaika` - Päivityksessä käytettävä aika pub fn laske_uudet_sijainnit( &mut self, kappaleet: &mut [Fysiikkakappale], paivitysaika: &Duration, ) { let mut vanhat_sijainnit = Vec::new(); self.tormaykset = Tormaystiedot::new(); // Laskee uudet sijainnit for kappale in kappaleet.iter_mut() { vanhat_sijainnit.push(kappale.anna_sijainti()); kappale.aseta_sijainti(kappale.laske_uusi_sijainti(paivitysaika)); } // Tarkistetaan törmäykset uusien sijaintien välillä for i in 0..kappaleet.len() { // Tarkistaa aiheuttaako se törmäyksen for j in 0..kappaleet.len() { if i == j { // Törmäys itsensä kannssa ei ole järkevä luonnollisestikaan continue; } if ovatko_paallekkain( &kappaleet[i].kappale.borrow(), &kappaleet[j].kappale.borrow(), ) { // Törmäys tapahtuu // Merkitään törmäys muistiin self.tormaykset .lisaa_tormays(i, kappaleet[j].kappale.borrow().tagi) } } } // Perutaan kaikkien törmänneiden liike for tormays in self.tormaykset.anna_tormaykset() { let tormaajan_indeksi = tormays.indeksi; kappaleet[tormaajan_indeksi].aseta_sijainti(vanhat_sijainnit[tormaajan_indeksi]); } } } /// Tarkistaa törmäävätkö kaksi annettua kappaletta toisiinsa /// # Arguments /// * `kappale_a` - Kappale, joka ns törmää /// * `kappale_b` - Kappale, joka ns tulee törmätyksi fn ovatko_paallekkain(kappale_a: &Kappale, kappale_b: &Kappale) -> bool { match (kappale_a.muoto, kappale_b.muoto) { (Muoto::Nelio(leveys_a, korkeus_a), Muoto::Nelio(leveys_b, korkeus_b)) => { let sijainti_a = kappale_a.kulman_sijainti(); let sijainti_b = kappale_b.kulman_sijainti(); let vasen_a = sijainti_a.x; let oikea_a = sijainti_a.x + leveys_a; let vasen_b = sijainti_b.x; let oikea_b = sijainti_b.x + leveys_b; let ala_a = sijainti_a.y; let yla_a = sijainti_a.y + korkeus_a; let ala_b = sijainti_b.y; let yla_b = sijainti_b.y + korkeus_b; !(oikea_a < vasen_b || oikea_b < vasen_a || yla_a < ala_b || yla_b < ala_a) } // TODO: Epäilen bugia ympyrän törmäyksissä, mutta hankala olla varma, kun ympyrä on oikeasti kuva (Muoto::Ympyra(sade_a), Muoto::Ympyra(sade_b)) => { (kappale_a.keskipisteen_sijainti() - kappale_b.keskipisteen_sijainti()).pituus() < (sade_a + sade_b) } (Muoto::Ympyra(_), Muoto::Nelio(_, _)) => ympyran_ja_nelion_tormays(kappale_b, kappale_a), (Muoto::Nelio(_, _), Muoto::Ympyra(_)) => ympyran_ja_nelion_tormays(kappale_a, kappale_b), } } /// Tarkistaa ovatko annetut neliö ja ympyrä toistensa päällä. /// Antaa false, jos oletut muodot(ensin neliö ja sitten ympyrä) eivät päde. /// # Arguments /// * `nelio` - Pitää olla neliö tai false /// * `ympyra` - Pitää olla ympyrä tai false fn ympyran_ja_nelion_tormays(nelio: &Kappale, ympyra: &Kappale) -> bool { if let Muoto::Nelio(leveys, korkeus) = nelio.muoto { if let Muoto::Ympyra(sade) = ympyra.muoto { let ympyra_sijainti = ympyra.keskipisteen_sijainti(); let nelio_sijainti = nelio.kulman_sijainti(); let vasen = nelio_sijainti.x; let oikea = nelio_sijainti.x + leveys; let ala = nelio_sijainti.y; let yla = nelio_sijainti.y + korkeus; let vasen_yla_kulma = Vektori::new(vasen, yla); let oikea_yla_kulma = Vektori::new(oikea, yla); let vasen_ala_kulma = Vektori::new(vasen, ala); let oikea_ala_kulma = Vektori::new(oikea, ala); return !( // Rajataan neliö, jonka ulkopuolella törmäys ei voi tapahtua ympyra_sijainti.x <= vasen - sade || oikea + sade <= ympyra_sijainti.x || ympyra_sijainti.y <= ala - sade || yla + sade <= ympyra_sijainti.y // Tarkistetaan, onko se, neliön kulman muodostaman ympyrän ulkopuolella || (ympyra_sijainti - vasen_yla_kulma).pituus() >= sade && ympyra_sijainti.x < vasen && ympyra_sijainti.y > yla || (ympyra_sijainti - oikea_yla_kulma).pituus() >= sade && ympyra_sijainti.x > oikea && ympyra_sijainti.y > yla || (ympyra_sijainti - vasen_ala_kulma).pituus() >= sade && ympyra_sijainti.x < vasen && ympyra_sijainti.y < ala || (ympyra_sijainti - oikea_ala_kulma).pituus() >= sade && ympyra_sijainti.x > oikea && ympyra_sijainti.y < ala ); } } false }
true
9d17e92a036be8b6884a18da22fcb2424fd7d9fe
Rust
greyarch/dot-http
/src/controller/tests.rs
UTF-8
5,052
2.8125
3
[ "Apache-2.0" ]
permissive
use crate::controller::Controller; use http_test_server::{Request, TestServer}; use std::io::Write; use std::sync::mpsc::Receiver; use tempfile::NamedTempFile; #[test] fn test() { let server = TestServer::new().unwrap(); let _resource = server.create_resource("/defaults"); let _requests = server.requests(); let mut snapshot_file = NamedTempFile::new().unwrap(); writeln!(snapshot_file, "{{}}").unwrap(); let snapshot_file = snapshot_file.into_temp_path(); let mut env_file = NamedTempFile::new().unwrap(); writeln!(env_file, "{{}}").unwrap(); let env_file = env_file.into_temp_path(); let mut script_file = NamedTempFile::new().unwrap(); writeln!( script_file, "POST http://localhost:{port} HTTP/1.1", port = server.port() ) .unwrap(); let script_file = script_file.into_temp_path(); let mut controller = Controller::default(); let offset = 1; let env = String::from("dev"); controller .execute(offset, false, env, &script_file, &snapshot_file, &env_file) .unwrap(); } #[test] fn test_last_line() { let mut requests = multi_line_setup( 17, false, r#" POST http://localhost:{{port}}/first HTTP/1.1 Accept: */* Content-Type: application/json { "id": 1 } > {% console.log('test'); %} ### GET http://localhost:{{port}}/second HTTP/1.1 Accept: */* "#, ) .into_iter(); let _second = requests.next().expect("We should have a first request"); assert_eq!(None, requests.next(), "We should only have 1 request"); } #[test] fn test_all_requests() { let mut requests = multi_line_setup( 0, true, r#" GET http://localhost:{{port}}/first HTTP/1.1 Accept: */* Content-Type: application/json { "id": 1 } ### GET http://localhost:{{port}}/second HTTP/1.1 Accept: */* "#, ) .into_iter(); let _first = requests.next().expect("We should have a first request"); let _second = requests.next().expect("We should have a second request"); assert_eq!(None, requests.next(), "We should only have 2 requests"); } #[test] fn test_all_global_object() { let mut requests = multi_line_setup( 0, true, r#" GET http://localhost:{{port}}/first HTTP/1.1 Accept: */* Content-Type: application/json > {% client.global.set('global_state', response.body.response); %} ### GET http://localhost:{{port}}/{{global_state}} HTTP/1.1 Accept: */* "#, ) .into_iter(); let _first = requests.next().expect("We should have a first request"); let second = requests.next().expect("We should have a second request"); assert_eq!(None, requests.next(), "We should only have 2 requests"); assert_eq!( "/some_response", second.url, "We should be able to pass state via the global object" ); } /// This test ensures that we must operate through the global object and that we don't propagate state /// via global variables #[test] fn test_all_global_state() { let mut requests = multi_line_setup( 0, true, r#" GET http://localhost:{{port}}/first HTTP/1.1 > {% var someGlobal = "global"; client.global.set('global_state', response.body.response); %} ### GET http://localhost:{{port}}/second HTTP/1.1 > {% console.log(someGlobal); var found = someGlobal !== undefined; client.global.set('found', found); %} ### GET http://localhost:{{port}}/{{found}} "#, ) .into_iter(); let _first = requests.next().expect("We should have a first request"); let _second = requests.next().expect("We should have a second request"); let third = requests.next().expect("We should have a second request"); assert_eq!(None, requests.next(), "We should only have 2 requests"); assert_eq!( "/false", third.url, "We should not persist global variables across runs" ); } fn multi_line_setup(offset: usize, all: bool, scripts: &str) -> Receiver<Request> { let server = TestServer::new().unwrap(); server.create_resource("/first").body( r#"{ "response": "some_response" }"#, ); server.create_resource("/second"); let requests = server.requests(); let mut snapshot_file = NamedTempFile::new().unwrap(); writeln!(snapshot_file, "{{}}").unwrap(); let snapshot_file = snapshot_file.into_temp_path(); let mut env_file = NamedTempFile::new().unwrap(); writeln!( env_file, r#"{{ "dev": {{ "port": {port} }} }}"#, port = server.port() ) .unwrap(); let env_file = env_file.into_temp_path(); let mut script_file = NamedTempFile::new().unwrap(); writeln!(script_file, "{}", scripts,).unwrap(); let script_file = script_file.into_temp_path(); let mut controller = Controller::default(); let env = String::from("dev"); controller .execute(offset, all, env, &script_file, &snapshot_file, &env_file) .unwrap(); requests }
true
ddb2eba727d1e2c3a249151d8d42df63d1cd08e9
Rust
lights0123/sip-codec
/src/parser/header.rs
UTF-8
1,298
2.78125
3
[]
no_license
use http::header::{HeaderName, HeaderValue}; use nom::is_space; use super::is_newline; fn is_colon(ch: u8) -> bool { ch == b':' } fn is_colon_or_space(ch: u8) -> bool { is_colon(ch) || is_space(ch) } named!(pub header<&[u8], (HeaderName, HeaderValue)>, do_parse!( name: map_res!(take_till1!(is_colon_or_space), HeaderName::from_bytes) >> opt!(is_a!(" \t")) >> char!(':') >> opt!(is_a!(" \t")) >> content: map_res!(take_till!(is_newline), HeaderValue::from_bytes) >> (name, content) ) ); #[cfg(test)] mod tests { use super::*; #[test] fn no_space() { let expected = ( &b"\r\n"[..], ( HeaderName::from_static("content-type"), HeaderValue::from_static("application/sdp"), ), ); assert_eq!( expected, header(b"Content-Type:application/sdp\r\n").unwrap() ); } #[test] fn single_space() { let expected = ( &b"\r\n"[..], ( HeaderName::from_static("content-length"), HeaderValue::from_static("57"), ), ); assert_eq!(expected, header(b"Content-length: 57\r\n").unwrap()); } #[test] fn double_space() { let expected = ( &b"\r\n"[..], ( HeaderName::from_static("cseq"), HeaderValue::from_static("314159 INVITE"), ), ); assert_eq!(expected, header(b"CSeq : 314159 INVITE\r\n").unwrap()); } }
true
a3a9d90cc736739f58e87a2c7a230b84386dc100
Rust
edjacob25/ComputationalFundaments
/FirstPart/Homework5/benches/benchmarks.rs
UTF-8
2,218
2.609375
3
[ "MIT" ]
permissive
use criterion::{criterion_group, criterion_main, Criterion}; use homework_5::{heapsort, mergesort, quicksort, read_file}; fn benchmark(c: &mut Criterion) { let mut random: [Vec<u32>; 5] = Default::default(); let mut sorted: [Vec<u32>; 5] = Default::default(); let mut reverse: [Vec<u32>; 5] = Default::default(); for i in 2..7 { random[i - 2] = read_file(format!("random_{}", i).to_string()).unwrap(); sorted[i - 2] = read_file(format!("sorted_{}", i).to_string()).unwrap(); reverse[i - 2] = read_file(format!("reverse_{}", i).to_string()).unwrap(); } let mut group = c.benchmark_group("quicksort"); for i in 2..7 { group.bench_function(format!("random {}", i), |b| { b.iter(|| quicksort(random[i - 2].clone())) }); group.bench_function(format!("sorted {}", i), |b| { b.iter(|| quicksort(sorted[i - 2].clone())) }); group.bench_function(format!("reverse {}", i), |b| { b.iter(|| quicksort(reverse[i - 2].clone())) }); } group.finish(); let mut group = c.benchmark_group("mergesort"); for i in 2..7 { group.bench_function(format!("random {}", i), |b| { b.iter(|| mergesort(&mut random[i - 2].clone())) }); group.bench_function(format!("sorted {}", i), |b| { b.iter(|| mergesort(&mut sorted[i - 2].clone())) }); group.bench_function(format!("reverse {}", i), |b| { b.iter(|| mergesort(&mut reverse[i - 2].clone())) }); } group.finish(); let mut group = c.benchmark_group("heapsort"); for i in 2..7 { group.bench_function(format!("random {}", i), |b| { b.iter(|| heapsort(&mut random[i - 2].clone())) }); group.bench_function(format!("sorted {}", i), |b| { b.iter(|| heapsort(&mut sorted[i - 2].clone())) }); group.bench_function(format!("reverse {}", i), |b| { b.iter(|| heapsort(&mut reverse[i - 2].clone())) }); } group.finish(); } criterion_group! { name=benches; config=Criterion::default().sample_size(10); targets=benchmark } criterion_main!(benches);
true
29ec29877fb75af2fbef51b90b298e1cb461290b
Rust
redwarp/Advent-of-Code-2019
/src/bin/day03.rs
UTF-8
7,161
3.203125
3
[]
no_license
/// Probably the ugliest piece of code I ever wrote. Let's pretend it never existed. use adventofcode::files; #[derive(Debug)] struct Segment { a: (i32, i32), b: (i32, i32), direction: char, } impl Segment { fn does_intersect(&self, other_segment: &Segment) -> bool { if (self.is_vertical() && other_segment.is_vertical()) || (!self.is_vertical() && !other_segment.is_vertical()) { return false; } let horizontal: &Segment; let vertical: &Segment; match self.is_vertical() { true => { vertical = self; horizontal = other_segment; } false => { vertical = other_segment; horizontal = self; } }; if horizontal.a.0 <= vertical.a.0 && horizontal.b.0 >= vertical.a.0 && horizontal.a.1 <= vertical.b.1 && horizontal.a.1 >= vertical.a.1 { true } else { false } } fn intersection(&self, other_segment: &Segment) -> (i32, i32) { if (self.is_vertical() && other_segment.is_vertical()) || (!self.is_vertical() && !other_segment.is_vertical()) { return (0, 0); } let horizontal: &Segment; let vertical: &Segment; match self.is_vertical() { true => { vertical = self; horizontal = &other_segment; } false => { vertical = &other_segment; horizontal = self; } }; if horizontal.a.0 <= vertical.a.0 && horizontal.b.0 >= vertical.a.0 && horizontal.a.1 <= vertical.b.1 && horizontal.a.1 >= vertical.a.1 { (vertical.a.0, horizontal.a.1) } else { (0, 0) } } fn step_adjustement(&self, other_segment: &Segment) -> (i32, i32) { if (self.is_vertical() && other_segment.is_vertical()) || (!self.is_vertical() && !other_segment.is_vertical()) { return (0, 0); } let horizontal: &Segment; let vertical: &Segment; match self.is_vertical() { true => { vertical = self; horizontal = &other_segment; } false => { vertical = &other_segment; horizontal = self; } }; if horizontal.a.0 <= vertical.a.0 && horizontal.b.0 >= vertical.a.0 && horizontal.a.1 <= vertical.b.1 && horizontal.a.1 >= vertical.a.1 { let vertical_step = match vertical.direction { 'U' => horizontal.a.1 - vertical.a.1, _ => vertical.b.1 - horizontal.b.1, }; let horizontal_step = match horizontal.direction { 'R' => vertical.a.0 - horizontal.a.0, _ => horizontal.b.0 - vertical.b.0, }; (horizontal_step, vertical_step) } else { (0, 0) } } fn is_vertical(&self) -> bool { self.a.0 == self.b.0 } fn length(&self) -> i32 { self.b.0 - self.a.0 + self.b.1 - self.a.1 } } fn create_segment_from_path(origin: (i32, i32), path: &String) -> (Segment, i32, i32) { let (x, y) = origin; let direction = path.chars().next().unwrap(); let step: i32 = path[1..].to_owned().parse().unwrap(); match direction { 'U' => ( Segment { a: (x, y), b: (x, y + step), direction: direction, }, x, y + step, ), 'D' => ( Segment { a: (x, y - step), b: (x, y), direction: direction, }, x, y - step, ), 'R' => ( Segment { a: (x, y), b: (x + step, y), direction: direction, }, x + step, y, ), 'L' => ( Segment { a: (x - step, y), b: (x, y), direction: direction, }, x - step, y, ), _ => ( Segment { a: (0, 0), b: (0, 0), direction: direction, }, 0, 0, ), } } fn path_to_segments(paths: &Vec<String>) -> Vec<Segment> { let mut coords = (0, 0); paths .iter() .map(|path| { let (segment, x, y) = create_segment_from_path(coords, path); coords = (x, y); segment }) .collect() } fn find_distance_for_intersection( first_segments: Vec<Segment>, second_segments: Vec<Segment>, ) -> i32 { let mut distance = std::i32::MAX; for i in 0..first_segments.len() { for j in 0..second_segments.len() { if first_segments[i].does_intersect(&second_segments[j]) { let intersection = first_segments[i].intersection(&second_segments[j]); let intersection_distance = intersection.0.abs() + intersection.1.abs(); if distance > intersection_distance && intersection_distance != 0 { distance = intersection_distance; } } } } distance } fn find_steps_for_intersection(first_segments: Vec<Segment>, second_segments: Vec<Segment>) -> i32 { let mut steps = std::i32::MAX; for i in 0..first_segments.len() { for j in 0..second_segments.len() { if first_segments[i].does_intersect(&second_segments[j]) { let first_step = first_segments .iter() .take(i) .fold(0, |acc, segment| acc + segment.length()); let second_step = second_segments .iter() .take(j) .fold(0, |acc, segment| acc + segment.length()); let step_adjustement = first_segments[i].step_adjustement(&second_segments[j]); let step = first_step + second_step + step_adjustement.0 + step_adjustement.1; if step < steps && step > 0 { steps = step; } } } } steps } fn main() { let lines = files::read_file_line_per_line("inputs/day03.txt"); let first_wire: Vec<String> = lines[0].split(',').map(|path| path.to_string()).collect(); let second_wire: Vec<String> = lines[1].split(',').map(|path| path.to_string()).collect(); let distance = find_distance_for_intersection( path_to_segments(&first_wire), path_to_segments(&second_wire), ); let steps = find_steps_for_intersection( path_to_segments(&first_wire), path_to_segments(&second_wire), ); println!("Distance: {}", distance); println!("Steps: {}", steps); }
true
9cd57cc5fd55df3ad00d165dd34528ddf44ad7a6
Rust
wangzhf/rs-learn
/base/src/test_pattern.rs
UTF-8
2,791
4.21875
4
[]
no_license
pub fn test_pattern() { let x = 1; let c = 'c'; match c { // 此时x会被覆盖成c x => println!("x: {}, c: {}", x, c), } // 此时x会被还原为1 println!("x: {}", x); } // 测试解构struct struct Point { x: i64, y: i64, } pub fn test_pattern2() { let point = Point { x: 0, y: 0 }; match point { Point { x, y } => println!("x: {}, y: {}", x, y), } // 对字段重命名 match point { Point { x: x1, y: y1} => println!("x1: {}, y1: {}", x1, y1), } // 省略字段 match point { Point { x, .. } => println!("x: {}", x), } } pub fn test_pattern3() { let tuple: (u32, String) = (5, String::from("five")); // 解构时,tuple被move了 // let (x, s) = tuple; // String 未实现Copy,所以tuple被整体move了 println!("Tuple is : {:?}", tuple); let s: String = "Hello".to_string(); println!("{}", s); println!("{:?}", s); let t = (5, String::from("five")); // 忽略String类型,而u32实现了Copy,所以tuple不会被move let (x, _) = t; println!("Tuple is {:?}", t); } // 范围 pub fn test_pattern4() { let x = 1; match x { 1...10 => { println!("one to ten"); }, _ => println!("other"), } let c = 'w'; match c { 'a' ... 'z' => println!("little letter"), 'A' ... 'Z' => println!("Upper letter"), _ => println!("other"), } } // 多重匹配 pub fn test_pattern5() { let x = 1; match x { 1 | 2 => println!("one or two"), _ => println!("other"), } } // 使用ref或ref mut从中拿到一个引用,而不是将其remove掉 pub fn test_ref() { let mut tuple: (u32, String) = (3, "hello".to_string()); // 此处使用ref或ref mut取引用,tuple也就不会被move了 let (x, ref mut t) = tuple; println!("{:?}", tuple); let s: String = "hello".to_string(); match s { // 解构时使用ref,这样不影响所有权 ref s => println!("{}", s), } println!("{}", s); } #[derive(Debug)] struct Person { name: Option<String>, } // 变量绑定 pub fn test_param_bind() { let x = 1u32; match x { e@ 1...5 | e @ 6...10 => println!("the value is {}", e), _ => (), } let name = "Steve".to_string(); let x: Option<Person> = Some(Person { name: Some(name) }); match x { Some(Person { name: a @ Some(_), .. }) => println!("{:?}", a), _ => (), } } // 后置条件 pub fn test_if() { let x = 4; let y = false; match x { // 后置if表达式,等同于伪代码: if y and (x in list(4, 5)) 4 | 5 if y => println!("yes"), _ => println!("no"), } }
true
9f149371d4388b37ab299a73e015af9b25cfc989
Rust
itang/tests
/test-rusts/test-iron-open-browser/src/main.rs
UTF-8
1,186
3.0625
3
[]
no_license
extern crate iron; use iron::prelude::*; use std::process::Command; use std::time; use std::net::TcpListener; use std::thread; const PORT: u16 = 3000u16; fn hello_world(_: &mut Request) -> IronResult<Response> { Ok(Response::with((iron::status::Ok, "Hello world!"))) } fn tcp_port_available(port: u16) -> bool { match TcpListener::bind(("0.0.0.0", port)) { Err(_) => false, Ok(l) => { drop(l); true } } } fn try_open_browser() { thread::spawn(|| { loop { thread::sleep(time::Duration::from_millis(200)); if !tcp_port_available(PORT) { println!(">>tcp_port_available false"); Command::new("xdg-open") .arg(format!("http://localhost:{}", PORT)) .output() .unwrap_or_else(|e| { panic!("failed to execute process: {}", e) }); break; } println!("try again..."); } }); } fn main() { let chain = Chain::new(hello_world); try_open_browser(); println!("listening on :3000..."); Iron::new(chain).http(("0.0.0.0", PORT)).unwrap(); }
true
282d99b25715ac8cbba27cf7cf02fd2b9f1a3e20
Rust
energister/rusty_engine
/examples/level_creator.rs
UTF-8
7,709
3.40625
3
[ "LicenseRef-scancode-unknown-license-reference", "CC0-1.0", "MIT", "Apache-2.0" ]
permissive
use rusty_engine::prelude::*; const MAX_LAYER: f32 = 900.0; fn main() { println!( " This is an example of how you could write a level creater. This example lets you place actors, and generate the code you can copy-and-paste into a main.rs file to recreate that level. Controls R - Reset actor to default scale & rotation S - Print out status of current actor Z - Print out Rust code of current level Left/Up - Previous actor preset Right/Down - Next actor preset Right / Left Click - Rotate actor by 45 degrees (add Shift to rotate by 1 degree) Mousewheel - Scale actor by 10% (add Shift to scale by 1 percent) Mouse location - Choose translation (location) of actor " ); let mut game = Game::new(); // Use an incrementing index (converted to a string) for the unique label of the actors // Start at 1 since the hard-coded initial actor is 0 game.game_state_mut().u32_vec.push(1); // Get our first actor onto the board let initial_label = "0".to_string(); game.game_state_mut().string_vec.push(initial_label.clone()); let mut curr_actor = game.add_actor(initial_label, ActorPreset::RacingCarRed); //curr_actor.scale = 0.5; curr_actor.layer = MAX_LAYER; // Use a bool to track whether or not the shift key is currently pressed game.game_state_mut().bool_vec.push(false); // Use an f32 to track the current layer (so newer actors will always be on top of older ones) game.game_state_mut().f32_vec.push(0.01); game.run(logic); } fn logic(game_state: &mut GameState) { // Extract values we're tracking let current_label = game_state.string_vec.get_mut(0).unwrap(); let next_actor_index = game_state.u32_vec.get_mut(0).unwrap(); let shift_pressed = game_state.bool_vec.get_mut(0).unwrap(); let next_layer = game_state.f32_vec.get_mut(0).unwrap(); // Gather keyboard input let mut reset = false; let mut print_level = false; let mut print_status = false; let mut place_actor = false; let mut prev_preset = false; let mut next_preset = false; for keyboard_input in &game_state.keyboard_events { if let KeyboardInput { scan_code: _, key_code: Some(key_code), state, } = keyboard_input { if *state == ElementState::Pressed { match key_code { KeyCode::Z | KeyCode::Semicolon => { print_level = true; } KeyCode::LShift | KeyCode::RShift => { *shift_pressed = true; } KeyCode::R => { reset = true; } KeyCode::S => { print_status = true; } KeyCode::Space | KeyCode::Delete => { place_actor = true; } KeyCode::Left | KeyCode::Up => { prev_preset = true; } KeyCode::Right | KeyCode::Down => { next_preset = true; } _ => {} } } else { match key_code { KeyCode::LShift | KeyCode::RShift => { *shift_pressed = false; } _ => {} } } } } // Print out the level? if print_level { println!( "---------------\n\nuse rusty_engine::prelude::*;\n\nfn main() {{\n let mut game = Game::new();\n" ); for actor in game_state.actors.values() { if actor.label == *current_label { continue; } println!( " let a = game.game_state_mut().add_actor(\"{}\", ActorPreset::{:?}); a.translation = Vec2::new({:.1}, {:.1}); a.rotation = {:.8}; a.scale = {:.8}; a.layer = {:.8}; a.collision = true;", actor.label, actor.preset.unwrap(), actor.translation.x, actor.translation.y, actor.rotation, actor.scale, actor.layer, ); } println!("\n game.run(logic);\n}}\n\nfn logic(game_state: &mut GameState) {{\n // Game Logic Goes Here\n}}") } // Handle current actor that has not yet been placed if let Some(actor) = game_state.actors.get_mut(current_label) { // Should we print out the status of the actor? if print_status { println!( "Actor Status:\n-----------\n{:?}\nt: ({:.1}, {:.1})\nr: {:.8}\ns: {:.8}", actor.preset.unwrap(), actor.translation.x, actor.translation.y, actor.rotation, actor.scale ); } // Did the user ask for rotation scale to be reset? if reset { actor.rotation = 0.0; actor.scale = 1.0; } // Handle translation via mouse location for cursor_moved in &game_state.cursor_moved_events { actor.translation = cursor_moved.position; } // Handle rotation via mouse clicks for mouse_button_input in &game_state.mouse_button_events { if mouse_button_input.state != ElementState::Pressed { break; } let rotate_amount = if *shift_pressed { std::f32::consts::TAU / 360.0 } else { std::f32::consts::FRAC_PI_4 }; match mouse_button_input.button { MouseButton::Left => actor.rotation += rotate_amount, MouseButton::Right => actor.rotation -= rotate_amount, _ => {} } println!("r: {:.8}", actor.rotation); } // Handle scale via mousewheel for mouse_wheel in &game_state.mouse_wheel_events { let scale_amount = if *shift_pressed { 0.01 } else { 0.1 }; if mouse_wheel.y > 0.0 || mouse_wheel.x < 0.0 { actor.scale *= 1.0 + scale_amount; } else { actor.scale *= 1.0 - scale_amount; } actor.scale = actor.scale.clamp(0.1, 5.0); println!("s: {:.8}", actor.scale); } } // Change actor to prev/next preset if prev_preset || next_preset { let old_actor = { game_state.actors.get_mut(current_label).unwrap().clone() }; let new_preset = { if prev_preset { old_actor.preset.unwrap().prev() } else { old_actor.preset.unwrap().next() } }; let new_label = next_actor_index.to_string(); *next_actor_index += 1; let mut new_actor = new_preset.build(new_label.clone()); *current_label = new_label; new_actor.layer = MAX_LAYER; new_actor.translation = old_actor.translation; new_actor.rotation = old_actor.rotation; new_actor.scale = old_actor.scale; game_state.actors.insert(new_actor.label.clone(), new_actor); game_state.actors.remove::<str>(old_actor.label.as_ref()); println!("{:?}", new_preset); } // Place an actor if place_actor { let mut actor = { game_state.actors.get_mut(current_label).unwrap().clone() }; actor.layer = *next_layer; *next_layer += 0.01; actor.label = next_actor_index.to_string(); *next_actor_index += 1; game_state.actors.insert(actor.label.clone(), actor); } }
true
835bd6c071e40cd1f38c245ee47f628a5eeaf97c
Rust
risooonho/kludgine
/src/style/font_size.rs
UTF-8
880
3.015625
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::{ math::{Length, Raw, Scale, Scaled}, style::{Style, StyleComponent}, }; #[derive(Debug, Copy, Clone)] pub struct FontSize<Unit: Default + Copy>(pub Length<f32, Unit>); impl Default for FontSize<Scaled> { fn default() -> Self { Self::new(14.) } } impl<Unit: Default + Copy> FontSize<Unit> { pub fn new(value: f32) -> Self { Self(Length::new(value)) } pub fn get(&self) -> f32 { self.0.get() } pub fn length(&self) -> Length<f32, Unit> { self.0 } } impl StyleComponent<Scaled> for FontSize<Scaled> { fn scale(&self, scale: Scale<f32, Scaled, Raw>, map: &mut Style<Raw>) { map.push(FontSize(self.0 * scale)); } } impl StyleComponent<Raw> for FontSize<Raw> { fn scale(&self, _scale: Scale<f32, Raw, Raw>, map: &mut Style<Raw>) { map.push(FontSize(self.0)); } }
true
e341178b946ffa01434b3035de95bb297d9559a2
Rust
ngwemo/hackerrank-challenges-rust
/src/challenges/thirty_days_of_code/day_20.rs
UTF-8
952
3.296875
3
[]
no_license
// 20. Sorting // https://www.hackerrank.com/challenges/30-sorting #![allow(unused_variables)] use crate::functions; pub fn main() { let _ = functions::read_line().trim(); let input: String = String::from(functions::read_line().trim()); let mut row = input .split(' ') .into_iter() .map(|s| s.parse::<i32>()) .collect::<Result<Vec<i32>, std::num::ParseIntError>>() .unwrap(); println!( "Array is sorted in {} swaps.", swaps_from_bubble_sort(&mut row) ); println!("First Element: {}", row[0]); println!("Last Element: {}", row[row.len() - 1]); } fn swaps_from_bubble_sort(vec: &mut Vec<i32>) -> i32 { let mut number_of_swaps = 0; for i in 0..vec.len() { for j in 0..vec.len() - 1 { if vec[j] > vec[j + 1] { vec.swap(j, j + 1); number_of_swaps += 1; } } } number_of_swaps }
true
3b6b81e0c6792a7c15180b1469075c47519096b3
Rust
lemmi/AOC-2017
/star05/src/main.rs
UTF-8
1,561
3.734375
4
[ "Unlicense" ]
permissive
use std::io; use std::io::BufRead; use std::ops; // (y, x) ----> // | // | // | // v #[derive(Debug)] struct Point { y: i32, x: i32, } impl ops::Add for Point { type Output = Point; fn add(self, other: Point) -> Point { Point { y: self.y+other.y, x: self.x+other.x, } } } impl ops::Neg for Point { type Output = Point; fn neg(self) -> Point { Point{ y: -self.y, x: -self.x, } } } fn map_to_spiral(n:i32) -> Point { if n < 0 { return Point{y:0, x:0}; } let base = (n as f32).sqrt().floor() as i32; let remain = n - base*base; let odd = base & 1 > 0; let origin = if odd { Point{y:base / 2, x:base / 2} } else { Point{y:base / 2, x:base / 2 - 1} }; let coord = if remain == 0 { origin } else if remain <= base { let offset = Point{y: 1-remain, x: 1}; origin + offset } else if remain <= base * 2 { let offset = Point{y: -base, x: 1}; origin + offset + Point{y: 0, x: 1-(remain - base)} } else { unreachable!("We shouldn't be here"); }; if odd { coord } else { -coord } } fn main() { let stdin = io::stdin(); for line in stdin.lock().lines() { let n: i32 = line.expect("Oo?").parse().expect("Not a number!"); let coords = map_to_spiral(n); println!("location: {:?}", coords); println!("distance: {:?}", coords.y.abs() + coords.x.abs()); } }
true
6db8ce9855d4a59d1ce9554d0da57584cd9f9f24
Rust
fahrenkrug/rust_learn_book_chapter_16
/src/message_passing_examples.rs
UTF-8
1,740
3.28125
3
[]
no_license
use std::sync::mpsc; use std::thread; use std::time::Duration; pub fn run() { first_channel_example(); multiple_values_example(); multiple_producer_example(); } fn first_channel_example() { let (tx, rx) = mpsc::channel(); thread::spawn(move || { let val = String::from("Hello!"); tx.send(val).unwrap(); }); let received = rx.recv().unwrap(); println!("Got {}", received); } fn multiple_values_example() { let (tx, rx) = mpsc::channel(); thread::spawn(move || { let values = vec![ String::from("Hi"), String::from("from"), String::from("the"), String::from("thread"), ]; for val in values { tx.send(val).unwrap(); thread::sleep(Duration::from_secs(1)); } }); for received in rx { println!("Got: {}", received); } } fn multiple_producer_example() { let (tx, rx) = mpsc::channel(); let tx1 = tx.clone(); thread::spawn(move || { let values = vec![ String::from("Hi"), String::from("from"), String::from("the"), String::from("thread"), ]; for val in values { tx.send(val).unwrap(); thread::sleep(Duration::from_secs(1)); } }); thread::spawn(move || { let values = vec![ String::from("More"), String::from("messages"), String::from("for"), String::from("you"), ]; for val in values { tx1.send(val).unwrap(); thread::sleep(Duration::from_secs(1)); } }); for received in rx { println!("Got: {}", received); } }
true