blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
bd353a6715e727d703335222aabf2fb2d970e266
Rust
Torrencem/dynamics_census
/src/main.rs
UTF-8
19,404
2.859375
3
[]
no_license
#![allow(unused, dead_code)] use std::str::FromStr; use std::num::ParseIntError; extern crate anyhow; use anyhow::Error; use anyhow::Context; mod sigma_invariants; use sigma_invariants::*; extern crate polynomial; extern crate num_field_quad; use polynomial::*; use num_field_quad::*; use num_field_quad::mod_p::*; use num_traits::{Zero, One}; #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum ProjectivePoint { Finite(u16), Infinite, } impl FromStr for ProjectivePoint { type Err = ParseIntError; fn from_str(s: &str) -> Result<Self, Self::Err> { let coords: Vec<&str> = s.trim_matches(|p| p == '(' || p == ')') .split(':') .map(|s| s.trim()) .collect(); let x = coords[0].parse::<u16>()?; let y = coords[1].parse::<u16>()?; match y { 0 => Ok(ProjectivePoint::Infinite), _ => Ok(ProjectivePoint::Finite(x / y)), } } } #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] struct Morphism { b: u16, c: u16, } impl FromStr for Morphism { type Err = ParseIntError; fn from_str(s: &str) -> Result<Self, Self::Err> { let coords: Vec<&str> = s.trim_matches(|p| p == '(' || p == ')' || p == 'M') .split('&') .map(|s| s.trim()) .collect(); let x = coords[0].parse::<u16>()?; let y = coords[1].parse::<u16>()?; Ok(Morphism { b: x, c: y, }) } } #[derive(Clone, Copy, Debug)] enum CriticalPointEntry { Single(u16), Double(u16, u16), } impl FromStr for CriticalPointEntry { type Err = ParseIntError; fn from_str(s: &str) -> Result<Self, Self::Err> { let coords: Vec<&str> = s.trim_matches(|p| p == '[' || p == ']') .split('&') .map(|s| s.trim()) .collect(); match coords.len() { 1 => Ok(CriticalPointEntry::Single(coords[0].parse::<u16>()?)), _ => Ok(CriticalPointEntry::Double( coords[0].parse::<u16>()?, coords[1].parse::<u16>()?)) } } } #[derive(Clone, Copy, Debug)] struct DBEntry { // The first critical point lambda_1 l1: ProjectivePoint, // l1's entry l1_entry: CriticalPointEntry, // The second critical point lambda_2 l2: ProjectivePoint, // l2's entry l2_entry: CriticalPointEntry, } impl FromStr for DBEntry { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let entries: Vec<&str> = s.split(",") .map(|s| s.trim()) .collect(); Ok(DBEntry { l1: ProjectivePoint::from_str(entries[1]).context("error parsing l1")?, l1_entry: CriticalPointEntry::from_str(entries[2]).context("error parsing l1_entry")?, l2: ProjectivePoint::from_str(entries[3]).context("error parsing l2")?, l2_entry: CriticalPointEntry::from_str(entries[4]).context("error parsing l2_entry")?, }) } } use std::collections::HashMap; extern crate regex; use regex::Regex; #[derive(Clone, Debug)] struct DB { // A map from primes p to a inner: HashMap<u16, HashMap<Morphism, DBEntry>>, } impl FromStr for DB { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let mut result = HashMap::new(); let p_finder = Regex::new(r"p=(\d+)").unwrap(); for prime_unit in s.split("--- ") { if prime_unit.len() == 0 { continue; } // prime_unit is of the form "p={} ---\n..." let prime_match = p_finder.captures(prime_unit).unwrap(); let prime = u16::from_str(&prime_match[1]).context("error parsing prime value")?; let mut entries_for_prime = HashMap::new(); // Populate entries_for_prime for line in prime_unit.split('\n').skip(1) { if line.len() == 0 { continue; } let entry = DBEntry::from_str(line)?; let morphism_string = line.split(',').nth(0).unwrap().trim(); let morphism = Morphism::from_str(morphism_string).context("error parsing morphism")?; entries_for_prime.insert(morphism, entry); } result.insert(prime, entries_for_prime); } Ok(DB { inner: result }) } } // From paper: Subroutine 2A fn check_rational_periods(numer: Polynomial<ZiElement<i64>>, denom: Polynomial<ZiElement<i64>>, res: ZiElement<i64>, primes: &[u16], db: &DB, crit_pt_a: QFElement<i64>, crit_pt_b: QFElement<i64>) -> bool { // NOTE! This function has not been tested yet. use std::collections::HashSet; assert!(crit_pt_a.is_rational()); assert!(crit_pt_b.is_rational()); // let crit_pts = critical_points(numer.clone(), denom.clone()); let mut poss_period_1 = HashSet::new(); let mut poss_period_2 = HashSet::new(); assert!(crit_pt_a.field.c == -1 && crit_pt_b.field.c == -1); for p in primes.iter() { // Reduce crit_pt_a and crit_pt_b into F_p let crit_1 = ProjectivePoint::Finite(( ZiElement { inner: QFElement { a: crit_pt_a.a, b: crit_pt_a.b, field: crit_pt_a.field, d: 1 } }.reduce_mod(*p as u32) * ModPElt { val: mod_inverse(crit_pt_a.d, *p as i64), p: *p as u32 } ).val as u16); let crit_2 = ProjectivePoint::Finite(( ZiElement { inner: QFElement { a: crit_pt_b.a, b: crit_pt_b.b, field: crit_pt_b.field, d: 1 } }.reduce_mod(*p as u32) * ModPElt { val: mod_inverse(crit_pt_b.d, *p as i64), p: *p as u32 } ).val as u16); if res.reduce_mod(*p as u32) == Zero::zero() { continue; } // reduce phi mod p let reduced_numer = Polynomial::new( { let mut res = vec![]; for entry in numer.data() { res.push((*entry).reduce_mod(*p as u32)); } res } ); let reduced_denom = Polynomial::new( { let mut res = vec![]; for entry in denom.data() { res.push((*entry).reduce_mod(*p as u32)); } res } ); let map = FiniteQuadraticMap { numer: reduced_numer, denom: reduced_denom, }; let [s1, s2, s3] = map.sigma_invariants(); let morphism: Morphism = Morphism { b: s1.val as u16, c: s2.val as u16, }; // Look up psi in the database let dbentry = db.inner.get(p).unwrap().get(&morphism).unwrap(); // Let i = 1 // Retrieve the set of possible global periods for crit_1 let possible_periods = if dbentry.l1 == crit_1 { dbentry.l1_entry } else { dbentry.l2_entry }; if poss_period_1.is_empty() { // This is the first good prime match possible_periods { CriticalPointEntry::Single(x) => { poss_period_1.insert(x); }, CriticalPointEntry::Double(x, y) => { poss_period_1.insert(x); poss_period_1.insert(y); } } } else { match possible_periods { CriticalPointEntry::Single(x) => { poss_period_1.retain(|&val| val == x); }, CriticalPointEntry::Double(x, y) => { poss_period_1.retain(|&val| val == x || val == y); } } } if poss_period_1.is_empty() { return false; } // Let i = 2 // Retrieve the set of possible global periods for crit_1 let possible_periods = if dbentry.l1 == crit_2 { dbentry.l1_entry } else { dbentry.l2_entry }; if poss_period_2.is_empty() { // This is the first good prime match possible_periods { CriticalPointEntry::Single(x) => { poss_period_2.insert(x); }, CriticalPointEntry::Double(x, y) => { poss_period_2.insert(x); poss_period_2.insert(y); } } } else { match possible_periods { CriticalPointEntry::Single(x) => { poss_period_2.retain(|&val| val == x); }, CriticalPointEntry::Double(x, y) => { poss_period_2.retain(|&val| val == x || val == y); } } } if poss_period_2.is_empty() { return false; } } true } // From Paper: Subroutine 2B fn check_irrational_periods(numer: Polynomial<ZiElement<i64>>, denom: Polynomial<ZiElement<i64>>, res: ZiElement<i64>, primes: &[u16], db: &DB) -> bool { use std::collections::HashSet; let mut poss_per = HashSet::new(); for p in primes.iter() { if res.reduce_mod(*p as u32) == Zero::zero() { continue; } // reduce phi mod p let reduced_numer = Polynomial::new( { let mut res = vec![]; for entry in numer.data() { res.push((*entry).reduce_mod(*p as u32)); } res } ); let reduced_denom = Polynomial::new( { let mut res = vec![]; for entry in denom.data() { res.push((*entry).reduce_mod(*p as u32)); } res } ); // Make sure our map is degree 2 before computing any further if reduced_numer.degree() != 2 || reduced_denom.degree() != 2 { continue; } let map = FiniteQuadraticMap { numer: reduced_numer, denom: reduced_denom, }; let [s1, s2, s3] = map.sigma_invariants(); let morphism: Morphism = Morphism { b: s1.val as u16, c: s2.val as u16, }; // Look up psi in the database let dbentry = db.inner .get(p).unwrap() .get(&morphism); if dbentry.is_none() { // Then our map is conjugate to one of lower degree // so skip it continue; } let dbentry = dbentry.unwrap(); if poss_per.is_empty() { // This is the first good prime match dbentry.l1_entry { CriticalPointEntry::Single(x) => { poss_per.insert(x); }, CriticalPointEntry::Double(x, y) => { poss_per.insert(x); poss_per.insert(y); } } match dbentry.l2_entry { CriticalPointEntry::Single(x) => { poss_per.retain(|&val| val == x); }, CriticalPointEntry::Double(x, y) => { poss_per.retain(|&val| val == x || val == y); } } } else { // This is not the first good prime match dbentry.l1_entry { CriticalPointEntry::Single(x) => { poss_per.retain(|&val| val == x); }, CriticalPointEntry::Double(x, y) => { poss_per.retain(|&val| val == x || val == y); } } match dbentry.l2_entry { CriticalPointEntry::Single(x) => { poss_per.retain(|&val| val == x); }, CriticalPointEntry::Double(x, y) => { poss_per.retain(|&val| val == x || val == y); } } } if poss_per.is_empty() { return false; } } true } struct FindPCFMaps<'a, Iter> where Iter: Iterator<Item = (QFElement<i64>, QFElement<i64>)> { db: DB, primes: Vec<u16>, iter: &'a mut Iter, } impl<'a, Iter: Iterator<Item = (QFElement<i64>, QFElement<i64>)>> Iterator for FindPCFMaps<'a, Iter> { type Item = (Polynomial<ZiElement<i64>>, Polynomial<ZiElement<i64>>); // From paper: Algorithm 2 fn next(&mut self) -> Option<Self::Item> { while let Some((sig_1, sig_2)) = self.iter.next() { assert!(sig_1.field.c == -1 && sig_2.field.c == -1); // Create the rational map. Some intermediary steps to clear denominators let two: QFElement<i64> = QFElement::from_parts(2, 0, 1, sig_1.field); let b_c: QFElement<i64> = two - sig_1; let e: QFElement<i64> = two + sig_1; let f: QFElement<i64> = two - sig_1 - sig_2; let fac = lcm(lcm(b_c.d, e.d), f.d); let a: ZiElement<i64> = ZiElement::from_parts(2 * fac, 0); let b_c: ZiElement<i64> = ZiElement::from_parts(b_c.a * fac / b_c.d, b_c.b * fac / b_c.d); let d: ZiElement<i64> = ZiElement::from_parts(-fac, 0); let e: ZiElement<i64> = ZiElement::from_parts(e.a * fac / e.d, e.b * fac / e.d); let f: ZiElement<i64> = ZiElement::from_parts(f.a * fac / f.d, f.b * fac / f.d); let numer = Polynomial::new(vec![b_c.clone(), b_c, a]); let denom = Polynomial::new(vec![f, e, d]); let res = if numer.degree() < denom.degree() { resultant(denom.clone(), numer.clone()) } else { resultant(numer.clone(), denom.clone()) }; if res == Zero::zero() { continue; } let crit_pts = critical_points(numer.clone(), denom.clone()); let rational_crit_pts: Option<(QFElement<i64>, QFElement<i64>)> = match crit_pts { CriticalPoints::Two(crit_1, crit_2) => { // crit_n is a QFElement<ZiElement<i64>> but we just want a QFElement<i64> // but if crit_n.field.c is_square, we can take a square root and // simplify to a QFElement<i64> where c == -1. // TODO: This has the potential for overflow problems! I don't know how to fix // them... match (crit_1.field.c.square_root(), crit_2.field.c.square_root()) { (Some(c1s), Some(c2s)) => { Some(( (crit_1.a.inner + crit_1.b.inner * c1s.inner) / crit_1.d.inner, (crit_2.a.inner + crit_2.b.inner * c1s.inner) / crit_2.d.inner, )) }, _ => { // At least one of the critical points is irrational None } } }, _ => { None } }; // Should we use 2A or 2B? match rational_crit_pts { Some((crit_1, crit_2)) => { // 2A if check_rational_periods(numer.clone(), denom.clone(), res, &self.primes, &self.db, crit_1, crit_2) { return Some((numer.clone(), denom.clone())); } }, None => { // 2B if check_irrational_periods(numer.clone(), denom.clone(), res, &self.primes, &self.db) { return Some((numer.clone(), denom.clone())); } } } } None } } use std::env::args; use std::fs::read_to_string; use anyhow::{Result, bail}; fn main() -> Result<()> { let mut args = args(); if args.len() != 2 { bail!("Expected one command line argument for the database file location. Exiting!"); } let dbase_filename = args.nth(1).unwrap(); let dbase_file_data = read_to_string(dbase_filename)?; // In the outputed data, there are commas between square brackets // We want to change these to &'s. let fix_bracketed_commas_re = Regex::new(r"\[([^\]]+),([^\]]+)\]")?; let fix_bracketed_commas_data = fix_bracketed_commas_re.replace_all(&dbase_file_data, "[$1 & $2]"); // Same thing for M(_, _)'s let fix_morphism_commas_re = Regex::new(r"\(([^\)]+), ([^\)]+)\)")?; let fix_morphism_commas_data = fix_morphism_commas_re.replace_all(&fix_bracketed_commas_data, "($1 & $2)"); // The data also has blank lines, let's get rid of those let dbase_data: String = fix_morphism_commas_data .split('\n') .filter(|line| line.trim().len() != 0) .collect::<Vec<&str>>() .join("\n"); println!("Beginning to parse database..."); // Finally construct the database let mut dbase = DB::from_str(&dbase_data)?; // Filter out primes p such that (-1).sqrt() doesn't exist mod p dbase.inner.retain(|&p, _val| { let p_elt = ModPElt { val: (p - 1) as i64, p: p as u32 }; !p_elt.sqrt().is_none() }); println!("Parsing complete! Running algorithm..."); // We want the list of primes that are in the database as well let mut p_list: Vec<u16> = dbase .inner .keys() .cloned() .filter(|val| { // 281 was not finished in the data collection *val != 281 }) .collect(); p_list.sort(); // TODO: Write an iterator for "fake bounded height" to give to FindPCFMaps // and run FindPCFMaps here let mut values = (-100..100) .into_iter() .map(move |a| { (-10..10) .into_iter() .map(move |b| { (-10..10) .into_iter() .map(move |c| { (-10..10) .into_iter() .map(move |d| { let a = QFElement::from_parts(a, 0, b, QuadraticField::from_c(-1)); let b = QFElement::from_parts(c, 0, d, QuadraticField::from_c(-1)); (a, b) }) .collect::<Vec<_>>() }) .flatten() }) .flatten() }) .flatten(); let pcf_maps = FindPCFMaps { db: dbase, primes: p_list, iter: &mut values, }; for pcf_map in pcf_maps { dbg!(pcf_map); } Ok(()) }
true
e315bc7b355a9280d5853931970567ab208e9df5
Rust
japaric/compiler-builtins
/src/float/pow.rs
UTF-8
1,177
2.78125
3
[ "NCSA", "MIT" ]
permissive
macro_rules! pow { ($intrinsic:ident: $fty:ty, $ity:ident) => { /// Returns `a` raised to the power `b` #[cfg_attr(not(test), no_mangle)] pub extern "C" fn $intrinsic(a: $fty, b: $ity) -> $fty { let (mut a, mut b) = (a, b); let recip = b < 0; let mut r: $fty = 1.0; loop { if (b & 1) != 0 { r *= a; } b = sdiv!($ity, b, 2); if b == 0 { break; } a *= a; } if recip { 1.0 / r } else { r } } } } pow!(__powisf2: f32, i32); pow!(__powidf2: f64, i32); #[cfg(test)] mod tests { use qc::{I32, F32, F64}; check! { fn __powisf2(f: extern fn(f32, i32) -> f32, a: F32, b: I32) -> Option<F32> { Some(F32(f(a.0, b.0))) } fn __powidf2(f: extern fn(f64, i32) -> f64, a: F64, b: I32) -> Option<F64> { Some(F64(f(a.0, b.0))) } } }
true
c76411da34e9e4fdb92d54f82abedc6ad02c3d11
Rust
bagelboy/adventofcode
/2017/day01/src/main.rs
UTF-8
1,756
3.3125
3
[]
no_license
use std::fs::File; use std::io::Read; fn solve_captcha(input: &[char]) -> u32 { input.iter().enumerate().fold(0, |s, (i, j)| { if i < input.len() && input[i] == input[(i + 1) % input.len()] { s + j.to_digit(10).expect("this should be a number") } else { s } }) } fn solve_new_captcha(input: &[char]) -> u32 { input.iter().enumerate().fold(0, |s, (i, j)| { if i < input.len() && input[i] == input[(i + input.len() / 2) % input.len()] { s + j.to_digit(10).expect("this should be a number") } else { s } }) } fn get_chars(input: &str) -> Vec<char> { input.chars().collect() } #[cfg(test)] mod tests { #[test] fn solve_captcha() { assert_eq!(3, ::solve_captcha(&::get_chars(&"1122"))); assert_eq!(4, ::solve_captcha(&::get_chars(&"1111"))); assert_eq!(0, ::solve_captcha(&::get_chars(&"1234"))); assert_eq!(9, ::solve_captcha(&::get_chars(&"91212129"))); } #[test] fn solve_new_captcha() { assert_eq!(6, ::solve_new_captcha(&::get_chars("1212"))); assert_eq!(0, ::solve_new_captcha(&::get_chars("1221"))); assert_eq!(4, ::solve_new_captcha(&::get_chars("123425"))); assert_eq!(12, ::solve_new_captcha(&::get_chars("123123"))); assert_eq!(4, ::solve_new_captcha(&::get_chars("12131415"))); } } fn main() { let mut input = String::new(); if File::open("input") .expect("cannot open input") .read_to_string(&mut input) .is_ok() { let chars = get_chars(input.trim_right()); println!("captcha: {}", solve_captcha(&chars)); println!("new captcha: {}", solve_new_captcha(&chars)); } }
true
4257704a3350fb5ea5b76927a2db203dead0fd6b
Rust
jkristell/infrared
/src/protocol/rc5/encoder.rs
UTF-8
1,046
2.71875
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use crate::{protocol::Rc5, sender::ProtocolEncoder}; //TODO: Check Overflow const fn calc_freq(mut f: u32) -> u32 { let mut div = 1_000_000; if f > 1000 { f /= 1000; div /= 1000; } (889 * f) / div } impl<const FREQ: u32> ProtocolEncoder<FREQ> for Rc5 { type EncoderData = [u32; 1]; const DATA: Self::EncoderData = [calc_freq(FREQ)]; fn encode(cmd: &Self::Cmd, buf: &mut [u32]) -> usize { // Command as bits let bits = cmd.pack(); let rc5len = <Self as ProtocolEncoder<FREQ>>::DATA[0]; // First bit is always one buf[0] = 0; let mut prev = true; let mut index = 1; for b in 0..13 { let cur = bits & (1 << (12 - b)) != 0; if prev == cur { buf[index] = rc5len; buf[index + 1] = rc5len; index += 2; } else { buf[index] = rc5len * 2; index += 1; } prev = cur; } index } }
true
e659fcc05f17f1bc6f59bd3b80708d1faba86b5e
Rust
qingzhu521/paradfs
/src/utils/load_binary.rs
UTF-8
1,537
2.75
3
[]
no_license
use crate::structure::{Graph, AdjacentList}; use crate::common::io::*; use std::time::Instant; pub fn load_binary_graph(dir: String) -> Graph { println!("start to load binary graph"); let now = Instant::now(); let adj_path = fs::create_path(&vec![dir.as_str(), "adj"]); let rev_adj_path = fs::create_path(&vec![dir.as_str(), "rev_adj"]); let adj = read_data(adj_path); let rev_adj = read_data(rev_adj_path); println!("finish to load binary graph, cost {:?}", now.elapsed()); Graph::new(adj, rev_adj) } fn read_data(path: String) -> AdjacentList { let mut buf = ByteBuffer::new(128<<20); let mut fc = FileChannel::open(path); let mut ret = AdjacentList::new(); fc.read(&mut buf).unwrap(); buf.flip(); let mut cnt = 0; loop { if buf.remaining() < 12 { buf.compact(); let size = fc.read(&mut buf).unwrap(); buf.flip(); if size == 0 { break; } } let src_id = buf.get::<i64>().unwrap(); let count = buf.get::<u32>().unwrap(); let mut tmp = Vec::with_capacity(count as usize); for _ in 0..count { if buf.remaining() < 8 { buf.compact(); fc.read(&mut buf).unwrap(); buf.flip(); } tmp.push(buf.get().unwrap()); } ret.insert(src_id, tmp); cnt += 1; if cnt % 1000000 == 0 { println!("load {} nodes", cnt); } } ret }
true
413efe8408887b85a3d7064d2e41a7ebb5d063a2
Rust
suryapandian/rust
/examples/05.Ownership/4.copy.rs
UTF-8
211
3.21875
3
[]
no_license
fn main() { let mut foo = 42; let f = &mut foo; let bar = *f; // get a copy of the owner's value *f = 13; // set the reference's owner's value println!("{}", bar); println!("{}", foo); }
true
56927c9523914ae467fb93286946830f6392fe6d
Rust
trustwallet/wallet-core
/codegen-v2/src/tests/mod.rs
UTF-8
4,484
2.625
3
[ "BSD-3-Clause", "LicenseRef-scancode-protobuf", "LGPL-2.1-only", "Swift-exception", "MIT", "BSL-1.0", "Apache-2.0" ]
permissive
// Copyright © 2017-2023 Trust Wallet. // // This file is part of Trust. The full Trust copyright notice, including // terms governing use, modification, and redistribution, is contained in the // file LICENSE at the root of the source code distribution tree. use crate::codegen::swift::{render_to_strings, RenderIntput}; use crate::manifest::parse_str; /// Convenience function. fn create_intput(yaml: &str) -> RenderIntput { let file_info = parse_str(yaml).unwrap(); RenderIntput { file_info, struct_template: include_str!("../codegen/swift/templates/struct.hbs"), enum_template: include_str!("../codegen/swift/templates/enum.hbs"), extension_template: include_str!("../codegen/swift/templates/extension.hbs"), proto_template: include_str!("../codegen/swift/templates/proto.hbs"), partial_init_template: include_str!("../codegen/swift/templates/partial_init.hbs"), partial_func_tempalte: include_str!("../codegen/swift/templates/partial_func.hbs"), partial_prop_tempalte: include_str!("../codegen/swift/templates/partial_prop.hbs"), } } // Convenience function: runs the codegen on the given `input` and compares it // with the `expected` value. Expects a single, rendered file as output. fn render_and_compare_struct(input: &str, expected: &str) { let input = create_intput(input); let rendered = render_to_strings(input).unwrap(); assert_eq!(rendered.structs.len(), 1); assert!(rendered.enums.is_empty()); assert!(rendered.extensions.is_empty()); assert!(rendered.protos.is_empty()); let (_name, output) = &rendered.structs[0]; println!("{output}"); assert_eq!(output, expected); } fn render_and_compare_enum(input: &str, expected: &str) { let input = create_intput(input); let rendered = render_to_strings(input).unwrap(); assert!(rendered.structs.is_empty()); assert_eq!(rendered.enums.len(), 1); assert!(rendered.extensions.is_empty()); assert!(rendered.protos.is_empty()); let (_name, output) = &rendered.enums[0]; assert_eq!(output, expected); } #[test] fn single_struct() { const INPUT: &str = include_str!("samples/struct.input.yaml"); const EXPECTED: &str = include_str!("samples/struct.output.swift"); render_and_compare_struct(INPUT, EXPECTED); } #[test] fn single_class() { const INPUT: &str = include_str!("samples/class.input.yaml"); const EXPECTED: &str = include_str!("samples/class.output.swift"); render_and_compare_struct(INPUT, EXPECTED); } #[test] fn private() { const INPUT: &str = include_str!("samples/private_class.input.yaml"); const EXPECTED: &str = include_str!("samples/private_class.output.swift"); render_and_compare_struct(INPUT, EXPECTED); } #[test] fn optional() { const INPUT: &str = include_str!("samples/optional.input.yaml"); const EXPECTED: &str = include_str!("samples/optional.output.swift"); render_and_compare_struct(INPUT, EXPECTED); } #[test] fn enum_with_description() { const INPUT: &str = include_str!("samples/enum.input.yaml"); const EXPECTED: &str = include_str!("samples/enum.output.swift"); render_and_compare_enum(INPUT, EXPECTED); } #[test] fn privat_enum_with_description() { const INPUT: &str = include_str!("samples/enum_private.input.yaml"); const EXPECTED: &str = include_str!("samples/enum_private.output.swift"); render_and_compare_enum(INPUT, EXPECTED); } #[test] fn enum_with_extension() { const INPUT: &str = include_str!("samples/enum_extension.input.yaml"); const EXPECTED_ENUM: &str = include_str!("samples/enum.output.swift"); const EXPECTED_EXTENSION: &str = include_str!("samples/enum_extension.output.swift"); let input = create_intput(INPUT); let rendered = render_to_strings(input).unwrap(); assert!(rendered.structs.is_empty()); assert_eq!(rendered.enums.len(), 1); assert_eq!(rendered.extensions.len(), 1); assert!(rendered.protos.is_empty()); // Check generated enum. let (_name, output) = &rendered.enums[0]; assert_eq!(output, EXPECTED_ENUM); // Check generated extension. let (_name, output) = &rendered.extensions[0]; assert_eq!(output, EXPECTED_EXTENSION); } #[test] fn non_associated() { const INPUT: &str = include_str!("samples/non-associated.input.yaml"); const EXPECTED: &str = include_str!("samples/non-associated.output.swift"); render_and_compare_struct(INPUT, EXPECTED); }
true
0e3f99c0357cafeaf169025f87bdb67a1b60961e
Rust
crides/keebopt
/src/data.rs
UTF-8
3,419
2.859375
3
[ "MIT" ]
permissive
#![allow(dead_code)] use std::collections::BTreeMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; use nom::{ branch::alt, bytes::complete::tag, character::complete::{alpha1, char, digit1, none_of, one_of, u32}, combinator::{eof, map, opt, value}, multi::many0, sequence::{delimited, tuple}, Finish, }; type ParseResult<'a, O> = nom::IResult<&'a str, O>; #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum ModName { Shift, Ctrl, Alt, Super, AltGr, } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct ModEvent { name: ModName, left: bool, press: bool, } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum Key { Char(char), Key(u32), Special(String), } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub enum RawEvent { Key(Key), Mod(ModEvent), } macro_rules! parsers { ($($name:ident: $ret:ty = $body:expr)*) => { $(fn $name(s: &str) -> ParseResult<$ret> { $body(s) })* }; } parsers! { mod_name: ModName = alt(( value(ModName::Alt, tag("Alt")), value(ModName::Shift, tag("Shft")), value(ModName::Super, tag("Meta")), value(ModName::Ctrl, tag("Ctrl")), )) mod_ev: ModEvent = map(delimited(char('<'), tuple((opt(char('/')), one_of("LR"), mod_name)), char('>')), |(r, lr, name)| { ModEvent { press: r.is_none(), left: lr == 'L', name } }) key: Key = alt(( map(delimited(char('<'), alpha1, char('>')), |s: &str| Key::Special(s.into())), )) repeat: usize = map(delimited(tag("<#+"), u32, char('>')), |i| i as usize) any: Key = map(none_of("\n\t"), |c| Key::Char(c)) keys: Vec<RawEvent> = many0(alt(( map(key, RawEvent::Key), map(mod_ev, RawEvent::Mod), map(any, RawEvent::Key), ))) line: Vec<RawEvent> = delimited(tuple((digit1, char('-'), digit1, char('-'), digit1, char(' '), digit1, char(':'), digit1, char(':'), digit1, one_of("+-"), digit1, tag(" > "))), keys, eof) } #[derive(Clone, Debug)] struct Mod { name: ModName, left: bool, } #[derive(Clone, Debug)] enum SingleEvent { Key(Key), Moded(Vec<Mod>, Key), } #[derive(Clone, Debug)] enum Event { Single(SingleEvent), Repeated(SingleEvent, usize), } pub fn parse_logkeys(file: impl AsRef<Path>) -> Vec<RawEvent> { BufReader::new(File::open(file.as_ref()).expect("input file")) .lines() .filter_map(|l| { let l = l.unwrap(); l.starts_with("20").then(|| line(&l).finish().unwrap().1) }) .flatten() .collect() } pub fn gram_chars(evs: impl Iterator<Item = RawEvent>, len: usize) -> BTreeMap<String, usize> { let mut grams = BTreeMap::new(); let keys: Vec<_> = evs .filter_map(|e| { if let RawEvent::Key(Key::Char(c)) = e { Some(c) } else { None } }) .collect(); dbg!(keys.len()); for win in keys.windows(len) { grams .entry(win.iter().copied().collect()) .and_modify(|c| *c += 1) .or_insert(1); } dbg!(grams.len()); grams } #[test] fn simple() { assert_eq!( line("2023-02-27 22:27:37-0600 > e /var/lo<Tab><Tab><Tab><Tab>/<Tab>lo<Tab>") .finish() .unwrap() .1 .len(), 18 ); }
true
2763691d1bbf06993f713aaae13e014f72e82785
Rust
egorgrachev/Exercism
/rust/raindrops/src/lib.rs
UTF-8
391
3.140625
3
[]
no_license
pub fn raindrops(n: u32) -> String { let mut output = String::new(); let is_factor = |factor: u32| n % factor == 0; if is_factor(3) { output.push_str("Pling"); } if is_factor(5) { output.push_str("Plang"); } if is_factor(7) { output.push_str("Plong"); } if output.is_empty() { output = n.to_string(); } output }
true
a7e97d628819dc9e1469b6ce02c3c7beea43b339
Rust
leocavalcante/list-load
/src/lib.rs
UTF-8
669
2.6875
3
[ "MIT" ]
permissive
use std::error::Error; use s3::bucket::Bucket; use s3::credentials::Credentials; use s3::region::Region; pub type YouCanDoIt<T = ()> = Result<T, Box<dyn Error>>; pub fn bucket() -> YouCanDoIt<Bucket> { let bucket_name = std::env::var("S3_BUCKET")?; let s3_access_key = std::env::var("S3_ACCESS_KEY")?; let s3_secret = std::env::var("S3_SECRET")?; let s3_credentials = Credentials::new(Some(s3_access_key), Some(s3_secret), None, None); let s3_endpoint = std::env::var("S3_ENDPOINT")?; let region = Region::Custom { region: "us-east-1".to_string(), endpoint: s3_endpoint }; Ok(Bucket::new(bucket_name.as_ref(), region, s3_credentials)?) }
true
50cec877e6f3645917510f6b71ea77faae563c61
Rust
chaaz/adventofcode_2018
/day_06/src/part1.rs
UTF-8
2,590
3.140625
3
[]
no_license
use std::ops::RangeInclusive; use std::cmp::Ordering; pub fn run() { let content = include_str!("input.txt").trim().split("\n"); let points: Vec<_> = content.map(|line| Point::from_line(line)).collect(); let rect = Rect::min_bounds(&points); let mut total = vec![(0u32, false); points.len()]; // (area, infinite) find_totals(&points, &rect, &mut total); let best = total.iter().enumerate().max_by(|(_, (a0, i0)), (_, (a1, i1))| { match (i0, i1) { (true, true) => Ordering::Equal, (true, false) => Ordering::Less, (false, true) => Ordering::Greater, _ => a0.cmp(a1) } }).unwrap(); println!("best: ({}) = {}", best.0, (best.1).0); } fn find_totals(pts: &Vec<Point>, rect: &Rect, totals: &mut Vec<(u32, bool)>) { for y in rect.y_range() { for x in rect.x_range() { let (mut min_dist, mut min_i, mut tie) = (std::u32::MAX, 0, false); for i in 0 .. pts.len() { let pt = &pts[i]; let dist = pt.dist(x, y); if dist < min_dist { min_dist = dist; min_i = i; tie = false; } else if dist == min_dist { tie = true; } } if !tie { let infinite = rect.is_boundry(x, y); totals[min_i] = (totals[min_i].0 + 1, totals[min_i].1 || infinite); } } } } struct Point { x: u32, y: u32 } impl Point { pub fn from_line(line: &str) -> Point { let mut xy = line.split(","); let x = xy.next().unwrap().trim().parse().unwrap(); let y = xy.next().unwrap().trim().parse().unwrap(); Point { x, y } } pub fn dist(&self, x: u32, y: u32) -> u32 { let dist_x = if x > self.x { x - self.x } else { self.x - x }; let dist_y = if y > self.y { y - self.y } else { self.y - y }; dist_x + dist_y } } struct Rect { min_x: u32, min_y: u32, max_x: u32, max_y: u32 } impl Rect { pub fn min_bounds(points: &Vec<Point>) -> Rect { let mut min_x: u32 = std::u32::MAX; let mut min_y: u32 = std::u32::MAX; let mut max_x: u32 = 0; let mut max_y: u32 = 0; for pt in points { if pt.x < min_x { min_x = pt.x; } if pt.y < min_y { min_y = pt.y; } if pt.x > max_x { max_x = pt.x; } if pt.y > max_y { max_y = pt.y; } } Rect { min_x, min_y, max_x, max_y } } pub fn x_range(&self) -> RangeInclusive<u32> { self.min_x ..= self.max_x } pub fn y_range(&self) -> RangeInclusive<u32> { self.min_y ..= self.max_y } pub fn is_boundry(&self, x: u32, y: u32) -> bool { x == self.min_x || x == self.max_x || y == self.min_y || y == self.max_y } }
true
c875da7f6b1e91afabe15623fa504c9b7a01c1de
Rust
DryDish/Rust
/main/src/main.rs
UTF-8
469
3.125
3
[]
no_license
use logger::{SIZE, init_logger, error, info, warn}; #[derive(Debug)] struct ObjectThing { name: String, age: u8 } fn main() { init_logger(); let thing = ObjectThing { name: "Peter".to_string(), age: 18 }; info!("Hello there"); info!("Hello", "there!"); warn!("Warning you about stuff"); warn!("Warning you about this:", thing); error!("Big problems my mans!"); error!("Here is your problem :", thing); }
true
87643c9c762af960a256df88e92daab697e6b95d
Rust
ticki/kernel
/kernel/scheme/pipe.rs
UTF-8
4,669
2.53125
3
[ "MIT" ]
permissive
use alloc::arc::{Arc, Weak}; use collections::{BTreeMap, VecDeque}; use core::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; use spin::{Mutex, Once, RwLock, RwLockReadGuard, RwLockWriteGuard}; use syscall::error::{Error, Result, EBADF, EPIPE}; use syscall::scheme::Scheme; /// Pipes list pub static PIPE_SCHEME_ID: AtomicUsize = ATOMIC_USIZE_INIT; static PIPE_NEXT_ID: AtomicUsize = ATOMIC_USIZE_INIT; static PIPES: Once<RwLock<(BTreeMap<usize, PipeRead>, BTreeMap<usize, PipeWrite>)>> = Once::new(); /// Initialize pipes, called if needed fn init_pipes() -> RwLock<(BTreeMap<usize, PipeRead>, BTreeMap<usize, PipeWrite>)> { RwLock::new((BTreeMap::new(), BTreeMap::new())) } /// Get the global pipes list, const fn pipes() -> RwLockReadGuard<'static, (BTreeMap<usize, PipeRead>, BTreeMap<usize, PipeWrite>)> { PIPES.call_once(init_pipes).read() } /// Get the global schemes list, mutable fn pipes_mut() -> RwLockWriteGuard<'static, (BTreeMap<usize, PipeRead>, BTreeMap<usize, PipeWrite>)> { PIPES.call_once(init_pipes).write() } pub fn pipe(_flags: usize) -> (usize, usize) { let mut pipes = pipes_mut(); let read_id = PIPE_NEXT_ID.fetch_add(1, Ordering::SeqCst); let read = PipeRead::new(); let write_id = PIPE_NEXT_ID.fetch_add(1, Ordering::SeqCst); let write = PipeWrite::new(&read); pipes.0.insert(read_id, read); pipes.1.insert(write_id, write); (read_id, write_id) } pub struct PipeScheme; impl Scheme for PipeScheme { fn dup(&self, id: usize) -> Result<usize> { let mut pipes = pipes_mut(); let read_option = pipes.0.get(&id).map(|pipe| pipe.clone()); if let Some(pipe) = read_option { let pipe_id = PIPE_NEXT_ID.fetch_add(1, Ordering::SeqCst); pipes.0.insert(pipe_id, pipe); return Ok(pipe_id); } let write_option = pipes.1.get(&id).map(|pipe| pipe.clone()); if let Some(pipe) = write_option { let pipe_id = PIPE_NEXT_ID.fetch_add(1, Ordering::SeqCst); pipes.1.insert(pipe_id, pipe); return Ok(pipe_id); } Err(Error::new(EBADF)) } fn read(&self, id: usize, buf: &mut [u8]) -> Result<usize> { let pipe_option = { let pipes = pipes(); pipes.0.get(&id).map(|pipe| pipe.clone()) }; if let Some(pipe) = pipe_option { pipe.read(buf) } else { Err(Error::new(EBADF)) } } fn write(&self, id: usize, buf: &[u8]) -> Result<usize> { let pipe_option = { let pipes = pipes(); pipes.1.get(&id).map(|pipe| pipe.clone()) }; if let Some(pipe) = pipe_option { pipe.write(buf) } else { Err(Error::new(EBADF)) } } fn fsync(&self, _id: usize) -> Result<usize> { Ok(0) } fn close(&self, id: usize) -> Result<usize> { let mut pipes = pipes_mut(); drop(pipes.0.remove(&id)); drop(pipes.1.remove(&id)); Ok(0) } } /// Read side of a pipe #[derive(Clone)] pub struct PipeRead { vec: Arc<Mutex<VecDeque<u8>>> } impl PipeRead { pub fn new() -> Self { PipeRead { vec: Arc::new(Mutex::new(VecDeque::new())) } } fn read(&self, buf: &mut [u8]) -> Result<usize> { if buf.is_empty() || (Arc::weak_count(&self.vec) == 0 && self.vec.lock().is_empty()) { Ok(0) } else { /*loop { { if let Some(byte) = self.vec.lock().pop_front() { buf[0] = byte; break; } } unsafe { context::switch(); } }*/ let mut i = 0; while i < buf.len() { match self.vec.lock().pop_front() { Some(b) => { buf[i] = b; i += 1; }, None => break } } Ok(i) } } } /// Read side of a pipe #[derive(Clone)] pub struct PipeWrite { vec: Weak<Mutex<VecDeque<u8>>>, } impl PipeWrite { pub fn new(read: &PipeRead) -> Self { PipeWrite { vec: Arc::downgrade(&read.vec), } } fn write(&self, buf: &[u8]) -> Result<usize> { match self.vec.upgrade() { Some(vec) => { for &b in buf.iter() { vec.lock().push_back(b); } Ok(buf.len()) }, None => Err(Error::new(EPIPE)) } } }
true
099dd85e4752906625b19d691bc659db4b43da55
Rust
Earthmark/Korriban
/runtime/src/space.rs
UTF-8
635
3.015625
3
[ "MIT" ]
permissive
use crate::prop::PropSet; pub trait Element { fn update(&self, src: &PropSet, dest: &mut PropSet); } pub struct Space { props: PropSet, elements: Vec<Box<dyn Element>>, } impl Space { pub fn new() -> Self { Self { props: PropSet::new(), elements: Vec::new(), } } pub fn update(&mut self) { let mut dest = self.props.clone(); for elem in &self.elements { elem.update(&self.props, &mut dest); } self.props = dest; } pub fn add_element(&mut self, elem: Box<dyn Element>) { self.elements.push(elem); } }
true
6bdbc948212ac88ebf9071ec289e827d9f2e0e3b
Rust
EFanZh/Introduction-to-Algorithms
/src/chapter_12_binary_search_trees/section_12_1_what_is_a_binary_search_tree/mod.rs
UTF-8
1,463
3.46875
3
[]
no_license
use crate::chapter_10_elementary_data_structures::section_10_4_representing_rooted_trees::SimpleBinaryTreeNode; pub mod exercises; // Inorder-Tree-Walk(x) // // 1 if x ≠ nil // 2 Inorder-Tree-Walk(x.left) // 3 print x.key // 4 Inorder-Tree-Walk(x.right) pub fn inorder_tree_walk<T, F: FnMut(&T)>(root: &Option<Box<SimpleBinaryTreeNode<T>>>, mut f: F) { fn helper<T, F: FnMut(&T)>(root: &Option<Box<SimpleBinaryTreeNode<T>>>, f: &mut F) { if let Some(node) = root { helper(&node.left, f); f(&node.key); helper(&node.right, f); } } helper(root, &mut f); } #[cfg(test)] mod tests { use crate::chapter_10_elementary_data_structures::section_10_4_representing_rooted_trees::SimpleBinaryTreeNode; use crate::make_simple_tree; fn inorder_tree_walk_as_vec(root: &Option<Box<SimpleBinaryTreeNode<i32>>>) -> Vec<i32> { let mut result = Vec::new(); super::inorder_tree_walk(root, |&key| result.push(key)); result } #[test] fn test_inorder_tree_walk() { assert_eq!(inorder_tree_walk_as_vec(&None), vec![]); assert_eq!(inorder_tree_walk_as_vec(&make_simple_tree![1]), vec![1]); assert_eq!(inorder_tree_walk_as_vec(&make_simple_tree![(1, 2, 3)]), vec![2, 1, 3]); assert_eq!( inorder_tree_walk_as_vec(&make_simple_tree![(1, (2, 3, 4), 5)]), vec![3, 2, 4, 1, 5] ); } }
true
0af29edcbd179921ffd5f6a18515d4f4e6055d3b
Rust
hajifkd/nand2tetris
/10/jackc/src/parser.rs
UTF-8
29,983
2.859375
3
[]
no_license
use crate::lexer::{JackTokenizer, KeywordKind, SymbolKind, Token}; use crate::{escape_xml, JackcError}; use std::convert::TryFrom; pub trait Parse where Self: Sized, { fn parse<T: std::io::Read>(tokenizer: &mut JackTokenizer<T>) -> Result<Self, JackcError>; } #[derive(Debug)] pub struct Class { name: String, varss: Vec<Vec<ClassVar>>, // Oops subroutines: Vec<Subroutine>, } impl std::fmt::Display for Class { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "<class>")?; writeln!(f, "<keyword> class </keyword>")?; writeln!(f, "<identifier> {} </identifier>", self.name)?; writeln!(f, "<symbol> {{ </symbol>")?; for vars in self.varss.iter() { ClassVar::fmt_vars(vars, f)?; } for subroutine in self.subroutines.iter() { writeln!(f, "{}", subroutine)?; } writeln!(f, "<symbol> }} </symbol>")?; write!(f, "</class>")?; Ok(()) } } #[derive(Debug)] struct ClassVar { kind: ClassVarKind, v_type: Type, name: String, } impl ClassVar { fn fmt_vars(vars: &[ClassVar], f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if vars.len() == 0 { return Ok(()); } let s = &vars[0]; writeln!(f, "<classVarDec>")?; writeln!(f, "{}", s.kind)?; writeln!(f, "{}", s.v_type)?; writeln!(f, "<identifier> {} </identifier>", s.name)?; for n in (&vars[1..]).iter() { writeln!(f, "<symbol> , </symbol>")?; writeln!(f, "<identifier> {} </identifier>", n.name)?; } writeln!(f, "<symbol> ; </symbol>")?; writeln!(f, "</classVarDec>")?; Ok(()) } } #[derive(Eq, PartialEq, Debug, Copy, Clone)] enum ClassVarKind { Static, Field, } impl std::fmt::Display for ClassVarKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { &ClassVarKind::Static => write!(f, "<keyword> static </keyword>"), &ClassVarKind::Field => write!(f, "<keyword> field </keyword>"), } } } #[derive(Eq, PartialEq, Debug, Clone)] enum Type { Int, Char, Boolean, Void, Class(String), } impl std::fmt::Display for Type { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { &Type::Int => write!(f, "<keyword> int </keyword>"), &Type::Char => write!(f, "<keyword> char </keyword>"), &Type::Boolean => write!(f, "<keyword> boolean </keyword>"), &Type::Void => write!(f, "<keyword> void </keyword>"), &Type::Class(ref s) => write!(f, "<identifier> {} </identifier>", s), } } } impl Parse for Type { fn parse<T: std::io::Read>(tokenizer: &mut JackTokenizer<T>) -> Result<Type, JackcError> { match tokenizer.advance()? { Token::Keyword(KeywordKind::Int) => Ok(Type::Int), Token::Keyword(KeywordKind::Char) => Ok(Type::Char), Token::Keyword(KeywordKind::Boolean) => Ok(Type::Boolean), Token::Keyword(KeywordKind::Void) => Ok(Type::Void), Token::Identifier(s) => Ok(Type::Class(s)), _ => Err(JackcError::InvalidSyntax), } } } impl Parse for Vec<ClassVar> { fn parse<T: std::io::Read>( tokenizer: &mut JackTokenizer<T>, ) -> Result<Vec<ClassVar>, JackcError> { let kind = match tokenizer.advance()? { Token::Keyword(KeywordKind::Static) => ClassVarKind::Static, Token::Keyword(KeywordKind::Field) => ClassVarKind::Field, t => { tokenizer.unread_token(t); return Ok(vec![]); } }; let v_type = Type::parse(tokenizer)?; if v_type == Type::Void { return Err(JackcError::InvalidSyntax); } let mut idents = vec![tokenizer.advance()?.expect_identifier()?]; loop { let symbol = tokenizer.advance()?.expect_symbol()?; match symbol { b';' => break, b',' => idents.push(tokenizer.advance()?.expect_identifier()?), _ => return Err(JackcError::InvalidSyntax), } } Ok(idents .into_iter() .map(|name| ClassVar { kind, v_type: v_type.clone(), name, }) .collect()) } } #[derive(Debug)] struct Subroutine { kind: SubroutineKind, v_type: Type, name: String, parameters: Vec<Parameter>, body: SubroutineBody, } impl std::fmt::Display for Subroutine { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "<subroutineDec>")?; writeln!(f, "{}", self.kind)?; writeln!(f, "{}", self.v_type)?; writeln!(f, "<identifier> {} </identifier>", self.name)?; writeln!(f, "<symbol> ( </symbol>")?; writeln!(f, "<parameterList>")?; for (n, parameter) in self.parameters.iter().enumerate() { if n != 0 { writeln!(f, "<symbol> , </symbol>")?; } writeln!(f, "{}", parameter)?; } writeln!(f, "</parameterList>")?; writeln!(f, "<symbol> ) </symbol>")?; writeln!(f, "{}", self.body)?; write!(f, "</subroutineDec>")?; Ok(()) } } impl Parse for Option<Subroutine> { fn parse<T: std::io::Read>( tokenizer: &mut JackTokenizer<T>, ) -> Result<Option<Subroutine>, JackcError> { let kind = match tokenizer.advance()? { Token::Keyword(KeywordKind::Constructor) => SubroutineKind::Constructor, Token::Keyword(KeywordKind::Function) => SubroutineKind::Function, Token::Keyword(KeywordKind::Method) => SubroutineKind::Method, t => { tokenizer.unread_token(t); return Ok(None); } }; let v_type = Type::parse(tokenizer)?; let name = tokenizer.advance()?.expect_identifier()?; let parameters = Vec::<Parameter>::parse(tokenizer)?; let body = SubroutineBody::parse(tokenizer)?; Ok(Some(Subroutine { kind, v_type, name, parameters, body, })) } } #[derive(Debug)] enum SubroutineKind { Constructor, Function, Method, } impl std::fmt::Display for SubroutineKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { &SubroutineKind::Constructor => write!(f, "<keyword> constructor </keyword>"), &SubroutineKind::Function => write!(f, "<keyword> function </keyword>"), &SubroutineKind::Method => write!(f, "<keyword> method </keyword>"), } } } #[derive(Debug)] struct Parameter { v_type: Type, name: String, } impl std::fmt::Display for Parameter { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "{}", self.v_type)?; write!(f, "<identifier> {} </identifier>", self.name)?; Ok(()) } } impl Parse for Vec<Parameter> { fn parse<T: std::io::Read>( tokenizer: &mut JackTokenizer<T>, ) -> Result<Vec<Parameter>, JackcError> { tokenizer.advance()?.expect_spec_symbol(b'(')?; let mut token = tokenizer.advance()?; let mut params = vec![]; while token != Token::Symbol(SymbolKind(b')')) { if token != Token::Symbol(SymbolKind(b',')) { tokenizer.unread_token(token); } params.push(Parameter { v_type: Type::parse(tokenizer)?, name: tokenizer.advance()?.expect_identifier()?, }); token = tokenizer.advance()?; } Ok(params) } } #[derive(Debug)] struct SubroutineBody { varss: Vec<Vec<Var>>, // Oops statements: Vec<Statement>, } impl std::fmt::Display for SubroutineBody { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "<subroutineBody>")?; writeln!(f, "<symbol> {{ </symbol>")?; for vars in self.varss.iter() { Var::fmt_vars(vars, f)?; } writeln!(f, "<statements>")?; for statement in self.statements.iter() { writeln!(f, "{}", statement)?; } writeln!(f, "</statements>")?; writeln!(f, "<symbol> }} </symbol>")?; write!(f, "</subroutineBody>")?; Ok(()) } } impl Parse for SubroutineBody { fn parse<T: std::io::Read>( tokenizer: &mut JackTokenizer<T>, ) -> Result<SubroutineBody, JackcError> { tokenizer.advance()?.expect_spec_symbol(b'{')?; let mut varss = vec![]; let mut new_vars = Vec::<Var>::parse(tokenizer)?; while new_vars.len() != 0 { varss.push(new_vars); new_vars = Vec::<Var>::parse(tokenizer)?; } let statements = Vec::<Statement>::parse(tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b'}')?; Ok(SubroutineBody { varss, statements }) } } #[derive(Debug)] struct Var { v_type: Type, name: String, } impl Var { fn fmt_vars(vars: &[Var], f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if vars.len() == 0 { return Ok(()); } let s = &vars[0]; writeln!(f, "<varDec>")?; writeln!(f, "<keyword> var </keyword>")?; writeln!(f, "{}", s.v_type)?; writeln!(f, "<identifier> {} </identifier>", s.name)?; for n in (&vars[1..]).iter() { writeln!(f, "<symbol> , </symbol>")?; writeln!(f, "<identifier> {} </identifier>", n.name)?; } writeln!(f, "<symbol> ; </symbol>")?; writeln!(f, "</varDec>")?; Ok(()) } } impl Parse for Vec<Var> { fn parse<T: std::io::Read>(tokenizer: &mut JackTokenizer<T>) -> Result<Vec<Var>, JackcError> { match tokenizer.advance()? { Token::Keyword(KeywordKind::Var) => (), t => { tokenizer.unread_token(t); return Ok(vec![]); } } let v_type = Type::parse(tokenizer)?; let mut names = vec![tokenizer.advance()?.expect_identifier()?]; let mut delim = tokenizer.advance()?.expect_symbol()?; while delim != b';' { if delim != b',' { return Err(JackcError::InvalidSyntax); } names.push(tokenizer.advance()?.expect_identifier()?); delim = tokenizer.advance()?.expect_symbol()?; } Ok(names .into_iter() .map(|name| Var { v_type: v_type.clone(), name, }) .collect()) } } #[derive(Debug)] enum Statement { LetStatement { var_name: String, index: Option<Expression>, rhs: Expression, }, IfStatement { condition: Expression, if_statements: Vec<Statement>, else_statements: Vec<Statement>, }, WhileStatement { condition: Expression, statements: Vec<Statement>, }, DoStatement { subroutine_call: SubroutineCall, }, ReturnStatement { value: Option<Expression>, }, } impl std::fmt::Display for Statement { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { &Statement::LetStatement { ref var_name, ref index, ref rhs, } => { writeln!(f, "<letStatement>")?; writeln!(f, "<keyword> let </keyword>")?; writeln!(f, "<identifier> {} </identifier>", var_name)?; if let &Some(ref expr) = index { writeln!(f, "<symbol> [ </symbol>")?; writeln!(f, "{}", expr)?; writeln!(f, "<symbol> ] </symbol>")?; } writeln!(f, "<symbol> = </symbol>")?; writeln!(f, "{}", rhs)?; writeln!(f, "<symbol> ; </symbol>")?; write!(f, "</letStatement>")?; } &Statement::IfStatement { ref condition, ref if_statements, ref else_statements, } => { writeln!(f, "<ifStatement>")?; writeln!(f, "<keyword> if </keyword>")?; writeln!(f, "<symbol> ( </symbol>")?; writeln!(f, "{}", condition)?; writeln!(f, "<symbol> ) </symbol>")?; writeln!(f, "<symbol> {{ </symbol>")?; writeln!(f, "<statements>")?; for statement in if_statements.iter() { writeln!(f, "{}", statement)?; } writeln!(f, "</statements>")?; writeln!(f, "<symbol> }} </symbol>")?; if else_statements.len() != 0 { writeln!(f, "<keyword> else </keyword>")?; writeln!(f, "<symbol> {{ </symbol>")?; writeln!(f, "<statements>")?; for statement in else_statements.iter() { writeln!(f, "{}", statement)?; } writeln!(f, "</statements>")?; writeln!(f, "<symbol> }} </symbol>")?; } write!(f, "</ifStatement>")?; } &Statement::WhileStatement { ref condition, ref statements, } => { writeln!(f, "<whileStatement>")?; writeln!(f, "<keyword> while </keyword>")?; writeln!(f, "<symbol> ( </symbol>")?; writeln!(f, "{}", condition)?; writeln!(f, "<symbol> ) </symbol>")?; writeln!(f, "<symbol> {{ </symbol>")?; writeln!(f, "<statements>")?; for statement in statements.iter() { writeln!(f, "{}", statement)?; } writeln!(f, "</statements>")?; writeln!(f, "<symbol> }} </symbol>")?; write!(f, "</whileStatement>")?; } &Statement::DoStatement { ref subroutine_call, } => { writeln!(f, "<doStatement>")?; writeln!(f, "<keyword> do </keyword>")?; writeln!(f, "{}", subroutine_call)?; writeln!(f, "<symbol> ; </symbol>")?; write!(f, "</doStatement>")?; } &Statement::ReturnStatement { ref value } => { writeln!(f, "<returnStatement>")?; writeln!(f, "<keyword> return </keyword>")?; if let Some(ref value) = value { writeln!(f, "{}", value)?; } writeln!(f, "<symbol> ; </symbol>")?; write!(f, "</returnStatement>")?; } } Ok(()) } } impl Parse for Statement { fn parse<T: std::io::Read>(tokenizer: &mut JackTokenizer<T>) -> Result<Statement, JackcError> { match tokenizer.advance()?.expect_keyword()? { KeywordKind::Let => { let var_name = tokenizer.advance()?.expect_identifier()?; let symbol = tokenizer.advance()?.expect_symbol()?; let index = match symbol { b'[' => { let result = Expression::parse(tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b']')?; tokenizer.advance()?.expect_spec_symbol(b'=')?; Some(result) } b'=' => None, _ => return Err(JackcError::InvalidSyntax), }; let rhs = Expression::parse(tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b';')?; Ok(Statement::LetStatement { var_name, index, rhs, }) } KeywordKind::If => { tokenizer.advance()?.expect_spec_symbol(b'(')?; let condition = Expression::parse(tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b')')?; tokenizer.advance()?.expect_spec_symbol(b'{')?; let if_statements = Vec::<Statement>::parse(tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b'}')?; let token = tokenizer.advance()?; let else_statements = if token == Token::Keyword(KeywordKind::Else) { tokenizer.advance()?.expect_spec_symbol(b'{')?; let result = Vec::<Statement>::parse(tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b'}')?; result } else { tokenizer.unread_token(token); vec![] }; Ok(Statement::IfStatement { condition, if_statements, else_statements, }) } KeywordKind::While => { tokenizer.advance()?.expect_spec_symbol(b'(')?; let condition = Expression::parse(tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b')')?; tokenizer.advance()?.expect_spec_symbol(b'{')?; let statements = Vec::<Statement>::parse(tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b'}')?; Ok(Statement::WhileStatement { condition, statements, }) } KeywordKind::Do => { let s = tokenizer.advance()?.expect_identifier()?; let subroutine_call = SubroutineCall::parse_with_first_ident(s, tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b';')?; Ok(Statement::DoStatement { subroutine_call }) } KeywordKind::Return => { let next = tokenizer.advance()?; if next == Token::Symbol(SymbolKind(b';')) { Ok(Statement::ReturnStatement { value: None }) } else { tokenizer.unread_token(next); let expr = Expression::parse(tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b';')?; Ok(Statement::ReturnStatement { value: Some(expr) }) } } _ => Err(JackcError::InvalidSyntax), } } } impl Parse for Vec<Statement> { fn parse<T: std::io::Read>( tokenizer: &mut JackTokenizer<T>, ) -> Result<Vec<Statement>, JackcError> { let mut result = vec![]; let mut next = tokenizer.advance()?; while next != Token::Symbol(SymbolKind(b'}')) { tokenizer.unread_token(next); result.push(Statement::parse(tokenizer)?); next = tokenizer.advance()?; } tokenizer.unread_token(next); Ok(result) } } #[derive(Debug)] struct Expression { term: Term, ops: Vec<(Op, Term)>, } impl std::fmt::Display for Expression { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "<expression>")?; writeln!(f, "{}", self.term)?; for (op, term) in self.ops.iter() { writeln!(f, "{}", op)?; writeln!(f, "{}", term)?; } write!(f, "</expression>")?; Ok(()) } } impl Parse for Expression { fn parse<T: std::io::Read>(tokenizer: &mut JackTokenizer<T>) -> Result<Expression, JackcError> { let term = Term::parse(tokenizer)?; let mut next = tokenizer.advance()?; let mut ops = vec![]; while let Ok(op) = Op::try_from(&next) { ops.push((op, Term::parse(tokenizer)?)); next = tokenizer.advance()?; } tokenizer.unread_token(next); Ok(Expression { term, ops }) } } impl Parse for Vec<Expression> { fn parse<T: std::io::Read>( tokenizer: &mut JackTokenizer<T>, ) -> Result<Vec<Expression>, JackcError> { tokenizer.advance()?.expect_spec_symbol(b'(')?; let mut next = tokenizer.advance()?; let mut result = vec![]; while next != Token::Symbol(SymbolKind(b')')) { if next != Token::Symbol(SymbolKind(b',')) { tokenizer.unread_token(next); } result.push(Expression::parse(tokenizer)?); next = tokenizer.advance()?; } Ok(result) } } #[derive(Debug)] enum Term { IntegerConstant(u16), StringConstant(String), KeywordConstant(KeywordConstantKind), Var(String), IndexedVar(String, Box<Expression>), SubroutineCall(SubroutineCall), ParenthesizedExpr(Box<Expression>), UnaryOpedTerm(UnaryOp, Box<Term>), } impl std::fmt::Display for Term { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "<term>")?; match self { &Term::IntegerConstant(ref n) => { writeln!(f, "<integerConstant> {} </integerConstant>", n)?; } &Term::StringConstant(ref s) => { writeln!(f, "<stringConstant> {} </stringConstant>", escape_xml(s))?; } &Term::KeywordConstant(ref k) => { writeln!(f, "{}", k)?; } &Term::Var(ref n) => { writeln!(f, "<identifier> {} </identifier>", n)?; } &Term::IndexedVar(ref n, ref i) => { writeln!(f, "<identifier> {} </identifier>", n)?; writeln!(f, "<symbol>[</symbol>")?; writeln!(f, "{}", i)?; writeln!(f, "<symbol>]</symbol>")?; } &Term::SubroutineCall(ref s) => { writeln!(f, "{}", s)?; } &Term::ParenthesizedExpr(ref e) => { writeln!(f, "<symbol>(</symbol>")?; writeln!(f, "{}", e)?; writeln!(f, "<symbol>)</symbol>")?; } &Term::UnaryOpedTerm(ref uop, ref t) => { writeln!(f, "{}", uop)?; writeln!(f, "{}", t)?; } } write!(f, "</term>")?; Ok(()) } } impl Parse for Term { fn parse<T: std::io::Read>(tokenizer: &mut JackTokenizer<T>) -> Result<Term, JackcError> { match tokenizer.advance()? { Token::IntegerLiteral(i) => Ok(Term::IntegerConstant(i)), Token::StringLiteral(s) => Ok(Term::StringConstant(s)), Token::Keyword(KeywordKind::True) => { Ok(Term::KeywordConstant(KeywordConstantKind::True)) } Token::Keyword(KeywordKind::False) => { Ok(Term::KeywordConstant(KeywordConstantKind::False)) } Token::Keyword(KeywordKind::Null) => { Ok(Term::KeywordConstant(KeywordConstantKind::Null)) } Token::Keyword(KeywordKind::This) => { Ok(Term::KeywordConstant(KeywordConstantKind::This)) } Token::Identifier(s) => match tokenizer.advance()? { Token::Symbol(SymbolKind(b'[')) => { let index = Box::new(Expression::parse(tokenizer)?); tokenizer.advance()?.expect_spec_symbol(b']')?; Ok(Term::IndexedVar(s, index)) } t @ Token::Symbol(SymbolKind(b'(')) | t @ Token::Symbol(SymbolKind(b'.')) => { tokenizer.unread_token(t); Ok(Term::SubroutineCall( SubroutineCall::parse_with_first_ident(s, tokenizer)?, )) } t => { tokenizer.unread_token(t); Ok(Term::Var(s)) } }, Token::Symbol(SymbolKind(b'(')) => { let expr = Expression::parse(tokenizer)?; tokenizer.advance()?.expect_spec_symbol(b')')?; Ok(Term::ParenthesizedExpr(Box::new(expr))) } t @ Token::Symbol(_) => { let unop = UnaryOp::try_from(&t).map_err(|_| JackcError::InvalidSyntax)?; Ok(Term::UnaryOpedTerm(unop, Box::new(Term::parse(tokenizer)?))) } _ => Err(JackcError::InvalidSyntax), } } } #[derive(Debug)] enum KeywordConstantKind { True, False, Null, This, } impl std::fmt::Display for KeywordConstantKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { &KeywordConstantKind::True => write!(f, "<keyword> true </keyword>"), &KeywordConstantKind::False => write!(f, "<keyword> false </keyword>"), &KeywordConstantKind::Null => write!(f, "<keyword> null </keyword>"), &KeywordConstantKind::This => write!(f, "<keyword> this </keyword>"), } } } #[derive(Debug)] struct SubroutineCall { kind: SubroutineCallKind, name: String, args: Vec<Expression>, } impl std::fmt::Display for SubroutineCall { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { //writeln!(f, "<subroutineCall>")?; match self.kind { SubroutineCallKind::AbsoluteCall(ref s) => { writeln!(f, "<identifier> {} </identifier>", s)?; writeln!(f, "<symbol> . </symbol>")?; } _ => (), } writeln!(f, "<identifier> {} </identifier>", self.name)?; writeln!(f, "<symbol> ( </symbol>")?; writeln!(f, "<expressionList>")?; for (n, arg) in self.args.iter().enumerate() { if n != 0 { writeln!(f, "<symbol> , </symbol>")?; } writeln!(f, "{}", arg)?; } writeln!(f, "</expressionList>")?; write!(f, "<symbol> ) </symbol>")?; //write!(f, "</subroutineCall>")?; Ok(()) } } impl SubroutineCall { fn parse_with_first_ident( s: String, tokenizer: &mut JackTokenizer<impl std::io::Read>, ) -> Result<SubroutineCall, JackcError> { match tokenizer.advance()? { t @ Token::Symbol(SymbolKind(b'(')) => { tokenizer.unread_token(t); let args = Vec::<Expression>::parse(tokenizer)?; Ok(SubroutineCall { kind: SubroutineCallKind::SameClassCall, name: s, args, }) } Token::Symbol(SymbolKind(b'.')) => { let name = tokenizer.advance()?.expect_identifier()?; let args = Vec::<Expression>::parse(tokenizer)?; Ok(SubroutineCall { kind: SubroutineCallKind::AbsoluteCall(s), name, args, }) } _ => Err(JackcError::InvalidSyntax), } } } #[derive(Debug)] enum SubroutineCallKind { SameClassCall, AbsoluteCall(String), } #[derive(Debug, Eq, PartialEq)] struct UnaryOp(u8); impl std::fmt::Display for UnaryOp { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "<symbol> {} </symbol>", self.0 as char) } } impl TryFrom<&Token> for UnaryOp { type Error = (); fn try_from(token: &Token) -> Result<UnaryOp, ()> { match token { &Token::Symbol(ref kind) => { let b = kind.0; match b { b'-' | b'~' => Ok(UnaryOp(b)), _ => Err(()), } } _ => Err(()), } } } #[derive(Debug, Eq, PartialEq)] struct Op(u8); impl std::fmt::Display for Op { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "<symbol> {} </symbol>", escape_xml(&((self.0 as char).to_string())) ) } } impl TryFrom<&Token> for Op { type Error = (); fn try_from(token: &Token) -> Result<Op, ()> { match token { &Token::Symbol(ref kind) => { let b = kind.0; match b { b'+' | b'-' | b'*' | b'/' | b'&' | b'|' | b'<' | b'>' | b'=' => Ok(Op(b)), _ => Err(()), } } _ => Err(()), } } } impl Parse for Class { fn parse<T: std::io::Read>(tokenizer: &mut JackTokenizer<T>) -> Result<Class, JackcError> { if tokenizer.advance()? != Token::Keyword(KeywordKind::Class) { return Err(JackcError::ExpectedKeywordNotAppear("class")); } let class_name = tokenizer.advance()?.expect_identifier()?; tokenizer.advance()?.expect_spec_symbol(b'{')?; let mut varss = vec![]; let mut new_vars = Vec::<ClassVar>::parse(tokenizer)?; while new_vars.len() != 0 { varss.push(new_vars); new_vars = Vec::<ClassVar>::parse(tokenizer)?; } let mut subroutines = vec![]; while let Some(s) = Option::<Subroutine>::parse(tokenizer)? { subroutines.push(s); } tokenizer.advance()?.expect_spec_symbol(b'}')?; if tokenizer.advance()? != Token::EndOfFile { Err(JackcError::InvalidSyntax) } else { Ok(Class { name: class_name, varss, subroutines, }) } } }
true
06cffa8d1e54571f6ef3a9ab135544b169d36e3a
Rust
dtolnay/faketty
/tests/test.rs
UTF-8
622
2.65625
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use std::fs::{self, File}; use std::io; use std::process::Command; #[test] fn test() -> io::Result<()> { let tempdir = scratch::path("faketty"); let stdout = tempdir.join("test-stdout"); let stderr = tempdir.join("test-stderr"); let status = Command::new(env!("CARGO_BIN_EXE_faketty")) .arg("tests/test.sh") .stdout(File::create(&stdout)?) .stderr(File::create(&stderr)?) .status()?; assert_eq!(status.code(), Some(6)); assert_eq!(fs::read(stdout)?, "stdout is tty\r\n".as_bytes()); assert_eq!(fs::read(stderr)?, "stderr is tty\r\n".as_bytes()); Ok(()) }
true
d04c7d910c4cdec9f62d3ccb34462cf376975b17
Rust
debragail/prisma-engines
/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_upsert.rs
UTF-8
13,155
2.734375
3
[ "Apache-2.0" ]
permissive
use query_engine_tests::*; #[test_suite] mod delete_inside_upsert { use query_engine_tests::{assert_error, run_query, run_query_json, DatamodelWithParams}; use query_test_macros::relation_link_test; // "a P1 to C1 relation " should "work through a nested mutation by id" // TODO:(dom): Not working on mongo. Failing from 9-17 // Reason: Misses foreign key cascade emulation for update #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", exclude(MongoDb))] async fn p1_c1_should_work(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( runner, format!( r#"mutation {{ createOneParent(data: {{ p: "p1", p_1: "p", p_2: "1" childOpt: {{ create: {{c: "c1", c_1: "foo", c_2: "bar"}} }} }}){{ {parent_selection} childOpt{{ {child_selection} }} }} }}"#, parent_selection = t.parent().selection(), child_selection = t.child().selection() ) ), &["data", "createOneParent"], )?; insta::assert_snapshot!( run_query!(runner, format!(r#"mutation {{ upsertOneParent( where: {parent} update:{{ p: {{ set: "p2" }} childOpt: {{delete: true}} }} create:{{p: "Should not matter", p_1: "no", p_2: "yes"}} ){{ childOpt {{ c }} }} }}"#, parent = parent)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); Ok(()) } // "a P1 to C1 relation" should "error if the nodes are not connected" #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt")] async fn p1_c1_error_if_not_connected(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( runner, format!( r#"mutation {{ createOneParent(data: {{p: "p1", p_1: "p", p_2: "1"}}) {{ {selection} }} }}"#, selection = t.parent().selection() ) ), &["data", "createOneParent"], )?; assert_error!( runner, format!(r#"mutation {{ upsertOneParent( where: {parent} update:{{ p: {{ set: "p2" }} childOpt: {{delete: true}} }} create:{{p: "Should not matter", p_1: "nono", p_2: "yesyes"}} ){{ childOpt {{ c }} }} }}"#, parent = parent), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested delete on relation 'ChildToParent'." ); Ok(()) } // "a PM to C1! relation " should "work" #[relation_link_test(on_parent = "ToMany", on_child = "ToOneReq")] async fn pm_c1_req_should_req(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( runner, format!( r#"mutation {{ createOneParent(data: {{ p: "p1", p_1: "p", p_2: "1" childrenOpt: {{ create: {{c: "c1", c_1: "asdf", c_2: "qwer"}} }} }}){{ {selection} childrenOpt{{ c }} }} }}"#, selection = t.parent().selection() ) ), &["data", "createOneParent"], )?; insta::assert_snapshot!( run_query!(runner, format!(r#"mutation {{ upsertOneParent( where: {parent} update:{{ childrenOpt: {{delete: {{c: "c1"}}}} }} create:{{p: "Should not matter", p_1: "foo", p_2: "bar"}} ){{ childrenOpt {{ c }} }} }}"#, parent = parent)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[]}}}"### ); Ok(()) } // "a P1 to C1! relation " should "work" #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneReq")] async fn p1_c1_req_should_work(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( runner, format!( r#"mutation {{ createOneParent(data: {{ p: "p1", p_1: "p", p_2: "1" childOpt: {{ create: {{c: "c1", c_1: "foo", c_2: "bar"}} }} }}){{ {selection} childOpt{{ c }} }} }}"#, selection = t.parent().selection() ) ), &["data", "createOneParent"], )?; insta::assert_snapshot!( run_query!(runner, format!(r#"mutation {{ upsertOneParent( where: {parent} update:{{ childOpt: {{delete: true}} }} create:{{p: "Should not matter", p_1: "no", p_2: "yes"}} ){{ childOpt {{ c }} }} }}"#, parent = parent)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); Ok(()) } // "a PM to C1 " should "work" #[relation_link_test(on_parent = "ToMany", on_child = "ToOneOpt")] async fn pm_c1_should_work(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( runner, format!( r#"mutation {{ createOneParent(data: {{ p: "p1", p_1: "p", p_2: "1" childrenOpt: {{ create: [{{c: "c1", c_1: "foo", c_2: "bar"}}, {{c: "c2", c_1: "nono", c_2: "yesyes"}}] }} }}){{ {selection} childrenOpt{{ c }} }} }}"#, selection = t.parent().selection() ) ), &["data", "createOneParent"], )?; insta::assert_snapshot!( run_query!(runner, format!(r#"mutation {{ upsertOneParent( where: {parent} update:{{ childrenOpt: {{delete: [{{c: "c2"}}]}} }} create:{{p: "Should not matter", p_1: "no", p_2: "yes"}} ){{ childrenOpt {{ c }} }} }}"#, parent = parent)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); Ok(()) } // "a P1! to CM relation" should "error" #[relation_link_test(on_parent = "ToOneReq", on_child = "ToMany")] async fn p1_req_cm_should_error(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( runner, format!( r#"mutation {{ createOneParent(data: {{ p: "p1", p_1: "p", p_2: "1" childReq: {{ create: {{ c: "c1" c_1: "c_1" c_2: "c_2" }} }} }}){{ {selection} childReq{{ c }} }} }}"#, selection = t.parent().selection() ) ), &["data", "createOneParent"], )?; assert_error!( runner, format!(r#"mutation {{ upsertOneParent( where: {parent} update:{{ childReq: {{delete: true}} }} create:{{p: "Should not matter", p_1: "nono", p_2: "noyes", childReq: {{create:{{c: "Should not matter", c_1: "foo", c_2: "bar"}}}}}} ){{ childReq {{ c }} }} }}"#, parent = parent), 2009, "`Mutation.upsertOneParent.update.ParentUpdateInput.childReq.ChildUpdateOneRequiredWithoutParentsOptInput.delete`: Field does not exist on enclosing type." ); Ok(()) } // "a P1 to CM relation " should "work" #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToMany")] async fn p1_cm_should_work(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( runner, format!( r#"mutation {{ createOneParent(data: {{ p: "p1", p_1: "p_1" p_2: "p_2" childOpt: {{ create: {{ c: "c1" c_1: "c_1" c_2: "c_2" }} }} }}){{ {selection} childOpt{{ c }} }} }}"#, selection = t.parent().selection() ) ), &["data", "createOneParent"], )?; insta::assert_snapshot!( run_query!(runner, format!(r#"mutation {{ upsertOneParent( where: {parent} update:{{childOpt: {{delete: true}}}} create:{{p: "Should not matter", p_1: "no", p_2: "yes"}} ){{ childOpt{{ c }} }} }}"#, parent = parent)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); insta::assert_snapshot!( run_query!(runner, r#"query{findManyChild{c, parentsOpt{p}}}"#), @r###"{"data":{"findManyChild":[]}}"### ); Ok(()) } // "a PM to CM relation" should "work" #[relation_link_test(on_parent = "ToMany", on_child = "ToMany")] async fn pm_cm_should_work(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( runner, format!( r#"mutation {{ createOneParent(data: {{ p: "p1", p_1: "p", p_2: "1" childrenOpt: {{ create: [{{c: "c1", c_1: "foo", c_2: "bar"}},{{c: "c2", c_1: "wtf", c_2: "lol"}}] }} }}){{ {selection} childrenOpt{{ c }} }} }}"#, selection = t.parent().selection() ) ), &["data", "createOneParent"], )?; insta::assert_snapshot!( run_query!(runner, format!(r#"mutation {{ upsertOneParent( where: {parent} update:{{ childrenOpt: {{delete: [{{c: "c1"}}, {{c: "c2"}}]}} }} create:{{p: "Should not matter", p_1: "foo", p_2: "bar"}} ){{ childrenOpt{{ c }} }} }}"#, parent = parent)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[]}}}"### ); insta::assert_snapshot!( run_query!(runner, r#"query{findManyChild{c, parentsOpt{p}}}"#), @r###"{"data":{"findManyChild":[]}}"### ); Ok(()) } }
true
139e06783a21e63693cd63fdb118f982303e1034
Rust
DutchJavaDev/Rust-Rock-Paper-Scissors
/src/main.rs
UTF-8
3,762
3.515625
4
[]
no_license
extern crate rand; use std::io; use rand::Rng; #[derive(Debug)] #[derive(PartialEq)] enum Winner { None, Robot, Draw, You } #[derive(Debug)] #[derive(PartialEq)] enum Choices { None, Rock, Paper, Scissors } macro_rules! writeln { () => { println!(); }; ($txt:expr) => { println!("{}",$txt); }; } fn _get_user_input() -> String { let mut _input = String::new(); match io::stdin().read_line(&mut _input){ Ok(_n) => println!(""), Err(error) => println!("error: {}", error), } _input.to_lowercase().trim().to_owned() } fn _get_user_choice(_string:String) -> Choices { let mut _number : i32 = match _string.parse() { Ok(num) => num, Err(_) => { -1 } }; match _number { -1 | std::i32::MIN..=-2i32 | 0i32 | 4i32..=std::i32::MAX => Choices::None, 1 => Choices::Rock, 2 => Choices::Paper, 3 => Choices::Scissors } } fn _get_robot_choice() -> Choices { let mut _number = rand::thread_rng().gen_range(0,2); match _number { 0 => Choices::Rock, 1 => Choices::Paper, 2 => Choices::Scissors, std::i32::MIN..=-2i32 | 4i32..=std::i32::MAX | -1i32 | 3i32 => Choices::None, } } fn _show_user_choice() { writeln!("Type: rock, paper or scissors"); writeln!("Or type 1 for rock, 2 for paper and 3 for scissors"); writeln!("Type exit or e to exit"); writeln!(); } fn _run_game(_robot_choice:Choices, _user_choice:Choices) -> Winner { let mut _winner : Winner = Winner::None; match _robot_choice { Choices::None => { _winner = Winner::None } Choices::Rock => { _winner = if _user_choice == Choices::Rock { Winner::Draw } else if _user_choice == Choices::Paper { Winner::You } else if _user_choice == Choices::Scissors { Winner::Robot } else { Winner::None } } Choices::Paper => { _winner = if _user_choice == Choices::Rock { Winner::Robot } else if _user_choice == Choices::Paper { Winner::Draw } else if _user_choice == Choices::Scissors { Winner::You } else { Winner::None } } Choices::Scissors => { _winner = if _user_choice == Choices::Rock { Winner::You } else if _user_choice == Choices::Paper { Winner::Robot } else if _user_choice == Choices::Scissors { Winner::Draw } else { Winner::None } } } _winner } fn main(){ writeln!("Welcome to Rock Paper and Scissors game"); writeln!(); loop { _show_user_choice(); let _user_input = _get_user_input(); if _user_input == "e" || _user_input == "exit" { break; } let _robot = _get_robot_choice(); let _user = _get_user_choice(_user_input); match _user { Choices::None => { writeln!("Please enter a valid number!") } Choices::Rock | Choices::Paper | Choices::Scissors => { match _run_game(_robot, _user) { Winner::Draw => { writeln!("Its a draw!") } Winner::Robot => { writeln!("You lose!") } Winner::You => { writeln!("You won!") } Winner::None => { writeln!("Hehhe not gonna happen") } }; writeln!() } }; } writeln!("End of game"); }
true
31191ac7075c4f69fb04c88499105a51d9d5f338
Rust
nwtnni/photon
/src/integrator/light.rs
UTF-8
1,361
2.671875
3
[ "MIT" ]
permissive
use crate::prelude::*; use crate::geom; use crate::light::Light as _; use crate::math; use crate::scene; use crate::integrator; #[derive(Copy, Clone, Debug)] pub struct Light; impl<'scene> integrator::Integrator<'scene> for Light { fn shade(&self, scene: &scene::Scene<'scene>, ray: &math::Ray, hit: &geom::Hit<'scene>, depth: usize) -> math::Vec3 { if depth > 5 { return math::Vec3::default() } let p = hit.p; let n = hit.n; let wr = (ray.p - hit.p).normalize(); let mut color = hit.emit.unwrap_or_default(); for light in scene.lights() { let ls = light.sample(&p); if integrator::shadowed(scene, &p, &ls.d, ls.t) { continue } color += light.eval(&math::Ray::new(p, ls.d)) * hit.bxdf.unwrap().eval(&ls.d, &wr, &n) * ls.a * n.dot(&ls.d) / ls.p; } let bs = hit.bxdf.unwrap().sample(&wr, &n); if bs.delta && bs.p > 0.001 { let mut hr = geom::Hit::default(); let mut recurse = math::Ray::new(p, bs.d); if !scene.hit(&mut recurse, &mut hr) { return color } color += self.shade(scene, &recurse, &hr, depth + 1) * bs.v * n.dot(&bs.d).abs() / bs.p } color } }
true
e3061f761cb36078bcfe83683b2dd39bf252c8b0
Rust
tut-cc/ProjectEuler
/Problem018/rust/euler018.rs
UTF-8
896
2.984375
3
[]
no_license
use std::str::FromStr; use std::fmt::Debug; use std::cmp; fn convert_from_str<T: FromStr>(line: &str) -> Vec<T> where T::Err : Debug { line.split_whitespace().map(|x| x.parse::<T>().unwrap()).collect() } fn main() { let s = "75 95 64 17 47 82 18 35 87 10 20 04 82 47 65 19 01 23 75 03 34 88 02 77 73 07 63 67 99 65 04 28 06 16 70 92 41 41 26 56 83 40 80 70 33 41 48 72 33 47 32 37 16 94 29 53 71 44 65 25 43 91 52 97 51 14 70 11 33 28 77 73 17 78 39 68 17 57 91 71 52 38 17 14 91 43 58 50 27 29 48 63 66 04 68 89 53 67 30 73 16 69 87 40 31 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23".to_owned(); let mut veci : Vec<Vec<i32>> = s.lines().map(convert_from_str).collect(); for row in (1..veci.len()).rev() { for col in 0..(veci[row].len() - 1) { veci[row-1][col] += cmp::max(veci[row][col], veci[row][col+1]); } } println!("{}", veci[0][0]); }
true
85505059681272416aa047121994c607bf79aa7e
Rust
briete/yukicoder
/no56/src/main.rs
UTF-8
348
2.78125
3
[]
no_license
fn getline() -> String{ let mut __ret=String::new(); std::io::stdin().read_line(&mut __ret).ok(); return __ret; } fn main() { let l = getline(); let lv: Vec<_> = l.trim().split(' ').collect(); let d: f64 = lv[0].parse().unwrap(); let p: f64 = lv[1].parse().unwrap(); println!("{}", (d + (d * p / 100.0)).floor() as u64); }
true
35b9d615707635caad411b42df1d305b4997a308
Rust
awersching/wedder
/src/weather/weather_condition.rs
UTF-8
808
2.703125
3
[ "MIT" ]
permissive
use std::collections::HashMap; use serde::{Deserialize, Serialize}; use strum_macros::Display; use crate::config::Config; #[derive(Debug, Hash, Eq, PartialEq, Serialize, Deserialize, Display)] #[strum(serialize_all = "snake_case")] pub enum WeatherCondition { ClearSky, FewClouds, Clouds, ManyClouds, Rain, HeavyRain, Thunderstorm, Snow, Mist, } #[derive(Debug, Serialize, Deserialize, Eq, PartialEq)] pub struct Icons(HashMap<String, String>); impl Icons { pub fn get(&self, condition: &str) -> Option<&String> { self.0.get(condition) } } impl Default for Icons { fn default() -> Self { let cfg_str = include_str!("../../examples/wedder.toml"); let config: Config = toml::from_str(cfg_str).unwrap(); config.icons } }
true
38dccff8576af3e4f0ea499d92cee13c74d120d7
Rust
tarikeshaq/y86-lib
/src/executer/print.rs
UTF-8
4,486
2.640625
3
[ "MIT" ]
permissive
use super::instructions::{ICode, Instruction, Register}; use super::State; use lazy_static::lazy_static; use num_traits::FromPrimitive; use std::collections::HashMap; lazy_static! { static ref MAP: HashMap<u8, &'static str> = vec![ ((ICode::IHALT as u8) << 4, "halt"), ((ICode::INOP as u8) << 4, "nop"), ((ICode::IRRMVXX as u8) << 4 | 0, "rrmovq"), ((ICode::IRRMVXX as u8) << 4 | 1, "cmovle"), ((ICode::IRRMVXX as u8) << 4 | 2, "cmovl"), ((ICode::IRRMVXX as u8) << 4 | 3, "cmove"), ((ICode::IRRMVXX as u8) << 4 | 4, "cmovne"), ((ICode::IRRMVXX as u8) << 4 | 5, "cmovge"), ((ICode::IRRMVXX as u8) << 4 | 6, "cmovg"), ((ICode::IRMMOVQ as u8) << 4, "rmmovq"), ((ICode::IMRMOVQ as u8) << 4, "mrmovq"), ((ICode::IIRMOVQ as u8) << 4, "irmovq"), ((ICode::IOPQ as u8) << 4, "addq"), ((ICode::IOPQ as u8) << 4 | 1, "subq"), ((ICode::IOPQ as u8) << 4 | 2, "andq"), ((ICode::IOPQ as u8) << 4 | 3, "xorq"), ((ICode::IOPQ as u8) << 4 | 4, "mulq"), ((ICode::IOPQ as u8) << 4 | 5, "divq"), ((ICode::IOPQ as u8) << 4 | 6, "modq"), ((ICode::IJXX as u8) << 4, "jmp"), ((ICode::IJXX as u8) << 4 | 1, "jle"), ((ICode::IJXX as u8) << 4 | 2, "jl"), ((ICode::IJXX as u8) << 4 | 3, "je"), ((ICode::IJXX as u8) << 4 | 4, "jne"), ((ICode::IJXX as u8) << 4 | 5, "jge"), ((ICode::IJXX as u8) << 4 | 6, "jg"), ((ICode::ICALL as u8) << 4, "call"), ((ICode::IRET as u8) << 4, "ret"), ((ICode::IPUSHQ as u8) << 4, "pushq"), ((ICode::IPOPQ as u8) << 4, "popq") ] .into_iter() .collect(); } pub fn print_register(register: Register) -> &'static str { match register { Register::RRAX => "%rax", Register::RRCX => "%rcx", Register::RRDX => "%rdx", Register::RRBX => "%rbx", Register::RRSP => "%rsp", Register::RRBP => "%rbp", Register::RRSI => "%rsi", Register::RRDI => "%rdi", Register::RR8 => "%r8", Register::RR9 => "%r9", Register::RR10 => "%r10", Register::RR11 => "%r11", Register::RR12 => "%r12", Register::RR13 => "%r13", Register::RR14 => "%r14", Register::RNONE => "WAT", } } pub fn print_instruction(instr: &Instruction) { let code = instr.get_icode(); let ifun = instr.get_ifun(); let icode_ifun = (code as u8) << 4 | ifun; let mut curr = std::format!(" {:}", MAP.get(&icode_ifun).unwrap()); // Remove unwrap match code { ICode::IIRMOVQ => { curr.push_str(&std::format!( " $0x{:x}, {:}", instr.get_val_c().unwrap(), print_register(instr.get_r_b().unwrap()) )); } ICode::IPUSHQ | ICode::IPOPQ => curr.push_str(&std::format!( " {:}", print_register(instr.get_r_a().unwrap()) )), ICode::IJXX | ICode::ICALL => { curr.push_str(&std::format!(" 0x{:x}", instr.get_val_c().unwrap())) } ICode::IRMMOVQ => curr.push_str(&std::format!( " {:}, 0x{:x}({:})", print_register(instr.get_r_a().unwrap()), instr.get_val_c().unwrap(), print_register(instr.get_r_b().unwrap()) )), ICode::IMRMOVQ => curr.push_str(&std::format!( " 0x{:x}({:}), {:}", instr.get_val_c().unwrap(), print_register(instr.get_r_b().unwrap()), print_register(instr.get_r_a().unwrap()) )), ICode::IRRMVXX | ICode::IOPQ => curr.push_str(&std::format!( " {:}, {:}", print_register(instr.get_r_a().unwrap()), print_register(instr.get_r_b().unwrap()) )), _ => (), } curr.push_str(&std::format!(" #PC = 0x{:x}", instr.get_location())); println!("{:}", curr); } pub fn print_all_registers(state: &State) { (0..14) .into_iter() .for_each(|id| print_register_val(state, id)); } pub fn print_memory_quad_value(state: &State, address: u64) { println!( " #M_8[0x{:x}] = 0x{:x}", address, state.read_le(address).unwrap() ); } pub fn print_register_val(state: &State, val: u8) { println!( " #R[{:}] = 0x{:x}", print_register(FromPrimitive::from_u8(val).unwrap()), state.get_register(val) ); }
true
5f3ac359342d77e11bb70333f8c82e0534cd04d3
Rust
Reeywhaar/nut
/src/bucket/cursor_tests.rs
UTF-8
5,365
2.65625
3
[ "MIT" ]
permissive
use crate::db::tests::db_mock; #[test] fn seek_none() { let mut db = db_mock().build().unwrap(); let mut tx = db.begin_rw_tx().unwrap(); drop(tx.create_bucket(b"blub").unwrap()); let c = tx.cursor(); let item = c.seek(b"foo"); assert!(item.is_ok()); assert!(item.unwrap().is_none()); } #[test] fn seek_some() { let mut db = db_mock().build().unwrap(); let mut tx = db.begin_rw_tx().unwrap(); drop(tx.create_bucket(b"foo").unwrap()); let c = tx.cursor(); let item = c.seek(b"foo"); assert!(item.is_ok()); assert!(item.unwrap().is_some()); } #[test] fn values_cursor() { let mut db = db_mock().build().unwrap(); let mut tx = db.begin_rw_tx().unwrap(); { let mut bucket = tx.create_bucket(b"bucket").unwrap(); bucket.put(b"petr", b"rachmaninov".to_vec()).unwrap(); bucket.put(b"robert", b"plant".to_vec()).unwrap(); bucket.put(b"ziggy", b"stardust".to_vec()).unwrap(); { let cursor = bucket.cursor().unwrap(); assert_eq!(cursor.first().unwrap().key.unwrap(), b"petr"); } { let cursor = bucket.cursor().unwrap(); assert_eq!(cursor.first().unwrap().key.unwrap(), b"petr"); assert_eq!(cursor.next().unwrap().key.unwrap(), b"robert"); } { let mut key_names = vec![]; let cursor = bucket.cursor().unwrap(); { let item = cursor.first().unwrap(); key_names.push(item.key.unwrap().to_vec()); } loop { let item = cursor.next().unwrap(); if item.is_none() { break; } key_names.push(item.key.unwrap().to_vec()); } assert_eq!(key_names.len(), 3); assert!(key_names.contains(&b"petr".to_vec())); assert!(key_names.contains(&b"robert".to_vec())); assert!(key_names.contains(&b"ziggy".to_vec())); } // backwards { let cursor = bucket.cursor().unwrap(); assert_eq!(cursor.last().unwrap().key.unwrap(), b"ziggy"); } { let cursor = bucket.cursor().unwrap(); assert_eq!(cursor.last().unwrap().key.unwrap(), b"ziggy"); assert_eq!(cursor.prev().unwrap().key.unwrap(), b"robert"); } { let mut key_names = vec![]; let cursor = bucket.cursor().unwrap(); { let item = cursor.last().unwrap(); key_names.push(item.key.unwrap().to_vec()); } loop { let item = cursor.prev().unwrap(); if item.is_none() { break; } key_names.push(item.key.unwrap().to_vec()); } assert_eq!(key_names.len(), 3); assert!(key_names.contains(&b"petr".to_vec())); assert!(key_names.contains(&b"robert".to_vec())); assert!(key_names.contains(&b"ziggy".to_vec())); } { let cursor = bucket.cursor().unwrap(); assert_eq!(cursor.last().unwrap().key.unwrap(), b"ziggy"); assert_eq!(cursor.prev().unwrap().key.unwrap(), b"robert"); assert_eq!(cursor.prev().unwrap().key.unwrap(), b"petr"); assert_eq!(cursor.next().unwrap().key.unwrap(), b"robert"); assert_eq!(cursor.first().unwrap().key.unwrap(), b"petr"); assert_eq!(cursor.next().unwrap().key.unwrap(), b"robert"); assert_eq!(cursor.next().unwrap().key.unwrap(), b"ziggy"); assert_eq!(cursor.prev().unwrap().key.unwrap(), b"robert"); } { let cursor = bucket.cursor().unwrap(); assert_eq!(cursor.first().unwrap().key.unwrap(), b"petr"); assert_eq!(cursor.prev().unwrap().key, None); assert_eq!(cursor.prev().unwrap().key, None); } { let cursor = bucket.cursor().unwrap(); assert_eq!(cursor.last().unwrap().key.unwrap(), b"ziggy"); assert_eq!(cursor.next().unwrap().key, None); assert_eq!(cursor.next().unwrap().key, None); } } } #[test] fn bucket_cursor() { let mut db = db_mock().build().unwrap(); let mut tx = db.begin_rw_tx().unwrap(); { let mut bucket = tx.create_bucket(b"bucket").unwrap(); bucket.put(b"key", b"value".to_vec()).unwrap(); bucket.put(b"keys", b"value".to_vec()).unwrap(); } { let mut bucket = tx.create_bucket(b"another bucket").unwrap(); bucket.put(b"key", b"value".to_vec()).unwrap(); bucket.put(b"keys", b"value".to_vec()).unwrap(); } { let mut bucket_names = vec![]; let cursor = tx.cursor(); { let item = cursor.first().unwrap(); bucket_names.push(item.key.unwrap().to_vec()); } loop { let item = cursor.next().unwrap(); if item.is_none() { break; } bucket_names.push(item.key.unwrap().to_vec()); } assert_eq!(bucket_names.len(), 2); assert!(bucket_names.contains(&b"bucket".to_vec())); assert!(bucket_names.contains(&b"another bucket".to_vec())); } }
true
852e8273a09a2eb7114c9fc7b67062876f052fec
Rust
williamluke4/prisma-engine
/libs/sql-connection/src/mysql.rs
UTF-8
3,192
2.6875
3
[ "Apache-2.0" ]
permissive
use crate::{pooling::*, traits::{SqlConnection, SyncSqlConnection}}; use quaint::{ ast::*, connector::{self, ResultSet}, error::Error as QueryError, pool::{MysqlManager}, }; use std::convert::{TryInto}; use tokio::runtime::Runtime; use url::Url; /// A connection, or pool of connections, to a MySQL database. It exposes both sync and async /// query interfaces. pub struct Mysql { conn: ConnectionPool<connector::Mysql, MysqlManager>, // TODO: remove this when we delete the sync interface runtime: Runtime, } impl Mysql { /// Create a new single connection. pub fn new_unpooled(url: Url) -> Result<Self, QueryError> { let conn = connector::Mysql::from_params(url.try_into()?)?; let handle = ConnectionPool::Single(conn); Ok(Mysql { conn: handle, runtime: super::default_runtime(), }) } /// Create a new connection pool. pub fn new_pooled(url: Url) -> Result<Self, QueryError> { let pool = quaint::pool::mysql(url)?; let handle = ConnectionPool::Pool(pool); Ok(Mysql { conn: handle, runtime: super::default_runtime(), }) } async fn get_connection<'a>(&'a self) -> Result<ConnectionHandle<'a, connector::Mysql, MysqlManager>, QueryError> { Ok(self.conn.get_connection().await?) } } #[async_trait::async_trait] impl SqlConnection for Mysql { async fn execute<'a>(&self, q: Query<'a>) -> Result<Option<Id>, QueryError> { let conn = self.get_connection().await?; conn.as_queryable().execute(q).await } async fn query<'a>(&self, q: Query<'a>) -> Result<ResultSet, QueryError> { let conn = self.get_connection().await?; conn.as_queryable().query(q).await } async fn query_raw<'a>(&self, sql: &str, params: &[ParameterizedValue<'a>]) -> Result<ResultSet, QueryError> { let conn = self.get_connection().await?; conn.as_queryable().query_raw(sql, params).await } async fn execute_raw<'a>(&self, sql: &str, params: &[ParameterizedValue<'a>]) -> Result<u64, QueryError> { let conn = self.get_connection().await?; conn.as_queryable().execute_raw(sql, params).await } } impl SyncSqlConnection for Mysql { fn execute(&self, q: Query<'_>) -> Result<Option<Id>, QueryError> { let conn = self.runtime.block_on(self.get_connection())?; self.runtime.block_on(conn.as_queryable().execute(q)) } fn query(&self, q: Query<'_>) -> Result<ResultSet, QueryError> { let conn = self.runtime.block_on(self.get_connection())?; self.runtime.block_on(conn.as_queryable().query(q)) } fn query_raw(&self, sql: &str, params: &[ParameterizedValue<'_>]) -> Result<ResultSet, QueryError> { let conn = self.runtime.block_on(self.get_connection())?; self.runtime.block_on(conn.as_queryable().query_raw(sql, params)) } fn execute_raw(&self, sql: &str, params: &[ParameterizedValue<'_>]) -> Result<u64, QueryError> { let conn = self.runtime.block_on(self.get_connection())?; self.runtime.block_on(conn.as_queryable().execute_raw(sql, params)) } }
true
eea8ba5d75541f5a56acf3dfb9edff2aabca006c
Rust
Mange/googleprojection-rs
/tests/integration_test.rs
UTF-8
201
2.546875
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
extern crate googleprojection; #[test] fn it_works() { let pixel = googleprojection::from_ll_to_pixel(&(13.2, 55.9), 2).unwrap(); assert_eq!(pixel.0, 550.0); assert_eq!(pixel.1, 319.0); }
true
7d32e936f0291441dc5e2dbe5f56510737c93669
Rust
japaric-archived/linalg.rs
/src/ops/sub_assign/diag.rs
UTF-8
695
2.546875
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
use std::ops::Neg; use assign::SubAssign; use blas::Axpy; use onezero::One; use ops; use {DiagMut, Diag}; // Combinations: // // LHS: DiagMut // RHS: &T, T // // -> 2 implementations // Core implementations impl<'a, 'b, T> SubAssign<&'a T> for DiagMut<'b, T> where T: Axpy + Neg<Output=T> + One { fn sub_assign(&mut self, rhs: &T) { let DiagMut(Diag(ref mut y)) = *self; let x = rhs; let ref alpha = T::one().neg(); ops::axpy_strided_scalar(alpha, x, y) } } // "Forwarding" implementations impl<'a, T> SubAssign<T> for DiagMut<'a, T> where T: Axpy + Neg<Output=T> + One { fn sub_assign(&mut self, rhs: T) { self.sub_assign(&rhs) } }
true
bb9927801d1cc0799ba396c831b50084c888eb02
Rust
jawline/klee-rust
/tests/simple.rs
UTF-8
816
3.078125
3
[ "Unlicense" ]
permissive
extern crate klee; #[test] fn basic_test() { let mut a : i32 = 0; klee::symbol(&mut a, "a"); assert_eq!(a, 56); } #[test] fn other_test() { let mut a : i32 = 0; let mut b : i32 = 0; klee::symbol(&mut a, "a"); klee::symbol(&mut b, "b"); if a == 50 && b == 50 { panic!("I should happen!"); } } #[test] fn yant() { let a = klee::some::<bool>("a"); let b = klee::some::<i32>("b"); let c = if a { if b > 50 && b < 100 { true } else { false } } else { true }; assert_eq!(c, false); } #[test] fn another_test() { let a = klee::some::<i32>("a"); if a > 60 && a < 90 { let b = a + 40; assert_eq!(b, 0); if b == 12 { panic!("This path should be unreachable"); } assert_eq!(b, 101); assert_eq!(b, 150000); } }
true
eff061564b16e15bbfe022ef2b571ca6b942ef91
Rust
tbarrella/crypto-pure
/src/ghash.rs
UTF-8
6,715
2.90625
3
[ "Apache-2.0" ]
permissive
use byteorder::{BigEndian, ByteOrder as _}; pub(crate) fn ghash(key: &[u8; 16], data: &[u8], ciphertext: &[u8]) -> [u8; 16] { let mut tag = [0; 16]; let mut mac = GHash::new(key, data); mac.update(ciphertext); mac.write_tag(&mut tag); tag } const R0: u128 = 0xe1 << 120; struct GHash { function: PolyFunction, data_len: u64, ciphertext_len: u64, } impl GHash { fn new(key: &[u8; 16], data: &[u8]) -> Self { let mut ghash = Self { function: PolyFunction::new(key), data_len: data.len() as u64, ciphertext_len: 0, }; ghash.process(data); ghash } fn update(&mut self, input: &[u8]) { self.ciphertext_len += input.len() as u64; self.process(input); } fn write_tag(mut self, output: &mut [u8; 16]) { BigEndian::write_u64(&mut output[..8], 8 * self.data_len); BigEndian::write_u64(&mut output[8..], 8 * self.ciphertext_len); self.function.process(output); self.function.write_value(output); } fn process(&mut self, input: &[u8]) { for chunk in input.chunks(16) { if chunk.len() < 16 { let buffer = &mut [0; 16]; buffer[..chunk.len()].copy_from_slice(chunk); self.function.process(buffer); } else { self.function.process(chunk); } } } } type GFBlock = u128; struct PolyFunction { key_block: GFBlock, state: GFBlock, } impl PolyFunction { fn new(key: &[u8; 16]) -> Self { Self { key_block: u128::from_be_bytes(*key), state: 0, } } fn process(&mut self, input: &[u8]) { self.state ^= BigEndian::read_u128(input); let mut x = self.state; let mut v = self.key_block; self.state = 0; for _ in 0..128 { let mut h = x & (1 << 127); let mut m = (h as i128 >> 127) as u128; self.state ^= v & m; h = v << 127; m = (h as i128 >> 127) as u128; v >>= 1; v ^= R0 & m; x <<= 1; } } fn write_value(self, output: &mut [u8; 16]) { BigEndian::write_u128(output, self.state); } } #[cfg(test)] mod tests { use super::*; use crate::test_helpers::*; fn check(expected: &str, h: &str, a: &str, c: &str) { let h_vec = &h2b(h); let h = &mut [0; 16]; h.copy_from_slice(h_vec); let a = &h2b(a); let c = &h2b(c); let expected = h2b(expected); assert_eq!(expected, ghash(h, a, c)); } #[test] fn test_case_1_2() { let h = "66e94bd4ef8a2c3b884cfa59ca342b2e"; let a = ""; let c = ""; let expected = "00000000000000000000000000000000"; check(expected, h, a, c); let c = "0388dace60b6a392f328c2b971b2fe78"; let expected = "f38cbb1ad69223dcc3457ae5b6b0f885"; check(expected, h, a, c); } #[test] fn test_case_3_4_5_6() { let h = "b83b533708bf535d0aa6e52980d53b78"; let a = ""; let c = "42831ec2217774244b7221b784d0d49ce3aa212f2c02a4e035c17e2329aca12e\ 21d514b25466931c7d8f6a5aac84aa051ba30b396a0aac973d58e091473f5985"; let expected = "7f1b32b81b820d02614f8895ac1d4eac"; check(expected, h, a, c); let a = "feedfacedeadbeeffeedfacedeadbeefabaddad2"; let c = &c[..120]; let expected = "698e57f70e6ecc7fd9463b7260a9ae5f"; check(expected, h, a, c); let c = "61353b4c2806934a777ff51fa22a4755699b2a714fcdc6f83766e5f97b6c7423\ 73806900e49f24b22b097544d4896b424989b5e1ebac0f07c23f4598"; let expected = "df586bb4c249b92cb6922877e444d37b"; check(expected, h, a, c); let c = "8ce24998625615b603a033aca13fb894be9112a5c3a211a8ba262a3cca7e2ca7\ 01e4a9a4fba43c90ccdcb281d48c7c6fd62875d2aca417034c34aee5"; let expected = "1c5afe9760d3932f3c9a878aac3dc3de"; check(expected, h, a, c); } #[test] fn test_case_7_8() { let h = "aae06992acbf52a3e8f4a96ec9300bd7"; let a = ""; let c = ""; let expected = "00000000000000000000000000000000"; check(expected, h, a, c); let c = "98e7247c07f0fe411c267e4384b0f600"; let expected = "e2c63f0ac44ad0e02efa05ab6743d4ce"; check(expected, h, a, c); } #[test] fn test_case_9_10_11_12() { let h = "466923ec9ae682214f2c082badb39249"; let a = ""; let c = "3980ca0b3c00e841eb06fac4872a2757859e1ceaa6efd984628593b40ca1e19c\ 7d773d00c144c525ac619d18c84a3f4718e2448b2fe324d9ccda2710acade256"; let expected = "51110d40f6c8fff0eb1ae33445a889f0"; check(expected, h, a, c); let a = "feedfacedeadbeeffeedfacedeadbeefabaddad2"; let c = &c[..120]; let expected = "ed2ce3062e4a8ec06db8b4c490e8a268"; check(expected, h, a, c); let c = "0f10f599ae14a154ed24b36e25324db8c566632ef2bbb34f8347280fc4507057\ fddc29df9a471f75c66541d4d4dad1c9e93a19a58e8b473fa0f062f7"; let expected = "1e6a133806607858ee80eaf237064089"; check(expected, h, a, c); let c = "d27e88681ce3243c4830165a8fdcf9ff1de9a1d8e6b447ef6ef7b79828666e45\ 81e79012af34ddd9e2f037589b292db3e67c036745fa22e7e9b7373b"; let expected = "82567fb0b4cc371801eadec005968e94"; check(expected, h, a, c); } #[test] fn test_case_13_14() { let h = "dc95c078a2408989ad48a21492842087"; let a = ""; let c = ""; let expected = "00000000000000000000000000000000"; check(expected, h, a, c); let c = "cea7403d4d606b6e074ec5d3baf39d18"; let expected = "83de425c5edc5d498f382c441041ca92"; check(expected, h, a, c); } #[test] fn test_case_15_16_17_18() { let h = "acbef20579b4b8ebce889bac8732dad7"; let a = ""; let c = "522dc1f099567d07f47f37a32a84427d643a8cdcbfe5c0c97598a2bd2555d1aa\ 8cb08e48590dbb3da7b08b1056828838c5f61e6393ba7a0abcc9f662898015ad"; let expected = "4db870d37cb75fcb46097c36230d1612"; check(expected, h, a, c); let a = "feedfacedeadbeeffeedfacedeadbeefabaddad2"; let c = &c[..120]; let expected = "8bd0c4d8aacd391e67cca447e8c38f65"; check(expected, h, a, c); let c = "c3762df1ca787d32ae47c13bf19844cbaf1ae14d0b976afac52ff7d79bba9de0\ feb582d33934a4f0954cc2363bc73f7862ac430e64abe499f47c9b1f"; let expected = "75a34288b8c68f811c52b2e9a2f97f63"; check(expected, h, a, c); let c = "5a8def2f0c9e53f1f75d7853659e2a20eeb2b22aafde6419a058ab4f6f746bf4\ 0fc0c3b780f244452da3ebf1c5d82cdea2418997200ef82e44ae7e3f"; let expected = "d5ffcf6fc5ac4d69722187421a7f170b"; check(expected, h, a, c); } }
true
ce7f75f0a67fb7ad04eb8e7b7f1b8dcc323905d3
Rust
Nessex/advent-of-code
/2020/aoc-2020d6p2/src/main.rs
UTF-8
1,104
3
3
[]
no_license
use std::io::{self, Read}; use std::collections::HashSet; use std::iter::FromIterator; fn main() -> io::Result<()> { let mut buffer = String::new(); let mut stdin = io::stdin(); stdin.read_to_string(&mut buffer)?; let mut total = 0; for group in buffer.split("\n\n") { let mut set: HashSet<char> = HashSet::new(); let mut first = true; for line in group.split_ascii_whitespace() { let mut inset: HashSet<char> = HashSet::new(); if line.len() == 0 { continue; } for char in line.chars() { match char { 'a'..='z' => { inset.insert(char); } _ => continue, } } if first { set = inset; first = false; } else { set = HashSet::from_iter(set.intersection(&inset).into_iter().map(|c| c.clone())); } } total += set.len(); } println!("{}", total); Ok(()) }
true
b58b9df40011e68c36b4fd4991634e1ef1e8440b
Rust
grodwar/Hello_rust
/src/basics/types_and_variables.rs
UTF-8
5,018
3.296875
3
[]
no_license
#![allow(dead_code)] //turns off the warning from the compiler //#![allow(unused_imports)] use std::mem; // this is how you import const MEANING_OF_LIFE:u8 = 42; // no fixed address (its gonna be replaced inline at compilation time) //have to declare the type by yourself // all caps by standard static Z:i32 = 123; static mut Z_MUT:i32 = 123; // UNSAFE!! fn scope_and_shadowing(){ println!("scope_and_shadowing function"); //scope and shadowing //let a = 123; //COMMENTED TO GET RID OF WARNING AT COMPILATION let a =1234; // overrides previous declaration println!("outside (before), a = {}", a); //unnamed code block { let b = 456; println!("inside, b = {}", b); let a = 777; //shadows the previous declaration println!("inside, a = {}", a); } println!("outside, a = {}", a); //println!("outside, b = {}", b); // cannot find value 'b' in this scope } fn operators(){ println!("operators function"); // ARITHMETIC let mut a = 2+3*4; // follows PEMDAS println!("a = {}", a); a = a+1; // "--" and "++" operators are not supported a-=2; // -= += *= /= and %= are allowed println!("updated a = {}", a); println!("remainder of {} / {} = {}", a, 3, (a%3)); // there is no "power" operator but a built-in function let a_cubed = i32::pow(a,3); println!(" a = {}, a_cubed = {}", a, a_cubed); let b = 2.5; let b_cubed = f64::powi(b,3); //powi for integer exponent let b_to_pi = f64::powf(b,std::f64::consts::PI); //powf for float exponent println!(" {} cubed is {}", b, b_cubed); println!(" {} to the powe of pi is {}", b, b_to_pi); //BITWISE let c = 1 | 2; // | OR & AND ^ XOR ! NOR // 01 OR 10 == 11 == 3_10 (3 in base 10) println!("1|2 = {}", c); let two_to_10 = 1 << 10;// shift to the left println!("1 << 10 = {}", two_to_10); //LOGICAL let pi_less_4 = std::f64::consts::PI < 4.0; // less than // true // also > <= and >= println!("pi_less_4 = {}", pi_less_4); let x = 5; let x_is_five = x == 5; // equality // true println!("x_is_five = {}", x_is_five); } fn datatype_discovery(){ println!("datatype_discovery function"); let a:u8 = 123; // "let" creates a binding // u8 is 8 bits (1 byte) //u stands for unsigned ... values from 0 to 255 //let another:i8 = 123; // signed 8bits, values from -127 to 128 //COMMENTED TO GET RID OF WARNING AT COMPILATION println!("a = {}", a); //a = 456; // immutable variable ==> cant do let mut b:i8 = 0; // mutable variable! println!("b = {}", b); b = 42; // mutable variable! println!("b = {}", b); let mut c = 12345678; // will be guessed as signed 32-bits number // but IDEA guess and does it automatically println!("c = {} size = {} bytes", c, mem::size_of_val(&c)); // getting the size of c with size_of_val not "size_val", using "&" c = -1; // checking its signed println!("c = {} after modification", c); // i8 u8 i16 u16 i32 u32 i64 u64 let z:isize = 123; // integral datatype => size equal to the memory address of the system let size_of_z = mem::size_of_val(&z); println!("z = {}, takes up {} bytes, {}-bit OS", z, size_of_z, size_of_z *8); let d = 'x'; // chars use single quotes '' println!("d = {} size = {} bytes", d, mem::size_of_val(&d)); let e = 2.5; // double-precision float, 8 bytes or 64 bits println!("e = {} size = {} bytes", e, mem::size_of_val(&e)); let e_prime:f32 = 2.5; // if we want float 32, 4 bytes or 32 bits println!("e_prime = {} size = {} bytes", e_prime, mem::size_of_val(&e_prime)); // booleans (true or false) let g = false; println!("g = {} size = {} bytes", g, mem::size_of_val(&g)); let f=4>0; // true println!("f is {}", f); } // first thing touched in the lecture fn initial_main() { println!("initial_main function"); println!("test") // "println!" is a macro cause of the "!" } pub fn types_and_variables(){ println!("START OF main of mod types_and_variables;"); initial_main(); datatype_discovery(); operators(); scope_and_shadowing(); println!("MEANING_OF_LIFE = {}", MEANING_OF_LIFE); // the string is needed first, no auto-conversion from u8 println!("Z = {}", Z); unsafe{ // you promise you'll be careful :D println!("before change Z_MUT = {}", Z_MUT); Z_MUT = 777; println!("after change Z_MUT = {}", Z_MUT); } println!("END OF main of mod types_and_variables;"); }
true
6c7082213a04c9dfc941a587dc811ef39b1293bb
Rust
shelbyd/subsrch
/src/indices.rs
UTF-8
401
2.984375
3
[ "MIT" ]
permissive
use std::collections::*; pub type Indices = HashSet<usize>; pub trait SelectIndices { fn select_indices(self, indices: &Indices) -> Self; } impl<T> SelectIndices for Vec<T> { fn select_indices(self, indices: &Indices) -> Self { self.into_iter() .enumerate() .filter(|&(i, _)| indices.contains(&i)) .map(|(_, t)| t) .collect() } }
true
d18152b6671f982de45bf22764e4a1582a942bc1
Rust
avlo/rust-reference
/src/string_literal.rs
UTF-8
245
2.890625
3
[]
no_license
fn main() { let mut s = "string"; println!("{}", s); s = "another"; println!("{}", s); //let s1 = String::from("hello"); //let s2 = s1; //println!("{}", s2); let s1 = String::from("hello"); let s2 = s1; println!("{}, world!", s1); }
true
7d928a1ec6830a6071ca497d14d206674ed96943
Rust
Measter/rust_utils
/src/time/timespan.rs
UTF-8
18,903
3.265625
3
[]
no_license
use std::time::Duration; const NANOS_PER_MILLISECOND_F: f64 = 1_000_000.0; const NANOS_PER_SECOND_F: f64 = 1_000_000_000.0; const NANOS_PER_MILLISECOND: u32 = 1_000_000; const SECONDS_PER_MINUTE: u64 = 60; const SECONDS_PER_HOUR: u64 = SECONDS_PER_MINUTE * 60; const SECONDS_PER_DAY: u64 = SECONDS_PER_HOUR * 24; /// Trait is based on .Net's [`TimeSpan`](https://docs.microsoft.com/en-us/dotnet/api/system.timespan?view=netframework-4.7) type. pub trait TimeSpan<T> { /// Returns the days part of the time span. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let span = Duration::from_total_days(5.31545413).unwrap(); /// assert_eq!(span.partial_days(), 5); /// ``` fn partial_days(&self) -> u64; /// Returns the hours part of the time span. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let span = Duration::from_total_days(5.31545413).unwrap(); /// assert_eq!(span.partial_hours(), 7); /// ``` fn partial_hours(&self) -> u8; /// Returns the minutes part of the time span. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let span = Duration::from_total_days(5.31545413).unwrap(); /// assert_eq!(span.partial_minutes(), 34); /// ``` fn partial_minutes(&self) -> u8; /// Returns the seconds part of the time span. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let span = Duration::from_total_days(5.31545413).unwrap(); /// assert_eq!(span.partial_seconds(), 15); /// ``` fn partial_seconds(&self) -> u8; /// Returns the milliseconds part of the time span. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let span = Duration::from_total_days(5.31545413).unwrap(); /// assert_eq!(span.partial_milliseconds(), 236); /// ``` fn partial_milliseconds(&self) -> u16; /// Returns the total number of days, whole and fractional, represented by the time span. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let span = Duration::from_total_days(5.31545413).unwrap(); /// assert_eq!(span.total_days(), 5.31545413); /// ``` fn total_days(&self) -> f64; /// Returns the total number of hours, whole and fractional, represented by the time span. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let span = Duration::from_total_days(5.31545413).unwrap(); /// // Round to precision because of the inaccuracies in floating point maths. /// let span = (span.total_hours() * 1_000_000.0).round() / 1_000_000.0; /// assert_eq!(span, 127.570899); /// ``` fn total_hours(&self) -> f64; /// Returns the total number of minutes, whole and fractional, represented by the time span. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let span = Duration::from_total_days(5.31545413).unwrap(); /// // Round to precision because of the inaccuracies in floating point maths. /// let span = (span.total_minutes() * 1_0000.0).round() / 1_0000.0; /// assert_eq!(span, 7654.2539); /// ``` fn total_minutes(&self) -> f64; /// Returns the total number of seconds, whole and fractional, represented by the time span. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let span = Duration::from_total_days(5.31545413).unwrap(); /// // Round to precision because of the inaccuracies in floating point maths. /// let span = (span.total_seconds() * 1_000.0).round() / 1_000.0; /// assert_eq!(span, 459255.237); /// ``` fn total_seconds(&self) -> f64; /// Returns the total number of milliseconds, whole and fractional, represented by the time span. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let span = Duration::from_total_days(5.31545413).unwrap(); /// // Round to precision because of the inaccuracies in floating point maths. /// let span = span.total_milliseconds().round(); /// assert_eq!(span, 459255237.0); /// ``` fn total_milliseconds(&self) -> f64; /// Returns a timespan representing the given number of days. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let week = Duration::from_total_days(7.0); /// ``` fn from_total_days(days: f64) -> Result<T, String>; /// Returns a timespan representing the given number of hours. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let hours = Duration::from_total_hours(13.543); /// ``` fn from_total_hours(hours: f64) -> Result<T, String>; /// Returns a timespan representing the given number of minutes. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let minutes = Duration::from_total_minutes(20.0); /// ``` fn from_total_minutes(minutes: f64) -> Result<T, String>; /// Returns a timespan representing the given number of seconds. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let seconds = Duration::from_total_seconds(13.5); /// ``` fn from_total_seconds(seconds: f64) -> Result<T, String>; /// Returns a timespan representing the given number of milliseconds. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let milliseconds = Duration::from_total_milliseconds(516.0); /// ``` fn from_total_milliseconds(milliseconds: f64) -> Result<T, String>; /// Returns a timespan representing the given number of days. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let week = Duration::from_days(7); /// ``` fn from_days(days: u64) -> T; /// Returns a timespan representing the given number of hours. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let hours = Duration::from_hours(13); /// ``` fn from_hours(hours: u64) -> T; /// Returns a timespan representing the given number of minutes. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let minutes = Duration::from_minutes(20); /// ``` fn from_minutes(minutes: u64) -> T; /// Returns a timespan representing the given number of seconds. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let seconds = Duration::from_seconds(13); /// ``` fn from_seconds(seconds: u64) -> T; /// Returns a timespan representing the given number of milliseconds. /// /// # Examples /// /// ```rust /// use std::time::Duration; /// use rust_utils::time::TimeSpan; /// /// let milliseconds = Duration::from_milliseconds(516); /// ``` fn from_milliseconds(milliseconds: u64) -> T; } macro_rules! input_check { ($val:expr) => ( if $val.is_sign_negative() || $val.is_nan() || $val.is_infinite() { return Err(format!("Invalid timespan: {:?}", $val)); } ) } impl TimeSpan<Duration> for Duration { fn partial_days(&self) -> u64 { (self.as_secs() / SECONDS_PER_DAY) as u64 } fn partial_hours(&self) -> u8 { let secs = self.as_secs() % SECONDS_PER_DAY; (secs / SECONDS_PER_HOUR) as u8 } fn partial_minutes(&self) -> u8 { let secs = (self.as_secs() % SECONDS_PER_DAY) % SECONDS_PER_HOUR; (secs / SECONDS_PER_MINUTE) as u8 } fn partial_seconds(&self) -> u8 { let secs = ((self.as_secs() % SECONDS_PER_DAY) % SECONDS_PER_HOUR) % SECONDS_PER_MINUTE; secs as u8 } fn partial_milliseconds(&self) -> u16{ (self.subsec_nanos() / NANOS_PER_MILLISECOND) as u16 } fn total_days(&self) -> f64 { let total_days = self.as_secs() as f64 / SECONDS_PER_DAY as f64; let total_nanoseconds = self.subsec_nanos() as f64 / NANOS_PER_SECOND_F / SECONDS_PER_DAY as f64; total_days + total_nanoseconds } fn total_hours(&self) -> f64 { let total_hours = self.as_secs() as f64 / SECONDS_PER_HOUR as f64; let total_nanoseconds = self.subsec_nanos() as f64 / NANOS_PER_SECOND_F / SECONDS_PER_HOUR as f64; total_hours + total_nanoseconds } fn total_minutes(&self) -> f64 { let total_minutes = self.as_secs() as f64 / SECONDS_PER_MINUTE as f64; let total_nanoseconds = self.subsec_nanos() as f64 / NANOS_PER_SECOND_F / SECONDS_PER_MINUTE as f64; total_minutes + total_nanoseconds } fn total_seconds(&self) -> f64 { let total_seconds = self.as_secs() as f64; let total_nanoseconds = self.subsec_nanos() as f64 / NANOS_PER_SECOND_F; total_seconds + total_nanoseconds } fn total_milliseconds(&self) -> f64 { let total_milliseconds = (self.as_secs() * 1000) as f64; let total_nanoseconds = self.subsec_nanos() as f64 / NANOS_PER_MILLISECOND_F; total_milliseconds + total_nanoseconds } fn from_total_days(days: f64) -> Result<Duration, String> { input_check!(days); let days_in_sec = days * SECONDS_PER_DAY as f64; let full_days_in_sec = days_in_sec.trunc() as u64; let frac_days_in_sec = (days_in_sec.fract() * NANOS_PER_SECOND_F).round() as u32; Ok(Duration::new(full_days_in_sec, frac_days_in_sec)) } fn from_total_hours(hours: f64) -> Result<Duration, String> { input_check!(hours); let hours_in_sec = hours * SECONDS_PER_HOUR as f64; let full_hours_in_sec = hours_in_sec.trunc() as u64; let frac_hours_in_sec = (hours_in_sec.fract() * NANOS_PER_SECOND_F).round() as u32; Ok(Duration::new(full_hours_in_sec, frac_hours_in_sec)) } fn from_total_minutes(minutes: f64) -> Result<Duration, String> { input_check!(minutes); let minutes_in_sec = minutes * SECONDS_PER_MINUTE as f64; let full_minutes_in_sec = minutes_in_sec.trunc() as u64; let frac_minutes_in_sec = (minutes_in_sec.fract() * NANOS_PER_SECOND_F).round() as u32; Ok(Duration::new(full_minutes_in_sec, frac_minutes_in_sec)) } fn from_total_seconds(seconds: f64) -> Result<Duration, String> { input_check!(seconds); let full_seconds_in_sec = seconds.trunc() as u64; let frac_seconds_in_sec = (seconds.fract() * NANOS_PER_SECOND_F).round() as u32; Ok(Duration::new(full_seconds_in_sec, frac_seconds_in_sec)) } fn from_total_milliseconds(milliseconds: f64) -> Result<Duration, String> { input_check!(milliseconds); let milliseconds_in_nano_sec = (milliseconds * NANOS_PER_MILLISECOND_F).round() as u32; Ok(Duration::new(0, milliseconds_in_nano_sec)) } fn from_days(days: u64) -> Duration { Duration::new(days * SECONDS_PER_DAY, 0) } fn from_hours(hours: u64) -> Duration { Duration::new(hours * SECONDS_PER_HOUR, 0) } fn from_minutes(minutes: u64) -> Duration { Duration::new(minutes * SECONDS_PER_MINUTE, 0) } fn from_seconds(seconds: u64) -> Duration { Duration::new(seconds, 0) } fn from_milliseconds(milliseconds: u64) -> Duration { let secs = milliseconds / 1000; let nanos = (milliseconds % 1000) as u32 * NANOS_PER_MILLISECOND; Duration::new(secs, nanos) } } #[cfg(test)] mod tests { use std::time::Duration; use std::f64; use super::TimeSpan; #[test] fn input_negative() { let neg = Duration::from_total_days(-4.0); assert!(neg.is_err()); } #[test] fn input_infinite() { let inf = Duration::from_total_days(f64::INFINITY); assert!(inf.is_err()); } #[test] fn input_nan() { let nan = Duration::from_total_days(f64::NAN); assert!(nan.is_err()); } #[test] fn from_total_days_two_weeks() { let span = Duration::from_total_days(14.0).unwrap(); assert_eq!(span, Duration::new(1209600, 0)); } #[test] fn from_total_days_one_and_half_day() { let span = Duration::from_total_days(1.5).unwrap(); assert_eq!(span, Duration::new(129600,0)); } #[test] fn from_total_days_one_and_third() { let span = Duration::from_total_days(1.3333).unwrap(); assert_eq!(span, Duration::new(115197,120_000_000)); } #[test] fn from_total_hours_two_hours() { let span = Duration::from_total_hours(2.0).unwrap(); assert_eq!(span, Duration::new(7200, 0)); } #[test] fn from_total_hours_one_and_half_hours() { let span = Duration::from_total_hours(1.5).unwrap(); assert_eq!(span, Duration::new(5400,0)); } #[test] fn from_total_hours_one_and_third() { let span = Duration::from_total_hours(1.3333).unwrap(); assert_eq!(span, Duration::new(4799,880_000_000)); } #[test] fn from_total_minutes_two_minutes() { let span = Duration::from_total_minutes(2.0).unwrap(); assert_eq!(span, Duration::new(120, 0)); } #[test] fn from_total_minutes_one_and_half_minutes() { let span = Duration::from_total_minutes(1.5).unwrap(); assert_eq!(span, Duration::new(90,0)); } #[test] fn from_total_minutes_one_and_third() { let span = Duration::from_total_minutes(1.3333).unwrap(); assert_eq!(span, Duration::new(79,998_000_000)); } #[test] fn from_total_seconds_two_seconds() { let span = Duration::from_total_seconds(2.0).unwrap(); assert_eq!(span, Duration::new(2, 0)); } #[test] fn from_total_seconds_one_and_half_seconds() { let span = Duration::from_total_seconds(1.5).unwrap(); assert_eq!(span, Duration::new(1,500_000_000)); } #[test] fn from_total_seconds_one_and_third() { let span = Duration::from_total_seconds(1.3333).unwrap(); assert_eq!(span, Duration::new(1,333_300_000)); } #[test] fn from_total_milliseconds_two_milliseconds() { let span = Duration::from_total_milliseconds(2.0).unwrap(); assert_eq!(span, Duration::new(0, 2_000_000)); } #[test] fn from_total_milliseconds_one_and_half_milliseconds() { let span = Duration::from_total_milliseconds(1.5).unwrap(); assert_eq!(span, Duration::new(0, 1_500_000)); } #[test] fn from_total_milliseconds_one_and_third() { let span = Duration::from_total_milliseconds(1.3333).unwrap(); assert_eq!(span, Duration::new(0, 1_333_300)); } #[test] fn from_days_two_weeks() { let span = Duration::from_days(14); assert_eq!(span, Duration::new(1209600, 0)); } #[test] fn from_hours_two_hours() { let span = Duration::from_hours(2); assert_eq!(span, Duration::new(7200, 0)); } #[test] fn from_minutes_two_minutes() { let span = Duration::from_minutes(2); assert_eq!(span, Duration::new(120, 0)); } #[test] fn from_seconds_two_seconds() { let span = Duration::from_seconds(2); assert_eq!(span, Duration::new(2, 0)); } #[test] fn from_milliseconds_two_milliseconds() { let span = Duration::from_milliseconds(2); assert_eq!(span, Duration::new(0, 2_000_000)); } #[test] fn partial_days() { let span = Duration::from_total_days(1.51354973541463).unwrap(); assert_eq!(span.partial_days(), 1); } #[test] fn partial_hours() { let span = Duration::from_total_days(1.51354973541463).unwrap(); assert_eq!(span.partial_hours(), 12); } #[test] fn partial_minutes() { let span = Duration::from_total_days(1.51354973541463).unwrap(); assert_eq!(span.partial_minutes(), 19); } #[test] fn partial_seconds() { let span = Duration::from_total_days(1.51354973541463).unwrap(); assert_eq!(span.partial_seconds(), 30); } #[test] fn partial_milliseconds() { let span = Duration::from_total_days(1.51354973541463).unwrap(); assert_eq!(span.partial_milliseconds(), 697); } #[test] fn total_days() { let span = Duration::from_total_days(1.5135497354).unwrap(); // Round to precision because of the inaccuracies in floating point maths. let span = (span.total_days() * 1_000_000_000_0.0).round() / 1_000_000_000_0.0; assert_eq!(span, 1.5135497354); } #[test] fn total_hours() { let span = Duration::from_total_days(1.5135497354).unwrap(); // Round to precision because of the inaccuracies in floating point maths. let span = (span.total_hours() * 1_000_000_00.0).round() / 1_000_000_00.0; assert_eq!(span, 36.32519365); } #[test] fn total_minutes() { let span = Duration::from_total_days(1.5135497354).unwrap(); // Round to precision because of the inaccuracies in floating point maths. let span = (span.total_minutes() * 1_000_000.0).round() / 1_000_000.0; assert_eq!(span, 2179.511619); } #[test] fn total_seconds() { let span = Duration::from_total_days(1.5135497354).unwrap(); // Round to precision because of the inaccuracies in floating point maths. let span = (span.total_seconds() * 1_0000.0).round() / 1_0000.0; assert_eq!(span, 130770.6971); } #[test] fn total_milliseconds() { let span = Duration::from_total_days(1.5135497354).unwrap(); // Round to precision because of the inaccuracies in floating point maths. let span = (span.total_milliseconds() * 1_0.0).round() / 1_0.0; assert_eq!(span, 130770697.1); } }
true
fbbdb7a00b5ae00a915911053a22999960bd939a
Rust
wuggen/resolution
/src/fileparser.rs
UTF-8
5,955
2.796875
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use crate::cnf::*; use crate::resolution_graph::ResolutionGraph; use std::collections::HashMap; use std::fs::File; use std::io::prelude::*; use std::path::Path; #[derive(Debug)] pub enum FileParseError { IOError(std::io::Error), ParseError(String), } impl std::fmt::Display for FileParseError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { IOError(e) => write!(f, "{}", e), ParseError(s) => write!(f, "{}", s), } } } use FileParseError::*; pub type Result = std::result::Result<ResolutionGraph, FileParseError>; impl std::convert::Into<Result> for FileParseError { fn into(self) -> Result { Err(self) } } pub fn get_clauses(filename: &Path) -> Result { let mut file = match File::open(&filename) { Err(why) => return IOError(why).into(), Ok(file) => file, }; let mut input = String::new(); match file.read_to_string(&mut input) { Err(why) => return IOError(why).into(), Ok(_) => (), } let inputlength = input.len(); let mut slice; let mut vec = Vec::new(); let mut graph = ResolutionGraph::new(); for line in input.lines() { let mut current_index = 0; let mut clause = Clause::new(); let mut clause_label; let mut parent1 = String::new(); let mut parent2 = String::new(); //read up until the first space let mut i = current_index; while i + 1 <= inputlength { slice = &line[i..(i + 1)]; if slice == ":" { break; } i += 1; } //get the label of the current clause clause_label = line[current_index..i].to_string(); current_index = i + 3; i = current_index; //read until the delimiting } while i + 1 <= line.len() { slice = &line[i..(i + 1)]; if slice == "}" { break; } i += 1; } //get each of the clause literals, and add each one to the current clause object let clause_strings = line[current_index..i].to_string(); let mut split = clause_strings.split(","); for st in split { let name = st.trim(); if st.len() > 0 { slice = &name[..1]; if slice == "~" { clause.add(Literal::new_negated(&name[1..])); } else { clause.add(Literal::new(name)); } } } //read the labels of the parent clauses (if they exist) current_index = i + 3; i = current_index; while i + 1 <= inputlength { slice = &line[i..(i + 1)]; if slice == ")" { break; } i += 1; } let full_labels = line[current_index..i].to_string(); if full_labels.len() != 0 { let mut parents_vec = Vec::new(); split = full_labels.split(","); for st in split { let lab = st.trim(); parents_vec.push(lab); } if parents_vec.len() != 2 { return ParseError("ERROR: each clause must have exactly two parents!".to_string()) .into(); } parent1 = parents_vec[0].to_string(); parent2 = parents_vec[1].to_string(); } vec.push((clause_label, clause, parent1, parent2)); } //construct the resolution graph let mut index; let mut will_repeat = true; let mut bools = Vec::new(); let mut ids = HashMap::new(); for _item in vec.iter() { bools.push(false); } assert_eq!(vec.len(), bools.len()); while will_repeat { will_repeat = false; index = 0; while index < vec.len() { if bools[index] == false { let label = &vec[index].0; let cl = &vec[index].1; let p1 = &vec[index].2; let p2 = &vec[index].3; if p1.len() == 0 && p2.len() == 0 { let id = graph.add_clause(cl.clone()); ids.insert(label, id); bools[index] = true; will_repeat = true; index += 1; } else { match ids.get(p1) { Some(&cid) => { let p1id = cid; match ids.get(p2) { Some(&cid2) => { let p2id = cid2; let id = graph.add_clause(cl.clone()); ids.insert(label, id); if let Err(s) = graph.add_resolution_ids(p1id, p2id, id) { return ParseError(format!( "ERROR failed to create resolution: {}", s )) .into(); } bools[index] = true; will_repeat = true; index += 1 } _ => index += 1, } } _ => index += 1, } } } else { index += 1; } } } Ok(graph) } /* #[cfg(test)] mod test { use super::*; #[test] fn attempt() { let graph = get_clauses("test.txt"); println!("results: {:?}", graph); println!("is correct? {:?}", graph.verify()); } } */
true
988b90fddd48c4b9b529fff4a93c1439e79b1ebc
Rust
basiliqio/messy_json
/src/schema.rs
UTF-8
9,681
3.375
3
[ "MIT" ]
permissive
use std::ops::Deref; use super::*; /// ## Schema of a JSON Value /// /// This enum describes in broad strokes how a JSON should look like when deserialized. /// /// At deserialization, this enum will ensure that the JSON Value corresponds to this schema. #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum MessyJsonInner { Array(MessyJsonArray), Bool(MessyJsonScalar), Number(MessyJsonNumeric), Obj(MessyJsonObject), String(MessyJsonScalar), #[cfg(feature = "uuid")] Uuid(MessyJsonScalar), } /// Wrapper for [MessyJsonInner](MessyJsonInner) /// /// Wrapping it in an [Arc](std::sync::Arc) #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct MessyJson(Arc<MessyJsonInner>); impl std::ops::Deref for MessyJson { type Target = MessyJsonInner; fn deref(&self) -> &Self::Target { &self.0 } } impl MessyJson { #[inline] pub fn builder(&self, settings: MessyJsonSettings) -> MessyJsonBuilder { MessyJsonBuilder::new(self, settings) } } /// An expected object, set when encountering a null value. #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum MessyJsonExpected { Root(MessyJson), Obj(MessyJsonObject), } impl MessyJsonInner { /// Check if the inner value of this enum is optional pub fn optional(&self) -> bool { match self { MessyJsonInner::Array(x) => x.optional(), MessyJsonInner::Bool(x) => x.optional(), MessyJsonInner::Number(x) => x.optional(), MessyJsonInner::Obj(x) => x.optional(), MessyJsonInner::String(x) => x.optional(), #[cfg(feature = "uuid")] MessyJsonInner::Uuid(x) => x.optional(), } } } impl From<MessyJsonInner> for MessyJson { fn from(x: MessyJsonInner) -> Self { MessyJson(Arc::new(x)) } } impl From<MessyJsonArray> for MessyJsonInner { fn from(x: MessyJsonArray) -> Self { MessyJsonInner::Array(x) } } impl From<MessyJsonNumeric> for MessyJsonInner { fn from(x: MessyJsonNumeric) -> Self { MessyJsonInner::Number(x) } } impl From<MessyJsonObject> for MessyJsonInner { fn from(x: MessyJsonObject) -> Self { MessyJsonInner::Obj(x) } } impl From<&MessyJsonObject> for MessyJsonInner { fn from(x: &MessyJsonObject) -> Self { MessyJsonInner::Obj(x.clone()) } } impl From<&MessyJsonNumeric> for MessyJsonInner { fn from(x: &MessyJsonNumeric) -> Self { MessyJsonInner::Number(*x) } } /// Schema deserializer of a JSON Value /// /// This struct takes a reference to a [MessyJson](MessyJson) and expose `serde`'s /// deserialization trait. #[derive(Clone, Debug, PartialEq, Eq)] pub struct MessyJsonBuilder { schema: MessyJson, settings: MessyJsonSettings, } /// Builder for [MessyJsonObject](MessyJsonObject) #[derive(Clone, Debug, PartialEq, Eq)] pub struct MessyJsonObjectBuilder { schema: MessyJsonObject, settings: MessyJsonSettings, } pub trait MessyJsonObjectTrait { type Input; /// Create a new builder from a [MessyJson](MessyJson) fn new(schema: &Self::Input, settings: MessyJsonSettings) -> Self; /// Get the inner [MessyJson](MessyJson) fn inner(&self) -> &Self::Input; /// Return the settings fn settings(&self) -> &MessyJsonSettings; /// Create a new nested schema providing the nested schema and self fn new_nested(&self, schema: &MessyJson, settings: MessyJsonSettings) -> MessyJsonBuilder; /// Compare that a deserialized object have all the required fields are available. /// /// Return a missing key if any, None otherwise fn compare_obj( schema: &MessyJsonObject, res: &mut BTreeMap<ArcStr, MessyJsonValue>, ) -> Option<String> { let mut to_be_merged: BTreeMap<ArcStr, MessyJsonValue> = BTreeMap::new(); let el = itertools::merge_join_by(schema.properties(), res.keys(), |(key1, _), key2| { Ord::cmp(key1, key2) }) .find(|merged| match merged { itertools::EitherOrBoth::Both(_, _) => false, itertools::EitherOrBoth::Left((key, val)) => match val.optional() { true => { to_be_merged.insert( (*key).clone(), MessyJsonValue::Null( MessyJsonNullType::Absent, MessyJsonExpected::Root((*val).clone()), ), ); false } false => true, }, itertools::EitherOrBoth::Right(_) => true, }); let missing_key = el.map(|x| { match x { itertools::EitherOrBoth::Both(_, x) => x, itertools::EitherOrBoth::Left((key, _val)) => key.as_str(), itertools::EitherOrBoth::Right(x) => x, } .to_string() }); res.append(&mut to_be_merged); missing_key } /// Compare that a deserialized object have all the required fields either absent or set, but not set to null. /// /// Return a missing key if any, None otherwise fn compare_obj_forced_null( schema: &MessyJsonObject, res: &mut BTreeMap<ArcStr, MessyJsonValue>, ) -> Option<String> { let el = itertools::merge_join_by(schema.properties(), res, |(key1, _), (key2, _)| { Ord::cmp(key1, key2) }) .find(|merged| match merged { itertools::EitherOrBoth::Both((_, schema), (_, value)) => { !schema.optional() && matches!(value, MessyJsonValue::Null(null_type, _) if matches!(null_type, MessyJsonNullType::Null)) }, _ => false, }); el.map(|x| { match x { itertools::EitherOrBoth::Both((key, _), _) => key, itertools::EitherOrBoth::Left((key, _val)) => key, itertools::EitherOrBoth::Right((key, _)) => key, } .to_string() }) } } impl<'a> MessyJsonObjectTrait for MessyJsonBuilder { type Input = MessyJson; #[inline] fn new(schema: &Self::Input, settings: MessyJsonSettings) -> Self { MessyJsonBuilder { schema: schema.clone(), settings, } } #[inline] fn inner(&self) -> &Self::Input { &self.schema } #[inline] fn settings(&self) -> &MessyJsonSettings { &self.settings } #[inline] fn new_nested(&self, schema: &MessyJson, settings: MessyJsonSettings) -> MessyJsonBuilder { MessyJsonBuilder { schema: schema.clone(), settings, } } } impl<'a> MessyJsonObjectTrait for MessyJsonObjectBuilder { type Input = MessyJsonObject; #[inline] fn new(schema: &Self::Input, settings: MessyJsonSettings) -> Self { MessyJsonObjectBuilder { schema: schema.clone(), settings, } } #[inline] fn inner(&self) -> &Self::Input { &self.schema } #[inline] fn settings(&self) -> &MessyJsonSettings { &self.settings } #[inline] fn new_nested(&self, schema: &MessyJson, settings: MessyJsonSettings) -> MessyJsonBuilder { MessyJsonBuilder::new(schema, settings) } } impl<'de> DeserializeSeed<'de> for MessyJsonBuilder { type Value = MessyJsonValueContainer<'de>; #[inline] fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error> where D: Deserializer<'de>, { match self.inner().deref() { MessyJsonInner::Bool(opt) => match opt.optional() || self.settings().all_optional() { true => deserializer.deserialize_option(self), false => deserializer.deserialize_bool(self), }, MessyJsonInner::String(opt) => match opt.optional() || self.settings().all_optional() { true => deserializer.deserialize_option(self), false => deserializer.deserialize_str(self), }, MessyJsonInner::Number(opt) => match opt.optional() || self.settings().all_optional() { true => deserializer.deserialize_option(self), false => match opt.type_() { MessyJsonNumberType::U64 => deserializer.deserialize_u64(self), MessyJsonNumberType::U128 => deserializer.deserialize_u128(self), }, }, MessyJsonInner::Obj(opt) => match opt.optional() || self.settings().all_optional() { true => deserializer.deserialize_option(self), false => deserializer.deserialize_map(self), }, MessyJsonInner::Array(opt) => match opt.optional() || self.settings().all_optional() { true => deserializer.deserialize_option(self), false => deserializer.deserialize_seq(self), }, #[cfg(feature = "uuid")] MessyJsonInner::Uuid(opt) => match opt.optional() || self.settings().all_optional() { true => deserializer.deserialize_option(self), false => deserializer.deserialize_str(self), }, } } } impl<'de> DeserializeSeed<'de> for MessyJsonObjectBuilder { type Value = MessyJsonValueContainer<'de>; #[inline] fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error> where D: Deserializer<'de>, { match self.inner().optional() { true => deserializer.deserialize_option(self), false => deserializer.deserialize_map(self), } } }
true
23f3acd5cc06257b8225db84cfe4805b94973cf2
Rust
hml1006/amqp-proto
/src/method/base.rs
UTF-8
4,136
2.921875
3
[ "BSD-2-Clause" ]
permissive
use crate::class::Class; use crate::error::FrameDecodeErr; use crate::method::tx::TxMethod; use crate::method::connection::ConnectionMethod; use crate::method::channel::ChannelMethod; use crate::method::access::AccessMethod; use crate::method::exchange::ExchangeMethod; use crate::method::queue::QueueMethod; use crate::method::basic::BasicMethod; use crate::method::confirm::ConfirmMethod; pub trait MethodId { fn method_id(&self) -> u16; } #[derive(Clone, Copy)] pub enum Method { Connection(ConnectionMethod), Channel(ChannelMethod), Access(AccessMethod), Exchange(ExchangeMethod), Queue(QueueMethod), Basic(BasicMethod), Confirm(ConfirmMethod), Tx(TxMethod) } impl MethodId for Method { fn method_id(&self) -> u16 { match self { Method::Connection(method) => method.method_id(), Method::Channel(method) => method.method_id(), Method::Access(method) => method.method_id(), Method::Exchange(method) => method.method_id(), Method::Queue(method) => method.method_id(), Method::Basic(method) => method.method_id(), Method::Confirm(method) => method.method_id(), Method::Tx(method) => method.method_id() } } } impl Default for Method { fn default() -> Self { Method::Connection(ConnectionMethod::default()) } } pub(crate) fn get_method_type(class: Class, method_id: u16) -> Result<Method, FrameDecodeErr> { match class { Class::Connection => { let method = ConnectionMethod::from(method_id); if let ConnectionMethod::Unknown = method { return Err(FrameDecodeErr::SyntaxError("unknown method for connection")); } else { return Ok(Method::Connection(method)); } } Class::Channel => { let method = ChannelMethod::from(method_id); if let ChannelMethod::Unknown = method { return Err(FrameDecodeErr::SyntaxError("unknown method for channel")); } else { return Ok(Method::Channel(method)); } } Class::Access => { let method = AccessMethod::from(method_id); if let AccessMethod::Unknown = method { return Err(FrameDecodeErr::SyntaxError("unknown method for access")); } else { return Ok(Method::Access(method)); } } Class::Exchange => { let method = ExchangeMethod::from(method_id); if let ExchangeMethod::Unknown = method { return Err(FrameDecodeErr::SyntaxError("unknown method for exchange")); } else { return Ok(Method::Exchange(method)); } } Class::Queue => { let method = QueueMethod::from(method_id); if let QueueMethod::Unknown = method { return Err(FrameDecodeErr::SyntaxError("unknown method for queue")); } else { return Ok(Method::Queue(method)); } } Class::Basic => { let method = BasicMethod::from(method_id); if let BasicMethod::Unknown = method { return Err(FrameDecodeErr::SyntaxError("unknown method for basic")); } else { return Ok(Method::Basic(method)); } } Class::Tx => { let method = TxMethod::from(method_id); if let TxMethod::Unknown = method { return Err(FrameDecodeErr::SyntaxError("unknown method for tx")); } else { return Ok(Method::Tx(method)); } } Class::Confirm => { let method = ConfirmMethod::from(method_id); if let ConfirmMethod::Unknown = method { return Err(FrameDecodeErr::SyntaxError("unknown method for confirm")); } else { return Ok(Method::Confirm(method)); } } Class::Unknown => return Err(FrameDecodeErr::SyntaxError("unknown class")) } }
true
9695d72c0f737e8ff67b267fcbe3621fd9d0aa9f
Rust
AhmedArslan/d4-format
/d4/src/task/histogram.rs
UTF-8
2,021
2.984375
3
[ "MIT" ]
permissive
use super::{Task, TaskPartition}; use std::ops::Range; pub struct Histogram(String, u32, u32); pub struct Partition { range: (u32, u32), base: i32, histogram: Vec<u32>, below: u32, above: u32, } impl TaskPartition for Partition { type PartitionParam = Range<i32>; type ResultType = (u32, Vec<u32>, u32); fn new(left: u32, right: u32, param: Range<i32>) -> Self { let base = param.start; let size = (param.end - param.start).max(0) as usize; Self { base, range: (left, right), histogram: vec![0; size], below: 0, above: 0, } } fn scope(&self) -> (u32, u32) { self.range } #[inline(always)] fn feed(&mut self, _: u32, value: i32) -> bool { let offset = value - self.base; if offset < 0 { self.below += 1; return true; } if offset >= self.histogram.len() as i32 { self.above += 1; return true; } self.histogram[offset as usize] += 1; true } fn into_result(self) -> (u32, Vec<u32>, u32) { (self.below, self.histogram, self.above) } } impl Task for Histogram { type Partition = Partition; type Output = (u32, Vec<u32>, u32); fn new(chr: &str, left: u32, right: u32) -> Self { Histogram(chr.to_string(), left, right) } fn region(&self) -> (&str, u32, u32) { (self.0.as_ref(), self.1, self.2) } fn combine(&self, parts: &[(u32, Vec<u32>, u32)]) -> (u32, Vec<u32>, u32) { if parts.is_empty() { return (0, vec![], 0); } let mut histogram = vec![0; parts[0].1.len()]; let mut below = 0; let mut above = 0; for (b, v, a) in parts { for (idx, value) in v.into_iter().enumerate() { histogram[idx] += value; } below += b; above += a; } (below, histogram, above) } }
true
8a0c82a91ed8074e700245524515013a1056a88d
Rust
hml1006/amqp-proto
/src/method/connection.rs
UTF-8
1,524
3.15625
3
[ "BSD-2-Clause" ]
permissive
use crate::method::base::MethodId; #[derive(Clone, Copy)] pub enum ConnectionMethod { Start, StartOk, Secure, SecureOk, Tune, TuneOk, Open, OpenOk, Close, CloseOk, Unknown, } impl MethodId for ConnectionMethod { fn method_id(&self) -> u16 { match self { ConnectionMethod::Start => 10, ConnectionMethod::StartOk => 11, ConnectionMethod::Secure => 20, ConnectionMethod::SecureOk => 21, ConnectionMethod::Tune => 30, ConnectionMethod::TuneOk => 31, ConnectionMethod::Open => 40, ConnectionMethod::OpenOk => 41, ConnectionMethod::Close => 50, ConnectionMethod::CloseOk => 51, ConnectionMethod::Unknown => 0xffff } } } impl Default for ConnectionMethod { fn default() -> Self { ConnectionMethod::Unknown } } impl From<u16> for ConnectionMethod { fn from(method_id: u16) -> Self { match method_id { 10 => ConnectionMethod::Start, 11 => ConnectionMethod::StartOk, 20 => ConnectionMethod::Secure, 21 => ConnectionMethod::SecureOk, 30 => ConnectionMethod::Tune, 31 => ConnectionMethod::TuneOk, 40 => ConnectionMethod::Open, 41 => ConnectionMethod::OpenOk, 50 => ConnectionMethod::Close, 51 => ConnectionMethod::CloseOk, _ => ConnectionMethod::Unknown } } }
true
e37fef6c1f1eed32ddb95a1448e7fcb28865f280
Rust
indiv0/rnes
/src/cpu.rs
UTF-8
77,268
3.046875
3
[]
no_license
use mapper::{Mapper, NROM}; use memory::{Address, Memory, NESMemory}; use opcode::Opcode; use opcode::Opcode::*; use std::cmp::Ordering::{Equal, Greater, Less}; use util::{bit_get, bit_set, is_negative}; // Initialization values for the CPU registers. const CPU_STATUS_REGISTER_INITIAL_VALUE: u8 = 0x34; // 0x00111000 (IRQ disabled) const CPU_STACK_POINTER_INITIAL_VALUE: u8 = 0xFD; // Indices of the flag bits in the CPU status register. // TODO: perhaps replace this with the bitflags crate? const FLAG_CARRY: u8 = 0; const FLAG_ZERO: u8 = 1; const FLAG_IRQ_DISABLE: u8 = 2; const FLAG_DECIMAL_MODE: u8 = 3; const FLAG_BREAK: u8 = 5; const FLAG_OVERFLOW: u8 = 6; const FLAG_NEGATIVE: u8 = 7; // IRQ/BRK vector memory address. const IRQ_VECTOR_ADDR: Address = 0xFFFE; /// An implementation of the NES CPU. /// /// # Architecture /// /// The NES CPU is an 8-bit CPU with a small number of internal registers, 64 KB /// of memory, and a 16-bit address bus. The processor is little endian and /// expects addresses to be stored in memory least significant byte first. #[derive(Clone)] pub struct CPU<M> where M: Mapper, { // Registers /// Accumulator. a: u8, /// Index register X. /// /// # Special Functionality /// /// The X register has one special function. It can be used to get a copy of /// the stack pointer or change its value. x: u8, /// Index register Y. y: u8, /// Processor status. /// /// The processor status register contains flags which are set or cleared /// after the execution of instructions to record their results. /// There are also several control flags. /// Each flag has a single bit within the register. /// /// Flags (from LSB to MSB): /// /// * Carry flag: set if the last operation caused an overflow from bit 7 of /// the result or an underflow from bit 0. /// * Zero flag: set if the result of the last operation was zero. /// * Interrupt Disable: set if the program has executed a "Set Interrupt /// Disable" (SEI) instruction. /// While this flag is set the processor will not respond to interrupts /// from devices until it is cleared by a "Clear Interrupt Disable" (CLI) /// instruction. /// * Decimal Mode: while this flag is set the processor will utilize the /// rules of binary coded decimal (BCD) arithmetic during addition and /// subtraction. /// * Unknown /// * Break Command: set when a BRK instruction has been executed an an /// interrupt has been generated to process it. /// * Overflow Flag: set during arithmetic operations if the result has /// yielded an invalid 2's complement result (e.g. adding two positive /// numbers and ending up with a negative result). /// * Negative Flag: set if the result of the last operation had bit 7 set /// to one. p: u8, /// Program counter. /// /// Points to the address from which the next instruction byte will be /// fetched. /// /// Low and high 8-bit halves of this register are called PCL and PCH, /// respectively. The program counter may be read by pushing its value to /// the stack. This can be done by jumping to a subroutine or by causing an /// interrupt. pc: Address, /// Stack pointer. /// /// Holds the low 8 bits of the next free location on the stack. /// Pushing to the stack causes the stack pointer to be decremented. /// Popping from the stack causes it to be incremented. /// /// The top 8 bits of the stack pointer are hard-coded to `00000001`, so the /// stack pointer really has an 8-bit address space. sp: u8, memory: NESMemory<M>, } impl<M> CPU<M> where M: Mapper, { pub fn new(mapper: M) -> Self { let memory = NESMemory::new(mapper); // TODO: are these resets necessary? // Disable all channels. // FIXME: uncomment these calls when the APU is implemented. //memory.store(0x4015, 0x00); // Enable frame IRQ //memory.store(0x4017, 0x00); Self { a: 0, x: 0, y: 0, p: CPU_STATUS_REGISTER_INITIAL_VALUE, pc: 0, sp: CPU_STACK_POINTER_INITIAL_VALUE, memory, } } /// Execute a single instruction cycle. pub fn step(&mut self) { use instruction::AddressingMode::*; // Read the next opcode to be executed. let pc = self.pc; let opcode: Opcode = self.read_u8(pc).into(); self.pc += 1; // Decode the opcode into an executable instruction. let instruction = opcode.decode(); // If the instruction requires an operand, use the specified addressing // mode to determine its address. let operand_addr = match *instruction.addressing_mode() { Immediate => Some(self.addr_imm()), ZeroPage => Some(self.addr_zero_page()), ZeroPageX => Some(self.addr_zero_page_x()), Relative => Some(self.relative()), Absolute => Some(self.addr_abs()), AbsoluteX => Some(self.addr_abs_x()), AbsoluteY => Some(self.addr_abs_y()), IndirectX => Some(self.addr_ind_x()), IndirectY => Some(self.addr_ind_y()), Indirect => Some(self.indirect()), Implicit => None, Accumulator | ZeroPageY => panic!("Unimplemented addressing mode"), }; match *instruction.opcode() { ADC_IMM | ADC_ZPAGE | ADC_ZPAGEX | ADC_ABS | ADC_ABSX | ADC_ABSY | ADC_INDX | ADC_INDY => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.adc(addr); return; } AND_IMM | AND_ZPAGE | AND_ZPAGEX | AND_ABS | AND_ABSX | AND_ABSY | AND_INDX | AND_INDY => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.and(addr); return; } ASL_ACC => { self.asl(None); } ASL_ZPAGE | ASL_ZPAGEX | ASL_ABS | ASL_ABSX => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.asl(Some(addr)); } BCC => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.bcc(addr); } BCS => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.bcs(addr); } BEQ => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.beq(addr); } BIT_ZPAGE | BIT_ABS => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.bit(addr); } BMI => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.bmi(addr); } BNE => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.bne(addr); } BPL => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.bpl(addr); } BRK => self.brk(), BVC => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.bvc(addr); } BVS => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.bvs(addr); } CLC => self.clc(), CLD => self.cld(), CLI => self.cli(), CLV => self.clv(), CMP_IMM | CMP_ZPAGE | CMP_ZPAGEX | CMP_ABS | CMP_ABSX | CMP_ABSY | CMP_INDX | CMP_INDY => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.cmp(addr); } CPX_IMM | CPX_ZPAGE | CPX_ABS => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.cpx(addr); } CPY_IMM | CPY_ZPAGE | CPY_ABS => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.cpy(addr); } LDA_IMM | LDA_ZPAGE | LDA_ZPAGEX | LDA_ABS | LDA_ABSX | LDA_ABSY | LDA_INDX | LDA_INDY => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.lda(addr); } DEC_ZPAGE | DEC_ZPAGEX | DEC_ABS | DEC_ABSX => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.dec(addr); } DEX => self.dex(), DEY => self.dey(), EOR_IMM | EOR_ZPAGE | EOR_ZPAGEX | EOR_ABS | EOR_ABSX | EOR_ABSY | EOR_INDX | EOR_INDY => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.eor(addr); } INC_ZPAGE | INC_ZPAGEX | INC_ABS | INC_ABSX => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.inc(addr); } INX => self.inx(), INY => self.iny(), JMP_ABS | JMP_IND => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.jmp(addr); } JSR => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.jsr(addr); } LDX_IMM | LDX_ZPAGE | LDX_ZPAGEY | LDX_ABS | LDX_ABSY => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.ldx(addr); } LDY_IMM | LDY_ZPAGE | LDY_ZPAGEX | LDY_ABS | LDY_ABSX => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.ldy(addr); } LSR_ACC => self.lsr_acc(), LSR_ZPAGE | LSR_ZPAGEX | LSR_ABS | LSR_ABSX => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.lsr_mem(addr); } NOP => self.nop(), ORA_IMM | ORA_ZPAGE | ORA_ZPAGEX | ORA_ABS | ORA_ABSX | ORA_ABSY | ORA_INDX | ORA_INDY => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.ora(addr); } PHA => self.pha(), PHP => self.php(), PLA => self.pla(), PLP => self.plp(), ROL_ACC => self.rol_acc(), ROL_ZPAGE | ROL_ZPAGEX | ROL_ABS | ROL_ABSX => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.rol_mem(addr); } ROR_ACC => self.ror_acc(), ROR_ZPAGE | ROR_ZPAGEX | ROR_ABS | ROR_ABSX => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.ror_mem(addr); } RTI => self.rti(), RTS => self.rts(), SBC_IMM | SBC_ZPAGE | SBC_ZPAGEX | SBC_ABS | SBC_ABSX | SBC_ABSY | SBC_INDX | SBC_INDY => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.sbc(addr); } SEC => self.sec(), SED => self.sed(), SEI => self.sei(), STA_ZPAGE | STA_ZPAGEX | STA_ABS | STA_ABSX | STA_ABSY | STA_INDX | STA_INDY => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.sta(addr); } STX_ZPAGE | STX_ZPAGEY | STX_ABS => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.stx(addr); } STY_ZPAGE | STY_ZPAGEX | STY_ABS => { let addr = operand_addr.expect("Operand address was unexpectedly missing"); self.sty(addr); } TAX => self.tax(), TAY => self.tay(), TSX => self.tsx(), TXA => self.txa(), TXS => self.txs(), TYA => self.tya(), } } fn _reset(&mut self) { // A, X, Y are not affected by reset. // Decrement S by 3, but do not write anything to the stack. self.sp.wrapping_sub(3); // Set the I (IRQ disable) flag to true). self.p |= 0x04; // Internal memory remains unchanged. // APU mode in $4017 remains unchanged. // Silence the APU. self.memory.store(0x4015, 0x00); } /// Adds the relative displacement to the program counter to branch to a new /// location. fn branch(&mut self, relative_addr: Address) { // Because branch instructions step the program counter by 2, we must // first decrement it back. // TODO: find a way to optimize this? self.pc -= 2; // Add the signed relative value to the program counter. self.pc += (relative_addr ^ 0x80) - 0x80; } /// Compares the specified value to another value in memory, and sets the zaro and carry flags /// as appropriate. fn compare(&mut self, value: u8, other_addr: Address) { // Retrieve the value to be compared. let other = self.read_u8(other_addr); match value.cmp(&other) { Less => { self.set_carry(false); self.set_zero(false); self.set_negative(true); } Equal => { self.set_carry(true); self.set_zero(true); self.set_negative(false); } Greater => { self.set_carry(true); self.set_zero(false); self.set_negative(false); } } } /// Shifts the bits of the specified value to the right, returning the /// result and setting the carry, zero, and negative status flags as /// necessary. fn lsr(&mut self, value: u8) -> u8 { self.set_carry(bit_get(value, 0)); let res = value >> 1; self.set_zero(res == 0); self.set_negative(false); res } /// Rotates the bits of the specified value one place to the left, and /// returns the resulting value. /// Sets the carry, zero, and negative flags as necessary. fn rol(&mut self, value: u8) -> u8 { let new_carry = bit_get(value, 7); let mut result = value << 1; result = bit_set(result, 0, self.carry()); self.set_carry(new_carry); self.set_zero(result == 0); self.set_negative(is_negative(result)); result } /// Rotates the bits of the specified value one place to the right, and /// returns the resulting value. /// Sets the carry, zero, and negative flags as necessary. fn ror(&mut self, value: u8) -> u8 { let new_carry = bit_get(value, 0); let mut result = value >> 1; result = bit_set(result, 7, self.carry()); self.set_carry(new_carry); self.set_zero(result == 0); self.set_negative(is_negative(result)); result } /// Performs an add with carry. fn adc_inner(&mut self, arg: u8) { let (sum, overflow1) = self.a.overflowing_add(arg); let (sum, overflow2) = sum.overflowing_add(self.carry() as u8); let carry = overflow1 || overflow2; // Carry flag gets set if overflow in bit 7. self.set_carry(carry); // Set if sign bit is incorrect. let overflow = !(self.a ^ arg) & (self.a ^ sum) & 0x80; self.set_overflow(overflow != 0); self.a = sum; // Set if A = 0 let zero = self.a == 0; self.set_zero(zero); } // Processor status /// Returns the value of the "carry" flag. fn carry(&self) -> bool { bit_get(self.p, FLAG_CARRY) } /// Sets the value of the "carry" flag. fn set_carry(&mut self, carry: bool) { self.p = bit_set(self.p, FLAG_CARRY, carry); } /// Returns the value of the "zero" flag. fn zero(&self) -> bool { bit_get(self.p, FLAG_ZERO) } /// Sets the value of the "zero" flag. fn set_zero(&mut self, zero: bool) { self.p = bit_set(self.p, FLAG_ZERO, zero); } /// Returns the value of the "IRQ disable" flag. #[allow(dead_code)] fn irq_disable(&self) -> bool { bit_get(self.p, FLAG_IRQ_DISABLE) } /// Sets the value of the "IRQ disable" flag. fn set_irq_disable(&mut self, irq_disable: bool) { self.p = bit_set(self.p, FLAG_IRQ_DISABLE, irq_disable); } /// Returns the value of the "decimal mode" flag. #[allow(dead_code)] fn decimal_mode(&self) -> bool { bit_get(self.p, FLAG_DECIMAL_MODE) } /// Sets the value of the "decimal mode" flag. fn set_decimal_mode(&mut self, decimal_mode: bool) { self.p = bit_set(self.p, FLAG_DECIMAL_MODE, decimal_mode); } /// Returns the value of the "break" flag. #[allow(dead_code)] fn break_flag(&self) -> bool { bit_get(self.p, FLAG_BREAK) } /// Sets the value of the "break" flag. fn set_break(&mut self, brk: bool) { self.p = bit_set(self.p, FLAG_BREAK, brk); } /// Returns the value of the "overflow" flag. fn overflow(&self) -> bool { bit_get(self.p, FLAG_OVERFLOW) } /// Sets the value of the "overflow" flag. fn set_overflow(&mut self, overflow: bool) { self.p = bit_set(self.p, FLAG_OVERFLOW, overflow); } /// Sets the value of the "negative" flag. fn negative(&self) -> bool { bit_get(self.p, FLAG_NEGATIVE) } /// Sets the value of the "negative" flag. fn set_negative(&mut self, negative: bool) { self.p = bit_set(self.p, FLAG_NEGATIVE, negative); } // Stack operations and variables /// Returns the value of the stack pointer as an absolute address value. fn sp(&self) -> u16 { // Compute the address of the stack pointer; // the top 8 bits are hard-coded to be equal to 0b0000_0001. 0x0100 | u16::from(self.sp) } /// Pushes a value to the stack. /// /// The stack is implemented as a descending stack, so the stack pointer /// is decremented after this operation. fn push(&mut self, value: u8) { // Push the value onto the stack and decrement the stack pointer. let addr = self.sp(); self.write_u8(addr, value); self.sp = self.sp.wrapping_sub(1); } /// Pops a value from the stack. /// /// The stack is implemented as a descending stack, so the stack pointer /// is incremented after this operation. fn pop(&mut self) -> u8 { // Increment the stack pointer and pop the value from the stack. self.sp = self.sp.wrapping_add(1); let addr = self.sp(); self.read_u8(addr) } /// Retrieves a value "index" positions from the top without removing it. #[allow(dead_code)] fn peek(&mut self, index: u8) -> u8 { // Calculate the address we wish to peek at. let addr = 0x0100 | u16::from(self.sp.wrapping_add(1).wrapping_add(index)); self.read_u8(addr) } // Memory read /// Reads and returns a single u8 value at the specified memory address. fn read_u8(&mut self, addr: Address) -> u8 { self.memory.fetch(addr) } /// Reads and returns a little endian u16 at the specified memory address. fn read_u16(&mut self, addr: Address) -> u16 { let low = u16::from(self.read_u8(addr)); let high = u16::from(self.read_u8(addr + 1)); high << 8 | low } // Memory write /// Writes a single `u8` value to the specified memory address. fn write_u8(&mut self, addr: Address, value: u8) { self.memory.store(addr, value); } /// Writes a little endian `u16` value to the specified memory address. #[allow(dead_code)] fn write_u16(&mut self, addr: Address, value: u16) { self.write_u8(addr, value as u8); self.write_u8(addr + 1, (value >> 8) as u8); } // Memory addressing /// Returns the address value of the program counter location. /// /// # Note /// /// Increments the program counter by 1 to represent the memory read. fn addr_imm(&mut self) -> Address { let addr = self.pc; self.pc += 1; addr } /// Returns the zero page address value at the program counter location. /// /// # Note /// /// Increments the program counter by 1 to represent the memory read. fn addr_zero_page(&mut self) -> Address { let pc = self.pc; let addr = Address::from(self.read_u8(pc)); self.pc += 1; addr } /// Returns the zero page address value at the program counter location, /// with the current value of the `X` register added to it. /// /// # Note /// /// Increments the program counter by 1 to represent the memory read. fn addr_zero_page_x(&mut self) -> Address { let pc = self.pc; let addr = Address::from(self.read_u8(pc)); self.pc += 1; addr + u16::from(self.x) } /// Returns a memory address by taking the value at the program counter /// location and adding it to the current value of the program counter. fn relative(&mut self) -> Address { let pc = self.pc; let addr = Address::from(self.read_u8(pc)); self.pc += 1; addr } /// Returns the address value pointed to by the address value at the program /// counter location. /// /// # Note /// /// Increments the program counter by 2 to represent the memory read. fn addr_abs(&mut self) -> Address { let pc = self.pc; let addr = self.read_u16(pc); self.pc += 2; addr } /// Returns the address value pointed to by the value located at the program /// counter location, incremented by the value of register `X`. /// /// # Note /// /// Increments the program counter by 2 to represent the memory read. fn addr_abs_x(&mut self) -> Address { let pc = self.pc; let base_addr = self.read_u16(pc); self.pc += 2; base_addr + u16::from(self.x) } /// Returns the address value pointed to by the value located at the program /// counter location, incremented by the value of register `Y`. /// /// # Note /// /// Increments the program counter by 2 to represent the memory read. fn addr_abs_y(&mut self) -> Address { let pc = self.pc; let base_addr = self.read_u16(pc); self.pc += 2; base_addr + u16::from(self.y) } /// Returns the address value pointed to by the value at the program counter location. /// /// # Note /// /// Increments the program counter by 2 to represent the memory read. fn indirect(&mut self) -> Address { let pc = self.pc; let addr_loc = self.read_u16(pc); let addr = self.read_u16(addr_loc); self.pc += 2; addr } /// Adds the value of register `X` to the memory address located at the /// program counter location, then returns the memory address value pointed /// to by that value. /// /// # Note /// /// Increments the program counter by 1 to represent the memory read. fn addr_ind_x(&mut self) -> Address { let pc = self.pc; let base_addr = Address::from(self.read_u8(pc)); self.pc += 1; let x = self.x; self.read_u16(base_addr + u16::from(x)) } /// Retrieves the memory address pointed to by the instruction operand, then /// adds the value of register `Y` to this address. /// /// # Note /// /// Increments the program counter by 1 to represent the memory read. fn addr_ind_y(&mut self) -> Address { let pc = self.pc; let base_addr = Address::from(self.read_u8(pc)); self.pc += 1; self.read_u16(base_addr) + u16::from(self.y) } // Instructions /// Performs an add with carry. fn adc(&mut self, addr: Address) { let arg = self.read_u8(addr); self.adc_inner(arg); } /// Performs a logical AND. fn and(&mut self, addr: Address) { let arg = self.read_u8(addr); self.a &= arg; // Set if A = 0 let zero = self.a == 0; self.set_zero(zero); // Set if bit 7 set let negative = is_negative(self.a); self.set_negative(negative); } /// Shifts the contents of the accumulator or the specified memory address /// one bit left. fn asl(&mut self, addr: Option<Address>) { let value = match addr { Some(addr) => self.read_u8(addr), None => self.a, }; let carry = is_negative(value); let res = value << 1; // Bit 7 is placed in the carry flag. self.set_carry(carry); // Set if A = 0 let zero = self.a == 0; self.set_zero(zero); // Set if bit 7 of the result is set. self.set_negative(bit_get(res, 7)); match addr { Some(addr) => { self.memory.store(addr, res); } None => self.a = res, }; } /// Adds the relative value to the program counter to branch to a new /// location if the carry flag is clear. fn bcc(&mut self, addr: Address) { if !self.carry() { self.branch(addr); } } /// Adds the relative value to the program counter to branch to a new /// location if the carry flag is set. fn bcs(&mut self, addr: Address) { if self.carry() { self.branch(addr); } } /// Adds the relative value to the program counter to branch to a new /// location if the zero flag is set. fn beq(&mut self, addr: Address) { if self.zero() { self.branch(addr); } } /// Tests if one or more bits are set in a target memory location. fn bit(&mut self, addr: Address) { let value = self.read_u8(addr); // AND the value of the mask pattern in A with the value in memory to // set or clear the zero flag. let zero = (self.a & value) == 0; self.set_zero(zero); // Bit 6 of the value is copied into the V flag. self.set_overflow(bit_get(value, 7)); // Bit 7 of the value is copied into the N flag. self.set_negative(bit_get(value, 6)); } /// Adds the relative displacement to the program counter to branch, if /// the negative flag is set. fn bmi(&mut self, addr: Address) { if self.negative() { self.branch(addr); } } /// Adds the relative value to the program counter to branch to a new /// location if the zero flag is not set. fn bne(&mut self, addr: Address) { if !self.zero() { self.branch(addr); } } /// Adds the relative value to the program counter to branch to a new /// location if the negative flag is clear. fn bpl(&mut self, addr: Address) { if !self.negative() { self.branch(addr); } } /// Generates an interrupt request. /// /// The program counter and processor status are pushed onto the stack, the /// IRQ interrupt vector is read into the program counter, and the break /// flag is set to `true`. fn brk(&mut self) { // Push the program counter onto the stack. let pc = self.pc; self.push((pc >> 8) as u8); self.push(pc as u8); // Push the processor status onto the stack. let p = self.p; self.push(p); // Load the IRQ interrupt vector into the PC. self.pc = self.read_u16(IRQ_VECTOR_ADDR); // Set the break flag in the status to 1. self.set_break(true); } /// Adds the relative value to the program counter to branch to a new /// location if the overflow flag is clear. fn bvc(&mut self, addr: Address) { if !self.overflow() { self.branch(addr); } } /// Adds the relative displacement to the program counter to branch, if /// the overflow flag is set. fn bvs(&mut self, addr: Address) { if self.overflow() { self.branch(addr); } } /// Sets the carry flag to zero. fn clc(&mut self) { self.set_carry(false); } /// Sets the decimal mode flag to zero. /// /// # Note /// /// The state of the decimal flag is undefined when the CPU is powered up /// and it is not reset when an interrupt is generated. /// In both cases you should include an explicit CLD to ensure that the flag is cleared before /// performing addition or subtraction. fn cld(&mut self) { self.set_decimal_mode(false); } /// Clears the interrupt disable flag. fn cli(&mut self) { self.set_irq_disable(false); } /// Clears the overflow flag. fn clv(&mut self) { self.set_overflow(false); } /// Compares the contents of the accumulator with another value and sets the /// zero and carry flags as appropriate. fn cmp(&mut self, addr: Address) { let a = self.a; self.compare(a, addr); } /// Compares the contents of the X register with another value and sets the /// zero and carry flags as appropriate. fn cpx(&mut self, addr: Address) { let x = self.x; self.compare(x, addr); } /// Compares the contents of the Y register with another value and sets the /// zero and carry flags as appropriate. fn cpy(&mut self, addr: Address) { let y = self.y; self.compare(y, addr); } /// Subtracts one from the specified value in memory, setting the zero and /// negative flags as appropriate. fn dec(&mut self, addr: Address) { let mut value = self.read_u8(addr); value = value.wrapping_sub(1); self.set_zero(value == 0); self.set_negative(is_negative(value)); self.write_u8(addr, value); } /// Subtracts one from the X register, setting the zero and negative flags /// appropriate. fn dex(&mut self) { let mut value = self.x; value = value.wrapping_sub(1); self.set_zero(value == 0); self.set_negative(is_negative(value)); self.x = value; } /// Subtracts one from the Y register, setting the zero and negative flags /// appropriate. fn dey(&mut self) { let mut value = self.y; value = value.wrapping_sub(1); self.set_zero(value == 0); self.set_negative(is_negative(value)); self.y = value; } /// Performs an exclusive OR on the contents of the accumulator and the byte /// value at the specified memory address. fn eor(&mut self, addr: Address) { let value = self.read_u8(addr); self.a ^= value; let a = self.a; self.set_zero(a == 0); self.set_negative(is_negative(a)); } /// Adds one to the value at the specified memory location, setting the zero /// and negative flags as appropriate. fn inc(&mut self, addr: Address) { let mut value = self.read_u8(addr); value = value.wrapping_add(1); self.set_zero(value == 0); self.set_negative(is_negative(value)); self.write_u8(addr, value); } /// Adds one to the X register, setting the zero and negative flags as /// appropriate. fn inx(&mut self) { self.x = self.x.wrapping_add(1); let x = self.x; self.set_zero(x == 0); self.set_negative(is_negative(x)); } /// Adds one to the Y register, setting the zero and negative flags as /// appropriate. fn iny(&mut self) { self.y = self.y.wrapping_add(1); let y = self.y; self.set_zero(y == 0); self.set_negative(is_negative(y)); } /// Sets the program to the address specified. /// /// # Note /// // From: http://obelisk.me.uk/6502/reference.html#JMP /// An original 6502 has does not correctly fetch the target address if the /// indirect vector falls on a page boundary (e.g. $xxFF where xx is any /// value from $00 to $FF). /// In this case fetches the LSB from $xxFF as expected but takes the MSB /// from $xx00. /// This is fixed in some later chips like the 65SC02 so for compatibility /// always ensure the indirect vector is not at the end of the page. fn jmp(&mut self, addr: Address) { self.pc = addr; } /// Pushes the address (minus one) of the return location to the stack and /// then sets the program counter to the target memory address. fn jsr(&mut self, addr: Address) { let return_addr = self.pc.wrapping_sub(1); self.push((return_addr >> 8) as u8); self.push(return_addr as u8); self.pc = self.read_u16(addr); } /// Loads a byte of memory into the accumulator. fn lda(&mut self, addr: Address) { let value = self.read_u8(addr); self.a = value; let zero = self.a == 0; self.set_zero(zero); // Negative gets set if bit 7 of A is set. let negative = is_negative(self.a); self.set_negative(negative); } /// Loads a byte of memory into the X register. fn ldx(&mut self, addr: Address) { let value = self.read_u8(addr); self.x = value; self.set_zero(value == 0); self.set_negative(is_negative(value)); } /// Loads a byte of memory into the Y register. fn ldy(&mut self, addr: Address) { let value = self.read_u8(addr); self.y = value; self.set_zero(value == 0); self.set_negative(is_negative(value)); } /// Shift each of the bits in the accumulator one place to the right. /// Sets the carry, zero, and negative flags as necessary. fn lsr_acc(&mut self) { let a = self.a; self.a = self.lsr(a); } /// Shift each of the bits of the value at the specified address one place /// to the right. /// Sets the carry, zero, and negative flags as necessary. fn lsr_mem(&mut self, addr: Address) { let value = self.read_u8(addr); let res = self.lsr(value); self.write_u8(addr, res); } /// Causes no changes to the processor (except the normal incrementing of the program counter). fn nop(&self) {} /// Performs an inclusive OR on the contents of the accumulator and the byte /// value at the specified memory location. /// Sets the zero and negative flags as necessary. fn ora(&mut self, addr: Address) { let value = self.read_u8(addr); self.a |= value; let a = self.a; self.set_zero(a == 0); self.set_negative(is_negative(a)); } /// Pushes a copy of the accumulator onto the stack. fn pha(&mut self) { let a = self.a; self.push(a); } /// Pushes a copy of the status flags onto the stack. fn php(&mut self) { let p = self.p; self.push(p); } /// Pulls an 8-bit value from the stack into the accumulator. /// Sets the zero and negative flags as necessary. fn pla(&mut self) { self.a = self.pop(); let a = self.a; self.set_zero(a == 0); self.set_negative(is_negative(a)); } /// Pulls an 8-bit value from the stack into the processor flags. fn plp(&mut self) { self.p = self.pop(); } /// Shift each of the bits in the accumulator one place to the left. /// Sets the carry, zero, and negative flags as necessary. fn rol_acc(&mut self) { let a = self.a; self.a = self.rol(a); } /// Shift each of the bits in the value pointed at by the memory address one /// place to the left. /// Sets the carry, zero, and negative flags as necessary. fn rol_mem(&mut self, addr: Address) { let value = self.read_u8(addr); let res = self.rol(value); self.write_u8(addr, res); } /// Shift each of the bits in the accumulator one place to the right. /// Sets the carry, zero, and negative flags as necessary. fn ror_acc(&mut self) { let a = self.a; self.a = self.ror(a); } /// Shift each of the bits in the value pointed at by the memory address one /// place to the right. /// Sets the carry, zero, and negative flags as necessary. fn ror_mem(&mut self, addr: Address) { let value = self.read_u8(addr); let res = self.ror(value); self.write_u8(addr, res); } /// Pulls the processor flags and program counter from the stack. fn rti(&mut self) { self.p = self.pop(); let mut pc = u16::from(self.pop()); pc |= u16::from(self.pop()) << 8; self.pc = pc; } /// Returns from a subroutine. /// /// Pulls the program counter (minus one) fromt he stack. fn rts(&mut self) { let mut pc = u16::from(self.pop()); pc |= u16::from(self.pop()) << 8; self.pc = pc.wrapping_sub(1); } /// Subtracts the contents of a memory location from the accumulator with /// the not of the carry bit. /// If overflow occurs, the carry bit is clear; this enables multiple byte /// subtraction to be performed. fn sbc(&mut self, addr: Address) { let arg = self.read_u8(addr); // SBC(arg) is equivalent to ADC(!arg) self.adc_inner(!arg); } /// Set the carry flag to one. fn sec(&mut self) { self.set_carry(true); } /// Set the decimal mode flag to one. fn sed(&mut self) { self.set_decimal_mode(true); } /// Set the interrupt disable flag to one. fn sei(&mut self) { self.set_irq_disable(true); } /// Stores the contents of the accumulator into memory. fn sta(&mut self, addr: Address) { let a = self.a; self.write_u8(addr, a); } /// Stores the contents of the X register into memory. fn stx(&mut self, addr: Address) { let x = self.x; self.write_u8(addr, x); } /// Stores the contents of the Y register into memory. fn sty(&mut self, addr: Address) { let y = self.y; self.write_u8(addr, y); } /// Copies the contents of the accumulator into the X register. /// Sets the zero and negative flags as appropriate. fn tax(&mut self) { self.x = self.a; let x = self.x; self.set_zero(x == 0); self.set_negative(is_negative(x)); } /// Copies the contents of the accumulator into the Y register. /// Sets the zero and negative flags as appropriate. fn tay(&mut self) { self.y = self.a; let y = self.y; self.set_zero(y == 0); self.set_negative(is_negative(y)); } /// Copies the contents of the stack register into the X register. /// Sets the zero and negative flags as appropriate. fn tsx(&mut self) { self.x = self.sp; let x = self.x; self.set_zero(x == 0); self.set_negative(is_negative(x)); } /// Copies the contents of the X register into the accumulator. /// Sets the zero and negative flags as appropriate. fn txa(&mut self) { self.a = self.x; let a = self.a; self.set_zero(a == 0); self.set_negative(is_negative(a)); } /// Copies the contents of the X register into the stack register. /// Sets the zero and negative flags as appropriate. fn txs(&mut self) { self.sp = self.x; let sp = self.sp; self.set_zero(sp == 0); self.set_negative(is_negative(sp)); } /// Copies the contents of the Y register into the accumulator. /// Sets the zero and negative flags as appropriate. fn tya(&mut self) { self.a = self.y; let a = self.a; self.set_zero(a == 0); self.set_negative(is_negative(a)); } } impl Default for CPU<NROM> { fn default() -> Self { Self::new(NROM::default()) } } #[cfg(test)] mod tests { use super::{bit_get, bit_set, CPU, CPU_STACK_POINTER_INITIAL_VALUE}; use memory::Memory; use opcode::Opcode::*; #[test] fn test_status_register() { let mut cpu = CPU::default(); cpu.set_carry(false); assert!(!cpu.carry()); cpu.set_carry(true); assert!(cpu.carry()); cpu.set_zero(false); assert!(!cpu.zero()); cpu.set_zero(true); assert!(cpu.zero()); cpu.set_break(false); assert!(!cpu.break_flag()); cpu.set_break(true); assert!(cpu.break_flag()); cpu.set_overflow(false); assert!(!cpu.overflow()); cpu.set_overflow(true); assert!(cpu.overflow()); cpu.set_negative(false); assert!(!cpu.negative()); cpu.set_negative(true); assert!(cpu.negative()); } #[test] fn test_stack_push() { let mut cpu = CPU::default(); cpu.push(0xAA); cpu.push(0xBB); assert_eq!(cpu.read_u8(0x01FD), 0xAA); assert_eq!(cpu.read_u8(0x01FC), 0xBB); assert_eq!(cpu.sp, CPU_STACK_POINTER_INITIAL_VALUE.wrapping_sub(2)); } #[test] fn test_stack_pop() { let mut cpu = CPU::default(); cpu.push(0xAA); assert_eq!(cpu.pop(), 0xAA); assert_eq!(cpu.sp, CPU_STACK_POINTER_INITIAL_VALUE); } #[test] fn test_stack_peek() { let mut cpu = CPU::default(); cpu.push(0xAA); cpu.push(0xBB); assert_eq!(cpu.peek(0), 0xBB); assert_eq!(cpu.peek(1), 0xAA); } #[test] fn test_stack_wrapping_behaviour() { let mut cpu = CPU::default(); // Pop enough times to reach stack address 0x100. cpu.pop(); cpu.pop(); cpu.pop(); assert_eq!(cpu.sp, 0x00); cpu.push(0xAA); assert_eq!(cpu.sp, 0xFF); assert_eq!(cpu.read_u8(0x0100), 0xAA); cpu.push(0xBB); assert_eq!(cpu.peek(0), 0xBB); assert_eq!(cpu.peek(1), 0xAA); } #[test] fn test_read_u8() { let mut cpu = CPU::default(); cpu.memory.store(0x1000, 0xFF); assert_eq!(cpu.read_u8(0x1000), 0xFF); } #[test] fn test_read_u16() { let mut cpu = CPU::default(); cpu.memory.store(0x1000, 0xCD); cpu.memory.store(0x1001, 0xAB); assert_eq!(cpu.read_u16(0x1000), 0xABCD); } #[test] fn test_write_u8() { let mut cpu = CPU::default(); cpu.write_u8(0x0010, 0xAA); assert_eq!(cpu.memory.fetch(0x0010), 0xAA); } #[test] fn test_addr_imm() { let mut cpu = CPU::default(); cpu.pc = 0x00FF; assert_eq!(cpu.addr_imm(), 0x00FF); } #[test] fn test_addr_zero_page() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, 0x10); assert_eq!(cpu.addr_zero_page(), 0x0010); } #[test] fn test_addr_zero_page_x() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, 0x10); cpu.x = 0x05; assert_eq!(cpu.addr_zero_page_x(), 0x0015); } #[test] fn test_addr_abs() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, 0x00); cpu.memory.store(0x0001, 0x02); assert_eq!(cpu.addr_abs(), 0x0200); } #[test] fn test_addr_abs_x() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, 0x00); cpu.memory.store(0x0001, 0x02); cpu.x = 0x05; assert_eq!(cpu.addr_abs_x(), 0x0205); } #[test] fn test_addr_abs_y() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, 0x00); cpu.memory.store(0x0001, 0x02); cpu.y = 0x05; assert_eq!(cpu.addr_abs_y(), 0x0205); } #[test] fn test_addr_ind_x() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, 0x04); cpu.memory.store(0x0034, 0xCD); cpu.memory.store(0x0035, 0xAB); cpu.x = 0x30; assert_eq!(cpu.addr_ind_x(), 0xABCD); } #[test] fn test_addr_ind_y() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, 0x0A); cpu.memory.store(0x000A, 0xEF); cpu.memory.store(0x000B, 0xCD); cpu.y = 0x01; assert_eq!(cpu.addr_ind_y(), 0xCDF0); } #[test] fn test_adc() { let mut cpu = CPU::default(); // 1 + 1 = 2, returns C = 0, V = 0 cpu.set_zero(false); cpu.set_carry(false); cpu.set_overflow(false); cpu.memory.store(0x0000, 0x01); cpu.a = 0x01; cpu.adc(0x0000); assert_eq!(cpu.a, 0x02); assert!(!cpu.zero()); assert!(!cpu.carry()); assert!(!cpu.overflow()); // 1 + -1 = 0, returns C = 1, V = 0 cpu.set_zero(false); cpu.set_carry(false); cpu.set_overflow(false); cpu.memory.store(0x0000, 0xFF); cpu.a = 0x01; cpu.adc(0x0000); assert_eq!(cpu.a, 0x00); assert!(cpu.zero()); assert!(cpu.carry()); assert!(!cpu.overflow()); // 127 + 1 = 128, returns C = 0, V = 1 cpu.set_zero(false); cpu.set_carry(false); cpu.set_overflow(false); cpu.memory.store(0x0000, 0x01); cpu.a = 0x7F; cpu.adc(0x0000); assert_eq!(cpu.a, 0x80); assert!(!cpu.zero()); assert!(!cpu.carry()); assert!(cpu.overflow()); // -128 + -1 = -129, returns C = 1, V = 1 cpu.set_zero(false); cpu.set_carry(false); cpu.set_overflow(false); cpu.memory.store(0x0000, 0xFF); cpu.a = 0x80; cpu.adc(0x0000); assert_eq!(cpu.a, 0x7F); assert!(!cpu.zero()); assert!(cpu.carry()); assert!(cpu.overflow()); } #[test] fn test_and() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, 0xA5); cpu.a = 0xFF; cpu.and(0x0000); assert_eq!(cpu.a, 0xA5); assert!(!cpu.zero()); assert!(cpu.negative()); cpu.a = 0x05; cpu.and(0x0000); assert_eq!(cpu.a, 0x05); assert!(!cpu.zero()); assert!(!cpu.negative()); cpu.a = 0x00; cpu.and(0x0000); assert_eq!(cpu.a, 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_asl() { let mut cpu = CPU::default(); cpu.a = 0xFF; cpu.asl(None); assert_eq!(cpu.a, 0xFE); assert!(cpu.carry()); assert!(!cpu.zero()); assert!(cpu.negative()); cpu.memory.store(0x0000, 0x0E); cpu.asl(Some(0x0000)); assert_eq!(cpu.memory.fetch(0x0000), 0x1C); assert!(!cpu.carry()); assert!(!cpu.zero()); assert!(!cpu.negative()); cpu.memory.store(0x0000, 0x80); cpu.asl(Some(0x0000)); assert_eq!(cpu.memory.fetch(0x0000), 0x00); assert!(cpu.carry()); assert!(!cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_bcc() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, BCC as u8); cpu.memory.store(0x0001, 0x04); cpu.set_carry(false); cpu.step(); assert_eq!(cpu.pc, 4); cpu.pc = 0; cpu.set_carry(true); cpu.step(); assert_eq!(cpu.pc, 2); } #[test] fn test_bcs() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, BCS as u8); cpu.memory.store(0x0001, 0x04); cpu.set_carry(true); cpu.step(); assert_eq!(cpu.pc, 4); cpu.pc = 0; cpu.set_carry(false); cpu.step(); assert_eq!(cpu.pc, 2); } #[test] fn test_beq() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, BEQ as u8); cpu.memory.store(0x0001, 0x04); cpu.set_zero(true); cpu.step(); assert_eq!(cpu.pc, 4); cpu.pc = 0; cpu.set_zero(false); cpu.step(); assert_eq!(cpu.pc, 2); } #[test] fn test_bit() { let mut cpu = CPU::default(); cpu.a = 0xEA; cpu.memory.store(0x0000, 0xEA); cpu.bit(0x0000); assert!(!cpu.zero()); assert!(cpu.overflow()); assert!(cpu.negative()); cpu.memory.store(0x0000, 0x28); cpu.bit(0x0000); assert!(!cpu.zero()); assert!(!cpu.overflow()); assert!(!cpu.negative()); cpu.memory.store(0x0000, 0x00); cpu.bit(0x0000); assert!(cpu.zero()); assert!(!cpu.overflow()); assert!(!cpu.negative()); } #[test] fn test_bmi() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, BMI as u8); cpu.memory.store(0x0001, 0x04); cpu.set_negative(true); cpu.step(); assert_eq!(cpu.pc, 4); cpu.pc = 0; cpu.set_negative(false); cpu.step(); assert_eq!(cpu.pc, 2); } #[test] fn test_bne() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, BNE as u8); cpu.memory.store(0x0001, 0x04); cpu.set_zero(false); cpu.step(); assert_eq!(cpu.pc, 4); cpu.pc = 0; cpu.set_zero(true); cpu.step(); assert_eq!(cpu.pc, 2); } #[test] fn test_bpl() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, BPL as u8); cpu.memory.store(0x0001, 0x04); cpu.set_negative(false); cpu.step(); assert_eq!(cpu.pc, 4); cpu.pc = 0; cpu.set_negative(true); cpu.step(); assert_eq!(cpu.pc, 2); } // FIXME: uncomment this when reads to IRQ_VECTOR_ADDR are implemented. /* #[test] fn test_brk() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, BRK as u8); cpu.write_u16(IRQ_VECTOR_ADDR, 0x0400); cpu.set_break(false); let pc = cpu.pc; let status = cpu.p; assert_eq!(cpu.break_flag(), false); cpu.step(); assert_eq!(cpu.peek(0), status); assert_eq!(cpu.peek(1), pc as u8 + 1); // PC has since been incremented assert_eq!(cpu.peek(2), (pc >> 8) as u8); assert_eq!(cpu.pc, 0x0400); assert_eq!(cpu.break_flag(), true); } */ #[test] fn test_bvc() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, BVC as u8); cpu.memory.store(0x0001, 0x04); cpu.set_overflow(false); cpu.step(); assert_eq!(cpu.pc, 4); cpu.pc = 0; cpu.set_overflow(true); cpu.step(); assert_eq!(cpu.pc, 2); } #[test] fn test_bvs() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, BVS as u8); cpu.memory.store(0x0001, 0x04); cpu.set_overflow(true); cpu.step(); assert_eq!(cpu.pc, 4); cpu.pc = 0; cpu.set_overflow(false); cpu.step(); assert_eq!(cpu.pc, 2); } #[test] fn test_clc() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, CLC as u8); cpu.set_carry(true); assert!(cpu.carry()); cpu.step(); assert!(!cpu.carry()); } #[test] fn test_cld() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, CLD as u8); cpu.set_decimal_mode(true); assert!(cpu.decimal_mode()); cpu.step(); assert!(!cpu.decimal_mode()); } #[test] fn test_cli() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, CLI as u8); cpu.set_irq_disable(true); assert!(cpu.irq_disable()); cpu.step(); assert!(!cpu.irq_disable()); } #[test] fn test_clv() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, CLV as u8); cpu.set_overflow(true); assert!(cpu.overflow()); cpu.step(); assert!(!cpu.overflow()); } #[test] fn test_cmp() { let mut cpu = CPU::default(); cpu.a = 0x05; // Test that the carry, zero, and negative flags get set correctly when // A < M. cpu.memory.store(0x0000, 0x06); cpu.set_carry(true); cpu.set_zero(true); cpu.set_negative(false); cpu.cmp(0x0000); assert!(!cpu.carry()); assert!(!cpu.zero()); assert!(cpu.negative()); // Test that the carry, zero, and negative flags get set correctly when // A = M. cpu.memory.store(0x0000, 0x05); cpu.pc = 0x0000; cpu.set_carry(false); cpu.set_zero(false); cpu.set_negative(true); cpu.cmp(0x0000); assert!(cpu.carry()); assert!(cpu.zero()); assert!(!cpu.negative()); // Test that the carry, zero, and negative flags get set correctly when // A > M. cpu.memory.store(0x0000, 0x04); cpu.pc = 0x0000; cpu.set_carry(false); cpu.set_zero(true); cpu.set_negative(true); cpu.cmp(0x0000); assert!(cpu.carry()); assert!(!cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_cpx() { let mut cpu = CPU::default(); cpu.x = 0x05; // Test that the carry, zero, and negative flags get set correctly when // A < M. cpu.memory.store(0x0000, 0x06); cpu.set_carry(true); cpu.set_zero(true); cpu.set_negative(false); cpu.cpx(0x0000); assert!(!cpu.carry()); assert!(!cpu.zero()); assert!(cpu.negative()); // Test that the carry, zero, and negative flags get set correctly when // A = M. cpu.memory.store(0x0000, 0x05); cpu.pc = 0x0000; cpu.set_carry(false); cpu.set_zero(false); cpu.set_negative(true); cpu.cpx(0x0000); assert!(cpu.carry()); assert!(cpu.zero()); assert!(!cpu.negative()); // Test that the carry, zero, and negative flags get set correctly when // A > M. cpu.memory.store(0x0000, 0x04); cpu.pc = 0x0000; cpu.set_carry(false); cpu.set_zero(true); cpu.set_negative(true); cpu.cpx(0x0000); assert!(cpu.carry()); assert!(!cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_cpy() { let mut cpu = CPU::default(); cpu.y = 0x05; // Test that the carry, zero, and negative flags get set correctly when // A < M. cpu.memory.store(0x0000, 0x06); cpu.set_carry(true); cpu.set_zero(true); cpu.set_negative(false); cpu.cpy(0x0000); assert!(!cpu.carry()); assert!(!cpu.zero()); assert!(cpu.negative()); // Test that the carry, zero, and negative flags get set correctly when // A = M. cpu.memory.store(0x0000, 0x05); cpu.pc = 0x0000; cpu.set_carry(false); cpu.set_zero(false); cpu.set_negative(true); cpu.cpy(0x0000); assert!(cpu.carry()); assert!(cpu.zero()); assert!(!cpu.negative()); // Test that the carry, zero, and negative flags get set correctly when // A > M. cpu.memory.store(0x0000, 0x04); cpu.pc = 0x0000; cpu.set_carry(false); cpu.set_zero(true); cpu.set_negative(true); cpu.cpy(0x0000); assert!(cpu.carry()); assert!(!cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_dec() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, 0xFF); cpu.set_zero(true); cpu.set_negative(false); cpu.dec(0x0000); assert_eq!(cpu.memory.fetch(0x0000), 0xFE); assert!(!cpu.zero()); assert!(cpu.negative()); cpu.memory.store(0x0000, 0x01); cpu.set_zero(false); cpu.set_negative(true); cpu.dec(0x0000); assert_eq!(cpu.memory.fetch(0x0000), 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_dex() { let mut cpu = CPU::default(); cpu.x = 0xFF; cpu.set_zero(true); cpu.set_negative(false); cpu.dex(); assert_eq!(cpu.x, 0xFE); assert!(!cpu.zero()); assert!(cpu.negative()); cpu.x = 0x01; cpu.set_zero(false); cpu.set_negative(true); cpu.dex(); assert_eq!(cpu.x, 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_dey() { let mut cpu = CPU::default(); cpu.y = 0xFF; cpu.set_zero(true); cpu.set_negative(false); cpu.dey(); assert_eq!(cpu.y, 0xFE); assert!(!cpu.zero()); assert!(cpu.negative()); cpu.y = 0x01; cpu.set_zero(false); cpu.set_negative(true); cpu.dey(); assert_eq!(cpu.y, 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_eor() { let mut cpu = CPU::default(); cpu.a = 0xFF; cpu.memory.store(0x0000, 0x0F); cpu.set_zero(true); cpu.set_negative(false); cpu.eor(0x0000); assert_eq!(cpu.a, 0xF0); assert!(!cpu.zero()); assert!(cpu.negative()); cpu.a = 0xFF; cpu.memory.store(0x0000, 0xFF); cpu.set_zero(false); cpu.set_negative(true); cpu.eor(0x0000); assert_eq!(cpu.a, 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_inc() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, 0xFF); cpu.set_zero(false); cpu.set_negative(true); cpu.inc(0x0000); assert_eq!(cpu.memory.fetch(0x0000), 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); cpu.memory.store(0x0000, 0xFE); cpu.set_zero(true); cpu.set_negative(false); cpu.inc(0x0000); assert_eq!(cpu.memory.fetch(0x0000), 0xFF); assert!(!cpu.zero()); assert!(cpu.negative()); } #[test] fn test_inx() { let mut cpu = CPU::default(); cpu.x = 0xFF; cpu.set_zero(false); cpu.set_negative(true); cpu.inx(); assert_eq!(cpu.x, 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); cpu.x = 0xFE; cpu.set_zero(true); cpu.set_negative(false); cpu.inx(); assert_eq!(cpu.x, 0xFF); assert!(!cpu.zero()); assert!(cpu.negative()); } #[test] fn test_iny() { let mut cpu = CPU::default(); cpu.y = 0xFF; cpu.set_zero(false); cpu.set_negative(true); cpu.iny(); assert_eq!(cpu.y, 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); cpu.y = 0xFE; cpu.set_zero(true); cpu.set_negative(false); cpu.iny(); assert_eq!(cpu.y, 0xFF); assert!(!cpu.zero()); assert!(cpu.negative()); } #[test] fn test_jmp_abs() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, JMP_ABS as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x01); cpu.step(); assert_eq!(cpu.pc, 0x0100); } #[test] fn test_jmp_ind() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, JMP_IND as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x01); cpu.memory.store(0x0100, 0x03); cpu.memory.store(0x0101, 0x02); cpu.step(); assert_eq!(cpu.pc, 0x0203); } #[test] fn test_jsr() { let mut cpu = CPU::default(); cpu.pc = 0x0220; cpu.memory.store(0x0220, 0xBB); cpu.memory.store(0x0221, 0xAA); assert_eq!(cpu.pc, 0x0220); cpu.jsr(0x0220); assert_eq!(cpu.pc, 0xAABB); assert_eq!(cpu.peek(0), 0x1F); assert_eq!(cpu.peek(1), 0x02); } #[test] fn test_lda() { let mut cpu = CPU::default(); cpu.memory.store(0x0200, 0xFF); assert_eq!(cpu.a, 0x00); cpu.lda(0x0200); assert_eq!(cpu.a, 0xFF); assert!(!cpu.zero()); // Test that the zero flag gets set correctly. cpu.memory.store(0x0000, 0x00); cpu.memory.store(0x0001, 0x01); cpu.lda(0x0000); assert!(cpu.zero()); cpu.lda(0x0001); assert!(!cpu.zero()); // Test that the negative flag gets set correctly. cpu.memory.store(0x0000, 0x00); cpu.memory.store(0x0001, 0x80); cpu.lda(0x0000); assert!(!cpu.negative()); cpu.lda(0x0001); assert!(cpu.negative()); } #[test] fn test_lda_immediate() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, LDA_IMM as u8); cpu.memory.store(0x0001, 0x17); // #$17 cpu.step(); assert_eq!(cpu.a, 0x17); } #[test] fn test_lda_zero_page() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, LDA_ZPAGE as u8); cpu.memory.store(0x0001, 0x02); // $02 cpu.memory.store(0x0002, 0x03); cpu.step(); assert_eq!(cpu.a, 0x03); } #[test] fn test_lda_zero_page_x() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, LDA_ZPAGEX as u8); cpu.memory.store(0x0001, 0x02); // $02 cpu.memory.store(0x0005, 0xAB); cpu.x = 0x03; cpu.step(); assert_eq!(cpu.a, 0xAB); } #[test] fn test_lda_absolute() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, LDA_ABS as u8); cpu.memory.store(0x0001, 0x14); // $0314 cpu.memory.store(0x0002, 0x03); cpu.memory.store(0x0314, 0x31); cpu.step(); assert_eq!(cpu.a, 0x31); } #[test] fn test_lda_absolute_x() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, LDA_ABSX as u8); cpu.memory.store(0x0001, 0x00); // $0200 cpu.memory.store(0x0002, 0x02); cpu.memory.store(0x20A, 0xFF); cpu.x = 0x0A; cpu.step(); assert_eq!(cpu.a, 0xFF); } #[test] fn test_lda_absolute_y() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, LDA_ABSY as u8); cpu.memory.store(0x0001, 0x00); // $0200 cpu.memory.store(0x0002, 0x02); cpu.memory.store(0x020A, 0xFF); cpu.y = 0x0A; cpu.step(); assert_eq!(cpu.a, 0xFF); } #[test] fn test_lda_ind_x() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, LDA_INDX as u8); cpu.memory.store(0x0001, 0x80); // $0080 cpu.memory.store(0x008C, 0x3F); cpu.memory.store(0x008D, 0x01); cpu.memory.store(0x013F, 0x45); cpu.x = 0x0C; cpu.step(); assert_eq!(cpu.a, 0x45); } #[test] fn test_lda_ind_y() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, LDA_INDY as u8); cpu.memory.store(0x0001, 0x14); // $0014 cpu.memory.store(0x0014, 0x00); cpu.memory.store(0x0015, 0x01); cpu.memory.store(0x0128, 0x0B); cpu.y = 0x28; cpu.step(); assert_eq!(cpu.a, 0x0B); } #[test] fn test_ldx() { let mut cpu = CPU::default(); cpu.memory.store(0x0200, 0xFF); cpu.set_zero(true); cpu.set_negative(false); assert_eq!(cpu.x, 0x00); cpu.ldx(0x0200); assert_eq!(cpu.x, 0xFF); assert!(!cpu.zero()); assert!(cpu.negative()); cpu.x = 0xFF; cpu.memory.store(0x0200, 0x00); cpu.set_zero(false); cpu.set_negative(true); assert_eq!(cpu.x, 0xFF); cpu.ldx(0x0200); assert_eq!(cpu.x, 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_ldy() { let mut cpu = CPU::default(); cpu.memory.store(0x0200, 0xFF); cpu.set_zero(true); cpu.set_negative(false); assert_eq!(cpu.y, 0x00); cpu.ldy(0x0200); assert_eq!(cpu.y, 0xFF); assert!(!cpu.zero()); assert!(cpu.negative()); cpu.y = 0xFF; cpu.memory.store(0x0200, 0x00); cpu.set_zero(false); cpu.set_negative(true); assert_eq!(cpu.y, 0xFF); cpu.ldy(0x0200); assert_eq!(cpu.y, 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_lsr() { let mut cpu = CPU::default(); cpu.a = 0xFF; cpu.set_carry(false); cpu.set_zero(true); cpu.set_negative(true); assert_eq!(cpu.a, 0xFF); cpu.lsr_acc(); assert_eq!(cpu.a, 0x7F); assert!(cpu.carry()); assert!(!cpu.zero()); assert!(!cpu.negative()); cpu.a = 0x00; cpu.set_carry(true); cpu.set_zero(false); assert_eq!(cpu.a, 0x00); cpu.lsr_acc(); assert_eq!(cpu.a, 0x00); assert!(!cpu.carry()); assert!(cpu.zero()); } #[test] fn test_nop() { let mut cpu = CPU::default(); cpu.memory.store(0x0200, NOP as u8); cpu.pc = 0x0200; let mut cpu2 = cpu.clone(); cpu2.step(); assert_eq!(cpu2.pc, 0x0201); cpu2.pc -= 1; assert_eq!(cpu.a, cpu2.a); assert_eq!(cpu.x, cpu2.x); assert_eq!(cpu.y, cpu2.y); assert_eq!(cpu.p, cpu2.p); assert_eq!(cpu.pc, cpu2.pc); assert_eq!(cpu.sp, cpu2.sp); } #[test] fn test_ora() { let mut cpu = CPU::default(); cpu.a = 0xCF; cpu.memory.store(0x0000, 0x3F); cpu.set_zero(true); cpu.set_negative(false); cpu.ora(0x0000); assert_eq!(cpu.a, 0xFF); assert!(!cpu.zero()); assert!(cpu.negative()); cpu.a = 0x00; cpu.memory.store(0x0000, 0x00); cpu.set_zero(false); cpu.set_negative(true); cpu.eor(0x0000); assert_eq!(cpu.a, 0x00); assert!(cpu.zero()); assert!(!cpu.negative()); } #[test] fn test_pha() { let mut cpu = CPU::default(); cpu.a = 0xFF; assert_eq!(cpu.peek(0), 0x00); cpu.pha(); assert_eq!(cpu.peek(0), 0xFF); } #[test] fn test_php() { let mut cpu = CPU::default(); cpu.p = 0xFF; assert_eq!(cpu.peek(0), 0x00); cpu.php(); assert_eq!(cpu.peek(0), 0xFF); } #[test] fn test_pla() { let mut cpu = CPU::default(); cpu.a = 0x00; cpu.push(0xFF); assert_eq!(cpu.a, 0x00); cpu.pla(); assert_eq!(cpu.a, 0xFF); } #[test] fn test_plp() { let mut cpu = CPU::default(); cpu.p = 0x00; cpu.push(0xFF); assert_eq!(cpu.p, 0x00); cpu.plp(); assert_eq!(cpu.p, 0xFF); } #[test] fn test_rol() { let mut cpu = CPU::default(); cpu.a = 0xFF; cpu.set_carry(false); cpu.set_zero(true); cpu.set_negative(false); assert_eq!(cpu.a, 0xFF); cpu.rol_acc(); assert_eq!(cpu.a, 0xFE); assert!(cpu.carry()); assert!(!cpu.zero()); assert!(cpu.negative()); cpu.a = 0x80; cpu.set_carry(true); cpu.set_zero(true); assert_eq!(cpu.a, 0x80); cpu.rol_acc(); assert_eq!(cpu.a, 0x01); assert!(cpu.carry()); assert!(!cpu.zero()); } #[test] fn test_ror() { let mut cpu = CPU::default(); cpu.a = 0xFF; cpu.set_carry(false); cpu.set_zero(true); cpu.set_negative(true); assert_eq!(cpu.a, 0xFF); cpu.ror_acc(); assert_eq!(cpu.a, 0x7F); assert!(cpu.carry()); assert!(!cpu.zero()); assert!(!cpu.negative()); cpu.a = 0x00; cpu.set_carry(true); cpu.set_zero(true); assert_eq!(cpu.a, 0x00); cpu.ror_acc(); assert_eq!(cpu.a, 0x80); assert!(!cpu.carry()); assert!(!cpu.zero()); } // FIXME: uncomment this when reads to IRQ_VECTOR_ADDR are implemented. /* #[test] fn test_rti() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, BRK as u8); cpu.memory.store(0x0100, RTI as u8); cpu.write_u16(IRQ_VECTOR_ADDR, 0x0100); cpu.set_break(false); cpu.set_carry(false); assert_eq!(cpu.break_flag(), false); assert!(!cpu.carry()); cpu.step(); assert_eq!(cpu.pc, 0x0100); assert_eq!(cpu.break_flag(), true); cpu.set_carry(true); assert!(cpu.carry()); cpu.step(); assert_eq!(cpu.pc, 0x0001); assert!(!cpu.carry()); } */ #[test] fn test_rts() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, JMP_ABS as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x01); cpu.memory.store(0x0100, RTS as u8); cpu.step(); assert_eq!(cpu.pc, 0x0100); cpu.step(); assert_eq!(cpu.pc, 0xFFFF); } #[test] fn test_sbc() { let mut cpu = CPU::default(); // 8 - 4 - !1 = 4, returns C = 1, V = 0 cpu.set_zero(true); cpu.set_carry(true); cpu.set_overflow(true); cpu.memory.store(0x0000, 0x04); cpu.a = 0x08; cpu.sbc(0x0000); assert_eq!(cpu.a, 0x04); assert!(!cpu.zero()); assert!(cpu.carry()); assert!(!cpu.overflow()); // 8 - 4 - !0 = 3, returns C = 1, V = 0 cpu.set_zero(true); cpu.set_carry(false); cpu.set_overflow(true); cpu.memory.store(0x0000, 0x04); cpu.a = 0x08; cpu.sbc(0x0000); assert_eq!(cpu.a, 0x03); assert!(!cpu.zero()); assert!(cpu.carry()); assert!(!cpu.overflow()); // 4 - 5 - !1 = 255, returns C = 0, V = 0 cpu.set_zero(true); cpu.set_carry(true); cpu.set_overflow(true); cpu.memory.store(0x0000, 0x05); cpu.a = 0x04; cpu.sbc(0x0000); assert_eq!(cpu.a, 0xFF); assert!(!cpu.zero()); assert!(!cpu.carry()); assert!(!cpu.overflow()); // 4 - 5 - !0 = 254, returns C = 0, V = 0 cpu.set_zero(true); cpu.set_carry(false); cpu.set_overflow(true); cpu.memory.store(0x0000, 0x05); cpu.a = 0x04; cpu.sbc(0x0000); assert_eq!(cpu.a, 0xFE); assert!(!cpu.zero()); assert!(!cpu.carry()); assert!(!cpu.overflow()); // 127 - (-1) - !1 = +128, returns C = 0, V = 1 cpu.set_zero(true); cpu.set_carry(true); cpu.set_overflow(false); cpu.memory.store(0x0000, 0xFF); cpu.a = 0x7F; cpu.sbc(0x0000); assert_eq!(cpu.a, 0x80); assert!(!cpu.zero()); assert!(!cpu.carry()); assert!(cpu.overflow()); } #[test] fn test_sec() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, SEC as u8); cpu.set_carry(false); assert!(!cpu.carry()); cpu.step(); assert!(cpu.carry()); } #[test] fn test_sed() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, SED as u8); cpu.set_decimal_mode(false); assert!(!cpu.decimal_mode()); cpu.step(); assert!(cpu.decimal_mode()); } #[test] fn test_sei() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, SEI as u8); cpu.set_irq_disable(false); assert!(!cpu.irq_disable()); cpu.step(); assert!(cpu.irq_disable()); } #[test] fn test_sta() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, STA_ABS as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x02); cpu.a = 0xAB; assert_eq!(cpu.memory.fetch(0x0200), 0x00); cpu.step(); assert_eq!(cpu.memory.fetch(0x0200), 0xAB); } #[test] fn test_stx() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, STX_ABS as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x02); cpu.x = 0xAB; assert_eq!(cpu.memory.fetch(0x0200), 0x00); cpu.step(); assert_eq!(cpu.memory.fetch(0x0200), 0xAB); } #[test] fn test_sty() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, STY_ABS as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x02); cpu.y = 0xAB; assert_eq!(cpu.memory.fetch(0x0200), 0x00); cpu.step(); assert_eq!(cpu.memory.fetch(0x0200), 0xAB); } #[test] fn test_tax() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, TAX as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x02); cpu.a = 0xAB; cpu.set_zero(true); cpu.set_negative(false); assert_eq!(cpu.x, 0x00); cpu.step(); assert_eq!(cpu.x, 0xAB); assert!(!cpu.zero()); assert!(cpu.negative()); } #[test] fn test_tay() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, TAY as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x02); cpu.a = 0xAB; cpu.set_zero(true); cpu.set_negative(false); assert_eq!(cpu.y, 0x00); cpu.step(); assert_eq!(cpu.y, 0xAB); assert!(!cpu.zero()); assert!(cpu.negative()); } #[test] fn test_tsx() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, TSX as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x02); cpu.sp = 0xAB; cpu.set_zero(true); cpu.set_negative(false); assert_eq!(cpu.x, 0x00); cpu.step(); assert_eq!(cpu.x, 0xAB); assert!(!cpu.zero()); assert!(cpu.negative()); } #[test] fn test_txa() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, TXA as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x02); cpu.x = 0xAB; cpu.set_zero(true); cpu.set_negative(false); assert_eq!(cpu.a, 0x00); cpu.step(); assert_eq!(cpu.a, 0xAB); assert!(!cpu.zero()); assert!(cpu.negative()); } #[test] fn test_txs() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, TXS as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x02); cpu.x = 0xAB; cpu.set_zero(true); cpu.set_negative(false); assert_eq!(cpu.sp, CPU_STACK_POINTER_INITIAL_VALUE); cpu.step(); assert_eq!(cpu.sp, 0xAB); assert!(!cpu.zero()); assert!(cpu.negative()); } #[test] fn test_tya() { let mut cpu = CPU::default(); cpu.memory.store(0x0000, TYA as u8); cpu.memory.store(0x0001, 0x00); cpu.memory.store(0x0002, 0x02); cpu.y = 0xAB; cpu.set_zero(true); cpu.set_negative(false); assert_eq!(cpu.a, 0x00); cpu.step(); assert_eq!(cpu.a, 0xAB); assert!(!cpu.zero()); assert!(cpu.negative()); } #[test] fn test_bit_set() { let mut value = 0; value = bit_set(value, 0, true); assert_eq!(value, 0b0000_0001); value = bit_set(value, 0, true); assert_eq!(value, 0b0000_0001); value = bit_set(value, 7, true); assert_eq!(value, 0b1000_0001); value = bit_set(value, 7, false); assert_eq!(value, 0b0000_0001); } #[test] fn test_bit_get() { assert_eq!(bit_get(0b1000_0000, 7), true); assert_eq!(bit_get(0b1000_0000, 6), false); } }
true
c507d496af5f2d649a009cfe15c735ac48470830
Rust
secondfry/school21-rust-libft
/src/strlen.rs
UTF-8
634
3.796875
4
[ "MIT" ]
permissive
/// # strlen /// Returns str.len(). /// /// This function has no reason to exist. /// /// ## Example /// ``` /// assert_eq!(ft::strlen("abc"), 3); /// assert_eq!(ft::strlen("\0bc"), 3); /// ``` pub fn strlen(s: &str) -> usize { return s.len(); } /// # strlen_naive /// Returns amount of sumbols to first null-symbol in the string. /// /// ## Example /// ``` /// assert_eq!(ft::strlen_naive("abc"), 3); /// assert_eq!(ft::strlen_naive("a\0c"), 1); /// ``` pub fn strlen_naive(s: &str) -> usize { let c: Vec<char> = s.chars().collect(); let mut i: usize = 0; while i < c.len() && c[i] != '\0' { i += 1; } return i; }
true
7bb46f2a42b130254f4afacf255b481849fe3932
Rust
0xbadcoffe/ctap-hid-fido2
/src/client_pin_response.rs
UTF-8
1,781
2.875
3
[ "MIT" ]
permissive
use crate::cose; use crate::util; use serde_cbor::Value; pub struct Pin { pub retries: i32, } pub fn parse_cbor_client_pin_get_pin_token(bytes: &[u8]) -> Result<Vec<u8>, String> { let cbor: Value = serde_cbor::from_slice(bytes).unwrap(); if let Value::Map(n) = cbor { // 最初の要素を取得 let (key, val) = n.iter().next().unwrap(); if let Value::Integer(member) = key { if *member == 2 { return Ok(util::cbor_value_to_vec_u8(val).unwrap()); } } } return Err("parse_cbor_client_pin_get_pin_token error".into()); } pub fn parse_cbor_client_pin_get_keyagreement(bytes: &[u8]) -> Result<cose::CoseKey, String> { let cbor: Value = serde_cbor::from_slice(bytes).unwrap(); if let Value::Map(n) = cbor { // 最初の要素を取得 let (key, val) = n.iter().next().unwrap(); if let Value::Integer(member) = key { if *member == 1 { return Ok(cose::CoseKey::decode(val).unwrap()); } } } return Err("parse_cbor_client_pin_get_keyagreement error".into()); } pub fn parse_cbor_client_pin_get_retries(bytes: &[u8]) -> Result<Pin, String> { // deserialize to a serde_cbor::Value let cbor: Value = serde_cbor::from_slice(bytes).unwrap(); let mut pin = Pin { retries: 0 }; if let Value::Map(n) = cbor { for (key, val) in &n { if let Value::Integer(member) = key { match member { 3 => pin.retries = util::cbor_cast_value(val).unwrap(), _ => println!("- anything error"), } } } Ok(pin) } else { Err("parse_cbor_client_pin_get_retries error".into()) } }
true
a5e1fe22083eadc45c8e3b27f5c7cb1d9573a81c
Rust
maheshambule/presto_rs
/src/parsing/parser.rs
UTF-8
132,662
2.90625
3
[]
no_license
use super::{parse_tree, visit_post_order, ParseTree}; use crate::lexing::{ predefined_names, predefined_names::PredefinedName as PN, Lexer, Token, TokenKind as TK, }; use crate::utils::{ position, position::Position, syntax_error, syntax_error::Message, syntax_error::SyntaxError, text_range, text_range::TextRange, }; use parsing::parse_tree::ParseTree::Empty; /// The location and lexing context for a Parser. /// /// tokens contains both consumed tokens, as well as the token lookahead. /// Tokens are lexed from the lexer on demand as they are peeked for. /// /// Consuming a token advances index past the token. Lex errors in the consumed tokens /// become part of the parse results. Tokens must never be unconsumed. /// /// peek() methods inspect upcoming tokens without consuming input. /// Only the advance() method consumes a token. struct ParsePosition<'a> { index: usize, tokens: Vec<Token<'a>>, lexer: Lexer<'a>, } impl<'a> ParsePosition<'a> { pub fn new(value: &'a str) -> ParsePosition<'a> { ParsePosition { index: 0, tokens: vec![ // kinda goofy, but we add the BOF token here so that // the lexer doesn't need to special case the first token Token::new( TK::BeginningOfFile, text_range::NONE, &value[0..0], Vec::new(), Vec::new(), Vec::new(), ), ], lexer: Lexer::new(value), } } fn end_position(&self) -> Position { if let Some(token) = self.tokens.last() { token.full_end() } else { position::START } } /// Gets a token at the index from the start of self.tokens. /// Will cause lexing more tokens if not enough tokens are /// present. fn get_token(&mut self, index: usize) -> &Token<'a> { while index >= self.tokens.len() { let new_token = self.lexer.lex_token(); debug_assert!(self.end_position() <= new_token.full_start()); self.tokens.push(new_token); } &self.tokens[index] } /// Returns the token at offset ahead of the current token in the input. /// Does not consume the token. pub fn peek_token_offset(&mut self, offset: usize) -> &Token<'a> { self.get_token(self.index + offset) } /// Returns the next token in the input. /// Does not consume the token. pub fn peek_token(&mut self) -> &Token<'a> { self.peek_token_offset(0) } /// Returns the token kind of the next token in the input. /// Does not consume the token. pub fn peek_offset(&mut self, offset: usize) -> TK { self.peek_token_offset(offset).kind } /// Returns the token kind of the offset token in the input. /// Does not consume the token. pub fn peek_kind_offset(&mut self, kind: TK, offset: usize) -> bool { self.peek_offset(offset) == kind } /// Returns true if the next token's kind matches kind. /// Does not consume the token. pub fn peek_kind(&mut self, kind: TK) -> bool { self.peek_kind_offset(kind, 0) } /// Returns the token kind of the next token in the input. /// Does not consume the token. pub fn peek(&mut self) -> TK { self.peek_offset(0) } fn get_empty_range(&mut self) -> TextRange { TextRange::empty(self.peek_token().full_start()) } /// Consumes a token in the input. fn advance(&mut self) -> Token<'a> { debug_assert!(self.index < self.tokens.len()); let token = self.peek_token().clone(); self.index += 1; token } } /// Parser for the Presto SQL dialect. /// /// peek() methods inspect upcoming tokens without consuming input. /// Only the advance() method consumes a token. /// /// eat*() methods consume a token and convert it into a Token tree. /// /// *_opt() methods either parse a *, if present, or return an Empty tree. /// /// parse_*() methods parse a given language syntax. They are preceded by /// a comment indicating the grammar being parsed. struct Parser<'a> { position: ParsePosition<'a>, errors: Vec<SyntaxError>, } type ElementParser<'a> = fn(&mut Parser<'a>) -> ParseTree<'a>; type Peeker<'a> = fn(&mut Parser<'a>) -> bool; type OffsetPeeker<'a> = fn(&mut Parser<'a>, usize) -> bool; // Language independant parser functions impl<'a> Parser<'a> { pub fn new(value: &'a str) -> Parser<'a> { Parser { position: ParsePosition::new(value), errors: Vec::new(), } } fn add_error(&mut self, error: SyntaxError) { self.errors.push(error); } fn add_error_of_tree(&mut self, location: &ParseTree<'a>, message: &str) { self.add_error(SyntaxError::from_message( syntax_error::ERROR_SYNTAX_ERROR, Message::new(location.get_range(), message.to_string()), )) } fn peek_token_offset(&mut self, offset: usize) -> &Token<'a> { self.position.peek_token_offset(offset) } fn peek_token(&mut self) -> &Token<'a> { self.position.peek_token() } fn peek_offset(&mut self, offset: usize) -> TK { self.position.peek_offset(offset) } fn peek_kind_offset(&mut self, kind: TK, offset: usize) -> bool { self.position.peek_kind_offset(kind, offset) } fn peek_kind(&mut self, kind: TK) -> bool { self.position.peek_kind(kind) } fn peek(&mut self) -> TK { self.position.peek() } /// If the token at offset is a predefined name, return it otherwise return None. fn maybe_peek_predefined_name_offset(&mut self, offset: usize) -> Option<PN> { let token = self.peek_token_offset(offset); if token.kind == TK::Identifier { predefined_names::maybe_get_predefined_name(token.value) } else { None } } /// If the next token is a predefined name, return it otherwise return None. fn maybe_peek_predefined_name(&mut self) -> Option<PN> { self.maybe_peek_predefined_name_offset(0) } /// Returns true if the token at offset is a specific predefined name. fn peek_predefined_name_offset(&mut self, name: PN, offset: usize) -> bool { self.maybe_peek_predefined_name_offset(offset) == Some(name) } /// Returns true if the next token is a specific predefined name. fn peek_predefined_name(&mut self, name: PN) -> bool { self.peek_predefined_name_offset(name, 0) } fn advance(&mut self) -> Token<'a> { self.position.advance() } /// Create an empty TextRange at the current position in the input. fn get_empty_range(&mut self) -> TextRange { self.position.get_empty_range() } /// Create an empty tree whose position is at the current input /// position. fn eat_empty(&mut self) -> ParseTree<'a> { parse_tree::empty(self.get_empty_range()) } /// Consume the next token and return it wrapped in a Token tree. fn eat_token(&mut self) -> ParseTree<'a> { parse_tree::token(self.advance()) } /// Create an Error tree at the current location. // TODO: Add error code parameter. fn error(&mut self, message: String) -> ParseTree<'a> { let result = parse_tree::error(SyntaxError::from_message( syntax_error::ERROR_SYNTAX_ERROR, Message { range: self.get_empty_range(), message, }, )); // TODO: Remove this once we're debugged panic!( "WTF\n{}\n {}", self.position.lexer.input, format!("{:#?}", result) ); // TODO: restore this once we're debugged // result } /// Create an Error tree at the current location with a given message. fn expected_error(&mut self, expected: &str) -> ParseTree<'a> { let message = format!("Expected {}, found {}.", expected, self.peek()); self.error(message) } /// Creates an Error indicating that a given kind was expected. fn expected_error_kind(&mut self, expected: TK) -> ParseTree<'a> { self.expected_error(expected.to_string().as_str()) } /// Creates an Error indicating that a given name was expected. fn expected_error_name(&mut self, expected: PN) -> ParseTree<'a> { self.expected_error(expected.to_string().as_str()) } /// Consumes and returns the next token if its kind matches; /// otherwise returns an expected error token. fn eat(&mut self, kind: TK) -> ParseTree<'a> { if self.peek_kind(kind) { self.eat_token() } else { self.expected_error_kind(kind) } } /// Consumes and returns the next token if it matches the given /// predefined name; otherwise returns an expected error token. fn eat_predefined_name(&mut self, name: PN) -> ParseTree<'a> { if self.peek_predefined_name(name) { self.eat_token() } else { self.expected_error_name(name) } } /// Consumes and returns the next token if it matches the given /// predefined name; otherwise returns an empty tree. fn eat_predefined_name_opt(&mut self, name: PN) -> ParseTree<'a> { if self.peek_predefined_name(name) { self.eat_token() } else { self.eat_empty() } } /// Consumes and returns the next token if it matches the given /// kind; otherwise returns an empty tree. fn eat_opt(&mut self, kind: TK) -> ParseTree<'a> { if self.peek_kind(kind) { self.eat_token() } else { self.eat_empty() } } /// Parses a delimiter token, followed by a tree, followed by /// an end delimiter token. Returns a tuple with the 3 parsed /// trees. fn parse_delimited( &mut self, start_kind: TK, parse_element: ElementParser<'a>, end_kind: TK, ) -> (ParseTree<'a>, ParseTree<'a>, ParseTree<'a>) { let start = self.eat(start_kind); let element = parse_element(self); let end = self.eat(end_kind); (start, element, end) } /// Parses a tree enclosed in parens. Returns a tuple /// containing the 3 trees. fn parse_parenthesized( &mut self, parse_element: ElementParser<'a>, ) -> (ParseTree<'a>, ParseTree<'a>, ParseTree<'a>) { self.parse_delimited(TK::OpenParen, parse_element, TK::CloseParen) } /// Parse non-empty separated list elements. /// The second element in each pair is the separator. /// The separator for the last list element will always /// be an empty tree. fn parse_separated_list_elements( &mut self, separator_kind: TK, parse_element: ElementParser<'a>, ) -> Vec<(ParseTree<'a>, ParseTree<'a>)> { let mut elements = Vec::new(); let mut seperators = Vec::new(); elements.push(parse_element(self)); while { let separator = self.eat_opt(separator_kind); let at_end = separator.is_empty(); seperators.push(separator); !at_end } { elements.push(parse_element(self)); } elements.into_iter().zip(seperators.into_iter()).collect() } /// Parses the elements of a non-empty, non-separated list. /// The second element in each pair will always be an Empty tree. fn parse_list_elements( &mut self, peek_element: Peeker<'a>, parse_element: ElementParser<'a>, ) -> Vec<(ParseTree<'a>, ParseTree<'a>)> { let mut elements = Vec::new(); while elements.len() == 0 || peek_element(self) { elements.push((parse_element(self), self.eat_empty())); } elements } /// Parse possibly empty separated list. fn parse_separated_list_elements_opt( &mut self, separator_kind: TK, peek_element: Peeker<'a>, parse_element: ElementParser<'a>, ) -> Vec<(ParseTree<'a>, ParseTree<'a>)> { if peek_element(self) { self.parse_separated_list_elements(separator_kind, parse_element) } else { Vec::new() } } /// Parse non-empty list. fn parse_list( &mut self, peek_element: Peeker<'a>, parse_element: ElementParser<'a>, ) -> ParseTree<'a> { let start_delimiter = self.eat_empty(); let elements_and_separators = self.parse_list_elements(peek_element, parse_element); let end_delimiter = self.eat_empty(); parse_tree::list(start_delimiter, elements_and_separators, end_delimiter) } /// Parse non-empty separated list. /// Terminating separator is not consumed. fn parse_separated_list( &mut self, separator_kind: TK, parse_element: ElementParser<'a>, ) -> ParseTree<'a> { let start_delimiter = self.eat_empty(); let elements_and_separators = self.parse_separated_list_elements(separator_kind, parse_element); let end_delimiter = self.eat_empty(); parse_tree::list(start_delimiter, elements_and_separators, end_delimiter) } /// Parse possibly-empty separated list. /// Terminating separator is not consumed. fn parse_separated_list_opt( &mut self, separator_kind: TK, peek_element: Peeker<'a>, parse_element: ElementParser<'a>, ) -> ParseTree<'a> { let start_delimiter = self.eat_empty(); let elements_and_separators = self.parse_separated_list_elements_opt(separator_kind, peek_element, parse_element); let end_delimiter = self.eat_empty(); parse_tree::list(start_delimiter, elements_and_separators, end_delimiter) } /// Parse non-empty comma separated list. /// Terminating commas are not consumed. fn parse_comma_separated_list(&mut self, parse_element: ElementParser<'a>) -> ParseTree<'a> { self.parse_separated_list(TK::Comma, parse_element) } /// Parse possibly-empty comma separated list. /// Terminating commas are not consumed. fn parse_comma_separated_list_opt( &mut self, peek_element: Peeker<'a>, parse_element: ElementParser<'a>, ) -> ParseTree<'a> { self.parse_separated_list_opt(TK::Comma, peek_element, parse_element) } /// Parse delimited non-empty separated list. /// Terminating separator is not permitted. fn parse_delimited_separated_list( &mut self, start_kind: TK, separator_kind: TK, parse_element: ElementParser<'a>, end_kind: TK, ) -> ParseTree<'a> { let start_delimiter = self.eat(start_kind); let elements_and_separators = self.parse_separated_list_elements(separator_kind, parse_element); let end_delimiter = self.eat(end_kind); parse_tree::list(start_delimiter, elements_and_separators, end_delimiter) } /// Parse delimited possibly-empty separated list. /// Terminating separator is not permitted. fn parse_delimited_separated_list_opt( &mut self, start_kind: TK, separator_kind: TK, peek_element: Peeker<'a>, parse_element: ElementParser<'a>, end_kind: TK, ) -> ParseTree<'a> { let start_delimiter = self.eat(start_kind); let elements_and_separators = self.parse_separated_list_elements_opt(separator_kind, peek_element, parse_element); let end_delimiter = self.eat(end_kind); parse_tree::list(start_delimiter, elements_and_separators, end_delimiter) } /// Parse parenthesized, non-empty comma separated list. /// Terminating commas are not consumed. fn parse_parenthesized_comma_separated_list( &mut self, parse_element: ElementParser<'a>, ) -> ParseTree<'a> { self.parse_delimited_separated_list(TK::OpenParen, TK::Comma, parse_element, TK::CloseParen) } /// Parse optional parenthesized, non-empty comma separated list. /// Terminating commas are not consumed. fn parse_parenthesized_comma_separated_list_opt( &mut self, parse_element: ElementParser<'a>, ) -> ParseTree<'a> { if self.peek_kind(TK::OpenParen) { self.parse_delimited_separated_list( TK::OpenParen, TK::Comma, parse_element, TK::CloseParen, ) } else { self.eat_empty() } } /// Parse parenthesized, possibly empty comma separated list. /// Terminating commas are not consumed. fn parse_parenthesized_comma_separated_opt_list( &mut self, peek_element: Peeker<'a>, parse_element: ElementParser<'a>, ) -> ParseTree<'a> { self.parse_delimited_separated_list_opt( TK::OpenParen, TK::Comma, peek_element, parse_element, TK::CloseParen, ) } /// Parses a grammar entrypoint; ensures all input is consumed. fn parse_entrypoint(&mut self, parse_element: ElementParser<'a>) -> ParseTree<'a> { let (bof, tree, eof) = self.parse_delimited(TK::BeginningOfFile, parse_element, TK::EndOfFile); parse_tree::entrypoint(bof, tree, eof) } } // Presto Language specific functions // // Methods here are prefixed with a comment indicating the grammar // production being parsed. impl<'a> Parser<'a> { // query // : with_? queryNoWith pub fn parse_query(&mut self) -> ParseTree<'a> { let with = self.parse_with_opt(); let query_no_with = self.parse_query_no_with(); parse_tree::query(with, query_no_with) } // with_ // : WITH RECURSIVE? namedQuery (',' namedQuery)* fn parse_with_opt(&mut self) -> ParseTree<'a> { if self.peek_kind(TK::WITH) { let with = self.eat_token(); let recursive = self.eat_opt(TK::RECURSIVE); let named_queries = self.parse_comma_separated_list(|parser| parser.parse_named_query()); parse_tree::with(with, recursive, named_queries) } else { self.eat_empty() } } fn parse_parenthesized_query(&mut self) -> (ParseTree<'a>, ParseTree<'a>, ParseTree<'a>) { self.parse_parenthesized(|parser| parser.parse_query()) } // namedQuery // : name=identifier (columnAliases)? AS '(' query ')' fn parse_named_query(&mut self) -> ParseTree<'a> { let name = self.parse_identifier(); let column_aliases = self.parse_column_aliases_opt(); let as_ = self.eat(TK::AS); let (open_paren, query, close_paren) = self.parse_parenthesized_query(); parse_tree::named_query(name, column_aliases, as_, open_paren, query, close_paren) } // identifier // : IDENTIFIER #unquotedIdentifier // | QUOTED_IDENTIFIER #quotedIdentifier // | nonReserved #unquotedIdentifier // | BACKQUOTED_IDENTIFIER #backQuotedIdentifier // | DIGIT_IDENTIFIER #digitIdentifier // ; fn parse_identifier(&mut self) -> ParseTree<'a> { if self.peek_identifier() { self.eat_token() } else { self.expected_error_kind(TK::Identifier) } } fn peek_identifier_offset(&mut self, offset: usize) -> bool { match self.peek_offset(offset) { TK::Identifier | TK::QuotedIdentifier | TK::BackquotedIdentifier | TK::DigitIdentifier => true, _ => false, } } fn peek_identifier(&mut self) -> bool { self.peek_identifier_offset(0) } fn parse_identifier_opt(&mut self) -> ParseTree<'a> { if self.peek_identifier() { self.eat_token() } else { self.eat_empty() } } // columnAliases // : '(' identifier (',' identifier)* ')' fn parse_column_aliases_opt(&mut self) -> ParseTree<'a> { if self.peek_column_aliases() { self.parse_parenthesized_comma_separated_list(|parser| parser.parse_identifier()) } else { self.eat_empty() } } fn peek_column_aliases(&mut self) -> bool { // need to disambiguate with query in an insert into self.peek_kind(TK::OpenParen) && self.peek_identifier_offset(1) } // queryNoWith: // queryTerm // (ORDER BY sortItem (',' sortItem)*)? // (LIMIT limit=(INTEGER_VALUE | ALL))? fn parse_query_no_with(&mut self) -> ParseTree<'a> { let query_term = self.parse_query_term(); self.parse_query_no_with_tail(query_term) } fn peek_query_no_with_tail(&mut self) -> bool { self.peek_kind(TK::ORDER) || self.peek_limit_offset(0) } fn parse_query_no_with_tail(&mut self, query_term: ParseTree<'a>) -> ParseTree<'a> { let order_by_opt = self.parse_order_by_opt(); let limit_opt = self.parse_limit_opt(); parse_tree::query_no_with(query_term, order_by_opt, limit_opt) } // sortItem // : expression ordering=(ASC | DESC)? (NULLS nullOrdering=(FIRST | LAST))? fn parse_sort_item(&mut self) -> ParseTree<'a> { let expression = self.parse_expression(); let ordering_opt = self.parse_ordering_opt(); let nulls = self.eat_predefined_name_opt(PN::NULLS); let null_ordering = if nulls.is_empty() { self.eat_empty() } else { self.parse_null_ordering() }; parse_tree::sort_item(expression, ordering_opt, nulls, null_ordering) } fn parse_ordering_opt(&mut self) -> ParseTree<'a> { let asc = self.eat_predefined_name_opt(PN::ASC); if asc.is_empty() { self.eat_predefined_name_opt(PN::DESC) } else { asc } } fn parse_null_ordering(&mut self) -> ParseTree<'a> { let last = self.eat_predefined_name_opt(PN::LAST); if last.is_empty() { self.eat_predefined_name(PN::FIRST) } else { last } } // (ORDER BY sortItem (',' sortItem)*)? fn parse_order_by_opt(&mut self) -> ParseTree<'a> { let order = self.eat_opt(TK::ORDER); if order.is_empty() { order } else { let by = self.eat(TK::BY); let sort_items = self.parse_comma_separated_list(|parser| parser.parse_sort_item()); parse_tree::order_by(order, by, sort_items) } } // (LIMIT limit=(INTEGER_VALUE | ALL))? fn parse_limit_opt(&mut self) -> ParseTree<'a> { let limit = self.eat_predefined_name_opt(PN::LIMIT); if limit.is_empty() { limit } else { let value = self.eat_predefined_name_opt(PN::ALL); let value = if value.is_empty() { self.eat(TK::Integer) } else { value }; parse_tree::limit(limit, value) } } fn peek_limit_offset(&mut self, offset: usize) -> bool { self.peek_predefined_name_offset(PN::LIMIT, offset) && (self.peek_predefined_name_offset(PN::ALL, offset + 1) || self.peek_kind_offset(TK::Integer, offset + 1)) } // queryTerm // : queryPrimary #queryTermDefault // | left=queryTerm operator=INTERSECT setQuantifier? right=queryTerm #setOperation // | left=queryTerm operator=(UNION | EXCEPT) setQuantifier? right=queryTerm #setOperation fn parse_query_term(&mut self) -> ParseTree<'a> { // handle operator precedence here self.parse_union_query_term() } // | left=queryTerm operator=(UNION | EXCEPT) setQuantifier? right=queryTerm #setOperation fn parse_union_query_term(&mut self) -> ParseTree<'a> { let left = self.parse_intersect_query_term(); self.parse_union_query_term_tail(left) } fn parse_union_query_term_tail(&mut self, left: ParseTree<'a>) -> ParseTree<'a> { let mut left = left; while self.peek_union_query_term_tail() { let operator = self.eat_token(); let set_quantifier_opt = self.parse_set_quantifier_opt(|_parser, _offset| true); let right = self.parse_intersect_query_term(); left = parse_tree::query_set_operation(left, operator, set_quantifier_opt, right); } left } fn peek_union_query_term_tail(&mut self) -> bool { let op_kind = self.peek(); op_kind == TK::UNION || op_kind == TK::EXCEPT } // | left=queryTerm operator=INTERSECT setQuantifier? right=queryTerm #setOperation fn parse_intersect_query_term(&mut self) -> ParseTree<'a> { let left = self.parse_query_primary(); self.parse_intersect_query_term_tail(left) } fn parse_intersect_query_term_tail(&mut self, left: ParseTree<'a>) -> ParseTree<'a> { let mut left = left; while self.peek_intersect_query_term_tail() { let operator = self.eat_token(); let set_quantifier_opt = self.parse_set_quantifier_opt(|_parser, _offset| true); let right = self.parse_query_primary(); left = parse_tree::query_set_operation(left, operator, set_quantifier_opt, right); } left } fn parse_query_primary_tail(&mut self, query_primary: ParseTree<'a>) -> ParseTree<'a> { let query_intersect = self.parse_intersect_query_term_tail(query_primary); let query_term = self.parse_union_query_term_tail(query_intersect); self.parse_query_no_with_tail(query_term) } fn peek_intersect_query_term_tail(&mut self) -> bool { self.peek_kind(TK::INTERSECT) } // setQuantifier // : DISTINCT // | ALL fn parse_set_quantifier_opt(&mut self, peek_tail: OffsetPeeker<'a>) -> ParseTree<'a> { let distinct = self.eat_opt(TK::DISTINCT); if distinct.is_empty() && self.peek_predefined_name(PN::ALL) && peek_tail(self, 1) { self.eat_token() } else { distinct } } fn peek_query_primary_offset(&mut self, offset: usize) -> bool { match self.peek_offset(offset) { TK::SELECT | TK::TABLE | TK::VALUES => true, TK::OpenParen => self.peek_query_primary_offset(offset + 1), _ => false, } } fn peek_query_offset(&mut self, offset: usize) -> bool { self.peek_kind_offset(TK::WITH, offset) || self.peek_query_primary_offset(offset) } // queryPrimary // : querySpecification #queryPrimaryDefault // | TABLE qualifiedName #table // | VALUES expression (',' expression)* #inlineTable // | '(' queryNoWith ')' #subquery fn parse_query_primary(&mut self) -> ParseTree<'a> { match self.peek() { TK::SELECT => self.parse_query_specification(), TK::TABLE => self.parse_table(), TK::VALUES => self.parse_inline_table(), TK::OpenParen => self.parse_subquery(), _ => self.eat(TK::SELECT), } } // | '(' queryNoWith ')' #subquery fn parse_subquery(&mut self) -> ParseTree<'a> { let (open_paren, query_no_with, close_paren) = self.parse_parenthesized(|parser| parser.parse_query_no_with()); parse_tree::subquery(open_paren, query_no_with, close_paren) } // | VALUES expression (',' expression)* #inlineTable fn parse_inline_table(&mut self) -> ParseTree<'a> { let values = self.eat(TK::VALUES); let expressions = self.parse_comma_separated_list(|parser| parser.parse_expression()); parse_tree::inline_table(values, expressions) } // | TABLE qualifiedName #table fn parse_table(&mut self) -> ParseTree<'a> { let table = self.eat(TK::TABLE); let qualified_name = self.parse_qualified_name(); parse_tree::table(table, qualified_name) } // querySpecification // : SELECT setQuantifier? selectItem (',' selectItem)* // (FROM relation (',' relation)*)? // (WHERE where=booleanExpression)? // (GROUP BY groupBy)? // (HAVING having=booleanExpression)? fn parse_query_specification(&mut self) -> ParseTree<'a> { let select = self.eat(TK::SELECT); let set_quantifier_opt = self.parse_set_quantifier_opt(|parser, offset| parser.peek_select_item_offset(offset)); let select_items = self.parse_comma_separated_list(|parser| parser.parse_select_item()); let from = self.eat_opt(TK::FROM); let relations = if from.is_empty() { self.eat_empty() } else { self.parse_comma_separated_list(|parser| parser.parse_relation()) }; let where_ = self.eat_opt(TK::WHERE); let where_predicate = if where_.is_empty() { self.eat_empty() } else { self.parse_boolean_expression() }; let group = self.eat_opt(TK::GROUP); let (by, group_by) = if group.is_empty() { (self.eat_empty(), self.eat_empty()) } else { let by = self.eat(TK::BY); let group_by = self.parse_group_by(); (by, group_by) }; let having = self.eat_opt(TK::HAVING); let having_predicate = if having.is_empty() { self.eat_empty() } else { self.parse_boolean_expression() }; parse_tree::query_specification( select, set_quantifier_opt, select_items, from, relations, where_, where_predicate, group, by, group_by, having, having_predicate, ) } // selectItem // : expression (AS? identifier)? #selectSingle // | qualifiedName '.' ASTERISK #selectAll // | ASTERISK #selectAll fn parse_select_item(&mut self) -> ParseTree<'a> { let asterisk = self.eat_opt(TK::Asterisk); if asterisk.is_empty() { if self.peek_qualified_select_all() { let qualifier = self.parse_qualified_name(); let period = self.eat(TK::Period); let asterisk = self.eat(TK::Asterisk); parse_tree::qualified_select_all(qualifier, period, asterisk) } else { let expression = self.parse_expression(); let as_ = self.eat_opt(TK::AS); let identifier = if as_.is_empty() { self.parse_identifier_opt() } else { self.parse_identifier() }; parse_tree::select_item(expression, as_, identifier) } } else { parse_tree::select_all(asterisk) } } fn peek_select_item_offset(&mut self, offset: usize) -> bool { self.peek_expression_offset(offset) || self.peek_kind_offset(TK::Asterisk, offset) } fn peek_qualified_select_all(&mut self) -> bool { let mut offset = 0; while self.peek_identifier_offset(offset) { offset += 1; if self.peek_kind_offset(TK::Period, offset) { offset += 1; } else { return false; } } offset > 0 && self.peek_kind_offset(TK::Asterisk, offset) } // relation // : left=relation // ( CROSS JOIN right=sampledRelation // | joinType JOIN rightRelation=relation joinCriteria // | NATURAL joinType JOIN right=sampledRelation // ) #joinRelation // | sampledRelation #relationDefault fn parse_relation(&mut self) -> ParseTree<'a> { let left = self.parse_sampled_relation(); self.parse_join_relation_tail(left) } fn peek_join_relation_tail(&mut self) -> bool { match self.peek() { TK::CROSS | TK::JOIN | TK::INNER | TK::LEFT | TK::RIGHT | TK::FULL | TK::NATURAL => { true } _ => false, } } fn parse_join_relation_tail(&mut self, left: ParseTree<'a>) -> ParseTree<'a> { let mut left = left; while self.peek_join_relation_tail() { left = match self.peek() { TK::CROSS => { let cross = self.eat(TK::CROSS); let join = self.eat(TK::JOIN); let right = self.parse_sampled_relation(); parse_tree::cross_join(left, cross, join, right) } TK::JOIN | TK::INNER | TK::LEFT | TK::RIGHT | TK::FULL => { let join_type = self.parse_join_type(); let join = self.eat(TK::JOIN); let right = self.parse_relation(); let join_criteria = self.parse_join_criteria(); parse_tree::join(left, join_type, join, right, join_criteria) } TK::NATURAL => { let natural = self.eat(TK::CROSS); let join_type = self.parse_join_type(); let join = self.eat(TK::JOIN); let right = self.parse_sampled_relation(); parse_tree::natural_join(left, natural, join_type, join, right) } _ => panic!("Unexpected join tail"), } } left } // joinType // : INNER? // | LEFT OUTER? // | RIGHT OUTER? // | FULL OUTER? fn parse_join_type(&mut self) -> ParseTree<'a> { match self.peek() { TK::INNER => self.eat(TK::INNER), TK::LEFT | TK::RIGHT | TK::FULL => { let kind = self.eat_token(); let outer_opt = self.eat_opt(TK::OUTER); parse_tree::outer_join_kind(kind, outer_opt) } _ => self.eat_empty(), } } // joinCriteria // : ON booleanExpression // | USING '(' identifier (',' identifier)* ')' fn parse_join_criteria(&mut self) -> ParseTree<'a> { match self.peek() { TK::ON => { let on = self.eat(TK::ON); let predicate = self.parse_boolean_expression(); parse_tree::on_join_criteria(on, predicate) } TK::USING => { let using = self.eat(TK::USING); let names = self .parse_parenthesized_comma_separated_list(|parser| parser.parse_identifier()); parse_tree::using_join_criteria(using, names) } _ => self.expected_error("join criteria"), } } // sampledRelation // : aliasedRelation ( // TABLESAMPLE sampleType '(' percentage=expression ')' // )? fn parse_sampled_relation(&mut self) -> ParseTree<'a> { let relation_primary = self.parse_relation_primary(); self.parse_sampled_relation_tail(relation_primary) } fn parse_sampled_relation_tail(&mut self, relation_primary: ParseTree<'a>) -> ParseTree<'a> { let aliased_relation = self.parse_aliased_relation_tail(relation_primary); let tablesample = self.eat_predefined_name_opt(PN::TABLESAMPLE); if tablesample.is_empty() { aliased_relation } else { let sample_type = self.parse_sample_type(); let (open_paren, expression, close_paren) = self.parse_parenthesized(|parser| parser.parse_expression()); parse_tree::sampled_relation( aliased_relation, tablesample, sample_type, open_paren, expression, close_paren, ) } } fn peek_sampled_relation_tail(&mut self) -> bool { self.peek_aliased_relation_tail_offset(0) || self.peek_tablesample_suffix_offset(0) } fn peek_tablesample_suffix_offset(&mut self, offset: usize) -> bool { self.peek_predefined_name_offset(PN::TABLESAMPLE, offset) && self.peek_sample_type_offset(offset + 1) } fn peek_sample_type_offset(&mut self, offset: usize) -> bool { match self.maybe_peek_predefined_name_offset(offset) { Some(PN::BERNOULLI) | Some(PN::SYSTEM) => true, _ => false, } } // sampleType // : BERNOULLI // | SYSTEM fn parse_sample_type(&mut self) -> ParseTree<'a> { if self.peek_sample_type_offset(0) { self.eat_token() } else { self.expected_error("sample type") } } // aliasedRelation // : relationPrimary (AS? identifier columnAliases?)? fn peek_aliased_relation_tail_offset(&mut self, offset: usize) -> bool { (self.peek_kind_offset(TK::AS, offset) || self.peek_identifier_offset(offset)) && // need to avoid consuming a TABLESAMPLE as an alias // This is due to the ANTLR grammar being recursive // through the relation production. !self.peek_tablesample_suffix_offset(offset) // need to avoid consuming the LIMIT(non-keyword) as an alias. && !self.peek_limit_offset(offset) } fn parse_aliased_relation_tail(&mut self, relation_primary: ParseTree<'a>) -> ParseTree<'a> { if self.peek_aliased_relation_tail_offset(0) { let as_opt = self.eat_opt(TK::AS); let identifier = self.parse_identifier(); let column_aliases_opt = self.parse_column_aliases_opt(); parse_tree::aliased_relation(relation_primary, as_opt, identifier, column_aliases_opt) } else { relation_primary } } fn peek_query_primary_follow(&mut self) -> bool { self.peek_intersect_query_term_tail() || self.peek_union_query_term_tail() || self.peek_query_no_with_tail() } // yields one of: // one of several relation trees: // joins, sampled, subquery_relation // query - possibly with or without a with clause // query_no_with // relation_or_query fn parse_relation_or_query(&mut self) -> ParseTree<'a> { if self.peek_kind(TK::OpenParen) { let (open_paren, relation_or_query, close_paren) = self.parse_parenthesized(|parser| parser.parse_relation_or_query()); let must_be_subquery_relation = relation_or_query.is_query() && !relation_or_query.as_query().with.is_empty(); if must_be_subquery_relation { let subquery_relation = parse_tree::subquery_relation(open_paren, relation_or_query, close_paren); let sampled_relation = self.parse_sampled_relation_tail(subquery_relation); self.parse_join_relation_tail(sampled_relation) } else if { let can_be_query_primary = !must_be_subquery_relation && (relation_or_query.is_query_no_with() || relation_or_query.is_query() || relation_or_query.is_relation_or_query()); can_be_query_primary } { // it is possible for both relation_tail and query_primary_tail // to be true: when followed by LIMIT x; which can only be a query. let relation_tail = self.peek_join_relation_tail() || self.peek_sampled_relation_tail(); let must_be_query_tail = self.peek_query_primary_follow(); if relation_tail && !must_be_query_tail { let subquery_relation = parse_tree::subquery_relation(open_paren, relation_or_query, close_paren); let sampled_relation = self.parse_sampled_relation_tail(subquery_relation); self.parse_join_relation_tail(sampled_relation) } else if must_be_query_tail { let subquery = parse_tree::subquery(open_paren, relation_or_query, close_paren); // this yields a query_no_with self.parse_query_primary_tail(subquery) } else { // we have a query which can be consumed as either // a subquery or a subquery_relation... // make the decision up the tree. parse_tree::relation_or_query(open_paren, relation_or_query, close_paren) } } else { let sampled_relation = self.parse_sampled_relation_tail( parse_tree::parenthesized_relation(open_paren, relation_or_query, close_paren), ); self.parse_join_relation_tail(sampled_relation) } } else if self.peek_query_offset(0) { // yields a query self.parse_query() } else { self.parse_relation() } } // relationPrimary // : qualifiedName #tableName // | '(' query ')' #subqueryRelation // | UNNEST '(' expression (',' expression)* ')' (WITH ORDINALITY)? #unnest // | LATERAL '(' query ')' #lateral // | '(' relation ')' #parenthesizedRelation fn parse_relation_primary(&mut self) -> ParseTree<'a> { match self.peek() { TK::OpenParen => { let (open_paren, relation_or_query, close_paren) = self.parse_parenthesized(|parser| parser.parse_relation_or_query()); if relation_or_query.is_query() || relation_or_query.is_query_no_with() || relation_or_query.is_relation_or_query() { parse_tree::subquery_relation(open_paren, relation_or_query, close_paren) } else { parse_tree::parenthesized_relation(open_paren, relation_or_query, close_paren) } } TK::UNNEST => self.parse_unnest(), _ => { if self.peek_predefined_name(PN::LATERAL) && self.peek_kind_offset(TK::OpenParen, 1) { self.parse_lateral() } else { self.parse_table_name() } } } } // : qualifiedName #tableName fn parse_table_name(&mut self) -> ParseTree<'a> { let name = self.parse_qualified_name(); parse_tree::table_name(name) } // | LATERAL '(' query ')' #lateral fn parse_lateral(&mut self) -> ParseTree<'a> { let lateral = self.eat_predefined_name(PN::LATERAL); let (open_paren, query, close_paren) = self.parse_parenthesized_query(); parse_tree::lateral(lateral, open_paren, query, close_paren) } // | UNNEST '(' expression (',' expression)* ')' (WITH ORDINALITY)? #unnest fn parse_unnest(&mut self) -> ParseTree<'a> { let unnest = self.eat(TK::UNNEST); let expressions = self.parse_parenthesized_comma_separated_list(|parser| parser.parse_expression()); let with = self.eat_opt(TK::WITH); let ordinality = if with.is_empty() { self.eat_empty() } else { self.eat_predefined_name(PN::ORDINALITY) }; parse_tree::unnest(unnest, expressions, with, ordinality) } // groupBy // : setQuantifier? groupingElement (',' groupingElement)* fn parse_group_by(&mut self) -> ParseTree<'a> { let set_quantifier_opt = self .parse_set_quantifier_opt(|parser, offset| parser.peek_grouping_element_offset(offset)); let grouping_elements = self.parse_comma_separated_list(|parser| parser.parse_grouping_element()); parse_tree::group_by(set_quantifier_opt, grouping_elements) } // groupingElement // : groupingSet #singleGroupingSet // | ROLLUP '(' (expression (',' expression)*)? ')' #rollup // | CUBE '(' (expression (',' expression)*)? ')' #cube // | GROUPING SETS '(' groupingSet (',' groupingSet)* ')' #multipleGroupingSets fn parse_grouping_element(&mut self) -> ParseTree<'a> { match self.peek() { TK::ROLLUP => self.parse_rollup(), TK::CUBE => self.parse_cube(), TK::GROUPING => self.parse_grouping_sets(), _ => self.parse_grouping_set(), } } fn peek_grouping_element_offset(&mut self, offset: usize) -> bool { match self.peek_offset(offset) { TK::ROLLUP | TK::CUBE | TK::GROUPING => true, _ => self.peek_expression_offset(offset), } } // | ROLLUP '(' (expression (',' expression)*)? ')' #rollup fn parse_rollup(&mut self) -> ParseTree<'a> { let rollup = self.eat(TK::ROLLUP); let expressions = self.parse_parenthesized_comma_separated_list(|parser| parser.parse_expression()); parse_tree::rollup(rollup, expressions) } // | CUBE '(' (expression (',' expression)*)? ')' #cube fn parse_cube(&mut self) -> ParseTree<'a> { let cube = self.eat(TK::CUBE); let expressions = self.parse_parenthesized_comma_separated_list(|parser| parser.parse_expression()); parse_tree::cube(cube, expressions) } // | GROUPING SETS '(' groupingSet (',' groupingSet)* ')' #multipleGroupingSets fn parse_grouping_sets(&mut self) -> ParseTree<'a> { let grouping = self.eat(TK::GROUPING); let sets = self.eat_predefined_name(PN::SETS); let grouping_sets = self.parse_parenthesized_comma_separated_list(|parser| parser.parse_grouping_set()); parse_tree::grouping_sets(grouping, sets, grouping_sets) } // groupingSet // : '(' (expression (',' expression)*)? ')' // | expression fn parse_grouping_set(&mut self) -> ParseTree<'a> { // This is a subset of expression, except that it permits '()' // parenthesized expressions will show up as // either a row constructor or a paren expression. let elements = if self.peek_kind(TK::OpenParen) && self.peek_kind_offset(TK::CloseParen, 1) { parse_tree::empty_grouping_set(self.eat(TK::OpenParen), self.eat(TK::CloseParen)) } else { self.parse_expression() }; parse_tree::grouping_set(elements) } // expression // : booleanExpression fn parse_expression(&mut self) -> ParseTree<'a> { self.parse_boolean_expression() } fn peek_expression(&mut self) -> bool { self.peek_expression_offset(0) } fn peek_expression_offset(&mut self, offset: usize) -> bool { match self.peek_offset(offset) { TK::NOT | TK::Plus | TK::Minus // : NULL #nullLiteral | TK::NULL // | DOUBLE_PRECISION string #typeConstructor | TK::DoublePrecision // | booleanValue #booleanLiteral | TK::TRUE | TK::FALSE // | number #numericLiteral | TK::Decimal | TK::Double | TK::Integer // | string #stringLiteral | TK::String | TK::UnicodeString // | BINARY_LITERAL #binaryLiteral | TK::BinaryLiteral // | '?' #parameter | TK::Question // // This is an extension to ANSI SQL, which considers EXISTS to be a <boolean expression> // | EXISTS '(' query ')' #exists | TK::EXISTS // | CASE valueExpression whenClause+ (ELSE elseExpression=expression)? END #simpleCase // | CASE whenClause+ (ELSE elseExpression=expression)? END #searchedCase | TK::CASE // | CAST '(' expression AS type_ ')' #cast | TK::CAST // | name=CURRENT_DATE #specialDateTimeFunction | TK::CURRENT_DATE // | name=CURRENT_TIME ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction | TK::CURRENT_TIME // | name=CURRENT_TIMESTAMP ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction | TK::CURRENT_TIMESTAMP // | name=LOCALTIME ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction | TK::LOCALTIME // | name=LOCALTIMESTAMP ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction | TK::LOCALTIMESTAMP // | name=CURRENT_USER #currentUser | TK::CURRENT_USER // | name=CURRENT_PATH #currentPath | TK::CURRENT_PATH // | NORMALIZE '(' valueExpression (',' normalForm)? ')' #normalize | TK::NORMALIZE // | EXTRACT '(' identifier FROM valueExpression ')' #extract | TK::EXTRACT // | GROUPING '(' (qualifiedName (',' qualifiedName)*)? ')' #groupingOperation | TK::GROUPING // | configureExpression #conf | TK::CONFIGURE // | '(' expression (',' expression)+ ')' #rowConstructor // | '(' (identifier (',' identifier)*)? ')' '->' expression #lambda // | '(' query ')' #subqueryExpression // | '(' expression ')' #parenthesizedExpression | TK::OpenParen // | interval #intervalLiteral // | identifier string #typeConstructor // | POSITION '(' valueExpression IN valueExpression ')' #position // | ROW '(' expression (',' expression)* ')' #rowConstructor // | qualifiedName '(' ASTERISK ')' filter_? over? #functionCall // | qualifiedName '(' (setQuantifier? expression (',' expression)*)? // (ORDER BY sortItem (',' sortItem)*)? ')' filter_? over? #functionCall // | identifier '->' expression #lambda // | TRY_CAST '(' expression AS type_ ')' #cast // | ARRAY '[' (expression (',' expression)*)? ']' #arrayConstructor // | identifier #columnReference // | SUBSTRING '(' valueExpression FROM valueExpression (FOR valueExpression)? ')' #substring // // TODO: The disambiguation of several of these is incorrect // Currently we're preferring the special syntax form // when we could have a function call. This applies to: // POSITION // TRY_CAST // SUBSTRING // Currently cannot parse functions calls with those names. // Need to verify if that's an issue. // // TODO: This could be tightened up a bit. | TK::Identifier | TK::QuotedIdentifier | TK::BackquotedIdentifier | TK::DigitIdentifier => true, _ => false, } } // booleanExpression // : valueExpression predicate[$valueExpression.ctx]? #predicated // | NOT booleanExpression #logicalNot // | left=booleanExpression operator=AND right=booleanExpression #logicalBinary // | left=booleanExpression operator=OR right=booleanExpression #logicalBinary fn parse_boolean_expression(&mut self) -> ParseTree<'a> { self.parse_or_expression() } fn parse_binary_expression( &mut self, peek_operator: Peeker<'a>, parse_operand: ElementParser<'a>, ) -> ParseTree<'a> { let left = parse_operand(self); self.parse_binary_expression_tail(left, peek_operator, parse_operand) } fn parse_binary_expression_tail( &mut self, left: ParseTree<'a>, peek_operator: Peeker<'a>, parse_operand: ElementParser<'a>, ) -> ParseTree<'a> { let mut left = left; while peek_operator(self) { let operator = self.eat_token(); let right = parse_operand(self); left = parse_tree::binary_expression(left, operator, right); } left } // | left=booleanExpression operator=OR right=booleanExpression #logicalBinary fn parse_or_expression(&mut self) -> ParseTree<'a> { self.parse_binary_expression( |parser| parser.peek_kind(TK::OR), |parser| parser.parse_and_expression(), ) } fn parse_or_expression_tail(&mut self, and_expression: ParseTree<'a>) -> ParseTree<'a> { self.parse_binary_expression_tail( and_expression, |parser| parser.peek_kind(TK::OR), |parser| parser.parse_and_expression(), ) } // | left=booleanExpression operator=AND right=booleanExpression #logicalBinary fn parse_and_expression(&mut self) -> ParseTree<'a> { self.parse_binary_expression( |parser| parser.peek_kind(TK::AND), |parser| parser.parse_not_expression(), ) } fn parse_and_expression_tail(&mut self, not_expression: ParseTree<'a>) -> ParseTree<'a> { self.parse_binary_expression_tail( not_expression, |parser| parser.peek_kind(TK::AND), |parser| parser.parse_not_expression(), ) } // | NOT booleanExpression #logicalNot fn parse_not_expression(&mut self) -> ParseTree<'a> { let not = self.eat_opt(TK::NOT); if !not.is_empty() { let operand = self.parse_not_expression(); parse_tree::unary_expression(not, operand) } else { self.parse_predicated_expression() } } // : comparisonOperator right=valueExpression #comparison // | comparisonOperator comparisonQuantifier '(' query ')' #quantifiedComparison fn parse_comparison_operator_suffix(&mut self, value: ParseTree<'a>) -> ParseTree<'a> { debug_assert!(self.peek_comparison_operator()); let operator = self.eat_token(); // TODO: Need better disambiguation between function_call // and comparison_quantifier((query) + 1) if self.peek_quantified_comparison() { let comparison_quantifier = self.eat_token(); let (open_paren, query, close_paren) = self.parse_parenthesized_query(); parse_tree::quantified_comparison( value, operator, comparison_quantifier, open_paren, query, close_paren, ) } else { let right = self.parse_value_expression(); parse_tree::binary_expression(value, operator, right) } } fn peek_comparison_operator(&mut self) -> bool { match self.peek() { TK::Equal | TK::LessGreater | TK::BangEqual | TK::OpenAngle | TK::CloseAngle | TK::LessEqual | TK::GreaterEqual => true, _ => false, } } // | IS NOT? NULL #nullPredicate // | IS NOT? DISTINCT FROM right=valueExpression #distinctFrom fn parse_is_suffix(&mut self, value: ParseTree<'a>) -> ParseTree<'a> { debug_assert!(self.peek_kind(TK::IS)); let is = self.eat_token(); let not_opt = self.eat_opt(TK::NOT); match self.peek() { TK::NULL => { let null = self.eat_token(); parse_tree::null_predicate(value, is, not_opt, null) } TK::DISTINCT => { let distinct = self.eat_token(); let from = self.eat(TK::FROM); let right = self.parse_value_expression(); parse_tree::distinct_from(value, distinct, from, right) } _ => self.expected_error("NULL, DISTINCT"), } } // | NOT? BETWEEN lower=valueExpression AND upper=valueExpression #between fn parse_between_suffix( &mut self, value: ParseTree<'a>, not_opt: ParseTree<'a>, ) -> ParseTree<'a> { let between = self.eat(TK::BETWEEN); let lower = self.parse_value_expression(); let and = self.eat(TK::AND); let upper = self.parse_value_expression(); parse_tree::between(value, not_opt, between, lower, and, upper) } // | NOT? LIKE pattern=valueExpression (ESCAPE escape=valueExpression)? #like fn parse_like_suffix(&mut self, value: ParseTree<'a>, not_opt: ParseTree<'a>) -> ParseTree<'a> { let like = self.eat(TK::LIKE); let pattern = self.parse_value_expression(); let escape_opt = self.eat_opt(TK::ESCAPE); let escape_value_opt = if escape_opt.is_empty() { self.eat_empty() } else { self.parse_value_expression() }; parse_tree::like(value, not_opt, like, pattern, escape_opt, escape_value_opt) } // | NOT? IN '(' expression (',' expression)* ')' #inList // | NOT? IN '(' query ')' #inSubquery fn parse_in_suffix(&mut self, value: ParseTree<'a>, not_opt: ParseTree<'a>) -> ParseTree<'a> { let in_ = self.eat(TK::IN); let expression_or_query = self.parse_row_constructor_or_subquery(); if expression_or_query.is_parenthesized_expression() { let (open_paren, expression, close_paren) = expression_or_query.unbox_parenthesized_expression(); let expressions = parse_tree::list( open_paren, vec![( expression, parse_tree::empty(TextRange::empty(close_paren.get_full_start())), )], close_paren, ); parse_tree::in_list(value, not_opt, in_, expressions) } else if expression_or_query.is_row_constructor() { let (expressions,) = expression_or_query.unbox_row_constructor(); parse_tree::in_list(value, not_opt, in_, expressions) } else { debug_assert!(expression_or_query.is_subquery_expression(),); let (open_paren, query, close_paren) = expression_or_query.unbox_subquery_expression(); parse_tree::in_subquery(value, not_opt, in_, open_paren, query, close_paren) } } // : valueExpression predicate[$valueExpression.ctx]? #predicated // predicate[ParserRuleContext value] // : comparisonOperator right=valueExpression #comparison // | comparisonOperator comparisonQuantifier '(' query ')' #quantifiedComparison // | NOT? BETWEEN lower=valueExpression AND upper=valueExpression #between // | NOT? IN '(' expression (',' expression)* ')' #inList // | NOT? IN '(' query ')' #inSubquery // | NOT? LIKE pattern=valueExpression (ESCAPE escape=valueExpression)? #like // | IS NOT? NULL #nullPredicate // | IS NOT? DISTINCT FROM right=valueExpression #distinctFrom fn parse_predicated_expression(&mut self) -> ParseTree<'a> { let value = self.parse_value_expression(); self.parse_predicated_expression_tail(value) } fn parse_predicated_expression_tail( &mut self, value_expression: ParseTree<'a>, ) -> ParseTree<'a> { match self.peek() { TK::Equal | TK::LessGreater | TK::BangEqual | TK::OpenAngle | TK::CloseAngle | TK::LessEqual | TK::GreaterEqual => self.parse_comparison_operator_suffix(value_expression), TK::IS => self.parse_is_suffix(value_expression), _ => { let not_opt = self.eat_opt(TK::NOT); match self.peek() { TK::BETWEEN => self.parse_between_suffix(value_expression, not_opt), TK::IN => self.parse_in_suffix(value_expression, not_opt), TK::LIKE => self.parse_like_suffix(value_expression, not_opt), _ => { if not_opt.is_empty() { value_expression } else { self.expected_error("BETWEEN, IN, LIKE") } } } } } } fn peek_comparison_quantifier(&mut self) -> bool { match self.maybe_peek_predefined_name() { Some(PN::ALL) | Some(PN::SOME) | Some(PN::ANY) => true, _ => false, } } fn peek_quantified_comparison(&mut self) -> bool { self.peek_comparison_quantifier() && self.peek_kind_offset(TK::OpenParen, 1) && self.peek_query_primary_offset(2) } // valueExpression // : primaryExpression #valueExpressionDefault // | valueExpression AT timeZoneSpecifier #atTimeZone // | operator=(MINUS | PLUS) valueExpression #arithmeticUnary // | left=valueExpression operator=(ASTERISK | SLASH | PERCENT) right=valueExpression #arithmeticBinary // | left=valueExpression operator=(PLUS | MINUS) right=valueExpression #arithmeticBinary // | left=valueExpression CONCAT right=valueExpression #concatenation fn parse_value_expression(&mut self) -> ParseTree<'a> { self.parse_concat_expression() } // | left=valueExpression CONCAT right=valueExpression #concatenation fn parse_concat_expression(&mut self) -> ParseTree<'a> { self.parse_binary_expression( |parser| parser.peek_kind(TK::BarBar), |parser| parser.parse_additive_expression(), ) } fn parse_concat_expression_tail( &mut self, additive_expression: ParseTree<'a>, ) -> ParseTree<'a> { self.parse_binary_expression_tail( additive_expression, |parser| parser.peek_kind(TK::BarBar), |parser| parser.parse_additive_expression(), ) } // | left=valueExpression operator=(PLUS | MINUS) right=valueExpression #arithmeticBinary fn parse_additive_expression(&mut self) -> ParseTree<'a> { self.parse_binary_expression( |parser| parser.peek_additive_operator(), |parser| parser.parse_multiplicative_expression(), ) } fn parse_additive_expression_tail( &mut self, multiplicative_expression: ParseTree<'a>, ) -> ParseTree<'a> { self.parse_binary_expression_tail( multiplicative_expression, |parser| parser.peek_additive_operator(), |parser| parser.parse_multiplicative_expression(), ) } fn peek_additive_operator(&mut self) -> bool { match self.peek() { TK::Plus | TK::Minus => true, _ => false, } } // | left=valueExpression operator=(ASTERISK | SLASH | PERCENT) right=valueExpression #arithmeticBinary fn parse_multiplicative_expression(&mut self) -> ParseTree<'a> { self.parse_binary_expression( |parser| parser.peek_multiplicative_operator(), |parser| parser.parse_arithmetic_unary_expression(), ) } fn parse_multiplicative_expression_tail( &mut self, unary_expression: ParseTree<'a>, ) -> ParseTree<'a> { self.parse_binary_expression_tail( unary_expression, |parser| parser.peek_multiplicative_operator(), |parser| parser.parse_arithmetic_unary_expression(), ) } fn peek_multiplicative_operator(&mut self) -> bool { match self.peek() { TK::Asterisk | TK::Slash | TK::Percent => true, _ => false, } } // | operator=(MINUS | PLUS) valueExpression #arithmeticUnary fn parse_arithmetic_unary_expression(&mut self) -> ParseTree<'a> { if self.peek_additive_operator() { let operator = self.eat_token(); let operand = self.parse_at_time_zone(); parse_tree::unary_expression(operator, operand) } else { self.parse_at_time_zone() } } // | valueExpression AT timeZoneSpecifier #atTimeZone // timeZoneSpecifier // : TIME ZONE interval #timeZoneInterval // | TIME ZONE string #timeZoneString fn parse_at_time_zone(&mut self) -> ParseTree<'a> { let left = self.parse_primary_expression(); self.parse_at_time_zone_tail(left) } fn parse_at_time_zone_tail(&mut self, value_expression: ParseTree<'a>) -> ParseTree<'a> { let at = self.eat_predefined_name_opt(PN::AT); if at.is_empty() { value_expression } else { let time = self.eat_predefined_name(PN::TIME); let zone = self.eat_predefined_name(PN::ZONE); let specifier = if self.peek_predefined_name(PN::INTERVAL) { self.parse_interval() } else { self.parse_string() }; parse_tree::at_time_zone(value_expression, at, time, zone, specifier) } } // qualifiedName // : identifier ('.' identifier)* fn parse_qualified_name(&mut self) -> ParseTree<'a> { // don't use parse_separated_list as period // is in the follow set of qualified_name let mut elements = Vec::new(); let mut seperators = Vec::new(); let start = self.eat_empty(); elements.push(self.parse_identifier()); while self.peek_kind(TK::Period) && self.peek_identifier_offset(1) { seperators.push(self.eat_opt(TK::Period)); elements.push(self.parse_identifier()); } seperators.push(self.eat_empty()); let end = self.eat_empty(); parse_tree::qualified_name(parse_tree::list( start, elements.into_iter().zip(seperators.into_iter()).collect(), end, )) } fn peek_qualified_name(&mut self) -> bool { self.peek_identifier() } fn parse_primary_prefix_expression(&mut self) -> ParseTree<'a> { match self.peek() { // : NULL #nullLiteral TK::NULL => self.parse_literal(), // | DOUBLE_PRECISION string #typeConstructor TK::DoublePrecision => self.parse_type_constructor(), // | booleanValue #booleanLiteral TK::TRUE | TK::FALSE => self.parse_literal(), // | number #numericLiteral TK::Decimal | TK::Double | TK::Integer => self.parse_literal(), // | string #stringLiteral TK::String | TK::UnicodeString => self.parse_literal(), // | BINARY_LITERAL #binaryLiteral TK::BinaryLiteral => self.parse_literal(), // | '?' #parameter TK::Question => self.parse_parameter(), // // This is an extension to ANSI SQL, which considers EXISTS to be a <boolean expression> // | EXISTS '(' query ')' #exists TK::EXISTS => self.parse_exists(), // | CASE valueExpression whenClause+ (ELSE elseExpression=expression)? END #simpleCase // | CASE whenClause+ (ELSE elseExpression=expression)? END #searchedCase TK::CASE => self.parse_case(), // | CAST '(' expression AS type_ ')' #cast TK::CAST => self.parse_cast(), // | name=CURRENT_DATE #specialDateTimeFunction TK::CURRENT_DATE => self.parse_current_date(), // | name=CURRENT_TIME ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction TK::CURRENT_TIME => self.parse_current_time(), // | name=CURRENT_TIMESTAMP ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction TK::CURRENT_TIMESTAMP => self.parse_current_timestamp(), // | name=LOCALTIME ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction TK::LOCALTIME => self.parse_localtime(), // | name=LOCALTIMESTAMP ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction TK::LOCALTIMESTAMP => self.parse_localtimestamp(), // | name=CURRENT_USER #currentUser TK::CURRENT_USER => self.parse_current_user(), // | name=CURRENT_PATH #currentPath TK::CURRENT_PATH => self.parse_current_path(), // | NORMALIZE '(' valueExpression (',' normalForm)? ')' #normalize TK::NORMALIZE => self.parse_normalize(), // | EXTRACT '(' identifier FROM valueExpression ')' #extract TK::EXTRACT => self.parse_extract(), // | GROUPING '(' (qualifiedName (',' qualifiedName)*)? ')' #groupingOperation TK::GROUPING => self.parse_grouping(), // | configureExpression #conf TK::CONFIGURE => self.parse_configure_expression(), // | '(' expression (',' expression)+ ')' #rowConstructor // | '(' (identifier (',' identifier)*)? ')' '->' expression #lambda // | '(' query ')' #subqueryExpression // | '(' expression ')' #parenthesizedExpression TK::OpenParen => { if self.peek_lambda() { self.parse_lambda() } else { self.parse_row_constructor_or_subquery() } } // | interval #intervalLiteral // | identifier string #typeConstructor // | POSITION '(' valueExpression IN valueExpression ')' #position // | ROW '(' expression (',' expression)* ')' #rowConstructor // | qualifiedName '(' ASTERISK ')' filter_? over? #functionCall // | qualifiedName '(' (setQuantifier? expression (',' expression)*)? // (ORDER BY sortItem (',' sortItem)*)? ')' filter_? over? #functionCall // | identifier '->' expression #lambda // | TRY_CAST '(' expression AS type_ ')' #cast // | ARRAY '[' (expression (',' expression)*)? ']' #arrayConstructor // | identifier #columnReference // | SUBSTRING '(' valueExpression FROM valueExpression (FOR valueExpression)? ')' #substring // // TODO: The disambiguation of several of these is incorrect // Currently we're prefering the special syntgax form // when we could have a function call. This applies to: // POSITION // TRY_CAST // SUBSTRING // Currently cannot parse functions calls with those names. // Need to verify if that's an issue. TK::Identifier => { if let Some(name) = self.maybe_peek_predefined_name() { match name { PN::INTERVAL => { if self.peek_interval() { return self.parse_interval(); } } PN::POSITION => { if self.peek_position() { return self.parse_position(); } } PN::ROW => { if self.peek_row_constructor() { return self.parse_row_constructor(); } } PN::TRY_CAST => { if self.peek_try_cast() { return self.parse_try_cast(); } } PN::ARRAY => { if self.peek_array_constructor() { return self.parse_array_constructor(); } } PN::SUBSTRING => { if self.peek_substring() { return self.parse_substring(); } } _ => (), } } // | identifier string #typeConstructor // | qualifiedName '(' ASTERISK ')' filter_? over? #functionCall // | qualifiedName '(' (setQuantifier? expression (',' expression)*)? // (ORDER BY sortItem (',' sortItem)*)? ')' filter_? over? #functionCall // | identifier '->' expression #lambda // | identifier #columnReference self.parse_identifier_start_expression() } // | identifier string #typeConstructor // | qualifiedName '(' ASTERISK ')' filter_? over? #functionCall // | qualifiedName '(' (setQuantifier? expression (',' expression)*)? // | identifier '->' expression #lambda // | identifier #columnReference TK::QuotedIdentifier | TK::BackquotedIdentifier | TK::DigitIdentifier => { self.parse_identifier_start_expression() } _ => self.expected_error("Expected expression."), } } fn parse_primary_expression(&mut self) -> ParseTree<'a> { let operand = self.parse_primary_prefix_expression(); self.parse_primary_expression_tail(operand) } fn parse_primary_expression_tail( &mut self, primary_expression: ParseTree<'a>, ) -> ParseTree<'a> { let mut result = primary_expression; loop { // suffixes match self.peek() { // | base=primaryExpression '.' fieldName=identifier #dereference TK::Period => { let period = self.eat(TK::Period); let field_name = self.parse_identifier(); result = parse_tree::dereference(result, period, field_name) } // | value=primaryExpression '[' index=valueExpression ']' #subscript TK::OpenSquare => { let open_square = self.eat(TK::OpenSquare); let index = self.parse_value_expression(); let close_square = self.eat(TK::CloseSquare); result = parse_tree::subscript(result, open_square, index, close_square) } _ => return result, } } } fn parse_paren_expression_tail(&mut self, paren_expression: ParseTree<'a>) -> ParseTree<'a> { let primary_expression = self.parse_primary_expression_tail(paren_expression); let at_time_zone = self.parse_at_time_zone_tail(primary_expression); let multitplicative = self.parse_multiplicative_expression_tail(at_time_zone); let additive = self.parse_additive_expression_tail(multitplicative); let concat = self.parse_concat_expression_tail(additive); let predicate = self.parse_predicated_expression_tail(concat); let and = self.parse_and_expression_tail(predicate); let or = self.parse_or_expression_tail(and); or } // | '(' (identifier (',' identifier)*)? ')' '->' expression #lambda fn peek_lambda(&mut self) -> bool { if self.peek_kind(TK::OpenParen) { let mut offset = 1; if self.peek_identifier_offset(offset) { offset += 1; while self.peek_kind_offset(TK::Comma, offset) { offset += 1; if self.peek_identifier_offset(offset) { offset += 1; } else { return false; } } } self.peek_kind_offset(TK::CloseParen, offset) && self.peek_kind_offset(TK::Arrow, offset + 1) } else { false } } fn parse_lambda(&mut self) -> ParseTree<'a> { let parameters = self.parse_delimited_separated_list_opt( TK::OpenParen, TK::Comma, |parser| parser.peek_identifier(), |parser| parser.parse_identifier(), TK::CloseParen, ); let array = self.eat(TK::Arrow); let body = self.parse_expression(); parse_tree::lambda(parameters, array, body) } fn paren_expression_or_query_to_expression( &mut self, open_paren: ParseTree<'a>, expression_or_query: ParseTree<'a>, close_paren: ParseTree<'a>, ) -> ParseTree<'a> { if expression_or_query.is_query() || expression_or_query.is_query_no_with() { parse_tree::subquery_expression(open_paren, expression_or_query, close_paren) } else if expression_or_query.is_expression_or_query() { parse_tree::parenthesized_expression( open_paren, self.expression_or_query_to_expression(expression_or_query), close_paren, ) } else { // TODO: debug_assert!(expression_or_query.is_any_expression()); parse_tree::parenthesized_expression(open_paren, expression_or_query, close_paren) } } fn expression_or_query_to_expression( &mut self, expression_or_query: ParseTree<'a>, ) -> ParseTree<'a> { if expression_or_query.is_query() || expression_or_query.is_query_no_with() { self.add_error_of_tree(&expression_or_query, "Expected expression, found query."); expression_or_query } else if expression_or_query.is_expression_or_query() { let (open_paren, expression_or_query, close_paren) = expression_or_query.unbox_expression_or_query(); self.paren_expression_or_query_to_expression( open_paren, expression_or_query, close_paren, ) } else { // TODO: debug_assert!(expression_or_query.is_any_expression()) expression_or_query } } // yields one of: // one of several expression trees: // query - possibly with or without a with clause // query_no_with // expression_or_query fn parse_expression_or_query(&mut self) -> ParseTree<'a> { if self.peek_kind(TK::OpenParen) { let open_paren = self.eat(TK::OpenParen); let expression_or_query = self.parse_expression_or_query(); if self.peek_kind(TK::Comma) { let row_constructor = self.parse_row_constructor_tail(open_paren, expression_or_query); self.parse_paren_expression_tail(row_constructor) } else { let close_paren = self.eat(TK::CloseParen); let must_be_subquery_expression = expression_or_query.is_query() && !expression_or_query.as_query().with.is_empty(); if must_be_subquery_expression { parse_tree::subquery_expression(open_paren, expression_or_query, close_paren) } else if { let can_be_query_primary = !must_be_subquery_expression && (expression_or_query.is_query_no_with() || expression_or_query.is_query() || expression_or_query.is_expression_or_query()); can_be_query_primary } { let must_be_query_tail = self.peek_query_primary_follow(); if must_be_query_tail { let subquery = parse_tree::subquery(open_paren, expression_or_query, close_paren); // this yields a query_no_with self.parse_query_primary_tail(subquery) } else if self.peek_kind(TK::CloseParen) { // we have a query which can be consumed as either // a subquery or a subquery_expression... // make the decision up the tree. parse_tree::expression_or_query( open_paren, expression_or_query, close_paren, ) } else { // we have a parenthesized query, with what looks like an expression tail // afterwards let subquery_expression = parse_tree::subquery_expression( open_paren, expression_or_query, close_paren, ); self.parse_paren_expression_tail(subquery_expression) } } else { // we have an expression self.parse_paren_expression_tail(parse_tree::parenthesized_expression( open_paren, expression_or_query, close_paren, )) } } } else if self.peek_query_offset(0) { // yields a query self.parse_query() } else { self.parse_expression() } } // | '(' expression (',' expression)+ ')' #rowConstructor // | '(' query ')' #subqueryExpression // | '(' expression ')' #parenthesizedExpression fn parse_row_constructor_or_subquery(&mut self) -> ParseTree<'a> { let open_paren = self.eat(TK::OpenParen); let expression_or_query = self.parse_expression_or_query(); if self.peek_kind(TK::Comma) { self.parse_row_constructor_tail(open_paren, expression_or_query) } else { // either expression or query is permitted let close_paren = self.eat(TK::CloseParen); self.paren_expression_or_query_to_expression( open_paren, expression_or_query, close_paren, ) } } /// parse expression_list tail and return a row constructor fn parse_row_constructor_tail( &mut self, open_paren: ParseTree<'a>, expression_or_query: ParseTree<'a>, ) -> ParseTree<'a> { let comma = self.eat(TK::Comma); let mut elements_tail = self.parse_separated_list_elements(TK::Comma, |parser| parser.parse_expression()); let close_paren = self.eat(TK::CloseParen); let mut elements = Vec::with_capacity(elements_tail.len() + 1); // validate that expression_or_query is actually an expression. let expression = self.expression_or_query_to_expression(expression_or_query); elements.push((expression, comma)); elements.append(&mut elements_tail); parse_tree::row_constructor(parse_tree::list(open_paren, elements, close_paren)) } // | POSITION '(' valueExpression IN valueExpression ')' #position fn peek_position(&mut self) -> bool { self.peek_predefined_name(PN::POSITION) && self.peek_kind_offset(TK::OpenParen, 1) } fn parse_position(&mut self) -> ParseTree<'a> { let position = self.eat_predefined_name(PN::POSITION); let open_paren = self.eat(TK::OpenParen); let value = self.parse_value_expression(); let in_ = self.eat(TK::IN); let target = self.parse_value_expression(); let close_paren = self.eat(TK::CloseParen); parse_tree::position(position, open_paren, value, in_, target, close_paren) } // interval // : INTERVAL sign=(PLUS | MINUS)? (string | configureExpression) from_=intervalField (TO to=intervalField)? fn peek_interval(&mut self) -> bool { // TODO: must peek all the way to the interval_field // to ensure diambiguation with additive_binary if sign is present // TODO: must peek to interval field if no-sign present, // string is present, to disambiguate with type constructor self.peek_predefined_name(PN::INTERVAL) && match self.peek_offset(1) { TK::Plus | TK::Minus | TK::String | TK::UnicodeString | TK::Identifier | TK::CONFIGURE => true, _ => false, } } fn parse_interval(&mut self) -> ParseTree<'a> { let interval = self.eat_predefined_name(PN::INTERVAL); let sign_opt = self.parse_sign_opt(); let value = if self.peek_string() { self.parse_string() } else { self.parse_configure_expression() }; let from = self.parse_interval_field(); let to_kw_opt = self.eat_predefined_name_opt(PN::TO); let to = if to_kw_opt.is_empty() { self.eat_empty() } else { self.parse_interval_field() }; parse_tree::interval(interval, sign_opt, value, from, to_kw_opt, to) } fn parse_sign_opt(&mut self) -> ParseTree<'a> { match self.peek() { TK::Plus | TK::Minus => self.eat_token(), _ => self.eat_empty(), } } // intervalField // : YEAR | MONTH | DAY | HOUR | MINUTE | SECOND fn peek_interval_field_offset(&mut self, offset: usize) -> bool { match self.maybe_peek_predefined_name_offset(offset) { Some(PN::YEAR) | Some(PN::MONTH) | Some(PN::DAY) | Some(PN::HOUR) | Some(PN::MINUTE) | Some(PN::SECOND) => true, _ => false, } } fn parse_interval_field(&mut self) -> ParseTree<'a> { if self.peek_interval_field_offset(0) { self.eat_token() } else { self.expected_error("interval field") } } // | ROW '(' expression (',' expression)* ')' #rowConstructor fn peek_row_constructor(&mut self) -> bool { self.peek_predefined_name(PN::ROW) && self.peek_kind_offset(TK::OpenParen, 1) } fn parse_row_constructor(&mut self) -> ParseTree<'a> { let row = self.eat_predefined_name(PN::ROW); let elements = self.parse_parenthesized_comma_separated_list(|parser| parser.parse_expression()); parse_tree::row(row, elements) } // | TRY_CAST '(' expression AS type_ ')' #cast fn peek_try_cast(&mut self) -> bool { self.peek_predefined_name(PN::TRY_CAST) && self.peek_kind_offset(TK::OpenParen, 1) } fn parse_try_cast(&mut self) -> ParseTree<'a> { let try_cast = self.eat_predefined_name(PN::TRY_CAST); let open_paren = self.eat(TK::OpenParen); let value = self.parse_expression(); let as_ = self.eat(TK::AS); let type_ = self.parse_type(); let close_paren = self.eat(TK::CloseParen); parse_tree::try_cast(try_cast, open_paren, value, as_, type_, close_paren) } // | ARRAY '[' (expression (',' expression)*)? ']' #arrayConstructor fn peek_array_constructor(&mut self) -> bool { self.peek_predefined_name(PN::ARRAY) && self.peek_kind_offset(TK::OpenSquare, 1) } fn parse_array_constructor(&mut self) -> ParseTree<'a> { let array = self.eat_predefined_name(PN::ARRAY); let elements = self.parse_delimited_separated_list_opt( TK::OpenSquare, TK::Comma, |parser| parser.peek_expression(), |parser| parser.parse_expression(), TK::CloseSquare, ); parse_tree::array(array, elements) } // configureExpression // : CONFIGURE '(' identifier ',' configure_value_ ')' fn peek_configure_expression(&mut self) -> bool { self.peek_kind(TK::CONFIGURE) } fn parse_configure_expression(&mut self) -> ParseTree<'a> { let configure = self.eat(TK::CONFIGURE); let open_paren = self.eat(TK::OpenParen); let identifier = self.parse_identifier(); let comma = self.eat(TK::Comma); let value = self.parse_configure_value(); let close_paren = self.eat(TK::CloseParen); parse_tree::configure_expression( configure, open_paren, identifier, comma, value, close_paren, ) } // configure_value_ // : string | number | booleanValue fn parse_configure_value(&mut self) -> ParseTree<'a> { match self.peek() { TK::String | TK::UnicodeString | TK::Decimal | TK::Double | TK::Integer | TK::TRUE | TK::FALSE => self.parse_literal(), _ => self.expected_error("configure value"), } } // | SUBSTRING '(' valueExpression FROM valueExpression (FOR valueExpression)? ')' #substring fn peek_substring(&mut self) -> bool { self.peek_predefined_name(PN::SUBSTRING) && self.peek_kind_offset(TK::OpenParen, 1) } fn parse_substring(&mut self) -> ParseTree<'a> { let substring = self.eat_predefined_name(PN::SUBSTRING); let open_paren = self.eat(TK::OpenParen); let value = self.parse_value_expression(); let from = self.eat(TK::FROM); let from_value = self.parse_value_expression(); let for_opt = self.eat_opt(TK::FOR); let for_value = if for_opt.is_empty() { self.eat_empty() } else { self.parse_value_expression() }; let close_paren = self.eat(TK::CloseParen); parse_tree::substring( substring, open_paren, value, from, from_value, for_opt, for_value, close_paren, ) } // | '(' query ')' #subqueryExpression fn parse_subquery_expression(&mut self) -> ParseTree<'a> { let (open_paren, query, close_paren) = self.parse_parenthesized_query(); parse_tree::subquery_expression(open_paren, query, close_paren) } // | GROUPING '(' (qualifiedName (',' qualifiedName)*)? ')' #groupingOperation fn parse_grouping(&mut self) -> ParseTree<'a> { let grouping = self.eat(TK::GROUPING); let groups = self.parse_delimited_separated_list_opt( TK::OpenParen, TK::Comma, |parser| parser.peek_qualified_name(), |parser| parser.parse_qualified_name(), TK::CloseParen, ); parse_tree::grouping(grouping, groups) } // | EXTRACT '(' identifier FROM valueExpression ')' #extract fn parse_extract(&mut self) -> ParseTree<'a> { let extract = self.eat(TK::EXTRACT); let open_paren = self.eat(TK::OpenParen); let identifier = self.parse_identifier(); let from = self.eat(TK::FROM); let value = self.parse_value_expression(); let close_paren = self.eat(TK::CloseParen); parse_tree::extract(extract, open_paren, identifier, from, value, close_paren) } // | name=CURRENT_PATH #currentPath fn parse_current_path(&mut self) -> ParseTree<'a> { self.parse_literal() } // | name=CURRENT_USER #currentUser fn parse_current_user(&mut self) -> ParseTree<'a> { self.parse_literal() } // | name=CURRENT_DATE #specialDateTimeFunction fn parse_current_date(&mut self) -> ParseTree<'a> { self.parse_literal() } // | name=CURRENT_TIME ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction fn parse_current_time(&mut self) -> ParseTree<'a> { let current_time = self.eat(TK::CURRENT_TIME); let (open_paren, precision, close_paren) = if self.peek_kind(TK::OpenParen) { self.parse_parenthesized(|parser| parser.eat(TK::Integer)) } else { (self.eat_empty(), self.eat_empty(), self.eat_empty()) }; parse_tree::current_time(current_time, open_paren, precision, close_paren) } // | name=CURRENT_TIMESTAMP ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction fn parse_current_timestamp(&mut self) -> ParseTree<'a> { let current_timestamp = self.eat(TK::CURRENT_TIMESTAMP); let (open_paren, precision, close_paren) = if self.peek_kind(TK::OpenParen) { self.parse_parenthesized(|parser| parser.eat(TK::Integer)) } else { (self.eat_empty(), self.eat_empty(), self.eat_empty()) }; parse_tree::current_timestamp(current_timestamp, open_paren, precision, close_paren) } // | NORMALIZE '(' valueExpression (',' normalForm)? ')' #normalize fn parse_normalize(&mut self) -> ParseTree<'a> { let normalize = self.eat(TK::NORMALIZE); let open_paren = self.eat(TK::OpenParen); let value = self.parse_value_expression(); let comma_opt = self.eat_opt(TK::Comma); let normal_form = if comma_opt.is_empty() { self.eat_empty() } else { self.parse_normal_form() }; let close_paren = self.eat(TK::CloseParen); parse_tree::normalize( normalize, open_paren, value, comma_opt, normal_form, close_paren, ) } // normalForm // : NFD | NFC | NFKD | NFKC fn parse_normal_form(&mut self) -> ParseTree<'a> { match self.maybe_peek_predefined_name() { Some(PN::NFD) | Some(PN::NFC) | Some(PN::NFKD) | Some(PN::NFKC) => self.eat_token(), _ => self.expected_error("normal form"), } } // | name=LOCALTIMESTAMP ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction fn parse_localtimestamp(&mut self) -> ParseTree<'a> { let localtimestamp = self.eat(TK::LOCALTIMESTAMP); let (open_paren, precision, close_paren) = if self.peek_kind(TK::OpenParen) { self.parse_parenthesized(|parser| parser.eat(TK::Integer)) } else { (self.eat_empty(), self.eat_empty(), self.eat_empty()) }; parse_tree::localtimestamp(localtimestamp, open_paren, precision, close_paren) } // | name=LOCALTIME ('(' precision=INTEGER_VALUE ')')? #specialDateTimeFunction fn parse_localtime(&mut self) -> ParseTree<'a> { let localtime = self.eat(TK::LOCALTIME); let (open_paren, precision, close_paren) = if self.peek_kind(TK::OpenParen) { self.parse_parenthesized(|parser| parser.eat(TK::Integer)) } else { (self.eat_empty(), self.eat_empty(), self.eat_empty()) }; parse_tree::localtime(localtime, open_paren, precision, close_paren) } // | CAST '(' expression AS type_ ')' #cast fn parse_cast(&mut self) -> ParseTree<'a> { let cast = self.eat(TK::CAST); let open_paren = self.eat(TK::OpenParen); let value = self.parse_expression(); let as_ = self.eat(TK::AS); let type_ = self.parse_type(); let close_paren = self.eat(TK::CloseParen); parse_tree::cast(cast, open_paren, value, as_, type_, close_paren) } // | CASE valueExpression whenClause+ (ELSE elseExpression=expression)? END #simpleCase // | CASE whenClause+ (ELSE elseExpression=expression)? END #searchedCase fn parse_case(&mut self) -> ParseTree<'a> { let case = self.eat(TK::CASE); let value_opt = if self.peek_when_clause() { self.eat_empty() } else { self.parse_expression() }; let when_clauses = self.parse_list( |parser| parser.peek_when_clause(), |parser| parser.parse_when_clause(), ); let else_opt = self.eat_opt(TK::ELSE); let default = if else_opt.is_empty() { self.eat_empty() } else { self.parse_expression() }; let end = self.eat(TK::END); parse_tree::case(case, value_opt, when_clauses, else_opt, default, end) } // whenClause // : WHEN condition=expression THEN result=expression fn parse_when_clause(&mut self) -> ParseTree<'a> { let when = self.eat(TK::WHEN); let condition = self.parse_expression(); let then = self.eat(TK::THEN); let result = self.parse_expression(); parse_tree::when_clause(when, condition, then, result) } fn peek_when_clause(&mut self) -> bool { self.peek_kind(TK::WHEN) } // | EXISTS '(' query ')' #exists fn parse_exists(&mut self) -> ParseTree<'a> { let exists = self.eat(TK::EXISTS); let (open_paren, query, close_paren) = self.parse_parenthesized_query(); parse_tree::exists(exists, open_paren, query, close_paren) } // | '?' #parameter fn parse_parameter(&mut self) -> ParseTree<'a> { self.eat(TK::Question) } fn parse_literal(&mut self) -> ParseTree<'a> { parse_tree::literal(self.eat_token()) } // | identifier string #typeConstructor // | DOUBLE_PRECISION string #typeConstructor fn parse_type_constructor(&mut self) -> ParseTree<'a> { let type_ = if self.peek_kind(TK::DoublePrecision) { self.eat_token() } else { self.parse_identifier() }; let value = self.parse_string(); parse_tree::type_constructor(type_, value) } // | identifier string #typeConstructor // | qualifiedName '(' ASTERISK ')' filter_? over? #functionCall // | qualifiedName '(' (setQuantifier? expression (',' expression)*)? // (ORDER BY sortItem (',' sortItem)*)? ')' filter_? over? #functionCall // | identifier '->' expression #lambda // | identifier #columnReference fn parse_identifier_start_expression(&mut self) -> ParseTree<'a> { match self.peek_offset(1) { TK::Arrow => self.parse_parenless_lambda(), TK::String | TK::UnicodeString => self.parse_type_constructor(), _ => { if self.peek_function_call() { self.parse_function_call() } else { parse_tree::identifier(self.eat_token()) } } } } fn parse_parenless_lambda(&mut self) -> ParseTree<'a> { let parameter = self.eat_token(); let arrow = self.eat(TK::Arrow); let body = self.parse_expression(); parse_tree::lambda(parameter, arrow, body) } // | qualifiedName '(' ASTERISK ')' filter_? over? #functionCall // | qualifiedName '(' (setQuantifier? expression (',' expression)*)? // (ORDER BY sortItem (',' sortItem)*)? ')' filter_? over? #functionCall fn peek_function_call(&mut self) -> bool { let mut offset = 1; while self.peek_kind_offset(TK::Period, offset) { offset += 1; if self.peek_identifier_offset(offset) { offset += 1; } else { return false; } } self.peek_kind_offset(TK::OpenParen, offset) } fn parse_function_call(&mut self) -> ParseTree<'a> { let name = self.parse_qualified_name(); let open_paren = self.eat(TK::OpenParen); if self.peek_kind(TK::Asterisk) { let set_quantifier_opt = self.eat_empty(); let arguments = self.eat(TK::Asterisk); let order_by_opt = self.eat_empty(); let close_paren = self.eat(TK::CloseParen); let filter_opt = self.parse_filter_opt(); let null_treatment_opt = self.eat_empty(); let over_opt = self.parse_over_opt(); parse_tree::function_call( name, open_paren, set_quantifier_opt, arguments, order_by_opt, close_paren, filter_opt, null_treatment_opt, over_opt, ) } else { let set_quantifier_opt = self.parse_set_quantifier_opt(|parser, offset| { parser.peek_kind_offset(TK::CloseParen, offset) || parser.peek_expression_offset(offset) || parser.peek_kind_offset(TK::ORDER, offset) }); let arguments = if set_quantifier_opt.is_empty() { self.parse_comma_separated_list_opt( |parser| parser.peek_expression(), |parser| parser.parse_expression(), ) } else { self.parse_comma_separated_list(|parser| parser.parse_expression()) }; let order_by_opt = self.parse_order_by_opt(); let close_paren = self.eat(TK::CloseParen); let filter_opt = self.parse_filter_opt(); let null_treatment_opt = self.parse_null_treatment_opt(); let over_opt = if null_treatment_opt.is_empty() { self.parse_over_opt() } else { self.parse_over() }; parse_tree::function_call( name, open_paren, set_quantifier_opt, arguments, order_by_opt, close_paren, filter_opt, null_treatment_opt, over_opt, ) } } // filter // : FILTER '(' WHERE booleanExpression ')' fn parse_filter_opt(&mut self) -> ParseTree<'a> { if self.peek_filter() { let filter = self.eat_predefined_name(PN::FILTER); let open_paren = self.eat(TK::OpenParen); let where_ = self.eat(TK::WHERE); let predicate = self.parse_boolean_expression(); let close_paren = self.eat(TK::CloseParen); parse_tree::filter(filter, open_paren, where_, predicate, close_paren) } else { self.eat_empty() } } fn peek_filter(&mut self) -> bool { self.peek_predefined_name(PN::FILTER) && self.peek_kind_offset(TK::OpenParen, 1) } // over // : OVER '(' // (PARTITION BY partition+=expression (',' partition+=expression)*)? // (ORDER BY sortItem (',' sortItem)*)? // windowFrame? // ')' fn parse_over_opt(&mut self) -> ParseTree<'a> { if self.peek_over() { self.parse_over() } else { self.eat_empty() } } fn parse_over(&mut self) -> ParseTree<'a> { let over = self.eat_predefined_name(PN::OVER); let open_paren = self.eat(TK::OpenParen); let partition_opt = self.eat_predefined_name_opt(PN::PARTITION); let (by, partitions) = if partition_opt.is_empty() { (self.eat_empty(), self.eat_empty()) } else { ( self.eat(TK::BY), self.parse_comma_separated_list(|parser| parser.parse_expression()), ) }; let order_by_opt = self.parse_order_by_opt(); let window_frame = self.parse_window_frame_opt(); let close_paren = self.eat(TK::CloseParen); parse_tree::over( over, open_paren, partition_opt, by, partitions, order_by_opt, window_frame, close_paren, ) } fn peek_over(&mut self) -> bool { self.peek_predefined_name(PN::OVER) && self.peek_kind_offset(TK::OpenParen, 1) } // nullTreatment // : IGNORE NULLS // | RESPECT NULLS fn parse_null_treatment_opt(&mut self) -> ParseTree<'a> { if self.peek_null_treatment() { let treatment = self.eat(TK::Identifier); let nulls = self.eat_predefined_name(PN::NULLS); parse_tree::null_treatment(treatment, nulls) } else { self.eat_empty() } } fn peek_null_treatment(&mut self) -> bool { (self.peek_predefined_name(PN::IGNORE) || self.peek_predefined_name(PN::RESPECT)) && self.peek_predefined_name_offset(PN::NULLS, 1) // null treatement must be followed by OVER && self.peek_predefined_name_offset(PN::OVER, 2) } // windowFrame // : frameType=RANGE startBound=frameBound // | frameType=ROWS startBound=frameBound // | frameType=RANGE BETWEEN startBound=frameBound AND end=frameBound // | frameType=ROWS BETWEEN startBound=frameBound AND end=frameBound fn parse_window_frame_opt(&mut self) -> ParseTree<'a> { if self.peek_predefined_name(PN::RANGE) || self.peek_predefined_name(PN::ROWS) { let frame_type = self.eat_token(); let between_opt = self.eat_opt(TK::BETWEEN); let start = self.parse_frame_bound(); let (and, end) = if between_opt.is_empty() { (self.eat_empty(), self.eat_empty()) } else { (self.eat(TK::AND), self.parse_frame_bound()) }; parse_tree::window_frame(frame_type, between_opt, start, and, end) } else { self.eat_empty() } } // frameBound // : UNBOUNDED boundType=PRECEDING #unboundedFrame // | UNBOUNDED boundType=FOLLOWING #unboundedFrame // | CURRENT ROW #currentRowBound // | expression boundType=(PRECEDING | FOLLOWING) #boundedFrame // expression should be unsignedLiteral fn parse_frame_bound(&mut self) -> ParseTree<'a> { if self.peek_predefined_name(PN::UNBOUNDED) && (self.peek_predefined_name_offset(PN::PRECEDING, 1) || self.peek_predefined_name_offset(PN::FOLLOWING, 1)) { parse_tree::unbounded_frame(self.eat_token(), self.eat_token()) } else if self.peek_predefined_name_offset(PN::CURRENT, 0) && self.peek_predefined_name_offset(PN::ROW, 1) { parse_tree::current_row_bound(self.eat_token(), self.eat_token()) } else { let bound = self.parse_expression(); let bound_type = self.parse_bound_type(); parse_tree::bounded_frame(bound, bound_type) } } fn parse_bound_type(&mut self) -> ParseTree<'a> { match self.maybe_peek_predefined_name() { Some(PN::PRECEDING) | Some(PN::FOLLOWING) => self.eat_token(), _ => self.expected_error("PRECEDING, FOLLOWING"), } } // string // : STRING #basicStringLiteral // | UNICODE_STRING (UESCAPE STRING)? #unicodeStringLiteral fn parse_string(&mut self) -> ParseTree<'a> { match self.peek() { TK::String => self.parse_literal(), TK::UnicodeString => { let string = self.eat_token(); let uescape_opt = self.eat_opt(TK::UESCAPE); let escape = if uescape_opt.is_empty() { self.eat_empty() } else { self.eat(TK::String) }; parse_tree::unicode_string(string, uescape_opt, escape) } _ => self.expected_error("string"), } } fn peek_string(&mut self) -> bool { match self.peek() { TK::String | TK::UnicodeString => true, _ => false, } } // type_ // : type_ ARRAY fn parse_type(&mut self) -> ParseTree<'a> { let mut root_type = self.parse_root_type(); while self.peek_predefined_name(PN::ARRAY) { let array = self.eat_predefined_name(PN::ARRAY); root_type = parse_tree::array_type_suffix(root_type, array) } root_type } fn peek_type_offset(&mut self, offset: usize) -> bool { match self.peek_offset(offset) { TK::TimeWithTimeZone | TK::TimestampWithTimeZone | TK::DoublePrecision | TK::Identifier => true, _ => false, } } // | ARRAY '<' type_ '>' // | MAP '<' type_ ',' type_ '>' // | ROW '(' identifier type_ (',' identifier type_)* ')' // | baseType ('(' typeParameter (',' typeParameter)* ')')? // | INTERVAL from_=intervalField TO to=intervalField fn parse_root_type(&mut self) -> ParseTree<'a> { match self.peek() { TK::TimeWithTimeZone | TK::TimestampWithTimeZone | TK::DoublePrecision => (), TK::Identifier => { match self.maybe_peek_predefined_name() { Some(PN::ARRAY) => { if self.peek_array_type() { return self.parse_array_type(); } } Some(PN::MAP) => { if self.peek_map_type() { return self.parse_map_type(); } } Some(PN::ROW) => { if self.peek_row_type() { return self.parse_row_type(); } } Some(PN::INTERVAL) => { if self.peek_interval_type() { return self.parse_interval_type(); } } _ => (), }; () } _ => return self.expected_error("type"), } let type_name = self.eat_token(); let type_parameters = self .parse_parenthesized_comma_separated_list_opt(|parser| parser.parse_type_parameter()); parse_tree::named_type(type_name, type_parameters) } // typeParameter // : INTEGER_VALUE | type_ fn parse_type_parameter(&mut self) -> ParseTree<'a> { if self.peek_kind(TK::Integer) { self.eat_token() } else { self.parse_type() } } // | ARRAY '<' type_ '>' fn peek_array_type(&mut self) -> bool { self.peek_predefined_name(PN::ARRAY) && self.peek_kind_offset(TK::OpenAngle, 1) } fn parse_array_type(&mut self) -> ParseTree<'a> { let array = self.eat_predefined_name(PN::ARRAY); let (open_angle, element_type, close_angle) = self.parse_delimited(TK::OpenAngle, |parser| parser.parse_type(), TK::CloseAngle); parse_tree::array_type(array, open_angle, element_type, close_angle) } // | MAP '<' type_ ',' type_ '>' fn peek_map_type(&mut self) -> bool { self.peek_predefined_name(PN::MAP) && self.peek_kind_offset(TK::OpenAngle, 1) } fn parse_map_type(&mut self) -> ParseTree<'a> { let map = self.eat_predefined_name(PN::MAP); let open_angle = self.eat(TK::OpenAngle); let key_type = self.parse_type(); let comma = self.eat(TK::Comma); let value_type = self.parse_type(); let close_angle = self.eat(TK::CloseAngle); parse_tree::map_type(map, open_angle, key_type, comma, value_type, close_angle) } // | ROW '(' identifier type_ (',' identifier type_)* ')' fn peek_row_type(&mut self) -> bool { self.peek_predefined_name(PN::ROW) && self.peek_kind_offset(TK::OpenParen, 1) && self.peek_row_element_offset(2) } // identifier type_ fn peek_row_element_offset(&mut self, offset: usize) -> bool { self.peek_identifier_offset(offset) && self.peek_type_offset(offset + 1) } fn parse_row_type(&mut self) -> ParseTree<'a> { let row = self.eat_predefined_name(PN::ROW); let element_types = self.parse_parenthesized_comma_separated_list(|parser| parser.parse_row_type_element()); parse_tree::row_type(row, element_types) } fn parse_row_type_element(&mut self) -> ParseTree<'a> { let identifier = self.parse_identifier(); let type_ = self.parse_type(); parse_tree::row_type_element(identifier, type_) } // | INTERVAL from_=intervalField TO to=intervalField fn peek_interval_type(&mut self) -> bool { self.peek_predefined_name(PN::INTERVAL) && self.peek_interval_field_offset(1) } fn parse_interval_type(&mut self) -> ParseTree<'a> { let interval = self.eat_predefined_name(PN::INTERVAL); let from = self.parse_interval_field(); let to_kw = self.eat_predefined_name(PN::TO); let to = self.parse_interval_field(); parse_tree::interval_type(interval, from, to_kw, to) } pub fn parse_statement(&mut self) -> ParseTree<'a> { match self.peek() { TK::SELECT | TK::TABLE | TK::VALUES | TK::OpenParen | TK::WITH => self.parse_query(), TK::CREATE => self.parse_create_statement(), TK::INSERT => self.parse_insert_into(), TK::DELETE => self.parse_delete(), _ => panic!("TODO: Remaining statements"), } } // | CREATE SCHEMA (IF NOT EXISTS)? qualifiedName // (WITH properties)? #createSchema // | CREATE TABLE (IF NOT EXISTS)? qualifiedName columnAliases? // (COMMENT string)? // (WITH properties)? AS (query | '('query')') // (WITH (NO)? DATA)? #createTableAsSelect // | CREATE TABLE (IF NOT EXISTS)? qualifiedName // '(' tableElement (',' tableElement)* ')' // (COMMENT string)? // (WITH properties)? #createTable // | CREATE (OR REPLACE)? VIEW qualifiedName AS query #createView // | CREATE ROLE name=identifier // (WITH ADMIN grantor)? #createRole fn parse_create_statement(&mut self) -> ParseTree<'a> { match self.peek_offset(1) { TK::TABLE => self.parse_create_table(), TK::OR => self.parse_create_view(), TK::Identifier => match self.maybe_peek_predefined_name_offset(1) { Some(PN::SCHEMA) => self.parse_create_schema(), Some(PN::VIEW) => self.parse_create_view(), Some(PN::ROLE) => self.parse_create_role(), _ => self.expected_error("create statement"), }, _ => self.expected_error("create statement"), } } fn parse_create_table(&mut self) -> ParseTree<'a> { let create = self.eat(TK::CREATE); let table = self.eat(TK::TABLE); let if_not_exists_opt = self.parse_if_not_exists_opt(); let table_name = self.parse_qualified_name(); if self.peek_kind(TK::OpenParen) && self.peek_table_element_offset(1) { // | CREATE TABLE (IF NOT EXISTS)? qualifiedName // '(' tableElement (',' tableElement)* ')' // (COMMENT string)? // (WITH properties)? #createTable let table_elements = self .parse_parenthesized_comma_separated_list(|parser| parser.parse_table_element()); let comment_opt = self.parse_comment_opt(); let with_properties_opt = self.parse_with_properties_opt(); parse_tree::create_table( create, table, if_not_exists_opt, table_name, table_elements, comment_opt, with_properties_opt, ) } else { // | CREATE TABLE (IF NOT EXISTS)? qualifiedName columnAliases? // (COMMENT string)? // (WITH properties)? AS (query | '('query')') // (WITH (NO)? DATA)? #createTableAsSelect let column_aliases_opt = self.parse_column_aliases_opt(); let comment_opt = self.parse_comment_opt(); let with_properties_opt = self.parse_with_properties_opt(); let as_ = self.eat(TK::AS); let (open_paren_opt, query, close_paren_opt) = if self.peek_kind(TK::OpenParen) { self.parse_parenthesized_query() // TODO: Need to handle (query_no_with) query_primary_tail } else { (self.eat_empty(), self.parse_query(), self.eat_empty()) }; let with_data_opt = self.parse_with_data_opt(); parse_tree::create_table_as_select( create, table, if_not_exists_opt, table_name, column_aliases_opt, comment_opt, with_properties_opt, as_, open_paren_opt, query, close_paren_opt, with_data_opt, ) } } // (IF NOT EXISTS)? fn parse_if_not_exists_opt(&mut self) -> ParseTree<'a> { let if_ = self.eat_predefined_name_opt(PN::IF); if if_.is_empty() { if_ } else { let not = self.eat(TK::NOT); let exists = self.eat(TK::EXISTS); parse_tree::if_not_exists(if_, not, exists) } } // (COMMENT string)? fn parse_comment_opt(&mut self) -> ParseTree<'a> { let comment = self.eat_predefined_name_opt(PN::COMMENT); if comment.is_empty() { comment } else { let value = self.parse_string(); parse_tree::comment(comment, value) } } // principal // : USER identifier #userPrincipal // | ROLE identifier #rolePrincipal // | identifier #unspecifiedPrincipal // ; fn parse_principal(&mut self) -> ParseTree<'a> { let name = self.maybe_peek_predefined_name(); let is_identifier = self.peek_identifier_offset(1); match (is_identifier, name) { (true, Some(PN::USER)) => parse_tree::user_principal( self.eat_predefined_name(PN::USER), self.parse_identifier(), ), (true, Some(PN::ROLE)) => parse_tree::role_principal( self.eat_predefined_name(PN::ROLE), self.parse_identifier(), ), (_, _) => parse_tree::unspecified_principal(self.parse_identifier()), } } // grantor // : CURRENT_USER #currentUserGrantor // | CURRENT_ROLE #currentRoleGrantor // | principal #specifiedPrincipal // ; fn parse_grantor(&mut self) -> ParseTree<'a> { if self.peek_kind(TK::CURRENT_USER) { self.eat_token() } else if self.peek_predefined_name(PN::CURRENT_ROLE) { self.eat_predefined_name(PN::CURRENT_ROLE) } else { self.parse_principal() } } // (WITH ADMIN grantor)? fn parse_with_admin_grantor_opt(&mut self) -> ParseTree<'a> { let with = self.eat_opt(TK::WITH); if with.is_empty() { with } else { let admin = self.eat_predefined_name(PN::ADMIN); let grantor = self.parse_grantor(); parse_tree::with_admin_grantor(with, admin, grantor) } } // (WITH properties)? fn parse_with_properties_opt(&mut self) -> ParseTree<'a> { let with = self.eat_opt(TK::WITH); if with.is_empty() { with } else { let properties = self.parse_parenthesized_comma_separated_list(|parser| parser.parse_property()); parse_tree::with_properties(with, properties) } } // property_ // : identifier EQ expression fn parse_property(&mut self) -> ParseTree<'a> { let identifier = self.parse_identifier(); let eq = self.eat(TK::Equal); let value = self.parse_expression(); parse_tree::property(identifier, eq, value) } // (WITH (NO)? DATA)? fn parse_with_data_opt(&mut self) -> ParseTree<'a> { let with = self.eat_opt(TK::WITH); if with.is_empty() { with } else { let no_opt = self.eat_predefined_name_opt(PN::NO); let data = self.eat_predefined_name(PN::DATA); parse_tree::with_data(with, no_opt, data) } } // tableElement // : columnDefinition // | likeClause // ; fn peek_table_element_offset(&mut self, offset: usize) -> bool { // disambiguating between tableElement and column_aliases self.peek_kind_offset(TK::LIKE, offset) || (self.peek_kind_offset(TK::Identifier, offset) && !self.peek_kind_offset(TK::Comma, offset + 1)) } fn parse_table_element(&mut self) -> ParseTree<'a> { if self.peek_kind(TK::LIKE) { self.parse_like_clause() } else { self.parse_column_definition() } } // columnDefinition // : identifier type_ (NOT NULL)? (COMMENT string)? (WITH properties)? fn parse_column_definition(&mut self) -> ParseTree<'a> { let identifier = self.parse_identifier(); let type_ = self.parse_type(); let not_null_opt = self.parse_not_null_opt(); let comment_opt = self.parse_comment_opt(); let with_properties_opt = self.parse_with_properties_opt(); parse_tree::column_definition( identifier, type_, not_null_opt, comment_opt, with_properties_opt, ) } // (NOT NULL)? fn parse_not_null_opt(&mut self) -> ParseTree<'a> { let not = self.eat_opt(TK::NOT); if not.is_empty() { not } else { let null = self.eat(TK::NULL); parse_tree::not_null(not, null) } } // likeClause // : LIKE qualifiedName (optionType=(INCLUDING | EXCLUDING) PROPERTIES)? fn parse_like_clause(&mut self) -> ParseTree<'a> { let like = self.eat(TK::LIKE); let name = self.parse_qualified_name(); let (option_type_opt, properties) = if match self.maybe_peek_predefined_name() { Some(PN::INCLUDING) | Some(PN::EXCLUDING) => true, _ => false, } { (self.eat_token(), self.eat_predefined_name(PN::PROPERTIES)) } else { (self.eat_empty(), self.eat_empty()) }; parse_tree::like_clause(like, name, option_type_opt, properties) } fn parse_create_view(&mut self) -> ParseTree<'a> { let create = self.eat(TK::CREATE); let (or, replace) = match self.peek_kind(TK::OR) { true => (self.eat_token(), self.eat_predefined_name(PN::REPLACE)), false => (self.eat_empty(), self.eat_empty()), }; let view = self.eat_predefined_name(PN::VIEW); let qualified_name = self.parse_qualified_name(); let as_ = self.eat(TK::AS); let query = self.parse_query(); parse_tree::create_view(create, or ,replace, view, qualified_name, as_, query) } fn parse_create_schema(&mut self) -> ParseTree<'a> { panic!("TODO") } fn parse_create_role(&mut self) -> ParseTree<'a> { let create = self.eat(TK::CREATE); let role = self.eat_predefined_name(PN::ROLE); let name = self.parse_identifier(); let with_admin_grantor_opt = self.parse_with_admin_grantor_opt(); parse_tree::create_role(create, role, name, with_admin_grantor_opt) } // | INSERT INTO qualifiedName columnAliases? query #insertInto fn parse_insert_into(&mut self) -> ParseTree<'a> { let insert = self.eat(TK::INSERT); let into = self.eat(TK::INTO); let table_name = self.parse_qualified_name(); let column_aliases_opt = self.parse_column_aliases_opt(); let query = self.parse_query(); parse_tree::insert_into(insert, into, table_name, column_aliases_opt, query) } // | DELETE FROM qualifiedName (WHERE booleanExpression)? #delete fn parse_delete(&mut self) -> ParseTree<'a> { let delete = self.eat(TK::DELETE); let from = self.eat(TK::FROM); let table_name = self.parse_qualified_name(); let where_opt = self.eat_opt(TK::WHERE); let predicate = if where_opt.is_empty() { self.eat_empty() } else { self.parse_boolean_expression() }; parse_tree::delete(delete, from, table_name, where_opt, predicate) } } fn errors_of_tree<'a>(tree: &'a ParseTree<'a>) -> Vec<&'a SyntaxError> { let mut errors: Vec<&'a SyntaxError> = Vec::new(); let mut visit = |tree: &'a ParseTree<'a>| match tree { ParseTree::Token(tree) => { for error in &tree.token.errors { errors.push(&error) } } ParseTree::Error(error) => errors.push(&error.error), _ => (), }; visit_post_order(tree, &mut visit); errors } type ParseResult<'a> = (ParseTree<'a>, Vec<SyntaxError>); /// Parses text for the given element. /// Returns the parse tree and all errors. fn parse_entrypoint<'a>(text: &'a str, parse_element: ElementParser<'a>) -> ParseResult<'a> { let mut parser = Parser::new(text); let tree = parser.parse_entrypoint(parse_element); let mut errors = Vec::new(); for error in errors_of_tree(&tree) { errors.push(error.clone()) } errors.append(&mut parser.errors); errors.sort_by_key(|error| error.get_range()); (tree, errors) } /// Parses text containing a statement. /// The errors returned includes all errors contained within the tree. pub fn parse_statement<'a>(text: &'a str) -> ParseResult<'a> { parse_entrypoint(text, |parser| parser.parse_statement()) }
true
ce16aa8230cfc4f75fbb19c1e227a256d3c076a9
Rust
fhars/libtock-rs
/examples/button_subscribe.rs
UTF-8
858
2.640625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
#![no_std] use core::fmt::Write; use libtock::buttons; use libtock::buttons::ButtonState; use libtock::console::Console; use libtock::timer; use libtock::timer::Duration; // FIXME: Hangs up when buttons are pressed rapidly - problem in console? fn main() { let mut console = Console::new(); let mut with_callback = buttons::with_callback(|button_num: usize, state| { writeln!( console, "Button: {} - State: {}", button_num, match state { ButtonState::Pressed => "pressed", ButtonState::Released => "released", } ) .unwrap(); }); let mut buttons = with_callback.init().unwrap(); for mut button in &mut buttons { button.enable().unwrap(); } loop { timer::sleep(Duration::from_ms(500)); } }
true
05bf558ab8029b8f45b4941386b8f3f480dea8cd
Rust
AlbinoGazelle/Learning-Rust
/slices/src/main.rs
UTF-8
1,480
4.21875
4
[ "MIT" ]
permissive
//function to get the first word in a string using slices fn first_word(s: &str) -> &str { let bytes = s.as_bytes(); for (i, &item) in bytes.iter().enumerate() { if item == b' ' { return &s[0..i]; } } &s[..] } fn main() { //Slices are a way to reference a sequence of elements in a collection rather than the whole collection let s = String::from("hello"); //both of these are the same thing let _slice = &s[0..2]; //"he" let _slice = &s[..2]; //"he" let len = s.len(); //these are also the same thing. If your slice includes the last byte of the string you can drop the trailing number let _slice = &s[3..len]; //"lo" let _slice = &s[3..]; //"lo" //you can also drop both values to take the slice of the entire string let _slice = &s[0..len]; //"hello" let _slice = &s[..]; //"hello" let my_string = String::from("hello world"); // function works on slices of Strings let _word = first_word(&my_string); let my_string_literal = "hello world"; //function also works on slices of string literals let _word = first_word(&my_string_literal[..]); //also works on string literals since they ARE slices already (this is essentially the same as the above line of code) let _word = first_word(my_string_literal); //you can also take slices of other data structures like arrays let a = [1, 2, 3, 4, 5]; let _slice = &a[1..3]; //1, 2, 3 }
true
d22cf12220a96444813ef95859b1cb0b8f0238a1
Rust
melentyev/main
/rust_http_server/http/request.rs
UTF-8
5,787
3.015625
3
[]
no_license
use std::io::{TcpStream, BytesReader}; use std::collections::HashMap; use http::method::Method; use std::str::FromStr; use std::str::StrExt; use std::str::from_utf8; use std::str::from_utf8_unchecked; #[derive(Show)] pub struct Request { pub method: Method, pub http_minor : u32, pub http_major: u32, pub path: String, pub protocol: String, pub content_length: u32, pub body: Vec<u8>, pub query_params: HashMap<String, String>, pub headers: HashMap<String, String>, } impl Request { pub fn init() -> Request { Request { method: Method::GET, http_minor: 0, http_major: 0, path: String::new(), protocol: String::new(), content_length: 0, body: Vec::new(), query_params: HashMap::new(), headers: HashMap::new() } } pub fn get_method(self) -> Method { self.method } pub fn get_path(self) -> String { self.path } pub fn get_query_param(&self, key: &str) -> Option<&String> { self.query_params.get(&key.to_string()) } pub fn get_header(&self, key: &str) -> Option<&String> { self.headers.get(&key.to_string()) } pub fn get_body(self) -> Vec<u8> { self.body } } pub struct RequestParser<'a> { stream: &'a mut TcpStream, cur_lex: String, request: Request } impl<'a> RequestParser<'a> { pub fn init(stream: &'a mut TcpStream) -> RequestParser { RequestParser { cur_lex: String::new(), stream: stream, request: Request::init() } } fn next_byte(&mut self) -> u8 { let b = self.stream.bytes().next().unwrap().ok().unwrap(); println!("nb: {}", b); b } fn match_next(&mut self, b: u8) -> bool { self.next_byte() == b } fn skip_spaces(&mut self) { self.stream.bytes() .map(|c| { c.ok().unwrap() }) //println!("skip_c: {}", *c); .skip_while(|c| {*c != (' ' as u8) /*&& *c != ('\r' as u8) && *c != ('\n' as u8) */ } ); /*for c in stream.bytes() { if (c != ' ' as u8) { return c; } }*/ } fn take_until_equal_vec(&mut self, pat: u8, v: &mut Vec<u8>) { v.extend(self.stream.bytes() .map(|c| { c.ok().unwrap() }) .take_while(|c| { /*println!("c: {}", *c);*/ *c != pat })); } fn take_until_equal(&mut self, pat: u8) -> Vec<u8> { let mut v = Vec::new(); self.take_until_equal_vec(pat, &mut v); v } fn parse_method(&mut self) -> bool { self.skip_spaces(); let v : Vec<u8> = self.take_until_equal(' ' as u8); println!("v: {:?}", v); Method::from_utf8(v).map_or(false, |m| { self.request.method = m; true}) /* for c in stream.bytes() { if (c != ' ') { cur_lex.push(from_utf32(c as u32)) } else break; }*/ } fn parse_url(&mut self) -> bool { self.skip_spaces(); let v : Vec<u8> = self.take_until_equal(' ' as u8); println!("v: {:?}", v); match String::from_utf8(v) { Ok(s) => { self.request.path = s; true }, Err(_) => false } } fn parse_proto(&mut self) -> bool { self.skip_spaces(); let v : Vec<u8> = self.take_until_equal('\r' as u8); println!("v: {:?}", v); String::from_utf8(v).ok().map_or(false, |p| {self.request.protocol = p; true}) } fn parse_headers(&mut self) -> bool { //self.skip_spaces(); let rn_pattern = ['\r' as u8, '\n' as u8]; loop { if !self.match_next('\n' as u8) { return false } let mut v = Vec::new(); let b = self.next_byte(); if b == ('\r' as u8) { return self.next_byte() == ('\n' as u8) } v.push(b); self.take_until_equal_vec(':' as u8, &mut v); println!("here2"); match from_utf8(v.as_slice()) { Ok(key) => { self.skip_spaces(); let v : Vec<u8> = self.take_until_equal('\r' as u8); match from_utf8(v.as_slice()) { Ok(value) => { let tkey = key.trim(); self.request.headers.insert(tkey.to_string(), value.trim().to_string()); if tkey.as_slice() == "Content-Length" { match value.as_slice().parse() { Some(len) => { self.request.content_length = len; }, None => { return false } } } }, Err(_) => return false } } Err(_) => return false } } } fn parse_body(&mut self) -> bool { let len = self.request.content_length; if len > 0 { let v : Vec<Option<u8> > = self.stream .bytes().take(self.request.content_length as usize) .map(|r| { r.ok() }).collect(); if v.iter().all(|b| { b.is_some() }) { self.request.body = v.iter().map(|b| { b.unwrap() }).collect(); true } else { false } } else { true } } pub fn run(mut self) -> Option<Request> { /*let mut full_buf = Vec::new(); let mut last_pos = 0u32; let mut read_buf = [0u8, 1024]; let rn_pattern = ['\r' as u8, 'n' as u8]; let rnrn_pattern = ['\r' as u8, 'n' as u8, '\r' as u8, 'n' as u8]; let mut part_size = 0; let mut slice_from = full_buf.as_slice(); let mut state = State::Method;*/ if !self.parse_method() { return None } println!("Method: {:?}", self.request.method); if !self.parse_url() { return None } println!("Path: {}", self.request.path); if !self.parse_proto() { return None } println!("Protocol: {}", self.request.protocol); if !self.parse_headers() { return None } println!("Headers: {:?}", self.request.headers); if !self.parse_body() { return None } println!("Body: {:?}", self.request.body); //if !self.parse_path() { return None } Some(self.request) /*loop { match stream.read(buf) { Ok(bytes_read) => { for c in buf.slice_to(read_bytes) { match state { Method if c == ' ' as u8 => { Request.set_method(cur_lex); cur_lex = String::new(); State = Proto; } Method => { cur_lex.push(from_utf32(c as u32)) } Proto if c == ' ' as u8 => } } } Err(kind) ={ } } }*/ } /*full_buf.push_all(buf.slice_to(bytes_read)); match (0..full_buf.len()).find(|ind| => { full_buf.slice_from(ind).starts_from(rnrn_pattern) }) { }*/ }
true
5e437206eff26a55c2caa00a04ad989714840f6b
Rust
deuzu/adventofcode
/day1/src/step1.rs
UTF-8
946
3.453125
3
[]
no_license
use std::fs::File; use std::io; use std::io::Read; use std::path::Path; fn main() { let input = get_input().expect("Failed to open input file"); let lines = input.lines().into_iter(); let mut result: Option<u32> = None; for p1 in lines.clone() { for p2 in lines.clone() { let pair: (u32, u32) = (p1.parse().unwrap(), p2.parse().unwrap()); if 2020 == pair.0 + pair.1 { println!("A pair that is equal to 2020 has been found: {:?}", pair); println!("Multiplying together the numbers in the pair results in: {}", pair.0 * pair.1); result = Some(pair.0 * pair.1); } } } assert_eq!(Some(1009899), result); } fn get_input() -> Result<String, io::Error> { let path = Path::new("input.txt"); let mut file = File::open(&path)?; let mut input = String::new(); file.read_to_string(&mut input)?; Ok(input) }
true
d88931ad4f0ad25388f9929a51e0b9c0eb267e9e
Rust
dcchut/serenity
/examples/12_timing_and_events/src/main.rs
UTF-8
9,695
3.046875
3
[ "ISC" ]
permissive
//! This example will showcase one way on how to extend Serenity with a //! time-scheduler and an event-trigger-system. //! We will create a remind-me command that will send a message after a //! a demanded amount of time. Once the message has been sent, the user can //! react to it, triggering an event to send another message. use std::{collections::HashSet, env, hash::{Hash, Hasher}, sync::Arc, }; use serenity::{ prelude::*, framework::standard::{ Args, CommandResult, CommandGroup, DispatchError, HelpOptions, help_commands, StandardFramework, macros::{command, group, help}, }, http::Http, model::prelude::*, }; // We will use this crate as event dispatcher. use hey_listen::sync::{ParallelDispatcher as Dispatcher, ParallelDispatcherRequest as DispatcherRequest}; // And this crate to schedule our tasks. use white_rabbit::{Utc, Scheduler, DateResult, Duration}; // This enum represents possible events a listener might wait for. // In this case, we want to dispatch an event when a reaction is added. // Serenity's event-enum is not suitable for this. // First it offers too many variants we do not need, but most importantly, // it lacks the `Default`-trait which makes sense // as the enum-fields have no clear logical default value. But without it, // constructing mock-variants becomes difficult. // // As a result, we make our own slick event-enum! #[derive(Clone)] enum DispatchEvent { ReactEvent(MessageId, UserId), } // We need to implement equality for our enum. // One could test variants only. In this case, we want to know who reacted // on which message. impl PartialEq for DispatchEvent { fn eq(&self, other: &DispatchEvent) -> bool { match (self, other) { (DispatchEvent::ReactEvent(self_message_id, self_user_id), DispatchEvent::ReactEvent(other_message_id, other_user_id)) => { self_message_id == other_message_id && self_user_id == other_user_id } } } } impl Eq for DispatchEvent {} // See following Clippy-lint: // https://rust-lang.github.io/rust-clippy/master/index.html#derive_hash_xor_eq impl Hash for DispatchEvent { fn hash<H: Hasher>(&self, state: &mut H) { match self { DispatchEvent::ReactEvent(msg_id, user_id) => { msg_id.hash(state); user_id.hash(state); } } } } struct DispatcherKey; impl TypeMapKey for DispatcherKey { type Value = Arc<RwLock<Dispatcher<DispatchEvent>>>; } struct SchedulerKey; impl TypeMapKey for SchedulerKey { type Value = Arc<RwLock<Scheduler>>; } struct Handler; impl EventHandler for Handler { // We want to dispatch an event whenever a new reaction has been added. fn reaction_add(&self, context: Context, reaction: Reaction) { let dispatcher = { let mut context = context.data.write(); context.get_mut::<DispatcherKey>().expect("Expected Dispatcher.").clone() }; dispatcher.write().dispatch_event( &DispatchEvent::ReactEvent(reaction.message_id, reaction.user_id)); } } #[group("remind me")] #[prefixes("rm", "reminder")] #[commands(set_reminder)] struct RemindMe; #[help] fn my_help( context: &mut Context, msg: &Message, args: Args, help_options: &'static HelpOptions, groups: &[&'static CommandGroup], owners: HashSet<UserId> ) -> CommandResult { help_commands::with_embeds(context, msg, args, &help_options, groups, owners) } fn main() { // Configure the client with your Discord bot token in the environment. let token = env::var("DISCORD_TOKEN").expect( "Expected a token in the environment", ); let mut client = Client::new(&token, Handler) .expect("Err creating client"); { let mut data = client.data.write(); // We create a new scheduler with 4 internal threads. Why 4? It really // is just an arbitrary number, you are often better setting this // based on your CPU. // When a task is due, a thread from the threadpool will be used to // avoid blocking the scheduler thread. let scheduler = Scheduler::new(4); let scheduler = Arc::new(RwLock::new(scheduler)); let mut dispatcher: Dispatcher<DispatchEvent> = Dispatcher::default(); // Once receiving an event to dispatch, the amount of threads // set via `num_threads` will dispatch in parallel. dispatcher.num_threads(4).expect("Could not construct threadpool"); data.insert::<DispatcherKey>(Arc::new(RwLock::new(dispatcher))); data.insert::<SchedulerKey>(scheduler); } // We will fetch your bot's id. let bot_id = match client.cache_and_http.http.get_current_application_info() { Ok(info) => { info.id }, Err(why) => panic!("Could not access application info: {:?}", why), }; client.with_framework( // Configures the client, allowing for options to mutate how the // framework functions. StandardFramework::new() .configure(|c| c .with_whitespace(true) .on_mention(Some(bot_id)) .prefix("~") .delimiters(vec![", ", ","])) .on_dispatch_error(|ctx, msg, error| { if let DispatchError::Ratelimited(seconds) = error { let _ = msg.channel_id.say(&ctx.http, &format!("Try this again in {} seconds.", seconds)); } }) .after(|_ctx, _msg, cmd_name, error| { if let Err(why) = error { println!("Error in {}: {:?}", cmd_name, why); } }) .help(&MY_HELP) .group(&REMINDME_GROUP) ); if let Err(why) = client.start() { println!("Client error: {:?}", why); } } // Just a helper-function for creating the closure we want to use as listener. // It saves us from writing the same trigger twice for repeated and non-repeated // tasks (see remind-me command below). fn thanks_for_reacting(http: Arc<Http>, channel: ChannelId) -> Box<dyn Fn(&DispatchEvent) -> Option<DispatcherRequest> + Send + Sync> { Box::new(move |_| { if let Err(why) = channel.say(&http, "Thanks for reacting!") { println!("Could not send message: {:?}", why); } Some(DispatcherRequest::StopListening) }) } #[command] #[aliases("add")] fn set_reminder(context: &mut Context, msg: &Message, mut args: Args) -> CommandResult { // It might be smart to set a moderately high minimum value for `time` // to avoid abuse like tasks that repeat every 100ms, especially since // channels have send-message rate limits. let time: u64 = args.single()?; let repeat: bool = args.single()?; let args = args.rest().to_string(); let scheduler = { let mut context = context.data.write(); context.get_mut::<SchedulerKey>().expect("Expected Scheduler.").clone() }; let dispatcher = { let mut context = context.data.write(); context.get_mut::<DispatcherKey>().expect("Expected Dispatcher.").clone() }; let http = context.http.clone(); let msg = msg.clone(); let mut scheduler = scheduler.write(); // First, we check if the user wants a repeated task or not. if repeat { // Chrono's duration can also be negative // and therefore we cast to `i64`. scheduler.add_task_duration(Duration::milliseconds(time as i64), move |_| { let bot_msg = match msg.channel_id.say(&http, &args) { Ok(msg) => msg, // We could not send the message, thus we will try sending it // again in five seconds. // It might be wise to keep a counter for maximum tries. // If the channel got deleted, trying to send a message will // always fail. Err(why) => { println!("Error sending message: {:?}.", why); return DateResult::Repeat( Utc::now() + Duration::milliseconds(5000)) }, }; let http = http.clone(); // We add a function to dispatch for a certain event. dispatcher.write() .add_fn(DispatchEvent::ReactEvent(bot_msg.id, msg.author.id), // The `thanks_for_reacting`-function creates a function // to schedule. thanks_for_reacting(http, bot_msg.channel_id)); // We return that our date shall happen again, therefore we need // to tell when this shall be. DateResult::Repeat(Utc::now() + Duration::milliseconds(time as i64)) }); } else { // Pretty much identical with the `true`-case except for the returned // variant. scheduler.add_task_duration(Duration::milliseconds(time as i64), move |_| { let bot_msg = match msg.channel_id.say(&http, &args) { Ok(msg) => msg, Err(why) => { println!("Error sending message: {:?}.", why); return DateResult::Repeat( Utc::now() + Duration::milliseconds(5000) ) }, }; let http = http.clone(); dispatcher.write() .add_fn(DispatchEvent::ReactEvent(bot_msg.id, msg.author.id), thanks_for_reacting(http, bot_msg.channel_id)); // The task is done and that's it, we don't need to repeat it. DateResult::Done }); }; Ok(()) }
true
a3d7340d7687a176bdf374b64fe27d6667c2dd1c
Rust
maximumstock/Advent-of-Code-2019
/day_01/src/main.rs
UTF-8
1,982
2.859375
3
[]
no_license
fn main() { let input = vec![ 149579, 95962, 97899, 149552, 65085, 111896, 127591, 115128, 64630, 120430, 81173, 136775, 137806, 132042, 65902, 87894, 97174, 126829, 88716, 85284, 61178, 106423, 89821, 51123, 85350, 53905, 74259, 59710, 80358, 111938, 129027, 144036, 68717, 69382, 64163, 65114, 58548, 74559, 142855, 115617, 107847, 133264, 111657, 125402, 129254, 67275, 120955, 110940, 139146, 96810, 147085, 103471, 89560, 111940, 120332, 55717, 73498, 133817, 102095, 57518, 57725, 58673, 84918, 143693, 149361, 74432, 51048, 99136, 128220, 141591, 79477, 116798, 93622, 113316, 143888, 143155, 57861, 112833, 70928, 116310, 126836, 93835, 101281, 116599, 107776, 138215, 107034, 74826, 73372, 127785, 105051, 124720, 147682, 97320, 74957, 113446, 101566, 96278, 144766, 55755, ]; part1(&input); part2(&input); } fn part1(input: &Vec<i32>) { let total_fuel = input .iter() .map(mass_to_fuel) .fold(0, |left, right| left + right); println!("Case 1 - Total fuel: {:?}", total_fuel); } fn mass_to_fuel(mass: &i32) -> i32 { (mass / 3) - 2 } fn part2(input: &Vec<i32>) { let total_fuel: i32 = input .iter() .map(mass_and_fuel_to_fuel) .sum(); println!("Case 2 - Total fuel: {:?}", total_fuel); } fn mass_and_fuel_to_fuel(module_mass: &i32) -> i32 { // Calculate fuel for module mass let module_fule = mass_to_fuel(module_mass); let mut total_fuel = module_fule; let mut current_mass = module_fule; loop { let fuel = mass_to_fuel(&current_mass); if fuel <= 0 { break; } total_fuel += fuel; current_mass = fuel; println!("{:?}", fuel); } total_fuel } #[test] fn test_add() { assert_eq!(mass_and_fuel_to_fuel(&14), 2); assert_eq!(mass_and_fuel_to_fuel(&1969), 966); assert_eq!(mass_and_fuel_to_fuel(&100756), 50346); }
true
a372bd072a3db7c657d91a0c01d0b6efc449184b
Rust
michalwa/cgol-rs
/src/utils.rs
UTF-8
480
3.640625
4
[]
no_license
use std::ops::RangeInclusive; pub trait RangeExt<T> { /// Constrains the value to be contained within the range fn clamp(&self, t: T) -> T where T: Clone + Ord; } impl<T> RangeExt<T> for RangeInclusive<T> { fn clamp(&self, t: T) -> T where T: Clone + Ord, { if &t < self.start() { self.start().clone() } else if &t > self.end() { self.end().clone() } else { t } } }
true
d943db1d1ae0659c7251d85b7993389ff79d0eca
Rust
ckatsak/aoc2020
/day11/part2.rs
UTF-8
10,235
3.390625
3
[]
no_license
use std::convert::TryInto; use std::io::{BufRead, BufReader}; use std::path::Path; use anyhow::{anyhow, bail, Result}; #[derive(Clone, Debug, PartialEq)] enum Seat { Empty(usize, usize), Occupied(usize, usize), Floor, } impl std::convert::TryFrom<((usize, usize), char)> for Seat { type Error = String; fn try_from(((r, c), availability): ((usize, usize), char)) -> Result<Self, Self::Error> { match availability { 'L' => Ok(Seat::Empty(r, c)), '#' => Ok(Seat::Occupied(r, c)), '.' => Ok(Seat::Floor), s => Err(format!("invalid seat: {:?} --> {:#?}", (r, c), s)), } } } impl std::fmt::Display for Seat { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Seat::Empty(_, _) => write!(f, "L"), Seat::Occupied(_, _) => write!(f, "#"), Seat::Floor => write!(f, "."), } } } impl Seat { /// Return 8 Iterators, one per direction, that yield the position (`(usize, usize)`) /// of all other seats in the `Layout<dims>` that are "visible" from position `pos`. #[inline(always)] fn visible_seats_from( pos: (usize, usize), dims: (usize, usize), ) -> Vec<Box<dyn Iterator<Item = (usize, usize)>>> { vec![ Box::new(Seat::north(pos, dims)), Box::new(Seat::south(pos, dims)), Box::new(Seat::east(pos, dims)), Box::new(Seat::west(pos, dims)), Box::new(Seat::north_east(pos, dims)), Box::new(Seat::north_west(pos, dims)), Box::new(Seat::south_east(pos, dims)), Box::new(Seat::south_west(pos, dims)), ] } #[inline(always)] fn north((r, c): (usize, usize), _: (usize, usize)) -> impl Iterator<Item = (usize, usize)> { (0..r).rev().zip((c..=c).cycle()) } #[inline(always)] fn south( (r, c): (usize, usize), (nr, _): (usize, usize), ) -> impl Iterator<Item = (usize, usize)> { (r + 1..nr).zip((c..=c).cycle()) } #[inline(always)] fn east( (r, c): (usize, usize), (_, nc): (usize, usize), ) -> impl Iterator<Item = (usize, usize)> { (r..=r).cycle().zip(c + 1..nc) } #[inline(always)] fn west((r, c): (usize, usize), _: (usize, usize)) -> impl Iterator<Item = (usize, usize)> { (r..=r).cycle().zip((0..c).rev()) } #[inline(always)] fn north_east( (r, c): (usize, usize), (_, nc): (usize, usize), ) -> impl Iterator<Item = (usize, usize)> { (0..r).rev().zip(c + 1..nc) } #[inline(always)] fn north_west( (r, c): (usize, usize), _: (usize, usize), ) -> impl Iterator<Item = (usize, usize)> { (0..r).rev().zip((0..c).rev()) } #[inline(always)] fn south_east( (r, c): (usize, usize), (nr, nc): (usize, usize), ) -> impl Iterator<Item = (usize, usize)> { (r + 1..nr).zip(c + 1..nc) } #[inline(always)] fn south_west( (r, c): (usize, usize), (nr, _): (usize, usize), ) -> impl Iterator<Item = (usize, usize)> { (r + 1..nr).zip((0..c).rev()) } } #[derive(Clone, PartialEq)] struct Row { id: usize, seats: Vec<Seat>, } impl std::convert::TryFrom<(usize, &str)> for Row { type Error = String; fn try_from((id, s): (usize, &str)) -> Result<Self, Self::Error> { Ok(Row { id, seats: s .chars() .enumerate() .map(|(col, c)| ((id, col), c).try_into()) .collect::<Result<Vec<Seat>, String>>()?, }) } } impl std::fmt::Display for Row { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:3}: ", self.id)?; for seat in &self.seats { write!(f, "{}", seat)?; } Ok(()) } } #[derive(Clone, PartialEq)] struct Layout(Vec<Row>); impl std::fmt::Display for Layout { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for row in &self.0 { writeln!(f, "{}", row)?; } Ok(()) } } impl Layout { #[inline(always)] fn new<II: IntoIterator<Item = String>>(lines: II) -> Result<Self> { Ok(Layout( lines .into_iter() .enumerate() .map(|(i, line)| (i, line.as_ref()).try_into()) .collect::<Result<Vec<Row>, String>>() .map_err(|err| anyhow!("parsing input: {}", err))?, )) } fn step(&self) -> Self { const EMPTY: usize = 0; const CROWDY: usize = 5; let order = |pos| { Seat::visible_seats_from(pos, (self.0.len(), self.0.get(0).unwrap().seats.len())) .into_iter() .filter_map(|iter| { for (vr, vc) in iter.into_iter() { match self.0.get(vr).unwrap().seats.get(vc).unwrap() { Seat::Empty(_, _) => { return None; } Seat::Occupied(_, _) => { return Some(()); } Seat::Floor => (), } } None }) .count() }; // TODO: Non-optimal: the order of each Seat will be calculated multiple times. let mut next = self.clone(); next.0.iter_mut().for_each(|row| { row.seats.iter_mut().for_each(|seat| match seat { Seat::Empty(r, c) => { if order((*r, *c)) == EMPTY { *seat = Seat::Occupied(*r, *c) } } Seat::Occupied(r, c) => { if order((*r, *c)) >= CROWDY { *seat = Seat::Empty(*r, *c) } } _floor => (), // skip }) }); next } fn count_occupied(&self) -> usize { self.0 .iter() .map(|row| { row.seats .iter() .filter(|&seat| matches!(seat, Seat::Occupied(_, _))) .count() }) .sum() } } fn solve<P: AsRef<Path>>(path: P) -> Result<usize> { let input: Vec<_> = BufReader::with_capacity(1 << 14, std::fs::File::open(path)?) .lines() .map(|l| l.unwrap()) .collect(); let mut layout = Layout::new(input)?; //eprintln!("Initial Layout:\n{}", layout); loop { let next = layout.step(); //eprintln!("\n\nNext Layout:\n{}", next); if next == layout { return Ok(layout.count_occupied()); } layout = next; } } fn main() -> Result<()> { let argv: Vec<_> = std::env::args().collect(); let filepath = match argv.len() { 1 => "day11/input.txt", 2 => argv[1].as_ref(), _ => { bail!(format!("Usage:\n\t$ {} [<file>]", argv[0])); } }; println!("# occupied = {}", solve(filepath)?); Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn north() { assert_eq!(Seat::north((1, 1), (3, 3)).collect::<Vec<_>>(), &[(0, 1)]); assert_eq!( Seat::north((2, 2), (5, 5)).collect::<Vec<_>>(), &[(1, 2), (0, 2)] ); assert_eq!(Seat::north((0, 1), (3, 3)).collect::<Vec<_>>(), &[]); } #[test] fn south() { assert_eq!(Seat::south((1, 1), (3, 3)).collect::<Vec<_>>(), &[(2, 1)]); assert_eq!( Seat::south((2, 2), (5, 5)).collect::<Vec<_>>(), &[(3, 2), (4, 2)] ); assert_eq!(Seat::south((2, 2), (3, 3)).collect::<Vec<_>>(), &[]); } #[test] fn east() { assert_eq!(Seat::east((1, 1), (3, 3)).collect::<Vec<_>>(), &[(1, 2)]); assert_eq!( Seat::east((2, 2), (5, 5)).collect::<Vec<_>>(), &[(2, 3), (2, 4)] ); assert_eq!(Seat::east((0, 2), (3, 3)).collect::<Vec<_>>(), &[]); } #[test] fn west() { assert_eq!(Seat::west((1, 1), (3, 3)).collect::<Vec<_>>(), &[(1, 0)]); assert_eq!( Seat::west((2, 2), (5, 5)).collect::<Vec<_>>(), &[(2, 1), (2, 0)] ); assert_eq!(Seat::west((2, 0), (3, 3)).collect::<Vec<_>>(), &[]); } #[test] fn north_east() { assert_eq!( Seat::north_east((1, 1), (3, 3)).collect::<Vec<_>>(), &[(0, 2)] ); assert_eq!( Seat::north_east((2, 2), (5, 5)).collect::<Vec<_>>(), &[(1, 3), (0, 4)] ); assert_eq!( Seat::north_east((2, 1), (3, 3)).collect::<Vec<_>>(), &[(1, 2)] ); } #[test] fn north_west() { assert_eq!( Seat::north_west((1, 1), (3, 3)).collect::<Vec<_>>(), &[(0, 0)] ); assert_eq!( Seat::north_west((2, 2), (5, 5)).collect::<Vec<_>>(), &[(1, 1), (0, 0)] ); assert_eq!( Seat::north_west((1, 2), (3, 3)).collect::<Vec<_>>(), &[(0, 1)] ); } #[test] fn south_east() { assert_eq!( Seat::south_east((1, 1), (3, 3)).collect::<Vec<_>>(), &[(2, 2)] ); assert_eq!( Seat::south_east((2, 2), (5, 5)).collect::<Vec<_>>(), &[(3, 3), (4, 4)] ); assert_eq!( Seat::south_east((0, 1), (3, 3)).collect::<Vec<_>>(), &[(1, 2)] ); } #[test] fn south_west() { assert_eq!( Seat::south_west((1, 1), (3, 3)).collect::<Vec<_>>(), &[(2, 0)] ); assert_eq!( Seat::south_west((2, 2), (5, 5)).collect::<Vec<_>>(), &[(3, 1), (4, 0)] ); assert_eq!( Seat::south_west((1, 2), (3, 3)).collect::<Vec<_>>(), &[(2, 1)] ); } }
true
55b9d5c71e743cfdd343c78faf3414bf5d14ed25
Rust
madrury/rusty-rogue
/rogue/src/melee_combat_system.rs
UTF-8
7,979
2.9375
3
[]
no_license
use specs::prelude::*; use super::{ Map, Point, TileType, CombatStats, WantsToMeleeAttack, Name, WantsToTakeDamage, GameLog, Renderable, Position, AnimationRequestBuffer, AnimationRequest, Equipped, GrantsMeleeAttackBonus, StatusIsMeleeAttackBuffed, ElementalDamageKind, SpawnEntityWhenMeleeAttacked, EntitySpawnKind, EntitySpawnRequestBuffer, EntitySpawnRequest }; pub struct MeleeCombatSystem {} //---------------------------------------------------------------------------- // A system for processing melee combat. // // This system scans all entities for a WantsToMeleeAttack component, which // signals that the entity has requested to enter melee combat against some // target. If the combat is successful, we attaach a SufferDamage component to // the target, which is processed by the DamageSystem. //---------------------------------------------------------------------------- // TODO: Move this into a struct instead of a tuple. impl<'a> System<'a> for MeleeCombatSystem { type SystemData = ( Entities<'a>, WriteExpect<'a, Map>, WriteExpect<'a, GameLog>, WriteExpect<'a, AnimationRequestBuffer>, ReadExpect<'a, Point>, ReadStorage<'a, Name>, ReadStorage<'a, CombatStats>, WriteStorage<'a, Position>, ReadStorage<'a, Renderable>, ReadStorage<'a, Equipped>, ReadStorage<'a, GrantsMeleeAttackBonus>, ReadStorage<'a, StatusIsMeleeAttackBuffed>, WriteStorage<'a, WantsToMeleeAttack>, WriteStorage<'a, WantsToTakeDamage>, ReadStorage<'a, SpawnEntityWhenMeleeAttacked>, WriteExpect<'a, EntitySpawnRequestBuffer> ); fn run(&mut self, data: Self::SystemData) { let ( entities, mut map, mut log, mut animation_builder, ppos, names, combat_stats, positions, renderables, equipped, weapon_attack_bonuses, is_melee_buffs, mut melee_attacks, mut damagees, spawn_when_melee, mut entity_spawn_buffer ) = data; let iter = (&entities, &melee_attacks, &names, &combat_stats).join(); for (attacker, melee, name, stats) in iter { let target = melee.target; // As a rule, entities cannot target themselves in melee combat. // This happens if, for example, the player passes a turn. if attacker == target {continue;} // TODO: this unwrap is dodgy. Can we really not get here if the // target does not have combat stats? If we have a blocking entity // without combat stats, are we gonna be ok here? let target_stats = combat_stats.get(target).unwrap(); if target_stats.hp > 0 { let target_name = names.get(target).unwrap(); let weapon_attack_bonus: i32 = (&entities, &weapon_attack_bonuses, &equipped) .join() .filter(|(_e, _ab, eq)| eq.owner == attacker) .map(|(_e, ab, _eq)| ab.bonus) .sum(); // Factor is 2 if the attacker is buffed, 1 otherwise. let attack_buff_factor: i32 = is_melee_buffs.get(attacker) .map_or(1, |_b| 2); let damage = i32::max(0, attack_buff_factor * (stats.power + weapon_attack_bonus)); // TODO: This message should be created further down the turn // pipeline. Probably where damage is actually applied. if damage == 0 { log.entries.push( format!("{} is unable to damage {}.", &name.name, &target_name.name) ); continue; } // TODO: This is not right. This message needs to happen AFTER // defense buffs are applied. log.entries.push( format!("{} hits {} for {} hp.", &name.name, &target_name.name, damage) ); // Actually push the damage :D WantsToTakeDamage::new_damage( &mut damagees, melee.target, damage, ElementalDamageKind::Physical ); // Animate the damage with a flash // TODO: Same here. This should be created after damage is actually created. // to avoid triggering animations when all damage is nullified. let pos = positions.get(melee.target); let render = renderables.get(melee.target); if let(Some(pos), Some(render)) = (pos, render) { animation_builder.request( AnimationRequest::MeleeAttack { x: pos.x, y: pos.y, bg: render.bg, glyph: render.glyph, } ); } // If entity splits or spawn on a melee attack, send the signal // to spawn a new entity. We're probably smacking a jelly here. let spawns = spawn_when_melee.get(target); if let (Some(spawns), Some(pos)) = (spawns, pos) { let spawn_position = map.random_adjacent_point(pos.x, pos.y); let mut can_spawn: bool = false; if let Some(spawn_position) = spawn_position { let spawn_idx = map.xy_idx(spawn_position.0, spawn_position.1); let in_player_position = (spawn_position.0 == ppos.x) && (spawn_position.1 == ppos.y); let spawn_request_kind = match spawns.kind { EntitySpawnKind::PinkJelly {..} => { can_spawn = !map.blocked[spawn_idx] && !in_player_position; Some(EntitySpawnKind::PinkJelly { max_hp: target_stats.hp / 2, hp: target_stats.hp / 2 }) }, EntitySpawnKind::Fire {spread_chance, dissipate_chance} => { can_spawn = true; Some(EntitySpawnKind::Fire { spread_chance, dissipate_chance }) }, EntitySpawnKind::Chill {spread_chance, dissipate_chance} => { can_spawn = true; Some(EntitySpawnKind::Chill { spread_chance, dissipate_chance }) }, _ => None, }; if let Some(spawn_request_kind) = spawn_request_kind { if can_spawn { entity_spawn_buffer.request(EntitySpawnRequest { x: spawn_position.0, y: spawn_position.1, kind: spawn_request_kind }); } } } } // Create a bloodstain where the damage was inflicted. if let Some(pos) = pos { let idx = map.xy_idx(pos.x, pos.y); if map.tiles[idx] != TileType::DownStairs { map.tiles[idx] = TileType::BloodStain } } } } melee_attacks.clear(); } }
true
3207d986d26532431cbdf6f675917ac797c64920
Rust
JM4ier/oxidized
/src/commands/play/random_ai.rs
UTF-8
419
2.546875
3
[]
no_license
use super::*; use rand::prelude::*; use std::marker::*; #[derive(Default)] pub struct RandomPlayer<G> { _phantom: PhantomData<G>, } impl<G: PvpGame<usize> + Clone> AiPlayer<usize, G> for RandomPlayer<G> { fn make_move(&mut self, game: &G, player_id: usize) -> usize { let mut valid_moves = game.possible_moves(player_id); valid_moves.shuffle(&mut thread_rng()); valid_moves[0] } }
true
a7f59cb003e22559cfebef194e0f0d12676e09f0
Rust
lo48576/datetime-string
/src/common/ymd8_hyphen.rs
UTF-8
50,477
3.078125
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Date string in `%Y-%m-%d` (`YYYY-MM-DD`) format. //! //! This is also an RFC 3339 [`full-date`] string. //! //! [`full-date`]: https://tools.ietf.org/html/rfc3339#section-5.6 use core::{ convert::TryFrom, fmt, ops::{self, Range}, str, }; use crate::{ datetime::{is_leap_year, validate_ym0d, validate_ym1d}, parse::{parse_digits2, parse_digits4}, str::{write_digit2, write_digit4}, }; #[cfg(feature = "alloc")] use alloc::{string::String, vec::Vec}; use crate::error::{ComponentKind, Error, ErrorKind}; /// Length of RFC 3339 `full-date` string (i.e. length of `YYYY-MM-DD`). const FULL_DATE_LEN: usize = 10; /// Range of the year in the string. const YEAR_RANGE: Range<usize> = 0..4; /// Range of the month in the string. const MONTH_RANGE: Range<usize> = 5..7; /// Range of the day of month in the string. const MDAY_RANGE: Range<usize> = 8..10; /// Validates the given string as an RFC 3339 [`full-date`] string. /// /// [`full-date`]: https://tools.ietf.org/html/rfc3339#section-5.6 fn validate_bytes(s: &[u8]) -> Result<(), Error> { let s: &[u8; FULL_DATE_LEN] = TryFrom::try_from(s).map_err(|_| { if s.len() < FULL_DATE_LEN { ErrorKind::TooShort } else { ErrorKind::TooLong } })?; if (s[4] != b'-') || (s[7] != b'-') { return Err(ErrorKind::InvalidSeparator.into()); } let year_s: [u8; 4] = [s[0], s[1], s[2], s[3]]; let month_s: [u8; 2] = [s[5], s[6]]; let mday_s: [u8; 2] = [s[8], s[9]]; if !year_s.iter().all(u8::is_ascii_digit) { return Err(ErrorKind::InvalidComponentType(ComponentKind::Year).into()); } if !month_s.iter().all(u8::is_ascii_digit) { return Err(ErrorKind::InvalidComponentType(ComponentKind::Month).into()); } if !mday_s.iter().all(u8::is_ascii_digit) { return Err(ErrorKind::InvalidComponentType(ComponentKind::Mday).into()); } let month1 = parse_digits2(month_s); if (month1 < 1) || (month1 > 12) { return Err(ErrorKind::ComponentOutOfRange(ComponentKind::Month).into()); } let mday = parse_digits2(mday_s); if mday < 1 { return Err(ErrorKind::ComponentOutOfRange(ComponentKind::Mday).into()); } let year = parse_digits4(year_s); validate_ym1d(year, month1, mday).map_err(Into::into) } /// String slice for a date in `YYYY-MM-DD` format, such as `2001-12-31`. /// /// This is also an RFC 3339 [`full-date`] string. /// /// [`full-date`]: https://tools.ietf.org/html/rfc3339#section-5.6 #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(transparent)] // Note that `derive(Serialize)` cannot used here, because it encodes this as // `[u8]` rather than as a string. // // Comparisons implemented for the type are consistent (at least it is intended to be so). // See <https://github.com/rust-lang/rust-clippy/issues/2025>. // Note that `clippy::derive_ord_xor_partial_ord` would be introduced since Rust 1.47.0. #[allow(clippy::derive_hash_xor_eq)] #[allow(clippy::unknown_clippy_lints, clippy::derive_ord_xor_partial_ord)] pub struct Ymd8HyphenStr([u8]); impl Ymd8HyphenStr { /// Creates a `&Ymd8HyphenStr` from the given byte slice. /// /// This performs assertion in debug build, but not in release build. /// /// # Safety /// /// `validate_bytes(s)` should return `Ok(())`. #[inline] #[must_use] pub(crate) unsafe fn from_bytes_maybe_unchecked(s: &[u8]) -> &Self { debug_assert_ok!(validate_bytes(s)); &*(s as *const [u8] as *const Self) } /// Creates a `&mut Ymd8HyphenStr` from the given mutable byte slice. /// /// This performs assertion in debug build, but not in release build. /// /// # Safety /// /// `validate_bytes(s)` should return `Ok(())`. #[inline] #[must_use] pub(crate) unsafe fn from_bytes_maybe_unchecked_mut(s: &mut [u8]) -> &mut Self { debug_assert_ok!(validate_bytes(s)); &mut *(s as *mut [u8] as *mut Self) } /// Creates a `&mut Ymd8HyphenStr` from the given mutable string slice. /// /// This performs assertion in debug build, but not in release build. /// /// # Safety /// /// `validate_bytes(s.as_bytes())` should return `Ok(())`. #[inline] #[must_use] unsafe fn from_str_maybe_unchecked_mut(s: &mut str) -> &mut Self { // This is safe because `Hms6ColonStr` ensures that the underlying bytes // are ASCII string after modification. Self::from_bytes_maybe_unchecked_mut(s.as_bytes_mut()) } /// Creates a new `&Ymd8HyphenStr` from a string slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// assert_eq!(date.as_str(), "2001-12-31"); /// /// assert!(Ymd8HyphenStr::from_str("0000-01-01").is_ok()); /// assert!(Ymd8HyphenStr::from_str("9999-12-31").is_ok()); /// /// assert!(Ymd8HyphenStr::from_str("2004-02-29").is_ok(), "2004 is a leap year"); /// assert!(Ymd8HyphenStr::from_str("2100-02-29").is_err(), "2100 is NOT a leap year"); /// assert!(Ymd8HyphenStr::from_str("2000-02-29").is_ok(), "2000 is a leap year"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] // `FromStr` trait cannot be implemented for a slice. #[allow(clippy::should_implement_trait)] pub fn from_str(s: &str) -> Result<&Self, Error> { TryFrom::try_from(s) } /// Creates a new `&mut Ymd8HyphenStr` from a mutable string slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf = "2001-12-31".to_owned(); /// let date = Ymd8HyphenStr::from_mut_str(&mut buf)?; /// assert_eq!(date.as_str(), "2001-12-31"); /// /// date.set_year(1999)?; /// assert_eq!(date.as_str(), "1999-12-31"); /// /// assert_eq!(buf, "1999-12-31"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] pub fn from_mut_str(s: &mut str) -> Result<&mut Self, Error> { TryFrom::try_from(s) } /// Creates a new `&Ymd8HyphenStr` from a byte slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_bytes(b"2001-12-31")?; /// assert_eq!(date.as_str(), "2001-12-31"); /// /// assert!(Ymd8HyphenStr::from_bytes(b"0000-01-01").is_ok()); /// assert!(Ymd8HyphenStr::from_bytes(b"9999-12-31").is_ok()); /// /// assert!(Ymd8HyphenStr::from_bytes(b"2004-02-29").is_ok(), "2004 is a leap year"); /// assert!(Ymd8HyphenStr::from_bytes(b"2100-02-29").is_err(), "2100 is NOT a leap year"); /// assert!(Ymd8HyphenStr::from_bytes(b"2000-02-29").is_ok(), "2000 is a leap year"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] pub fn from_bytes(s: &[u8]) -> Result<&Self, Error> { TryFrom::try_from(s) } /// Creates a new `&mut Ymd8HyphenStr` from a mutable byte slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf: [u8; 10] = *b"2001-12-31"; /// let date = Ymd8HyphenStr::from_bytes_mut(&mut buf[..])?; /// assert_eq!(date.as_str(), "2001-12-31"); /// /// date.set_year(1999)?; /// assert_eq!(date.as_str(), "1999-12-31"); /// /// assert_eq!(&buf[..], b"1999-12-31"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] pub fn from_bytes_mut(s: &mut [u8]) -> Result<&mut Self, Error> { TryFrom::try_from(s) } /// Assigns the given value. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf: [u8; 10] = *b"1999-12-31"; /// let date = Ymd8HyphenStr::from_bytes_mut(&mut buf[..])?; /// assert_eq!(date.as_str(), "1999-12-31"); /// /// let newdate = Ymd8HyphenStr::from_str("2000-01-01")?; /// /// date.assign(newdate); /// assert_eq!(date.as_str(), "2000-01-01"); /// assert_eq!(buf, *b"2000-01-01"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] pub fn assign(&mut self, v: &Self) { debug_assert_eq!(self.0.len(), v.0.len()); self.0.copy_from_slice(&v.0); } /// Returns a string slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// assert_eq!(date.as_str(), "2001-12-31"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn as_str(&self) -> &str { unsafe { // This is safe because the `Ymd8HyphenStr` ensures that the // underlying bytes are ASCII string. debug_assert_safe_version_ok!(str::from_utf8(&self.0)); str::from_utf8_unchecked(&self.0) } } /// Returns a byte slice. /// /// If you want to use indexed access, prefer [`as_bytes_fixed_len`]. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// assert_eq!(date.as_bytes(), b"2001-12-31"); /// # Ok::<_, datetime_string::Error>(()) /// ``` /// /// [`as_bytes_fixed_len`]: #method.as_bytes_fixed_len #[inline] #[must_use] pub fn as_bytes(&self) -> &[u8] { &self.0 } /// Returns a fixed length byte slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// let fixed_len: &[u8; 10] = date.as_bytes_fixed_len(); /// assert_eq!(fixed_len, b"2001-12-31"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn as_bytes_fixed_len(&self) -> &[u8; 10] { debug_assert_eq!( self.len(), FULL_DATE_LEN, "Ymd8HyphenStr must always be 10 bytes" ); debug_assert_safe_version_ok!(<&[u8; FULL_DATE_LEN]>::try_from(&self.0)); let ptr = self.0.as_ptr() as *const [u8; FULL_DATE_LEN]; // This must be always safe because the length is already checked. unsafe { &*ptr } } /// Returns the year as a string slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// assert_eq!(date.year_str(), "2001"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn year_str(&self) -> &str { unsafe { // This is safe because the string is ASCII string and `YEAR_RANGE` // is always inside the string. debug_assert_safe_version_ok!(str::from_utf8(&self.0[YEAR_RANGE])); str::from_utf8_unchecked(self.0.get_unchecked(YEAR_RANGE)) } } /// Returns the year as a fixed length byte slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// let year_fixed_len: &[u8; 4] = date.year_bytes_fixed_len(); /// assert_eq!(year_fixed_len, b"2001"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn year_bytes_fixed_len(&self) -> &[u8; 4] { unsafe { // This is safe because `YEAR_RANGE` fits inside the string. debug_assert_safe_version_ok!(<&[u8; 4]>::try_from(&self.0[YEAR_RANGE])); let ptr = self.0.as_ptr().add(YEAR_RANGE.start) as *const [u8; 4]; &*ptr } } /// Returns the year as a fixed length mutable byte slice. /// /// # Safety /// /// The returned slice should have only ASCII digits. /// If non-ASCII digits are stored, it may lead to undefined behavior. #[inline] #[must_use] unsafe fn year_bytes_mut_fixed_len(&mut self) -> &mut [u8; 4] { // This is safe because `YEAR_RANGE` fits inside the string. debug_assert_ok!(<&[u8; 4]>::try_from(&self.0[YEAR_RANGE])); let ptr = self.0.as_mut_ptr().add(YEAR_RANGE.start) as *mut [u8; 4]; &mut *ptr } /// Returns the year as an integer. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// assert_eq!(date.year(), 2001); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn year(&self) -> u16 { parse_digits4(*self.year_bytes_fixed_len()) } /// Returns the month as a string slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// assert_eq!(date.month_str(), "12"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn month_str(&self) -> &str { unsafe { // This is safe because the string is ASCII string and `MONTH_RANGE` // is always inside the string. debug_assert_safe_version_ok!(str::from_utf8(&self.0[MONTH_RANGE])); str::from_utf8_unchecked(self.0.get_unchecked(MONTH_RANGE)) } } /// Returns the month as a fixed length byte slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// let month_fixed_len: &[u8; 2] = date.month_bytes_fixed_len(); /// assert_eq!(month_fixed_len, b"12"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn month_bytes_fixed_len(&self) -> &[u8; 2] { unsafe { // This is safe because `MONTH_RANGE` fits inside the string. debug_assert_safe_version_ok!(<&[u8; 2]>::try_from(&self.0[MONTH_RANGE])); let ptr = self.0.as_ptr().add(MONTH_RANGE.start) as *const [u8; 2]; &*ptr } } /// Returns the month as a fixed length mutable byte slice. /// /// # Safety /// /// The returned slice should have only ASCII digits. /// If non-ASCII digits are stored, it may lead to undefined behavior. #[inline] #[must_use] unsafe fn month_bytes_mut_fixed_len(&mut self) -> &mut [u8; 2] { // This is safe because `MONTH_RANGE` fits inside the string. debug_assert_ok!(<&[u8; 2]>::try_from(&self.0[MONTH_RANGE])); let ptr = self.0.as_mut_ptr().add(MONTH_RANGE.start) as *mut [u8; 2]; &mut *ptr } /// Returns the 1-based month as an integer. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// assert_eq!(date.month1(), 12); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn month1(&self) -> u8 { parse_digits2(*self.month_bytes_fixed_len()) } /// Returns the 0-based month as an integer. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// assert_eq!(date.month0(), 11); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn month0(&self) -> u8 { parse_digits2(*self.month_bytes_fixed_len()).wrapping_sub(1) } /// Returns the day of month as a string slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// assert_eq!(date.mday_str(), "31"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn mday_str(&self) -> &str { unsafe { // This is safe because the string is ASCII string and `MDAY_RANGE` // is always inside the string. debug_assert_safe_version_ok!(str::from_utf8(&self.0[MDAY_RANGE])); str::from_utf8_unchecked(self.0.get_unchecked(MDAY_RANGE)) } } /// Returns the day of month as a fixed length byte slice. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// let mday_fixed_len: &[u8; 2] = date.mday_bytes_fixed_len(); /// assert_eq!(mday_fixed_len, b"31"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn mday_bytes_fixed_len(&self) -> &[u8; 2] { unsafe { // This is safe because `MDAY_RANGE` fits inside the string. debug_assert_safe_version_ok!(<&[u8; 2]>::try_from(&self.0[MDAY_RANGE])); let ptr = self.0.as_ptr().add(MDAY_RANGE.start) as *const [u8; 2]; &*ptr } } /// Returns the day of month as a fixed length mutable byte slice. /// /// # Safety /// /// The returned slice should have only ASCII digits. /// If non-ASCII digits are stored, it may lead to undefined behavior. #[inline] #[must_use] unsafe fn mday_bytes_mut_fixed_len(&mut self) -> &mut [u8; 2] { // This is safe because `MDAY_RANGE` fits inside the string. debug_assert_ok!(<&[u8; 2]>::try_from(&self.0[MDAY_RANGE])); let ptr = self.0.as_mut_ptr().add(MDAY_RANGE.start) as *mut [u8; 2]; &mut *ptr } /// Returns the day of month as an integer. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("2001-12-31")?; /// /// assert_eq!(date.mday(), 31); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn mday(&self) -> u8 { parse_digits2(*self.mday_bytes_fixed_len()) } /// Sets the given year to the string. /// /// # Failures /// /// * Fails if `year` is greater than 9999. /// * Fails if the datetime after modification is invalid. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf: [u8; 10] = *b"2000-02-29"; /// let date = Ymd8HyphenStr::from_bytes_mut(&mut buf[..])?; /// assert_eq!(date.as_str(), "2000-02-29"); /// /// date.set_year(2004)?; /// assert_eq!(date.as_str(), "2004-02-29"); /// /// assert!(date.set_year(2001).is_err(), "2001-02-29 is invalid"); /// # Ok::<_, datetime_string::Error>(()) /// ``` pub fn set_year(&mut self, year: u16) -> Result<(), Error> { if year > 9999 { return Err(ErrorKind::ComponentOutOfRange(ComponentKind::Year).into()); } validate_ym1d(year, self.month1(), self.mday())?; unsafe { // This is safe because `write_digit4()` fills the slice with ASCII digits. write_digit4(self.year_bytes_mut_fixed_len(), year); } debug_assert_ok!(validate_bytes(&self.0)); debug_assert_ok!( validate_ym1d(self.year(), self.month1(), self.mday()), "Date should be valid after modification" ); Ok(()) } /// Sets the given 0-based month value to the string. /// /// # Failures /// /// * Fails if the datetime after modification is invalid. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf: [u8; 10] = *b"2001-12-31"; /// let date = Ymd8HyphenStr::from_bytes_mut(&mut buf[..])?; /// assert_eq!(date.as_str(), "2001-12-31"); /// /// date.set_month0(7)?; /// assert_eq!(date.as_str(), "2001-08-31"); /// /// assert!(date.set_month0(8).is_err(), "2001-09-31 is invalid"); /// # Ok::<_, datetime_string::Error>(()) /// ``` pub fn set_month0(&mut self, month0: u8) -> Result<(), Error> { if month0 >= 12 { return Err(ErrorKind::ComponentOutOfRange(ComponentKind::Month).into()); } validate_ym0d(self.year(), month0, self.mday())?; unsafe { // This is safe because `write_digit2()` fills the slice with ASCII digits. write_digit2(self.month_bytes_mut_fixed_len(), month0.wrapping_add(1)); } debug_assert_ok!(validate_bytes(&self.0)); debug_assert_ok!( validate_ym1d(self.year(), self.month1(), self.mday()), "Date should be valid after modification" ); Ok(()) } /// Sets the given 1-based month value to the string. /// /// # Failures /// /// * Fails if the datetime after modification is invalid. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf: [u8; 10] = *b"2001-12-31"; /// let date = Ymd8HyphenStr::from_bytes_mut(&mut buf[..])?; /// assert_eq!(date.as_str(), "2001-12-31"); /// /// date.set_month1(8)?; /// assert_eq!(date.as_str(), "2001-08-31"); /// /// assert!(date.set_month1(9).is_err(), "2001-09-31 is invalid"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] pub fn set_month1(&mut self, month1: u8) -> Result<(), Error> { self.set_month0(month1.wrapping_sub(1)) } /// Sets the given day of month value to the string. /// /// # Failures /// /// * Fails if the datetime after modification is invalid. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf: [u8; 10] = *b"2001-02-28"; /// let date = Ymd8HyphenStr::from_bytes_mut(&mut buf[..])?; /// assert_eq!(date.as_str(), "2001-02-28"); /// /// date.set_mday(3)?; /// assert_eq!(date.as_str(), "2001-02-03"); /// /// assert!(date.set_mday(29).is_err(), "2001-02-29 is invalid"); /// # Ok::<_, datetime_string::Error>(()) /// ``` pub fn set_mday(&mut self, mday: u8) -> Result<(), Error> { validate_ym1d(self.year(), self.month1(), mday)?; unsafe { // This is safe because `write_digit2()` fills the slice with ASCII digits. write_digit2(self.mday_bytes_mut_fixed_len(), mday); } debug_assert_ok!(validate_bytes(&self.0)); debug_assert_ok!( validate_ym1d(self.year(), self.month1(), self.mday()), "Date should be valid after modification" ); Ok(()) } /// Sets the given 0-based month and 1-based day of month values to the string. /// /// # Failures /// /// * Fails if the datetime after modification is invalid. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf: [u8; 10] = *b"2001-02-28"; /// let date = Ymd8HyphenStr::from_bytes_mut(&mut buf[..])?; /// assert_eq!(date.as_str(), "2001-02-28"); /// /// date.set_month0_mday(3, 23)?; /// assert_eq!(date.as_str(), "2001-04-23"); /// /// assert!(date.set_month0_mday(1, 29).is_err(), "2001-02-29 is invalid"); /// # Ok::<_, datetime_string::Error>(()) /// ``` pub fn set_month0_mday(&mut self, month0: u8, mday: u8) -> Result<(), Error> { validate_ym0d(self.year(), month0, mday)?; unsafe { // This is safe because `write_digit2()` fills the slices with ASCII digits. write_digit2(self.month_bytes_mut_fixed_len(), month0.wrapping_add(1)); write_digit2(self.mday_bytes_mut_fixed_len(), mday); } debug_assert_ok!(validate_bytes(&self.0)); debug_assert_ok!( validate_ym1d(self.year(), self.month1(), self.mday()), "Date should be valid after modification" ); Ok(()) } /// Sets the given 1-based month and day of month values to the string. /// /// # Failures /// /// * Fails if the datetime after modification is invalid. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf: [u8; 10] = *b"2001-02-28"; /// let date = Ymd8HyphenStr::from_bytes_mut(&mut buf[..])?; /// assert_eq!(date.as_str(), "2001-02-28"); /// /// date.set_month1_mday(4, 23)?; /// assert_eq!(date.as_str(), "2001-04-23"); /// /// assert!(date.set_month1_mday(2, 29).is_err(), "2001-02-29 is invalid"); /// # Ok::<_, datetime_string::Error>(()) /// ``` pub fn set_month1_mday(&mut self, month1: u8, mday: u8) -> Result<(), Error> { validate_ym1d(self.year(), month1, mday)?; unsafe { // This is safe because `write_digit2()` fills the slices with ASCII digits. write_digit2(self.month_bytes_mut_fixed_len(), month1); write_digit2(self.mday_bytes_mut_fixed_len(), mday); } debug_assert_ok!(validate_bytes(&self.0)); debug_assert_ok!( validate_ym1d(self.year(), self.month1(), self.mday()), "Date should be valid after modification" ); Ok(()) } /// Sets the given 1-based year, 0-based month, and 1-based day of month values to the string. /// /// # Failures /// /// * Fails if `year` is greater than 9999. /// * Fails if the datetime after modification is invalid. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf: [u8; 10] = *b"2001-02-28"; /// let date = Ymd8HyphenStr::from_bytes_mut(&mut buf[..])?; /// assert_eq!(date.as_str(), "2001-02-28"); /// /// date.set_ym0d(1999, 3, 23)?; /// assert_eq!(date.as_str(), "1999-04-23"); /// /// assert!(date.set_ym0d(1999, 1, 29).is_err(), "1999-02-29 is invalid"); /// # Ok::<_, datetime_string::Error>(()) /// ``` pub fn set_ym0d(&mut self, year: u16, month0: u8, mday: u8) -> Result<(), Error> { validate_ym0d(year, month0, mday)?; unsafe { // This is safe because `write_digit2()` and `write_digit4()` fill // the slices with ASCII digits. write_digit4(self.year_bytes_mut_fixed_len(), year); write_digit2(self.month_bytes_mut_fixed_len(), month0.wrapping_add(1)); write_digit2(self.mday_bytes_mut_fixed_len(), mday); } debug_assert_ok!(validate_bytes(&self.0)); debug_assert_ok!( validate_ym1d(self.year(), self.month1(), self.mday()), "Date should be valid after modification" ); Ok(()) } /// Sets the given 1-based year, month, and day of month values to the string. /// /// # Failures /// /// * Fails if `year` is greater than 9999. /// * Fails if the datetime after modification is invalid. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let mut buf: [u8; 10] = *b"2001-02-28"; /// let date = Ymd8HyphenStr::from_bytes_mut(&mut buf[..])?; /// assert_eq!(date.as_str(), "2001-02-28"); /// /// date.set_ym1d(1999, 4, 23)?; /// assert_eq!(date.as_str(), "1999-04-23"); /// /// assert!(date.set_ym1d(1999, 2, 29).is_err(), "1999-02-29 is invalid"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] pub fn set_ym1d(&mut self, year: u16, month1: u8, mday: u8) -> Result<(), Error> { self.set_ym0d(year, month1.wrapping_sub(1), mday) } /// Returns the 0-based day of the year, i.e. days since January 1 of the year. /// /// Note that this value is 0-based. /// January 1 is 0 days since January 1 of the year. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("1970-01-01")?; /// assert_eq!(date.yday0(), 0, "0 for the 1st day of the year, because this is 0-based value"); /// /// let date2 = Ymd8HyphenStr::from_str("1970-12-31")?; /// assert_eq!(date2.yday0(), 364); /// /// let leap_last = Ymd8HyphenStr::from_str("2000-12-31")?; /// assert_eq!(leap_last.yday0(), 365, "2000-02-29 exists"); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] pub fn yday0(&self) -> u16 { self.yday1() - 1 } /// Returns the 1-based day of the year. /// /// Note that this value is 1-based. /// January 1 is 1st day of the year. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("1970-01-01")?; /// assert_eq!(date.yday1(), 1, "1 for the 1st day of the year, because this is 1-based value"); /// /// let date2 = Ymd8HyphenStr::from_str("1970-12-31")?; /// assert_eq!(date2.yday1(), 365); /// /// let leap_last = Ymd8HyphenStr::from_str("2000-12-31")?; /// assert_eq!(leap_last.yday1(), 366, "2000-02-29 exists"); /// # Ok::<_, datetime_string::Error>(()) /// ``` pub fn yday1(&self) -> u16 { /// `yday`s of 0th day for each months of non-leap year. const BASE_YDAYS: [u16; 12] = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]; let month0 = self.month0(); let non_leap_yday = BASE_YDAYS[usize::from(month0)] + u16::from(self.mday()); if month0 > 1 && is_leap_year(self.year()) { non_leap_yday + 1 } else { non_leap_yday } } /// Returns the days from the epoch (1970-01-01). /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenStr; /// let date = Ymd8HyphenStr::from_str("1971-01-01")?; /// assert_eq!(date.days_since_epoch(), 365); /// /// let date2 = Ymd8HyphenStr::from_str("1969-01-01")?; /// assert_eq!(date2.days_since_epoch(), -365); /// /// let epoch = Ymd8HyphenStr::from_str("1970-01-01")?; /// assert_eq!(epoch.days_since_epoch(), 0); /// # Ok::<_, datetime_string::Error>(()) /// ``` pub fn days_since_epoch(&self) -> i32 { let tm_year = i32::from(self.year()) - 1900; // See "4.16. Seconds Since the Epoch" section of POSIX // (<https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap04.html#tag_04_16>). // // > If the year is \<1970 or the value is negative, the relationship is // > undefined. If the year is >=1970 and the value is non-negative, the // > value is related to a Coordinated Universal Time name according to // > the C-language expression, where `tm_sec`, `tm_min`, `tm_hour`, // > `tm_yday`, and `tm_year` are all integer types: // > // > ``` // > tm_sec + tm_min*60 + tm_hour*3600 + tm_yday*86400 + // > (tm_year-70)*31536000 + ((tm_year-69)/4)*86400 - // > ((tm_year-1)/100)*86400 + ((tm_year+299)/400)*86400 // > ``` (i32::from(self.yday1()) - 1) + (tm_year - 70) * 365 + (tm_year - 69) / 4 - (tm_year - 1) / 100 + (tm_year + 299) / 400 } } #[cfg(feature = "alloc")] impl alloc::borrow::ToOwned for Ymd8HyphenStr { type Owned = Ymd8HyphenString; #[inline] fn to_owned(&self) -> Self::Owned { self.into() } } impl AsRef<[u8]> for Ymd8HyphenStr { #[inline] fn as_ref(&self) -> &[u8] { self.as_bytes() } } impl AsRef<str> for Ymd8HyphenStr { #[inline] fn as_ref(&self) -> &str { self.as_str() } } impl AsRef<Ymd8HyphenStr> for Ymd8HyphenStr { #[inline] fn as_ref(&self) -> &Ymd8HyphenStr { self } } impl AsMut<Ymd8HyphenStr> for Ymd8HyphenStr { #[inline] fn as_mut(&mut self) -> &mut Ymd8HyphenStr { self } } impl<'a> From<&'a Ymd8HyphenStr> for &'a str { #[inline] fn from(v: &'a Ymd8HyphenStr) -> Self { v.as_str() } } #[cfg(feature = "chrono04")] impl From<&Ymd8HyphenStr> for chrono04::NaiveDate { fn from(v: &Ymd8HyphenStr) -> Self { let year = i32::from(v.year()); let month1 = u32::from(v.month1()); let mday = u32::from(v.mday()); Self::from_ymd(year, month1, mday) } } impl<'a> TryFrom<&'a [u8]> for &'a Ymd8HyphenStr { type Error = Error; #[inline] fn try_from(v: &'a [u8]) -> Result<Self, Self::Error> { validate_bytes(v)?; Ok(unsafe { // This is safe because a valid RFC 3339 `full-date` string is also an ASCII string. Ymd8HyphenStr::from_bytes_maybe_unchecked(v) }) } } impl<'a> TryFrom<&'a mut [u8]> for &'a mut Ymd8HyphenStr { type Error = Error; #[inline] fn try_from(v: &'a mut [u8]) -> Result<Self, Self::Error> { validate_bytes(v)?; Ok(unsafe { // This is safe because a valid RFC 3339 `full-date` string is also an ASCII string. Ymd8HyphenStr::from_bytes_maybe_unchecked_mut(v) }) } } impl<'a> TryFrom<&'a str> for &'a Ymd8HyphenStr { type Error = Error; #[inline] fn try_from(v: &'a str) -> Result<Self, Self::Error> { TryFrom::try_from(v.as_bytes()) } } impl<'a> TryFrom<&'a mut str> for &'a mut Ymd8HyphenStr { type Error = Error; #[inline] fn try_from(v: &'a mut str) -> Result<Self, Self::Error> { validate_bytes(v.as_bytes())?; Ok(unsafe { // This is safe because the value is successfully validated, and // `Ymd8HyphenStr` ensures the value after modification is an ASCII string. Ymd8HyphenStr::from_str_maybe_unchecked_mut(v) }) } } impl fmt::Display for Ymd8HyphenStr { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.as_str().fmt(f) } } impl ops::Deref for Ymd8HyphenStr { type Target = str; #[inline] fn deref(&self) -> &Self::Target { self.as_str() } } impl_cmp_symmetric!(str, Ymd8HyphenStr, &Ymd8HyphenStr); impl_cmp_symmetric!([u8], Ymd8HyphenStr, [u8]); impl_cmp_symmetric!([u8], Ymd8HyphenStr, &[u8]); impl_cmp_symmetric!([u8], &Ymd8HyphenStr, [u8]); impl_cmp_symmetric!(str, Ymd8HyphenStr, str); impl_cmp_symmetric!(str, Ymd8HyphenStr, &str); impl_cmp_symmetric!(str, &Ymd8HyphenStr, str); #[cfg(feature = "serde")] impl serde::Serialize for Ymd8HyphenStr { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { serializer.serialize_str(self.as_str()) } } /// Owned string for a date in `YYYY-MM-DD` format, such as `2001-12-31`. /// /// This is also an RFC 3339 [`full-date`] string. /// /// This is a fixed length string, and implements [`Copy`] trait. /// /// To create a value of this type, use [`str::parse`] method or /// [`std::convert::TryFrom`] trait, or convert from `&Ymd8HyphenStr`. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenString; /// use datetime_string::common::Ymd8HyphenStr; /// use std::convert::TryFrom; /// /// let try_from = Ymd8HyphenString::try_from("2001-12-31")?; /// /// let parse = "2001-12-31".parse::<Ymd8HyphenString>()?; /// let parse2: Ymd8HyphenString = "2001-12-31".parse()?; /// /// let to_owned = Ymd8HyphenStr::from_str("2001-12-31")?.to_owned(); /// let into: Ymd8HyphenString = Ymd8HyphenStr::from_str("2001-12-31")?.into(); /// # Ok::<_, datetime_string::Error>(()) /// ``` /// /// [`full-date`]: https://tools.ietf.org/html/rfc3339#section-5.6 // Note that `derive(Serialize)` cannot used here, because it encodes this as // `[u8; 10]` rather than as a string. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(transparent)] // Comparisons implemented for the type are consistent (at least it is intended to be so). // See <https://github.com/rust-lang/rust-clippy/issues/2025>. // Note that `clippy::derive_ord_xor_partial_ord` would be introduced since Rust 1.47.0. #[allow(clippy::derive_hash_xor_eq)] #[allow(clippy::unknown_clippy_lints, clippy::derive_ord_xor_partial_ord)] pub struct Ymd8HyphenString([u8; FULL_DATE_LEN]); impl Ymd8HyphenString { /// Creates a `Ymd8HyphenString` from the given bytes. /// /// # Safety /// /// `validate_bytes(&s)` should return `Ok(())`. #[inline] #[must_use] unsafe fn new_maybe_unchecked(s: [u8; 10]) -> Self { debug_assert_ok!(validate_bytes(&s)); Self(s) } /// Returns the minimum date. #[inline] #[must_use] fn min() -> Self { unsafe { // This is safe because `0000-01-01` is valid. debug_assert_safe_version_ok!(Self::try_from(*b"0000-01-01")); Self::new_maybe_unchecked(*b"0000-01-01") } } /// Creates a new `Ymd8HyphenString` from the given date. /// /// Note that `month0` is 0-based, i.e. January is 0, February is 1, and so on. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenString; /// let date = Ymd8HyphenString::from_ym0d(2001, 11, 31)?; /// assert_eq!(date.as_str(), "2001-12-31"); /// /// assert!(Ymd8HyphenString::from_ym0d(2001, 1, 29).is_err(), "2001-02-29 is invaild date"); /// # Ok::<_, datetime_string::Error>(()) /// ``` pub fn from_ym0d(year: u16, month0: u8, mday: u8) -> Result<Self, Error> { let mut v = Self::min(); v.set_ym0d(year, month0, mday)?; Ok(v) } /// Creates a new `Ymd8HyphenString` from the given date. /// /// Note that `month1` is 1-based, i.e. January is 1, February is 2, and so on. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenString; /// let date = Ymd8HyphenString::from_ym1d(2001, 12, 31)?; /// assert_eq!(date.as_str(), "2001-12-31"); /// /// assert!(Ymd8HyphenString::from_ym1d(2001, 2, 29).is_err(), "2001-02-29 is invaild date"); /// # Ok::<_, datetime_string::Error>(()) /// ``` pub fn from_ym1d(year: u16, month1: u8, mday: u8) -> Result<Self, Error> { let mut v = Self::min(); v.set_ym1d(year, month1, mday)?; Ok(v) } /// Returns a `&Ymd8HyphenStr` for the string. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenString; /// use datetime_string::common::Ymd8HyphenStr; /// /// let date = "2001-12-31".parse::<Ymd8HyphenString>()?; /// /// // Usually you don't need to call `as_deref()` explicitly, because /// // `Deref<Target = Ymd8HyphenStr>` trait is implemented. /// let _: &Ymd8HyphenStr = date.as_deref(); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn as_deref(&self) -> &Ymd8HyphenStr { unsafe { // This is safe because `self.0` is valid RFC 3339 `full-date` string. debug_assert_ok!(Ymd8HyphenStr::from_bytes(&self.0)); Ymd8HyphenStr::from_bytes_maybe_unchecked(&self.0) } } /// Returns a `&mut Ymd8HyphenStr` for the string. /// /// # Examples /// /// ``` /// # use datetime_string::common::Ymd8HyphenString; /// use datetime_string::common::Ymd8HyphenStr; /// /// let mut date = "2001-12-31".parse::<Ymd8HyphenString>()?; /// /// // Usually you don't need to call `as_deref_mut()` explicitly, because /// // `DerefMut` trait is implemented. /// let _: &mut Ymd8HyphenStr = date.as_deref_mut(); /// # Ok::<_, datetime_string::Error>(()) /// ``` #[inline] #[must_use] pub fn as_deref_mut(&mut self) -> &mut Ymd8HyphenStr { unsafe { // This is safe because `self.0` is valid RFC 3339 `full-date` string. debug_assert_ok!(Ymd8HyphenStr::from_bytes(&self.0)); Ymd8HyphenStr::from_bytes_maybe_unchecked_mut(&mut self.0) } } } impl core::borrow::Borrow<Ymd8HyphenStr> for Ymd8HyphenString { #[inline] fn borrow(&self) -> &Ymd8HyphenStr { self.as_deref() } } impl core::borrow::BorrowMut<Ymd8HyphenStr> for Ymd8HyphenString { #[inline] fn borrow_mut(&mut self) -> &mut Ymd8HyphenStr { self.as_deref_mut() } } impl AsRef<[u8]> for Ymd8HyphenString { #[inline] fn as_ref(&self) -> &[u8] { self.as_bytes() } } impl AsRef<str> for Ymd8HyphenString { #[inline] fn as_ref(&self) -> &str { self.as_str() } } impl AsRef<Ymd8HyphenStr> for Ymd8HyphenString { #[inline] fn as_ref(&self) -> &Ymd8HyphenStr { self } } impl AsMut<Ymd8HyphenStr> for Ymd8HyphenString { #[inline] fn as_mut(&mut self) -> &mut Ymd8HyphenStr { self } } #[cfg(feature = "alloc")] impl From<Ymd8HyphenString> for Vec<u8> { #[inline] fn from(v: Ymd8HyphenString) -> Vec<u8> { (*v.as_bytes_fixed_len()).into() } } #[cfg(feature = "alloc")] impl From<Ymd8HyphenString> for String { #[inline] fn from(v: Ymd8HyphenString) -> String { let vec: Vec<u8> = (*v.as_bytes_fixed_len()).into(); unsafe { // This is safe because a valid RFC 3339 `full-date` string is also an ASCII string. String::from_utf8_unchecked(vec) } } } impl From<&Ymd8HyphenStr> for Ymd8HyphenString { fn from(v: &Ymd8HyphenStr) -> Self { unsafe { // This is safe because the value is already validated. Self::new_maybe_unchecked(*v.as_bytes_fixed_len()) } } } #[cfg(feature = "chrono04")] impl TryFrom<&chrono04::NaiveDate> for Ymd8HyphenString { type Error = Error; /// Converts the given date into `Ymd8HyphenString`. /// /// # Failures /// /// Fails if the year is less than 0 or greater than 9999. fn try_from(v: &chrono04::NaiveDate) -> Result<Self, Self::Error> { use chrono04::Datelike; let year = v.year(); if (year < 0) || (year > 9999) { return Err(ErrorKind::ComponentOutOfRange(ComponentKind::Year).into()); } Ok( Self::from_ym1d(v.year() as u16, v.month() as u8, v.day() as u8) .expect("`chrono04::NaiveTime` must always have a valid date"), ) } } impl TryFrom<&[u8]> for Ymd8HyphenString { type Error = Error; #[inline] fn try_from(v: &[u8]) -> Result<Self, Self::Error> { Ymd8HyphenStr::from_bytes(v).map(Into::into) } } impl TryFrom<&str> for Ymd8HyphenString { type Error = Error; #[inline] fn try_from(v: &str) -> Result<Self, Self::Error> { Ymd8HyphenStr::from_str(v).map(Into::into) } } impl TryFrom<[u8; 10]> for Ymd8HyphenString { type Error = Error; #[inline] fn try_from(v: [u8; 10]) -> Result<Self, Self::Error> { validate_bytes(&v)?; Ok(unsafe { // This is safe because the value is successfully validated. Self::new_maybe_unchecked(v) }) } } impl fmt::Display for Ymd8HyphenString { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.as_deref().fmt(f) } } impl ops::Deref for Ymd8HyphenString { type Target = Ymd8HyphenStr; #[inline] fn deref(&self) -> &Self::Target { self.as_deref() } } impl ops::DerefMut for Ymd8HyphenString { #[inline] fn deref_mut(&mut self) -> &mut Self::Target { self.as_deref_mut() } } impl str::FromStr for Ymd8HyphenString { type Err = Error; #[inline] fn from_str(s: &str) -> Result<Self, Self::Err> { Self::try_from(s) } } impl_cmp_symmetric!(Ymd8HyphenStr, Ymd8HyphenString, &Ymd8HyphenString); impl_cmp_symmetric!(Ymd8HyphenStr, Ymd8HyphenString, Ymd8HyphenStr); impl_cmp_symmetric!(Ymd8HyphenStr, Ymd8HyphenString, &Ymd8HyphenStr); impl_cmp_symmetric!(str, Ymd8HyphenString, str); impl_cmp_symmetric!(str, Ymd8HyphenString, &str); impl_cmp_symmetric!(str, &Ymd8HyphenString, str); impl_cmp_symmetric!([u8], Ymd8HyphenString, [u8]); impl_cmp_symmetric!([u8], Ymd8HyphenString, &[u8]); impl_cmp_symmetric!([u8], &Ymd8HyphenString, [u8]); #[cfg(feature = "serde")] impl serde::Serialize for Ymd8HyphenString { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { serializer.serialize_str(self.as_str()) } } /// Items for serde support. #[cfg(feature = "serde")] mod serde_ { use super::*; use serde::de::{Deserialize, Deserializer, Visitor}; /// Visitor for `&Ymd8HyphenStr`. struct StrVisitor; impl<'de> Visitor<'de> for StrVisitor { type Value = &'de Ymd8HyphenStr; #[inline] fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("YYYY-MM-DD date string") } #[inline] fn visit_borrowed_bytes<E>(self, v: &'de [u8]) -> Result<Self::Value, E> where E: serde::de::Error, { Self::Value::try_from(v).map_err(E::custom) } #[inline] fn visit_borrowed_str<E>(self, v: &'de str) -> Result<Self::Value, E> where E: serde::de::Error, { Self::Value::try_from(v).map_err(E::custom) } } impl<'de> Deserialize<'de> for &'de Ymd8HyphenStr { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { deserializer.deserialize_any(StrVisitor) } } /// Visitor for `Ymd8HyphenString`. struct StringVisitor; impl<'de> Visitor<'de> for StringVisitor { type Value = Ymd8HyphenString; #[inline] fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("YYYY-MM-DD date string") } #[inline] fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> where E: serde::de::Error, { Self::Value::try_from(v).map_err(E::custom) } #[inline] fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error, { Self::Value::try_from(v).map_err(E::custom) } } impl<'de> Deserialize<'de> for Ymd8HyphenString { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { deserializer.deserialize_any(StringVisitor) } } } #[cfg(test)] mod tests { #[cfg(feature = "serde")] use super::*; use super::validate_bytes as s_validate; #[cfg(feature = "serde")] use serde_test::{assert_de_tokens, assert_tokens, Token}; #[test] fn validate_bytes() { assert!(s_validate(b"0000-01-01").is_ok()); assert!(s_validate(b"9999-12-31").is_ok()); assert!(s_validate(b"2001-01-01").is_ok()); assert!(s_validate(b"2001-01-31").is_ok()); assert!(s_validate(b"2001-03-31").is_ok()); assert!(s_validate(b"2001-04-30").is_ok()); assert!(s_validate(b"2001-05-31").is_ok()); assert!(s_validate(b"2001-06-30").is_ok()); assert!(s_validate(b"2001-07-31").is_ok()); assert!(s_validate(b"2001-08-31").is_ok()); assert!(s_validate(b"2001-09-30").is_ok()); assert!(s_validate(b"2001-10-31").is_ok()); assert!(s_validate(b"2001-11-30").is_ok()); assert!(s_validate(b"2001-12-31").is_ok()); assert!(s_validate(b"2001-00-01").is_err()); assert!(s_validate(b"2001-13-01").is_err()); assert!(s_validate(b"2001-01-00").is_err()); assert!(s_validate(b"2001-01-32").is_err()); assert!(s_validate(b"2001-03-32").is_err()); assert!(s_validate(b"2001-04-31").is_err()); assert!(s_validate(b"2001-05-32").is_err()); assert!(s_validate(b"2001-06-31").is_err()); assert!(s_validate(b"2001-07-32").is_err()); assert!(s_validate(b"2001-08-32").is_err()); assert!(s_validate(b"2001-09-31").is_err()); assert!(s_validate(b"2001-10-32").is_err()); assert!(s_validate(b"2001-11-31").is_err()); assert!(s_validate(b"2001-12-32").is_err()); // 2001 is not a leap year. assert!(s_validate(b"2001-02-28").is_ok()); assert!(s_validate(b"2001-02-29").is_err()); // 2000 is a leap year. assert!(s_validate(b"2000-02-28").is_ok()); assert!(s_validate(b"2000-02-29").is_ok()); assert!(s_validate(b"2000-02-30").is_err()); // 2004 is a leap year. assert!(s_validate(b"2004-02-28").is_ok()); assert!(s_validate(b"2004-02-29").is_ok()); assert!(s_validate(b"2004-02-30").is_err()); // 2100 is not a leap year. assert!(s_validate(b"2100-02-28").is_ok()); assert!(s_validate(b"2100-02-29").is_err()); assert!(s_validate(b"2001+01-01").is_err()); assert!(s_validate(b"2001-01+01").is_err()); assert!(s_validate(b"01-01-01").is_err()); assert!(s_validate(b"+001-01-01").is_err()); assert!(s_validate(b"-001-01-01").is_err()); } #[cfg(feature = "serde")] #[test] fn ser_de_str() { let raw: &'static str = "2001-12-31"; assert_tokens( &Ymd8HyphenStr::from_str(raw).unwrap(), &[Token::BorrowedStr(raw)], ); } #[cfg(feature = "serde")] #[test] fn ser_de_string() { let raw: &'static str = "2001-12-31"; assert_tokens( &Ymd8HyphenString::try_from(raw).unwrap(), &[Token::Str(raw)], ); } #[cfg(feature = "serde")] #[test] fn de_bytes_slice() { let raw: &'static [u8; 10] = b"2001-12-31"; assert_de_tokens( &Ymd8HyphenStr::from_bytes(raw).unwrap(), &[Token::BorrowedBytes(raw)], ); } #[cfg(feature = "serde")] #[test] fn de_bytes() { let raw: &'static [u8; 10] = b"2001-12-31"; assert_de_tokens( &Ymd8HyphenString::try_from(&raw[..]).unwrap(), &[Token::Bytes(raw)], ); } }
true
f516fc53beaae3ae886475b213cc9f02447937cf
Rust
s32k-rust/s32k144.rs
/src/mcm/cpcr.rs
UTF-8
23,016
2.734375
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::CPCR { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get() } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `HLT_FSM_ST`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum HLT_FSM_STR { #[doc = "Waiting for request"] _00, #[doc = "Waiting for platform idle"] _01, #[doc = "Platform stalled"] _11, #[doc = "Unused state"] _10, } impl HLT_FSM_STR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { HLT_FSM_STR::_00 => 0, HLT_FSM_STR::_01 => 1, HLT_FSM_STR::_11 => 3, HLT_FSM_STR::_10 => 2, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> HLT_FSM_STR { match value { 0 => HLT_FSM_STR::_00, 1 => HLT_FSM_STR::_01, 3 => HLT_FSM_STR::_11, 2 => HLT_FSM_STR::_10, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_00`"] #[inline] pub fn is_00(&self) -> bool { *self == HLT_FSM_STR::_00 } #[doc = "Checks if the value of the field is `_01`"] #[inline] pub fn is_01(&self) -> bool { *self == HLT_FSM_STR::_01 } #[doc = "Checks if the value of the field is `_11`"] #[inline] pub fn is_11(&self) -> bool { *self == HLT_FSM_STR::_11 } #[doc = "Checks if the value of the field is `_10`"] #[inline] pub fn is_10(&self) -> bool { *self == HLT_FSM_STR::_10 } } #[doc = "Possible values of the field `AXBS_HLT_REQ`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AXBS_HLT_REQR { #[doc = "AXBS is not receiving halt request"] _0, #[doc = "AXBS is receiving halt request"] _1, } impl AXBS_HLT_REQR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { AXBS_HLT_REQR::_0 => false, AXBS_HLT_REQR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> AXBS_HLT_REQR { match value { false => AXBS_HLT_REQR::_0, true => AXBS_HLT_REQR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == AXBS_HLT_REQR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == AXBS_HLT_REQR::_1 } } #[doc = "Possible values of the field `AXBS_HLTD`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AXBS_HLTDR { #[doc = "AXBS is not currently halted"] _0, #[doc = "AXBS is currently halted"] _1, } impl AXBS_HLTDR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { AXBS_HLTDR::_0 => false, AXBS_HLTDR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> AXBS_HLTDR { match value { false => AXBS_HLTDR::_0, true => AXBS_HLTDR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == AXBS_HLTDR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == AXBS_HLTDR::_1 } } #[doc = "Possible values of the field `FMC_PF_IDLE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum FMC_PF_IDLER { #[doc = "FMC program flash is not idle"] _0, #[doc = "FMC program flash is currently idle"] _1, } impl FMC_PF_IDLER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { FMC_PF_IDLER::_0 => false, FMC_PF_IDLER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> FMC_PF_IDLER { match value { false => FMC_PF_IDLER::_0, true => FMC_PF_IDLER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == FMC_PF_IDLER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == FMC_PF_IDLER::_1 } } #[doc = "Possible values of the field `PBRIDGE_IDLE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PBRIDGE_IDLER { #[doc = "PBRIDGE is not idle"] _0, #[doc = "PBRIDGE is currently idle"] _1, } impl PBRIDGE_IDLER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { PBRIDGE_IDLER::_0 => false, PBRIDGE_IDLER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> PBRIDGE_IDLER { match value { false => PBRIDGE_IDLER::_0, true => PBRIDGE_IDLER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == PBRIDGE_IDLER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == PBRIDGE_IDLER::_1 } } #[doc = "Possible values of the field `CBRR`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CBRRR { #[doc = "Fixed-priority arbitration"] _0, #[doc = "Round-robin arbitration"] _1, } impl CBRRR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { CBRRR::_0 => false, CBRRR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> CBRRR { match value { false => CBRRR::_0, true => CBRRR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == CBRRR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == CBRRR::_1 } } #[doc = "Possible values of the field `SRAMUAP`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum SRAMUAPR { #[doc = "Round robin"] _00, #[doc = "Special round robin (favors SRAM backdoor accesses over the processor)"] _01, #[doc = "Fixed priority. Processor has highest, backdoor has lowest"] _10, #[doc = "Fixed priority. Backdoor has highest, processor has lowest"] _11, } impl SRAMUAPR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { SRAMUAPR::_00 => 0, SRAMUAPR::_01 => 1, SRAMUAPR::_10 => 2, SRAMUAPR::_11 => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> SRAMUAPR { match value { 0 => SRAMUAPR::_00, 1 => SRAMUAPR::_01, 2 => SRAMUAPR::_10, 3 => SRAMUAPR::_11, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_00`"] #[inline] pub fn is_00(&self) -> bool { *self == SRAMUAPR::_00 } #[doc = "Checks if the value of the field is `_01`"] #[inline] pub fn is_01(&self) -> bool { *self == SRAMUAPR::_01 } #[doc = "Checks if the value of the field is `_10`"] #[inline] pub fn is_10(&self) -> bool { *self == SRAMUAPR::_10 } #[doc = "Checks if the value of the field is `_11`"] #[inline] pub fn is_11(&self) -> bool { *self == SRAMUAPR::_11 } } #[doc = r" Value of the field"] pub struct SRAMUWPR { bits: bool, } impl SRAMUWPR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = "Possible values of the field `SRAMLAP`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum SRAMLAPR { #[doc = "Round robin"] _00, #[doc = "Special round robin (favors SRAM backdoor accesses over the processor)"] _01, #[doc = "Fixed priority. Processor has highest, backdoor has lowest"] _10, #[doc = "Fixed priority. Backdoor has highest, processor has lowest"] _11, } impl SRAMLAPR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { SRAMLAPR::_00 => 0, SRAMLAPR::_01 => 1, SRAMLAPR::_10 => 2, SRAMLAPR::_11 => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> SRAMLAPR { match value { 0 => SRAMLAPR::_00, 1 => SRAMLAPR::_01, 2 => SRAMLAPR::_10, 3 => SRAMLAPR::_11, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_00`"] #[inline] pub fn is_00(&self) -> bool { *self == SRAMLAPR::_00 } #[doc = "Checks if the value of the field is `_01`"] #[inline] pub fn is_01(&self) -> bool { *self == SRAMLAPR::_01 } #[doc = "Checks if the value of the field is `_10`"] #[inline] pub fn is_10(&self) -> bool { *self == SRAMLAPR::_10 } #[doc = "Checks if the value of the field is `_11`"] #[inline] pub fn is_11(&self) -> bool { *self == SRAMLAPR::_11 } } #[doc = r" Value of the field"] pub struct SRAMLWPR { bits: bool, } impl SRAMLWPR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = "Values that can be written to the field `CBRR`"] pub enum CBRRW { #[doc = "Fixed-priority arbitration"] _0, #[doc = "Round-robin arbitration"] _1, } impl CBRRW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { CBRRW::_0 => false, CBRRW::_1 => true, } } } #[doc = r" Proxy"] pub struct _CBRRW<'a> { w: &'a mut W, } impl<'a> _CBRRW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: CBRRW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Fixed-priority arbitration"] #[inline] pub fn _0(self) -> &'a mut W { self.variant(CBRRW::_0) } #[doc = "Round-robin arbitration"] #[inline] pub fn _1(self) -> &'a mut W { self.variant(CBRRW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 9; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `SRAMUAP`"] pub enum SRAMUAPW { #[doc = "Round robin"] _00, #[doc = "Special round robin (favors SRAM backdoor accesses over the processor)"] _01, #[doc = "Fixed priority. Processor has highest, backdoor has lowest"] _10, #[doc = "Fixed priority. Backdoor has highest, processor has lowest"] _11, } impl SRAMUAPW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { SRAMUAPW::_00 => 0, SRAMUAPW::_01 => 1, SRAMUAPW::_10 => 2, SRAMUAPW::_11 => 3, } } } #[doc = r" Proxy"] pub struct _SRAMUAPW<'a> { w: &'a mut W, } impl<'a> _SRAMUAPW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: SRAMUAPW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "Round robin"] #[inline] pub fn _00(self) -> &'a mut W { self.variant(SRAMUAPW::_00) } #[doc = "Special round robin (favors SRAM backdoor accesses over the processor)"] #[inline] pub fn _01(self) -> &'a mut W { self.variant(SRAMUAPW::_01) } #[doc = "Fixed priority. Processor has highest, backdoor has lowest"] #[inline] pub fn _10(self) -> &'a mut W { self.variant(SRAMUAPW::_10) } #[doc = "Fixed priority. Backdoor has highest, processor has lowest"] #[inline] pub fn _11(self) -> &'a mut W { self.variant(SRAMUAPW::_11) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _SRAMUWPW<'a> { w: &'a mut W, } impl<'a> _SRAMUWPW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 26; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `SRAMLAP`"] pub enum SRAMLAPW { #[doc = "Round robin"] _00, #[doc = "Special round robin (favors SRAM backdoor accesses over the processor)"] _01, #[doc = "Fixed priority. Processor has highest, backdoor has lowest"] _10, #[doc = "Fixed priority. Backdoor has highest, processor has lowest"] _11, } impl SRAMLAPW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { SRAMLAPW::_00 => 0, SRAMLAPW::_01 => 1, SRAMLAPW::_10 => 2, SRAMLAPW::_11 => 3, } } } #[doc = r" Proxy"] pub struct _SRAMLAPW<'a> { w: &'a mut W, } impl<'a> _SRAMLAPW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: SRAMLAPW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "Round robin"] #[inline] pub fn _00(self) -> &'a mut W { self.variant(SRAMLAPW::_00) } #[doc = "Special round robin (favors SRAM backdoor accesses over the processor)"] #[inline] pub fn _01(self) -> &'a mut W { self.variant(SRAMLAPW::_01) } #[doc = "Fixed priority. Processor has highest, backdoor has lowest"] #[inline] pub fn _10(self) -> &'a mut W { self.variant(SRAMLAPW::_10) } #[doc = "Fixed priority. Backdoor has highest, processor has lowest"] #[inline] pub fn _11(self) -> &'a mut W { self.variant(SRAMLAPW::_11) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 28; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _SRAMLWPW<'a> { w: &'a mut W, } impl<'a> _SRAMLWPW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 30; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:1 - AXBS Halt State Machine Status"] #[inline] pub fn hlt_fsm_st(&self) -> HLT_FSM_STR { HLT_FSM_STR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bit 2 - AXBS Halt Request"] #[inline] pub fn axbs_hlt_req(&self) -> AXBS_HLT_REQR { AXBS_HLT_REQR::_from({ const MASK: bool = true; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 3 - AXBS Halted"] #[inline] pub fn axbs_hltd(&self) -> AXBS_HLTDR { AXBS_HLTDR::_from({ const MASK: bool = true; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 4 - Flash Memory Controller Program Flash Idle"] #[inline] pub fn fmc_pf_idle(&self) -> FMC_PF_IDLER { FMC_PF_IDLER::_from({ const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 6 - Peripheral Bridge Idle"] #[inline] pub fn pbridge_idle(&self) -> PBRIDGE_IDLER { PBRIDGE_IDLER::_from({ const MASK: bool = true; const OFFSET: u8 = 6; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 9 - Crossbar Round-robin Arbitration Enable"] #[inline] pub fn cbrr(&self) -> CBRRR { CBRRR::_from({ const MASK: bool = true; const OFFSET: u8 = 9; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bits 24:25 - SRAM_U Arbitration Priority"] #[inline] pub fn sramuap(&self) -> SRAMUAPR { SRAMUAPR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 24; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bit 26 - SRAM_U Write Protect"] #[inline] pub fn sramuwp(&self) -> SRAMUWPR { let bits = { const MASK: bool = true; const OFFSET: u8 = 26; ((self.bits >> OFFSET) & MASK as u32) != 0 }; SRAMUWPR { bits } } #[doc = "Bits 28:29 - SRAM_L Arbitration Priority"] #[inline] pub fn sramlap(&self) -> SRAMLAPR { SRAMLAPR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 28; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bit 30 - SRAM_L Write Protect"] #[inline] pub fn sramlwp(&self) -> SRAMLWPR { let bits = { const MASK: bool = true; const OFFSET: u8 = 30; ((self.bits >> OFFSET) & MASK as u32) != 0 }; SRAMLWPR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 9 - Crossbar Round-robin Arbitration Enable"] #[inline] pub fn cbrr(&mut self) -> _CBRRW { _CBRRW { w: self } } #[doc = "Bits 24:25 - SRAM_U Arbitration Priority"] #[inline] pub fn sramuap(&mut self) -> _SRAMUAPW { _SRAMUAPW { w: self } } #[doc = "Bit 26 - SRAM_U Write Protect"] #[inline] pub fn sramuwp(&mut self) -> _SRAMUWPW { _SRAMUWPW { w: self } } #[doc = "Bits 28:29 - SRAM_L Arbitration Priority"] #[inline] pub fn sramlap(&mut self) -> _SRAMLAPW { _SRAMLAPW { w: self } } #[doc = "Bit 30 - SRAM_L Write Protect"] #[inline] pub fn sramlwp(&mut self) -> _SRAMLWPW { _SRAMLWPW { w: self } } }
true
a540f98ec4812de10331df8b56a4642d9a28e50d
Rust
zhangjingqiang/os-ht-generator
/src/main.rs
UTF-8
1,824
2.6875
3
[ "MIT" ]
permissive
extern crate csv; use std::error::Error; use std::io; use std::process; use std::fs::File; const DOC: &str = r#" parameters: key_name: "user" image: "image" flavor: "flavor" security_groups: [ "default" ] volume_size: 100 volume_type: "SSD" vm_name: "vm" domain: "domain" az: "az" network: - network: "network" vnic_type: "direct" fixed_ip: - "IP" "#; fn make_template() -> Result<(), Box<dyn Error>> { let mut rdr = csv::Reader::from_reader(io::stdin()); for result in rdr.records() { let record = result?; let image = &record[1]; let flavor = &record[2]; let security_groups = &record[3]; let volume_size = &record[4]; let vm_name = &record[0]; let domain = &record[5]; let az = &record[6]; let network = &record[7]; let fixed_ip = &record[8]; let mut value: serde_yaml::Value = serde_yaml::from_str(DOC).unwrap(); value["parameters"]["image"] = image.into(); value["parameters"]["flavor"] = flavor.into(); value["parameters"]["security_groups"] = security_groups.into(); value["parameters"]["volume_size"] = volume_size.into(); value["parameters"]["vm_name"] = vm_name.into(); value["parameters"]["domain"] = domain.into(); value["parameters"]["az"] = az.into(); value["parameters"]["network"][0]["network"] = network.into(); value["parameters"]["network"][0]["fixed_ip"][0] = fixed_ip.into(); let file = File::create(["./heat_templates/", "env_", vm_name, ".yaml"].concat()).unwrap(); serde_yaml::to_writer(file, &value).unwrap(); } Ok(()) } fn main() { if let Err(err) = make_template() { println!("error running generator: {}", err); } process::exit(1); }
true
37f0b10e33a45004759e1f784b72bbb3bed9e57e
Rust
astro/rust-lpc43xx
/src/mcpwm/inten_clr/mod.rs
UTF-8
8,852
2.609375
3
[ "Apache-2.0" ]
permissive
#[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::INTEN_CLR { #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } } #[doc = r" Proxy"] pub struct _ILIM0_CLRW<'a> { w: &'a mut W, } impl<'a> _ILIM0_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _IMAT0_CLRW<'a> { w: &'a mut W, } impl<'a> _IMAT0_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ICAP0_CLRW<'a> { w: &'a mut W, } impl<'a> _ICAP0_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ILIM1_CLRW<'a> { w: &'a mut W, } impl<'a> _ILIM1_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _IMAT1_CLRW<'a> { w: &'a mut W, } impl<'a> _IMAT1_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ICAP1_CLRW<'a> { w: &'a mut W, } impl<'a> _ICAP1_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 6; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ILIM2_CLRW<'a> { w: &'a mut W, } impl<'a> _ILIM2_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _IMAT2_CLRW<'a> { w: &'a mut W, } impl<'a> _IMAT2_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 9; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ICAP2_CLRW<'a> { w: &'a mut W, } impl<'a> _ICAP2_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 10; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ABORT_CLRW<'a> { w: &'a mut W, } impl<'a> _ABORT_CLRW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 15; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - Writing a one clears the corresponding bit in INTEN, thus disabling the interrupt."] #[inline] pub fn ilim0_clr(&mut self) -> _ILIM0_CLRW { _ILIM0_CLRW { w: self } } #[doc = "Bit 1 - Writing a one clears the corresponding bit in INTEN, thus disabling the interrupt."] #[inline] pub fn imat0_clr(&mut self) -> _IMAT0_CLRW { _IMAT0_CLRW { w: self } } #[doc = "Bit 2 - Writing a one clears the corresponding bit in INTEN, thus disabling the interrupt."] #[inline] pub fn icap0_clr(&mut self) -> _ICAP0_CLRW { _ICAP0_CLRW { w: self } } #[doc = "Bit 4 - Writing a one clears the corresponding bit in INTEN, thus disabling the interrupt."] #[inline] pub fn ilim1_clr(&mut self) -> _ILIM1_CLRW { _ILIM1_CLRW { w: self } } #[doc = "Bit 5 - Writing a one clears the corresponding bit in INTEN, thus disabling the interrupt."] #[inline] pub fn imat1_clr(&mut self) -> _IMAT1_CLRW { _IMAT1_CLRW { w: self } } #[doc = "Bit 6 - Writing a one clears the corresponding bit in INTEN, thus disabling the interrupt."] #[inline] pub fn icap1_clr(&mut self) -> _ICAP1_CLRW { _ICAP1_CLRW { w: self } } #[doc = "Bit 8 - Writing a one clears the corresponding bit in INTEN, thus disabling the interrupt."] #[inline] pub fn ilim2_clr(&mut self) -> _ILIM2_CLRW { _ILIM2_CLRW { w: self } } #[doc = "Bit 9 - Writing a one clears the corresponding bit in INTEN, thus disabling the interrupt."] #[inline] pub fn imat2_clr(&mut self) -> _IMAT2_CLRW { _IMAT2_CLRW { w: self } } #[doc = "Bit 10 - Writing a one clears the corresponding bit in INTEN, thus disabling the interrupt."] #[inline] pub fn icap2_clr(&mut self) -> _ICAP2_CLRW { _ICAP2_CLRW { w: self } } #[doc = "Bit 15 - Writing a one clears the corresponding bit in INTEN, thus disabling the interrupt."] #[inline] pub fn abort_clr(&mut self) -> _ABORT_CLRW { _ABORT_CLRW { w: self } } }
true
1d11dd0f65dbc05409c1f2256ba68575fce3d0e8
Rust
hacspec/hacspec
/lib/src/math_util/ct_util.rs
UTF-8
2,768
3.453125
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::prelude::*; /// Conditional, constant-time swapping. /// Returns `(x, y)` if `c == 0` and `(y, x)` if `c == 1`. #[inline] #[cfg_attr(feature = "use_attributes", not_hacspec)] pub fn cswap_bit<T: Integer + Copy>(x: T, y: T, c: T) -> (T, T) { cswap(x, y, T::default().wrap_sub(c)) } /// Conditional, constant-time swapping. /// Returns `(x, y)` if `c == 0` and `(y, x)` if `c == T::max`. /// The return value is undefined if `c` has any other value. #[inline] #[cfg_attr(feature = "use_attributes", not_hacspec)] pub fn cswap<T: Integer + Copy>(x: T, y: T, c: T) -> (T, T) { let mask = c & (x ^ y); (x ^ mask, y ^ mask) } /// Set bit at position `i` in `x` to `b` if `c` is all 1 and return the restult. /// Returns `x` if `c` is `0`. #[inline] #[cfg_attr(feature = "use_attributes", not_hacspec)] pub fn cset_bit<T: Integer + Copy>(x: T, b: T, i: usize, c: T) -> T { let set = x.set_bit(b, i); let (out, _) = cswap(x, set, c); out } /// Add two numerics if condition `c` is set (all bits 1). /// Returns `x` if condition `c` is `0`. /// Note: Addition is always wrapping. #[inline] #[cfg_attr(feature = "use_attributes", not_hacspec)] pub fn cadd<T: Integer + Copy>(x: T, y: T, c: T) -> T { let sum = x.wrap_add(y); let (x, _) = cswap(x, sum, c); x } /// Subtract two numerics if condition `c` is set (all bits 1). /// Returns `x` if condition `c` is `0`. /// Note: Addition is always wrapping. #[inline] #[cfg_attr(feature = "use_attributes", not_hacspec)] pub fn csub<T: Integer + Copy>(x: T, y: T, c: T) -> T { let diff = x.wrap_sub(y); let (x, _) = cswap(x, diff, c); x } /// Multiply two numerics if condition `c` is set (all bits 1). /// Returns `x` if condition `c` is `0`. /// Note: Multiplication is always wrapping. #[inline] #[cfg_attr(feature = "use_attributes", not_hacspec)] pub fn cmul<T: Integer + Copy>(x: T, y: T, c: T) -> T { let prod = x.wrap_mul(y); let (x, _) = cswap(x, prod, c); x } /// Constant time division for Numerics. /// Note that this function is only constant time if `T` is a secret integer and /// hence provides constant time implementations for the used functions. #[inline] #[cfg_attr(feature = "use_attributes", not_hacspec)] pub fn ct_div<T: Integer + Copy>(a: T, d: T) -> (T, T) { let mut q = T::default(); let mut r = T::default(); for i in (0..T::NUM_BITS).rev() { r = r << 1; r = r.set(0, a, i); // The code below is equivalent to the following. // if r.greater_than_or_equal(d) { // r = r - d; // q = q.set_bit(T::ONE(), i); // } let geq = r.greater_than_or_equal_bm(d); r = csub(r, d, geq); q = cset_bit(q, T::ONE(), i, geq); } (q, r) }
true
7cc8d493470baaa52a47fdbaf20ccb302489412e
Rust
juggernaut0/aoc2019
/src/day20.rs
UTF-8
8,698
2.984375
3
[]
no_license
use std::collections::{HashMap, VecDeque}; pub fn run1(input: Vec<String>) -> u32 { let map = Map::from_input(input); pathfind(&map.chars, &map.outside_portals, &map.inside_portals) } pub fn run2(input: Vec<String>) -> u32 { let map = Map::from_input(input); pathfind2(&map.chars, &map.outside_portals, &map.inside_portals) } struct Map { chars: HashMap<(usize, usize), char>, outside_portals: HashMap<(usize, usize), String>, inside_portals: HashMap<(usize, usize), String>, } impl Map { fn from_input(input: Vec<String>) -> Map { let width = input[0].len() - 4; let height = input.len() - 4; let thicc = input.iter() .skip(2) .enumerate() .find(|(_, row)| { let l = row.len(); row[2..l-2].contains(' ') }) .map(|(i, _)| i) .expect("Expected a donut"); let mut chars = HashMap::new(); for (y, row) in input.iter().enumerate() { for (x, c) in row.chars().enumerate() { chars.insert((x, y), c); } } let mut outside_portals = HashMap::new(); let mut inside_portals = HashMap::new(); // top outside for x in 2..2+width { let y = 2; if chars[&(x, y)] == '.' { let mut s = String::new(); s.push(chars[&(x, y-2)]); s.push(chars[&(x, y-1)]); outside_portals.insert((x, y), s); } } // bottom outside for x in 2..2+width { let y = height+1; if chars[&(x, y)] == '.' { let mut s = String::new(); s.push(chars[&(x, y+1)]); s.push(chars[&(x, y+2)]); outside_portals.insert((x, y), s); } } // top inside for x in 2+thicc..2+width-thicc { let y = thicc+1; if chars[&(x, y)] == '.' { let mut s = String::new(); s.push(chars[&(x, y+1)]); s.push(chars[&(x, y+2)]); inside_portals.insert((x, y), s); } } // bottom inside for x in 2+thicc..2+width-thicc { let y = height-thicc+2; if chars[&(x, y)] == '.' { let mut s = String::new(); s.push(chars[&(x, y-2)]); s.push(chars[&(x, y-1)]); inside_portals.insert((x, y), s); } } // left outside for y in 2..2+height { let x = 2; if chars[&(x, y)] == '.' { let mut s = String::new(); s.push(chars[&(x-2, y)]); s.push(chars[&(x-1, y)]); outside_portals.insert((x, y), s); } } // right outside for y in 2..2+height { let x = width+1; if chars[&(x, y)] == '.' { let mut s = String::new(); s.push(chars[&(x+1, y)]); s.push(chars[&(x+2, y)]); outside_portals.insert((x, y), s); } } // left inside for y in 2+thicc..2+height-thicc { let x = thicc+1; if chars[&(x, y)] == '.' { let mut s = String::new(); s.push(chars[&(x+1, y)]); s.push(chars[&(x+2, y)]); inside_portals.insert((x, y), s); } } // right inside for y in 2+thicc..2+height-thicc { let x = width-thicc+2; if chars[&(x, y)] == '.' { let mut s = String::new(); s.push(chars[&(x-2, y)]); s.push(chars[&(x-1, y)]); inside_portals.insert((x, y), s); } } assert_eq!(outside_portals.len(), inside_portals.len() + 2); Map { chars, outside_portals, inside_portals } } } fn pathfind( chars: &HashMap<(usize, usize), char>, outside_portals: &HashMap<(usize, usize), String>, inside_portals: &HashMap<(usize, usize), String>, ) -> u32 { let start = outside_portals.iter().find(|it| it.1 == "AA").map(|(&pos, _)| pos).unwrap(); let mut queue = VecDeque::new(); queue.push_back(start); let mut costs = HashMap::new(); costs.insert(start, 0); /*let mut outside_d = HashMap::new(); // dist from start to outside portal let mut inside_d = HashMap::new();*/ while let Some(pos) = queue.pop_front() { let cost = costs[&pos]; if let Some(p) = inside_portals.get(&pos) { let (adj, _) = outside_portals.iter() .find(|(_, op)| *op == p) .unwrap(); let new_cost = cost + 1; let old_cost = costs.get(adj).copied().unwrap_or(999999999); if new_cost < old_cost { queue.push_back(*adj); costs.insert(*adj, new_cost); } } else if let Some(p) = outside_portals.get(&pos) { if p != "ZZ" && p != "AA" { let (adj, _) = inside_portals.iter() .find(|(_, op)| *op == p) .expect("Expected to find a matching inside portal"); let new_cost = cost + 1; let old_cost = costs.get(adj).copied().unwrap_or(999999999); if new_cost < old_cost { queue.push_back(*adj); costs.insert(*adj, new_cost); } } } for adj in adj(pos).iter() { let new_cost = cost + 1; let old_cost = costs.get(adj).copied().unwrap_or(999999999); let can_move = chars[adj] == '.'; if can_move && new_cost < old_cost { queue.push_back(*adj); costs.insert(*adj, new_cost); } } } let portal_costs: HashMap<_, _> = outside_portals.iter() .map(|(pos, name)| (name, costs.get(pos))) .collect(); log::info!("{:#?}", portal_costs); outside_portals.iter() .find(|it| it.1 == "ZZ") .map(|(zz_pos, _)| costs[zz_pos]) .expect("Epected to find ZZ") } fn pathfind2( chars: &HashMap<(usize, usize), char>, outside_portals: &HashMap<(usize, usize), String>, inside_portals: &HashMap<(usize, usize), String>, ) -> u32 { let start = outside_portals.iter().find(|it| it.1 == "AA").map(|(&pos, _)| pos).unwrap(); let goal = outside_portals.iter() .find(|it| it.1 == "ZZ") .map(|(&zz_pos, _)| zz_pos) .expect("Epected to find ZZ"); let mut queue = VecDeque::new(); queue.push_back((start, 0)); let mut costs = HashMap::new(); costs.insert((start, 0), 0); while let Some(current) = queue.pop_front() { let cost = costs[&current]; if current == (goal, 0) { return cost; } let (pos, layer) = current; if let Some(p) = inside_portals.get(&pos) { let (&dest, _) = outside_portals.iter() .find(|(_, op)| *op == p) .unwrap(); let adj = (dest, layer + 1); let new_cost = cost + 1; let old_cost = costs.get(&adj).copied().unwrap_or(999999999); if new_cost < old_cost { queue.push_back(adj); costs.insert(adj, new_cost); } } else if let Some(p) = outside_portals.get(&pos) { if layer > 0 && p != "ZZ" && p != "AA" { let (&dest, _) = inside_portals.iter() .find(|(_, op)| *op == p) .expect("Expected to find a matching inside portal"); let adj = (dest, layer - 1); let new_cost = cost + 1; let old_cost = costs.get(&adj).copied().unwrap_or(999999999); if new_cost < old_cost { queue.push_back(adj); costs.insert(adj, new_cost); } } } for &dest in adj(pos).iter() { let adj = (dest, layer); let new_cost = cost + 1; let old_cost = costs.get(&adj).copied().unwrap_or(999999999); let can_move = chars[&dest] == '.'; if can_move && new_cost < old_cost { queue.push_back(adj); costs.insert(adj, new_cost); } } } unreachable!("Ran out of places to go") } fn adj((x, y): (usize, usize)) -> [(usize, usize);4] { [ (x + 1, y), (x, y + 1), (x - 1, y), (x, y - 1), ] }
true
ef2eca7a786fc403d4a89b0c9d44e6cd310ea96e
Rust
obeah/sheeit
/sheeit/tests/concurrency_test.rs
UTF-8
7,581
2.703125
3
[ "MIT" ]
permissive
mod util; use core::cmp; use prettytable; use prettytable::{Row, Table}; use rand; use rand::Rng; use sheeit::storage::Storage; use sheeit::storage::{Cell, CoreDocument, Value}; use std::sync::Arc; use std::thread; use std::time::{Duration, Instant}; use util::multi_value::MultiValue; use uuid::Uuid; #[test] fn test_multiple_readers_and_writers() { let storage = Arc::new(Storage::obtain()); let handles: Vec<_> = (0..4) .map(|_i| { let storage_clone = Arc::clone(&storage); thread::spawn(move || { // println!("Generating from index: {}", i); storage_clone.add_ledger() }) }) .collect(); let document_uuids: Arc<Vec<Uuid>> = Arc::new( handles .into_iter() .map(|handle| handle.join().unwrap()) .collect(), ); let write_handles: Vec<_> = (0..4) .map(|_i| { let storage_clone = Arc::clone(&storage); let document_uuids_clone = Arc::clone(&document_uuids); thread::spawn(move || { let start = Instant::now(); loop { let random_index = rand::thread_rng().gen_range(0, document_uuids_clone.len()); let uuid = document_uuids_clone.get(random_index).unwrap(); let (_, ()) = storage_clone .transact_write(*uuid, |document| { if document.sheets().len() == 0 { document.add_sheet(); } let sheet = document.sheet_at(0).unwrap(); let column_index = if sheet.columns().len() == 0 || rand::thread_rng().gen_range(0, 2) == 1 { document.add_column(0).unwrap(); 0 } else { rand::thread_rng().gen_range(0, sheet.columns().len()) }; let rows_to_write = 0..rand::thread_rng().gen_range(0, 10); // println!("Will write {} rows", rows_to_write.len()); for _ in rows_to_write { let mut multi_val = MultiValue::new(); multi_val.add_val(Value::Integer( rand::thread_rng().gen_range(100, 1000), )); multi_val.add_val(Value::Integer( rand::thread_rng().gen_range(100, 1000), )); let value = Value::Custom(Box::new(multi_val)); // println!("Column value before adding:"); // for cell in column.cells() { //// println!("Cell: {}", cell); //// } // println!("Adding value: {}", value); document .add_cells_at_column( 0, column_index, vec![Cell::with_value(value, 0)], ) .ok() .unwrap(); // println!("Column value after adding:"); // for cell in column.cells() { // println!("Cell: {}", cell); // } } () }) .unwrap(); // println!("Thread {} is done writing", i); let now = Instant::now(); if now - start > Duration::from_millis(100) { // println!("=== BEGIN Document FROM THREAD: {}, with UUID: {} ===", i, uuid); // print_document(document); // println!("=== END Document FROM TRHEAD: {}, with UUID: {} ===", i, uuid); break; } } }) }) .collect(); let read_handles: Vec<_> = (0..4) .map(|i| { let storage_clone = Arc::clone(&storage); let document_uuids_clone = Arc::clone(&document_uuids); thread::spawn(move || { thread::sleep(Duration::from_millis(100)); let random_index = rand::thread_rng().gen_range(0, document_uuids_clone.len()); let uuid = document_uuids_clone.get(random_index).unwrap(); let document = storage_clone.read(&uuid).unwrap(); let start = Instant::now(); loop { print_document_stat(i, &document, false); print_document_stat(i, &storage_clone.read(&uuid).unwrap(), true); thread::sleep(Duration::from_millis(10)); let now = Instant::now(); if now - start > Duration::from_millis(200) { break; } } }) }) .collect(); for handle in write_handles { handle.join().unwrap(); } for handle in read_handles { handle.join().unwrap(); } // for uuid in document_uuids.iter() { // let document = storage.read(&uuid).unwrap(); // print_document(document); // } } fn print_document_stat(_i: i32, document: &CoreDocument, _is_new: bool) { let sheets = document.sheets(); let mut result = String::new(); let sheet = sheets.get(0).expect("No sheets!"); let columns = sheet.columns(); let first_column = columns.get(0).unwrap(); for cell in first_column.cells() { result = result + &format!("FirstColCellVal: {}\n", cell); } // // print!( // "{}: Thread no.: {}\n{}", // if is_new { "NEW" } else { "STALE " }, // i, // result // ); } #[allow(dead_code)] fn print_document(document: CoreDocument) { for sheet in document.sheets() { let mut table = Table::new(); let max_rows = sheet .columns() .iter() .fold(0, |acc, col| cmp::max(acc, col.cells().len())); for i in 0..max_rows { let mut cells = Vec::new(); for column in sheet.columns() { cells.push( column .cells() .get(i) .map(|cell| prettytable::Cell::new(&format!("{}", cell))) .unwrap_or(prettytable::Cell::new("")), ) } table.add_row(Row::new(cells)); } table.printstd(); } }
true
4238a19e0f097d5b0ae522d8bb30324da41ce1dc
Rust
semargal/num-to-words
/src/test_utils.rs
UTF-8
226
2.6875
3
[ "Apache-2.0" ]
permissive
use crate::types::*; pub struct InOut(pub Int, pub StaticStr); pub fn test_set(f: &dyn Fn(Int) -> Result<String>, data: &[InOut]) { for sample in data.iter() { assert_eq!(f(sample.0).unwrap(), sample.1); } }
true
2f7803eef306f205b04f04171bb6f30d39e124c1
Rust
PaigeDavid/CSIS616
/project1/src/main.rs
UTF-8
13,345
3.40625
3
[]
no_license
//! CSIS-616 - Program #3 //! //! Some parts were originally made by: Ralph W. Crosby PhD. //! Edited and added to by: Paige Peck //! //! //! Process a yaml format deterministic finite automaton producing //! - A textual representation of the internal state graph //! - A Graphviz `.dot` file representing the graph //! //! # Usage //! //! ``` //! cargo run regex //! ``` //! where: `regex` is a series of symbols that will generate a DFA and decide if input //! is accepted or rejected by the regex //! //! # Output //! //! To `stderr`: Debug display of the internal graph structure //! //! To `stdout`: Graphviz definitions of the graph structure use std::io; use std::io::prelude::*; use std::io::Write; // ********************************************************************* /// # Deterministic Finite Automata Structure struct DFA { /// The set of characters comprising the alphabet alphabet: Vec<char>, /// State number (1 relative) for the start state start: usize, /// Set of accept states (1 relative) accept: Vec<usize>, //will need to be Vec<usize> when multiple accept states are implemented /// Matrix of transitions, rows are states, columns characters in the alphabet transitions: Vec<Vec<usize>>, } //State based representation of the DFA version of the RegEx struct StateGraph { /// The set of characters comprising the alphabet alphabet: Vec<char>, /// State number for the start state start_state: usize, /// Vector of state objects states: Vec<Box<State>> } //Definition of a single state struct State { //Is this an accept state accept_state: bool, //Set of transitions transitions: Vec<usize> } struct Transitions { chars: char, state: usize } fn main() { //Get and validate the RegEx on the command line let regex = get_regex(std::env::args()); let dfa = DFA::new_from_regex(&regex); //Create the dfa structure based on in RegEx entered from the command line let state_graph = StateGraph::new_from_dfa(&dfa); //eprintln!("{:?}", state_graph); state_graph.write_graphviz(); // Process through the input until end of file (cntl-z) is encountered state_graph.process(); } // ********************************************************************* /// Return the RegEx passed as the first parameter fn get_regex(args: std::env::Args) -> String { // Get the arguments as a vector let args: Vec<String> = args.collect(); // Make sure only one argument was passed if args.len() != 2 { writeln!(std::io::stderr(), "Usage: cargo run 'regex'") .unwrap(); std::process::exit(1); } args[1].to_string() } // ********************************************************************* /// Implement the methods of the DFA structure impl DFA { //Create and return a DFA on the heap //Generate the DFA from the given regex fn new_from_regex(regex: &str) -> Box<DFA> { //Setup the regex as the language / alphabet of the dfa //Remove any duplicate word characters let mut l = regex.replace("|", ""); l = l.replace("+", ""); l = l.replace("*", ""); //Creates a language Vec<char> without the operators in it and pushing the sigma symbol for alphabet purposes let mut language: Vec<char> = l.chars().collect(); language.sort(); language.dedup(); language.push('Σ'); let final_state = l.len()+1; //Create a near blank dfa object, with 1 being start state, accept state being the final state // which is calculated based on the length of the regex length + 1 let mut dfa = Box::new(DFA{alphabet: language, start: 1, accept: [final_state].to_vec(), transitions: vec![] }); //Set current and next state to traverse through the graph as we create the transition matrix. let mut current_state = 1; let mut next_state = 2; //Create the Transitions Struct to save any transitions characters. These are characters that would // need to be cycled back to. First character and second state will always start this off. let mut transitions: Vec<Transitions> = Vec::new(); let t = Transitions{chars: regex.chars().next().unwrap(), state: 2}; transitions.push(t); //Create a previous_char character for | and * operators let mut previous_char = regex.chars().next().unwrap(); //Traverse through the regex string, reading characters and deciding what to do depending on the character. for c in regex.chars() { let mut states: Vec<usize> = Vec::new(); //Checks if previous char was a | operator. //If so, save the current character as a transition or cycle character //Also fixes any previous transition state if previous_char == '|' { for (n, a) in dfa.alphabet.iter().enumerate() { if *a == c { dfa.transitions[0][n] = next_state; } } let j = Transitions{chars: c, state: next_state}; transitions.push(j); } //Same as above, just with the * operator. if previous_char == '*' { let j = Transitions{chars: c, state: next_state}; transitions.push(j); } //Operator '|': Implemented - single and multiple | operators are working //Multiple types of symbols are untested and could produce varying results //Checks if character is | operator. If so, save the final state as an accept state, reset //current state back to 1, and set previous_char as | if c == '|' { let final_bar_state = dfa.transitions.len()+1; let mut final_bar_state_count: Vec<usize> = Vec::new(); dfa.accept.push(final_bar_state); for _a in dfa.alphabet.iter() { final_bar_state_count.push(final_bar_state); } dfa.transitions.push(final_bar_state_count); current_state = 1; previous_char = '|'; } //Operator '+': Implemented - single works, multiple is funky, almost working //Removes the previous transition matrix to remake it with updated states //Fix to the multiple + operators I believe is using a for loop to go through the entire transitions vec // but I have ran out of time to get that working. else if c == '+' { dfa.transitions.remove(dfa.transitions.len()-1); next_state -= 1; current_state -= 1; for a in dfa.alphabet.iter() { if a == &previous_char { states.push(next_state); } else { if *a == transitions[0].chars { states.push(transitions[0].state); } else { states.push(1); } } } dfa.transitions.push(states); next_state += 1; current_state += 1; } //Operator '*': Implemented - Single and multiple * operators are working. Something funky happens with the more characters // added into the regex, especially after a *. Not time to check it. Very close to getting this part fixed, most of it works //Similar to + operator, remove previous transition to replace it with new one. // Step back 2 states for next and current to allow for proper transition. Push necessary states. // Potential fix is similar to + operator with iterating over transitions instead of just checking index 0. //At the end, add 2 to current state to get back, and set previous_char as * else if c == '*' { dfa.transitions.remove(dfa.transitions.len()-1); let mut pushed_forward = false; next_state -= 2; current_state -= 2; for a in dfa.alphabet.iter() { if a == &previous_char { next_state += 1; states.push(next_state); } else if *a == 'Σ' { states.push(1); } else { if *a == transitions[0].chars { states.push(transitions[0].state); } else if !pushed_forward { next_state += 1; states.push(next_state); pushed_forward = true; } else { states.push(1); } } } dfa.transitions.push(states); current_state += 2; previous_char = '*'; } //All word character symbols: Implemented //Allows for any character that is in the language to be added in, checks if there is a transition/cycle //to be made, set the state as that before pushing. If it is not a transition, push to state 1 //if sigma symbol, push to state 1 else if c != 'Σ' { for a in dfa.alphabet.iter() { let mut was_transition = false; if c == *a { states.push(next_state); } else { for i in 0..transitions.len() { if *a == transitions[i].chars { states.push(transitions[i].state); was_transition = true; } } if was_transition == false { if previous_char == '*' && *a != 'Σ' { states.push(1); previous_char = c; } else { states.push(1); } } } } if previous_char != '|' { dfa.transitions.push(states); } next_state += 1; current_state += 1; previous_char = c; } } //Go back through and fix any transitions that weren't marked properly // (i.e. | transitions to state 2 from state 4 if applicable) for i in 0..dfa.transitions.len() { for n in 0..dfa.transitions[i].len() { if n < dfa.transitions[i].len() - 1 && dfa.transitions[i][n] == 1 { for c in 0..transitions.len() { if dfa.alphabet[n] == transitions[c].chars { dfa.transitions[i][n] = transitions[c].state; } } } } } //Set final state as a cycle for transition matrix. If 3 states, push [3,3,3] let mut final_state_count: Vec<usize> = Vec::new(); for _alphabet in dfa.alphabet.iter() { final_state_count.push(final_state); } dfa.transitions.push(final_state_count); dfa } } // ********************************************************************* // Implement the methods of the DFA structure impl StateGraph<> { /// Create a state graph from a DFA structure fn new_from_dfa(dfa: &DFA) -> Box<StateGraph> { // Create an empty graph object let mut graph = Box::new(StateGraph{alphabet: dfa.alphabet.clone(), start_state: dfa.start - 1, states: vec!() }); // Look through the transition table building state objects for row in dfa.transitions.iter() { let mut v = Box::new(State{accept_state: false, transitions: vec!()}); for col in row { v.transitions.push(col-1); } graph.states.push(v); } // Set the accept states for astate in dfa.accept.iter() { graph.states[*astate - 1].accept_state = true; } graph } /// Execute the graph on a sentence /// Return Err if a character not in the alphabet is encountered /// Return Ok and a bool indicating accept (true) or reject (false) fn test_sentence(&self, sentence: &str) -> Result<bool, String> { let mut state = self.start_state; //Full alphabet to test against for sigma character let full_alphabet: Vec<char> = "abcdefghijklmnopqrstuvwxyz0123456789 ".chars().collect(); for ch in sentence.chars() { //Check if character is a word character. Accept it if it is and change it to the 'Σ' symbol for matching purposes let mut c = ch; if !self.alphabet.contains(&c) && full_alphabet.contains(&c) { c = 'Σ'; } let state_no = match self.alphabet.iter().position(|v| *v == ch || *v == c) { Some(t) => t, None => return Err(format!("Character <{}> does not have a transition", ch)) }; print!("δ(q{}, {}) → ", state+1, ch); state = self.states[state].transitions[state_no]; println!("(q{})", state+1); } Ok(self.states[state].accept_state) } fn write_graphviz(&self) { println!("digraph {{"); println!("\trankdir=LR;"); println!("\tnode [shape=point]; start;"); for (n, state) in self.states.iter().enumerate() { if state.accept_state { println!("\tnode [shape=doublecircle]; q{};", n+1); } } println!("\tnode [shape=circle];"); println!("\tstart -> q{}", self.start_state+1); for (n, state) in self.states.iter().enumerate() { for (i, ch) in self.alphabet.iter().enumerate() { println!("\tq{} -> q{} [label=\"{}\"]", n+1, state.transitions[i] + 1, ch); } } println!("}}"); } fn process(&self) { let stdin = io::stdin(); for line in stdin.lock().lines() { // Get the line out of the Result, should never error let sentence = &line.unwrap(); println!("Processing sentence <{}>", sentence); match self.test_sentence(sentence) { Ok(b) => println!("{}", if b {"Accept"} else {"Reject"}), Err(s) => println!("Error processing sentence: {}", s) } } } } #[cfg(test)] mod test { use super::*; //This test is used to make sure that it creates a graphviz file #[test] fn test1() { let dfa = DFA::new_from_regex("a*b"); //Create the dfa structure based on in RegEx entered from the command line let state_graph = StateGraph::new_from_dfa(&dfa); state_graph.write_graphviz(); } }
true
d580d7d567ad69b5963db406b2af014ce3bb801c
Rust
qeedquan/challenges
/codegolf/rotate-cartesian-coordinates.rs
UTF-8
949
3.78125
4
[ "MIT" ]
permissive
/* Write a program rotates some Cartesian coordinates through an angle about the origin (0.0,0.0). The angle and coordinates will be read from a single line of stdin in the following format: angle x1,y1 x2,y2 x3,y3 ... eg. 3.14159265358979 1.0,0.0 0.0,1.0 1.0,1.0 0.0,0.0 The results should be printed to stdout in the following format: x1',y1' x2',y2' x3',y3' ... eg. -1.0,-3.23108510433268e-15 -3.23108510433268e-15,-1.0 -1.0,-1.0 -0.0,-0.0 */ use std::f64::consts::PI; struct Vec2 { x: f64, y: f64, } fn main() { let vectors: [Vec2; 4] = [ Vec2{x: 1.0, y: 0.0}, Vec2{x: 0.0, y: 1.0}, Vec2{x: 1.0, y: 1.0}, Vec2{x: 0.0, y: 0.0}, ]; rotate(PI, &vectors); } fn rotate(angle: f64, vectors: &[Vec2]) { let s = angle.sin(); let c = angle.cos(); for v in vectors { let x = v.x*c - v.y*s; let y = v.x*s + v.y*c; println!("{:.6} {:.6}", x, y); } }
true
4fbc9ccf587fe4654f8fad05e165e8ab1b551a43
Rust
gleam-lang/gleam
/compiler-core/src/erlang/tests/strings.rs
UTF-8
1,837
3.421875
3
[ "Apache-2.0" ]
permissive
use crate::assert_erl; #[test] fn concat() { assert_erl!( r#" pub fn go(x, y) { x <> y } "#, ); } #[test] fn concat_3_variables() { assert_erl!( r#" pub fn go(x, y, z) { x <> y <> z } "#, ); } #[test] fn string_prefix() { assert_erl!( r#" pub fn go(x) { case x { "Hello, " <> name -> name _ -> "Unknown" } } "#, ); } #[test] fn rest_variable_rewriting() { // This test checks that the the variable on the right hand side of <> has // it's name written correctly when it shadows an existing variable assert_erl!( r#" pub fn go(x) { case x { "Hello, " <> x -> x _ -> "Unknown" } } "#, ); } #[test] fn discard_concat_rest_pattern() { // We can discard the right hand side, it parses and type checks ok assert_erl!( r#" pub fn go(x) { case x { "Hello, " <> _ -> Nil _ -> Nil } } "#, ); } #[test] fn string_of_number_concat() { assert_erl!( r#" pub fn go(x) { x <> "1" } "#, ); } #[test] fn concat_function_call() { assert_erl!( r#" fn x() { "" } pub fn go() { x() <> x() } "#, ); } #[test] fn concat_constant() { assert_erl!( r#" const a = "Hello, " const b = "Joe!" pub fn go() { a <> b } "#, ); } #[test] fn concat_constant_fn() { assert_erl!( r#" const cs = s fn s() { "s" } pub fn go() { cs() <> cs() } "#, ); } #[test] fn pipe_concat() { assert_erl!( r#" fn id(x) { x } pub fn main() { { "" |> id } <> { "" |> id } } "#, ); } #[test] fn assert_string_prefix() { assert_erl!( r#" pub fn main(x) { let assert "m-" <> rest = x rest } "#, ); } #[test] fn assert_string_prefix_discar() { assert_erl!( r#" pub fn main(x) { let assert "m-" <> _ = x } "#, ); }
true
4630212c14e8e13e04c7355d8648ab75eb1e5f31
Rust
galenelias/AdventOfCode_2015
/src/Day24/mod.rs
UTF-8
2,581
3.15625
3
[]
no_license
use std::io::{self, BufRead}; use itertools::Itertools; fn check_splits(vals: &Vec<u32>, pos: usize, target: u32) -> (bool, Option<(usize, u64)>) { let mut sum : u32 = 0; let mut first_index = None; for i in 0..pos { sum += vals[i]; if sum == target { if first_index.is_none() { first_index = Some(i+1); } sum = 0; } else if sum > target { return (false, None); } } if let Some(index) = first_index { let product = vals.iter().take(index).fold(1u64, |acc, &x| acc * (x as u64)); return (true, Some((index, product))); } else { return (true, None); } } fn should_backtrack(vals: &mut Vec<u32>, pos: usize, target: u32, best: Option<(usize,u64)>) -> bool { let (pass, first_group) = check_splits(vals, pos, target); if !pass || (best.is_some() && first_group.is_none() && pos > best.unwrap().0) || (best.is_some() && first_group.is_some() && first_group.unwrap() >= best.unwrap()) { return true; } return false; } fn get_score(vals: &mut Vec<u32>, target: u32) -> Option<(usize,u64)> { let result = check_splits(vals, vals.len(), target); if result.0 { println!("Solution: {:?} = {} ({})", vals, result.1.unwrap().0, result.1.unwrap().1,); } return result.1 } fn optimize(vals: &mut Vec<u32>, pos: usize, target: u32, mut best: Option<(usize,u64)>) -> Option<(usize,u64)> { if pos == vals.len() { println!("Try: {:?} ({})", vals, best.is_some()); return get_score(vals, target); } else if should_backtrack(vals, pos, target, best) { if best.is_some() { // println!("Backtracking: pos={}, vals={:?}", pos, vals); } return None; } for i in pos..vals.len() { vals.swap(pos, i); let outcome = optimize(vals, pos+1, target, best); if best.is_none() || (outcome.is_some() && outcome.unwrap() < best.unwrap()) { best = outcome; // Now that we have a solution, most solutions in this sub-tree will be just as good, so potentially bail out early if should_backtrack(vals, pos, target, best) { vals.swap(pos, i); return best; } } vals.swap(pos, i); } return best; } pub fn solve() { let stdin = io::stdin(); let lines: Vec<u32> = stdin.lock().lines() .filter_map(|line| line.ok()) .map(|line| line.parse::<u32>().unwrap()) .collect_vec(); let mut lines = lines.into_iter().rev().collect_vec(); let sum: u32 = lines.iter().sum(); let part1 = optimize(&mut lines, 0, sum / 3, None); println!("Part 1: {} ({})", part1.unwrap().0, part1.unwrap().1); let part2 = optimize(&mut lines, 0, sum / 4, None); println!("Part 2: {} ({})", part2.unwrap().0, part2.unwrap().1); }
true
5fc9fe499cc6efd86b031744fc233573fbc5ee6b
Rust
jannes/fan2jian
/src/bin.rs
UTF-8
1,387
3.078125
3
[]
no_license
use std::{fs, io::Write, path::PathBuf}; use fan2jian::map_text; const HELP: &str = "\ fan2jian USAGE: app [INPUT_PATH] [OUTPUT_PATH] FLAGS: -h, --help Prints help information -r, --reverse Do reverse direction: 简体 to 繁体 ARGS: <INPUT_PATH>: path to input file <OUTPUT_PATH>: path to output file "; #[derive(Debug)] struct AppArgs { input: PathBuf, output: PathBuf, reverse: bool, } fn parse_args() -> Result<AppArgs, pico_args::Error> { let mut pargs = pico_args::Arguments::from_env(); if pargs.contains(["-h", "--help"]) { print!("{}", HELP); std::process::exit(0); } let args = AppArgs { input: pargs.free_from_str()?, output: pargs.free_from_str()?, reverse: pargs.contains(["-r", "--reverse"]), }; Ok(args) } fn main() { let args = match parse_args() { Ok(v) => v, Err(e) => { eprintln!("Error: {}.", e); std::process::exit(1); } }; let input_text = fs::read_to_string(args.input).expect("could not read input file"); let output_text = map_text(&input_text, !args.reverse); let mut output_file = fs::File::create(args.output.as_path()).expect("could not open output file"); output_file .write_all(output_text.as_bytes()) .expect("could not write to output file"); }
true
3071b2cf57668eba4eb936b1f00a67dfbc8f6178
Rust
louiidev/smol-rs
/examples/camera_movement.rs
UTF-8
1,343
2.625
3
[]
no_license
use smol_rs::errors::SmolError; use smol_rs::math::Vector3; use smol_rs::{import_file, App, AppSettings, Color, Keycode, Transform}; extern crate smol_rs; fn main() -> Result<(), SmolError> { let mut app = App::new(AppSettings { target_fps: 144., ..Default::default() }); let t = app .load_texture(import_file!("../assets/test.png")) .unwrap(); while app.is_running() { let mut position = Vector3::default(); let mut zoom = app.renderer.camera.zoom; for key in app.input.get_pressed_keys().iter() { match key { Keycode::Up => position.y -= 1., Keycode::Down => position.y += 1., Keycode::Right => position.x += 1., Keycode::Left => position.x -= 1., _ => {} } } if app.input.mouse_scroll_direction > 0 { zoom += 0.5; } else if app.input.mouse_scroll_direction < 0 { zoom -= 0.5; } app.renderer.camera.zoom = zoom; if position.magnitude() > 0.1 { app.renderer.camera.position += position.normalize() * 200. * app.delta; } app.renderer.clear(Color::BLACK); app.renderer.texture(Transform::default(), &t); app.end_scene(); } Ok(()) }
true
bc99790fe5245a45cea43cc1f0b1bbed4051dd58
Rust
AssafAoc/aoc2020
/src/d5.rs
UTF-8
1,549
2.890625
3
[ "Unlicense" ]
permissive
use std::collections::BTreeSet; #[allow(dead_code)] const TEST: &str = r#"FBFBBFFRLR BFFFBBFRRR FFFBBBFRRR BBFFBBFRLL"#; // 357, 567, 119, 820 #[allow(dead_code)] pub fn run() { let input = super::get_input(5, ""); // let input = TEST.lines(); let mut seat_ids = BTreeSet::new(); for boarding_pass in input { let mut min_row = 0; let mut max_row = 127; let mut min_col = 0; let mut max_col = 7; let mut chars = boarding_pass.chars(); for _ in 0..7 { match chars.next() { Some('F') => max_row = (max_row + min_row) / 2, Some('B') => min_row = (max_row + min_row) / 2, None | Some(_) => panic!("error") } } for _ in 0..3 { match chars.next() { Some('L') => max_col = (max_col + min_col) / 2, Some('R') => min_col = (max_col + min_col) / 2, None | Some(_) => panic!("error") } } let seat_id = max_row * 8 + max_col; seat_ids.insert(seat_id); // max_seat_id = max_seat_id.max(seat_id); } println!("a: {}", seat_ids.iter().max().unwrap()); let mut seat_id_iterator = seat_ids.iter().peekable(); let mut previous_id = seat_id_iterator.next().unwrap(); 'find_seat: for current_id in seat_id_iterator { if current_id - previous_id == 2 { println!("b: {}", previous_id + 1); break 'find_seat; } previous_id = current_id; } }
true
b852cda2eb3b7916dd3c49f4a6513538c62332d0
Rust
erezny/rusted-cypher
/src/cypher/mod.rs
UTF-8
11,540
2.96875
3
[ "MIT" ]
permissive
//! Provides structs used to interact with the cypher transaction endpoint //! //! The types declared in this module, save for `Statement`, don't need to be instantiated //! directly, since they can be obtained from the `GraphClient`. //! //! # Examples //! //! ## Execute a single query //! ``` //! # use rusted_cypher::GraphClient; //! # const URL: &'static str = "http://neo4j:neo4j@localhost:7474/db/data"; //! let graph = GraphClient::connect(URL).unwrap(); //! //! graph.cypher().exec("CREATE (n:CYPHER_QUERY {value: 1})").unwrap(); //! let result = graph.cypher().exec("MATCH (n:CYPHER_QUERY) RETURN n.value AS value").unwrap(); //! # assert_eq!(result.data.len(), 1); //! //! // Iterate over the results //! for row in result.rows() { //! let value = row.get::<i32>("value").unwrap(); // or: let value: i32 = row.get("value"); //! assert_eq!(value, 1); //! } //! # graph.cypher().exec("MATCH (n:CYPHER_QUERY) delete n"); //! ``` //! //! ## Execute multiple queries //! ``` //! # use rusted_cypher::GraphClient; //! # const URL: &'static str = "http://neo4j:neo4j@localhost:7474/db/data"; //! # let graph = GraphClient::connect(URL).unwrap(); //! let mut query = graph.cypher().query() //! .with_statement("MATCH (n:SOME_CYPHER_QUERY) RETURN n.value as value") //! .with_statement("MATCH (n:OTHER_CYPHER_QUERY) RETURN n"); //! //! let results = query.send().unwrap(); //! //! for row in results[0].rows() { //! let value: i32 = row.get("value").unwrap(); //! assert_eq!(value, 1); //! } //! ``` //! //! ## Start a transaction //! ``` //! # use rusted_cypher::GraphClient; //! # const URL: &'static str = "http://neo4j:neo4j@localhost:7474/db/data"; //! # let graph = GraphClient::connect(URL).unwrap(); //! let (transaction, results) = graph.cypher().transaction() //! .with_statement("MATCH (n:TRANSACTION_CYPHER_QUERY) RETURN n") //! .begin().unwrap(); //! //! # assert_eq!(results.len(), 1); //! ``` pub mod transaction; pub mod statement; pub mod result; pub use self::statement::Statement; pub use self::transaction::Transaction; pub use self::result::CypherResult; use std::convert::Into; use std::collections::BTreeMap; use hyper::client::{Client, Response}; use hyper::header::Headers; use url::Url; use serde::Deserialize; use serde_json::{self, Value}; use serde_json::de as json_de; use serde_json::ser as json_ser; use serde_json::value as json_value; use self::result::{QueryResult, ResultTrait}; use ::error::GraphError; #[cfg(feature = "rustc-serialize")] fn check_param_errors_for_rustc_serialize(statements: &Vec<Statement>) -> Result<(), GraphError> { for stmt in statements.iter() { if stmt.has_param_errors() { let entry = stmt.param_errors().iter().nth(1).unwrap(); return Err(GraphError::new( &format!("Error at parameter '{}' of query '{}': {}", entry.0, stmt.statement(), entry.1) )); } } Ok(()) } #[cfg(not(feature = "rustc-serialize"))] fn check_param_errors_for_rustc_serialize(_: &Vec<Statement>) -> Result<(), GraphError> { Ok(()) } fn send_query(client: &Client, endpoint: &str, headers: &Headers, statements: Vec<Statement>) -> Result<Response, GraphError> { if cfg!(feature = "rustc-serialize") { try!(check_param_errors_for_rustc_serialize(&statements)); } let mut json = BTreeMap::new(); json.insert("statements", statements); let json = match serde_json::to_string(&json) { Ok(json) => json, Err(e) => { error!("Unable to serialize request: {}", e); return Err(GraphError::new_error(Box::new(e))); } }; let req = client.post(endpoint) .headers(headers.clone()) .body(&json); debug!("Seding query:\n{}", json_ser::to_string_pretty(&json).unwrap_or(String::new())); let res = try!(req.send()); Ok(res) } fn parse_response<T: Deserialize + ResultTrait>(res: &mut Response) -> Result<T, GraphError> { let value = json_de::from_reader(res); let result = match value.and_then(|v: Value| json_value::from_value::<T>(v.clone())) { Ok(result) => result, Err(e) => { error!("Unable to parse response: {}", e); return Err(GraphError::new_error(Box::new(e))); } }; if result.errors().len() > 0 { return Err(GraphError::new_neo4j_error(result.errors().clone())); } Ok(result) } /// Represents the cypher endpoint of a neo4j server /// /// The `Cypher` struct holds information about the cypher enpoint. It is used to create the queries /// that are sent to the server. pub struct Cypher { endpoint: Url, client: Client, headers: Headers, } impl Cypher { /// Creates a new Cypher /// /// Its arguments are the cypher transaction endpoint and the HTTP headers containing HTTP /// Basic Authentication, if needed. pub fn new(endpoint: Url, headers: Headers) -> Self { Cypher { endpoint: endpoint, client: Client::new(), headers: headers, } } fn endpoint(&self) -> &Url { &self.endpoint } fn client(&self) -> &Client { &self.client } fn headers(&self) -> &Headers { &self.headers } /// Creates a new `CypherQuery` pub fn query(&self) -> CypherQuery { CypherQuery { statements: Vec::new(), cypher: &self, } } /// Executes the given `Statement` /// /// Parameter can be anything that implements `Into<Statement>`, `&str` or `Statement` itself pub fn exec<S: Into<Statement>>(&self, statement: S) -> Result<CypherResult, GraphError> { let mut query = self.query(); query.add_statement(statement); let mut results = try!(query.send()); match results.pop() { Some(result) => Ok(result), None => Err(GraphError::new("No results returned from server")), } } /// Creates a new `Transaction` pub fn transaction(&self) -> Transaction<self::transaction::Created> { Transaction::new(&self.endpoint.to_string(), &self.headers) } } /// Represents a cypher query /// /// A cypher query is composed by statements, each one containing the query itself and its parameters. /// /// The query parameters must implement `Serialize` so they can be serialized into JSON in order to /// be sent to the server pub struct CypherQuery<'a> { statements: Vec<Statement>, cypher: &'a Cypher, } impl<'a> CypherQuery<'a> { /// Adds statements in builder style pub fn with_statement<T: Into<Statement>>(mut self, statement: T) -> Self { self.add_statement(statement); self } pub fn add_statement<T: Into<Statement>>(&mut self, statement: T) { self.statements.push(statement.into()); } pub fn statements(&self) -> &Vec<Statement> { &self.statements } pub fn set_statements(&mut self, statements: Vec<Statement>) { self.statements = statements; } /// Sends the query to the server /// /// The statements contained in the query are sent to the server and the results are parsed /// into a `Vec<CypherResult>` in order to match the response of the neo4j api. pub fn send(self) -> Result<Vec<CypherResult>, GraphError> { let client = self.cypher.client(); let endpoint = format!("{}/{}", self.cypher.endpoint(), "commit"); let headers = self.cypher.headers(); let mut res = try!(send_query(client, &endpoint, headers, self.statements)); let result: QueryResult = try!(parse_response(&mut res)); if result.errors().len() > 0 { return Err(GraphError::new_neo4j_error(result.errors().clone())) } Ok(result.results) } } #[cfg(test)] mod tests { use super::*; use ::cypher::result::Row; fn get_cypher() -> Cypher { use hyper::Url; use hyper::header::{Authorization, Basic, ContentType, Headers}; let cypher_endpoint = Url::parse("http://localhost:7474/db/data/transaction").unwrap(); let mut headers = Headers::new(); headers.set(Authorization( Basic { username: "neo4j".to_owned(), password: Some("neo4j".to_owned()), } )); headers.set(ContentType::json()); Cypher::new(cypher_endpoint, headers) } #[test] fn query_without_params() { let result = get_cypher().exec("MATCH (n:TEST_CYPHER) RETURN n").unwrap(); assert_eq!(result.columns.len(), 1); assert_eq!(result.columns[0], "n"); } #[test] fn query_with_string_param() { let statement = Statement::new("MATCH (n:TEST_CYPHER {name: {name}}) RETURN n") .with_param("name", "Neo"); let result = get_cypher().exec(statement).unwrap(); assert_eq!(result.columns.len(), 1); assert_eq!(result.columns[0], "n"); } #[test] fn query_with_int_param() { let statement = Statement::new("MATCH (n:TEST_CYPHER {value: {value}}) RETURN n") .with_param("value", 42); let result = get_cypher().exec(statement).unwrap(); assert_eq!(result.columns.len(), 1); assert_eq!(result.columns[0], "n"); } #[test] fn query_with_complex_param() { #[cfg(not(feature = "rustc-serialize"))] mod inner { #[derive(Serialize, Deserialize)] pub struct ComplexType { pub name: String, pub value: i32, } } #[cfg(feature = "rustc-serialize")] mod inner { #[derive(RustcEncodable, RustcDecodable)] pub struct ComplexType { pub name: String, pub value: i32, } } let cypher = get_cypher(); let complex_param = inner::ComplexType { name: "Complex".to_owned(), value: 42, }; let statement = Statement::new("CREATE (n:TEST_CYPHER_COMPLEX_PARAM {p})") .with_param("p", &complex_param); let result = cypher.exec(statement); assert!(result.is_ok()); let results = cypher.exec("MATCH (n:TEST_CYPHER_COMPLEX_PARAM) RETURN n").unwrap(); let rows: Vec<Row> = results.rows().take(1).collect(); let row = rows.first().unwrap(); let complex_result: inner::ComplexType = row.get("n").unwrap(); assert_eq!(complex_result.name, "Complex"); assert_eq!(complex_result.value, 42); cypher.exec("MATCH (n:TEST_CYPHER_COMPLEX_PARAM) DELETE n").unwrap(); } #[test] fn query_with_multiple_params() { let statement = Statement::new( "MATCH (n:TEST_CYPHER {name: {name}}) WHERE n.value = {value} RETURN n") .with_param("name", "Neo") .with_param("value", 42); let result = get_cypher().exec(statement).unwrap(); assert_eq!(result.columns.len(), 1); assert_eq!(result.columns[0], "n"); } #[test] fn multiple_queries() { let cypher = get_cypher(); let statement1 = Statement::new("MATCH (n:TEST_CYPHER) RETURN n"); let statement2 = Statement::new("MATCH (n:TEST_CYPHER) RETURN n"); let query = cypher.query() .with_statement(statement1) .with_statement(statement2); let results = query.send().unwrap(); assert_eq!(results.len(), 2); } }
true
6d355db6745ac2b0d7be36a786672edb075249b7
Rust
fbenkstein/advent-of-code
/gabriel/day22/src/main.rs
UTF-8
7,494
3.03125
3
[]
no_license
use priority_queue::PriorityQueue; use revord::RevOrd; use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashSet, VecDeque}; use std::fmt; struct Cave { depth: usize, width: usize, height: usize, target_x: usize, target_y: usize, regions: Vec<usize>, } enum Region { Rocky, Wet, Narrow, } impl From<usize> for Region { fn from(erosion_level: usize) -> Region { match erosion_level % 3 { 0 => Region::Rocky, 1 => Region::Wet, 2 => Region::Narrow, _ => unreachable!(), } } } #[derive(PartialEq, Eq, Clone, Copy, Hash, PartialOrd, Ord)] enum Tool { ClimbingGear, Torch, Neither, } // impl From<Region> for Tool { // fn from(region: Region) -> Tool { // match region { // } // } // } impl Cave { fn new(depth: usize, (target_x, target_y): (usize, usize)) -> Cave { let width = target_x + 6; let height = target_y + 6; let mut regions: Vec<usize> = vec![0; width * height]; for y in 0..height { for x in 0..width { let geological_index = if y == 0 && x == 0 { 0 } else if y == target_y && x == target_y { 0 } else if y == 0 { x * 16807 } else if x == 0 { y * 48271 } else { regions[(x - 1) + y * width] * regions[x + (y - 1) * width] }; regions[x + y * width] = (geological_index + depth) % 20183; } } Cave { depth, height, width, target_x, target_y, regions: regions, } } fn risk_level(&self) -> usize { let mut risk = 0; for x in 0..=self.target_x { for y in 0..=self.target_y { risk += self[(x, y)] % 3; } } risk } fn pathfind(&self) -> usize { // find all reachable tiles for this unit let mut reachable = BTreeMap::new(); let mut queue = PriorityQueue::new(); let start = ((0, 0), Tool::Torch); reachable.insert(start, 0); queue.push(start, RevOrd(0)); while let Some((((x, y), tool), cost)) = queue.pop() { // special stop conditions if tool == Tool::Torch && (x, y) == (self.target_x, self.target_y) { return cost.0; } if reachable[&((x, y), tool)] < cost.0 { continue; } for (edge, edge_cost) in self.edges(((x, y), tool)) { let new_cost = cost.0 + edge_cost; reachable .entry(edge) .and_modify(|e| { queue.change_priority(&edge, RevOrd(new_cost)); *e = new_cost }) .or_insert_with(|| { queue.push(edge, RevOrd(new_cost)); new_cost }); } } 0 } /// return edges with position, tool to switch to and cost fn edges( &self, ((x, y), tool): ((usize, usize), Tool), ) -> Vec<(((usize, usize), Tool), usize)> { let x = x as isize; let y = y as isize; let mut edges: Vec<(((usize, usize), Tool), usize)> = [(x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1)] .into_iter() .filter_map(|&(x, y)| { if x >= 0 && y >= 0 && x < self.width as isize && y < self.height as isize { let x = x as usize; let y = y as usize; match Region::from(self[(x, y)]) { Region::Rocky if tool == Tool::Torch || tool == Tool::ClimbingGear => { Some((((x, y), tool), 1)) } Region::Wet if tool == Tool::ClimbingGear || tool == Tool::Neither => { Some((((x, y), tool), 1)) } Region::Narrow if tool == Tool::Torch || tool == Tool::Neither => { Some((((x, y), tool), 1)) } _ => None, // need to switch tool } } else { None } }) .collect(); // check whether we need to switch tool let x = x as usize; let y = y as usize; match Region::from(self[(x, y)]) { Region::Rocky => { if tool != Tool::Torch { edges.push((((x, y), Tool::Torch), 7)); } if tool != Tool::ClimbingGear { edges.push((((x, y), Tool::ClimbingGear), 7)); } } Region::Wet => { if tool != Tool::ClimbingGear { edges.push((((x, y), Tool::ClimbingGear), 7)); } if tool != Tool::Neither { edges.push((((x, y), Tool::Neither), 7)); } } Region::Narrow => { if tool != Tool::Torch { edges.push((((x, y), Tool::Torch), 7)); } if tool != Tool::ClimbingGear { edges.push((((x, y), Tool::ClimbingGear), 7)); } } } if (x, y) == (self.target_x, self.target_y) { edges.push((((x, y), Tool::Torch), 7)); } edges } } use std::ops::Index; impl Index<(usize, usize)> for Cave { type Output = usize; fn index(&self, (x, y): (usize, usize)) -> &Self::Output { &self.regions[x + y * (self.width)] } } impl fmt::Display for Cave { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for y in 0..self.height { for x in 0..self.width { write!( f, "{}", if x == 0 && y == 0 { 'M' } else if x == self.target_x && y == self.target_y { 'T' } else { match self[(x, y)] % 3 { 0 => '.', 1 => '=', 2 => '|', _ => unreachable!(), } } )?; } writeln!(f)?; } writeln!(f)?; writeln!(f, "Risk level: {}", self.risk_level())?; Ok(()) } } fn main() { // let cave = Cave::new(510, (10, 10)); // println!("{}", cave); let cave = Cave::new(4845, (6, 770)); println!("{}", cave); println!("{}", cave.pathfind()); } #[test] fn example() { let cave = Cave::new(510, (10, 10)); assert_eq!(cave[(0, 0)], 510); assert_eq!(cave[(1, 0)], 17317); assert_eq!(cave[(0, 1)], 8415); assert_eq!(cave[(1, 1)], 1805); assert_eq!(cave[(10, 10)], 510); assert_eq!(cave.risk_level(), 114); } #[test] fn pathfinding() { let cave = Cave::new(510, (10, 10)); assert_eq!(cave.pathfind(), 45); }
true
8ccb6a616ebc6a105fb5c7d6340ad8823142325e
Rust
bto/wasm-gameboy
/wasm/src/cpu/opcode/dec_tests.rs
UTF-8
8,542
2.734375
3
[]
no_license
use super::*; #[macro_use] mod tests_macro; #[test] fn op_dec_r() { let mut cpu = CPU::new(); let opcode_base = 0b00_000_101; for i in [0b000, 0b001, 0b010, 0b011, 0b100, 0b101, 0b111].iter() { let opcode = opcode_base | (i << 3); // half carry let pc = cpu.registers.pc; set_inst!(cpu, pc, opcode); match i { 0b000 => cpu.registers.b = 0b1_0000, 0b001 => cpu.registers.c = 0b1_0000, 0b010 => cpu.registers.d = 0b1_0000, 0b011 => cpu.registers.e = 0b1_0000, 0b100 => cpu.registers.h = 0b1_0000, 0b101 => cpu.registers.l = 0b1_0000, 0b111 => cpu.registers.a = 0b1_0000, _ => panic!("never reach"), } cpu.registers.carry = true; cpu.registers.half_carry = false; cpu.registers.subtraction = false; cpu.registers.zero = true; cpu.execute(); assert_eq!(cpu.registers.pc, pc + 1); assert_eq!(cpu.registers.carry, true); assert_eq!(cpu.registers.half_carry, true); assert_eq!(cpu.registers.subtraction, true); assert_eq!(cpu.registers.zero, false); match i { 0b000 => assert_eq!(cpu.registers.b, 0b0_1111), 0b001 => assert_eq!(cpu.registers.c, 0b0_1111), 0b010 => assert_eq!(cpu.registers.d, 0b0_1111), 0b011 => assert_eq!(cpu.registers.e, 0b0_1111), 0b100 => assert_eq!(cpu.registers.h, 0b0_1111), 0b101 => assert_eq!(cpu.registers.l, 0b0_1111), 0b111 => assert_eq!(cpu.registers.a, 0b0_1111), _ => panic!("never reach"), } // zero let pc = cpu.registers.pc; set_inst!(cpu, pc, opcode); match i { 0b000 => cpu.registers.b = 1, 0b001 => cpu.registers.c = 1, 0b010 => cpu.registers.d = 1, 0b011 => cpu.registers.e = 1, 0b100 => cpu.registers.h = 1, 0b101 => cpu.registers.l = 1, 0b111 => cpu.registers.a = 1, _ => panic!("never reach"), } cpu.registers.carry = true; cpu.registers.half_carry = true; cpu.registers.subtraction = true; cpu.registers.zero = false; cpu.execute(); assert_eq!(cpu.registers.pc, pc + 1); assert_eq!(cpu.registers.carry, true); assert_eq!(cpu.registers.half_carry, false); assert_eq!(cpu.registers.subtraction, true); assert_eq!(cpu.registers.zero, true); match i { 0b000 => assert_eq!(cpu.registers.b, 0), 0b001 => assert_eq!(cpu.registers.c, 0), 0b010 => assert_eq!(cpu.registers.d, 0), 0b011 => assert_eq!(cpu.registers.e, 0), 0b100 => assert_eq!(cpu.registers.h, 0), 0b101 => assert_eq!(cpu.registers.l, 0), 0b111 => assert_eq!(cpu.registers.a, 0), _ => panic!("never reach"), } // overflow let pc = cpu.registers.pc; set_inst!(cpu, pc, opcode); match i { 0b000 => cpu.registers.b = 0, 0b001 => cpu.registers.c = 0, 0b010 => cpu.registers.d = 0, 0b011 => cpu.registers.e = 0, 0b100 => cpu.registers.h = 0, 0b101 => cpu.registers.l = 0, 0b111 => cpu.registers.a = 0, _ => panic!("never reach"), } cpu.registers.carry = false; cpu.registers.half_carry = false; cpu.registers.subtraction = false; cpu.registers.zero = true; cpu.execute(); assert_eq!(cpu.registers.pc, pc + 1); assert_eq!(cpu.registers.carry, false); assert_eq!(cpu.registers.half_carry, true); assert_eq!(cpu.registers.subtraction, true); assert_eq!(cpu.registers.zero, false); match i { 0b000 => assert_eq!(cpu.registers.b, 0xFF), 0b001 => assert_eq!(cpu.registers.c, 0xFF), 0b010 => assert_eq!(cpu.registers.d, 0xFF), 0b011 => assert_eq!(cpu.registers.e, 0xFF), 0b100 => assert_eq!(cpu.registers.h, 0xFF), 0b101 => assert_eq!(cpu.registers.l, 0xFF), 0b111 => assert_eq!(cpu.registers.a, 0xFF), _ => panic!("never reach"), } } } #[test] fn op_dec_rr() { let mut cpu = CPU::new(); let opcode_base = 0b00_00_1011; for i in [0b00, 0b01, 0b10, 0b11].iter() { let opcode = opcode_base | (i << 4); // not overfow let pc = cpu.registers.pc; set_inst!(cpu, pc, opcode); match i { 0b00 => cpu.registers.bc_set(1), 0b01 => cpu.registers.de_set(2), 0b10 => cpu.registers.hl_set(3), 0b11 => cpu.registers.sp = 4, _ => panic!("never reach"), } cpu.registers.carry = true; cpu.registers.half_carry = true; cpu.registers.subtraction = true; cpu.registers.zero = true; cpu.execute(); assert_eq!(cpu.registers.pc, pc + 1); match i { 0b00 => assert_eq!(cpu.registers.bc_get(), 0), 0b01 => assert_eq!(cpu.registers.de_get(), 1), 0b10 => assert_eq!(cpu.registers.hl_get(), 2), 0b11 => assert_eq!(cpu.registers.sp, 3), _ => panic!("never reach"), } assert_eq!(cpu.registers.carry, true); assert_eq!(cpu.registers.half_carry, true); assert_eq!(cpu.registers.subtraction, true); assert_eq!(cpu.registers.zero, true); // overfow let pc = cpu.registers.pc; set_inst!(cpu, pc, opcode); match i { 0b00 => cpu.registers.bc_set(0), 0b01 => cpu.registers.de_set(0), 0b10 => cpu.registers.hl_set(0), 0b11 => cpu.registers.sp = 0, _ => panic!("never reach"), } cpu.registers.carry = false; cpu.registers.half_carry = false; cpu.registers.subtraction = false; cpu.registers.zero = false; cpu.execute(); assert_eq!(cpu.registers.pc, pc + 1); match i { 0b00 => assert_eq!(cpu.registers.bc_get(), 0xFFFF), 0b01 => assert_eq!(cpu.registers.de_get(), 0xFFFF), 0b10 => assert_eq!(cpu.registers.hl_get(), 0xFFFF), 0b11 => assert_eq!(cpu.registers.sp, 0xFFFF), _ => panic!("never reach"), } assert_eq!(cpu.registers.carry, false); assert_eq!(cpu.registers.half_carry, false); assert_eq!(cpu.registers.subtraction, false); assert_eq!(cpu.registers.zero, false); } } #[test] fn op_dec_rrn() { let mut cpu = CPU::new(); let opcode = 0b00_110_101; // half carry let pc = cpu.registers.pc; set_inst!(cpu, pc, opcode); cpu.registers.h = 1; cpu.registers.l = 2; cpu.mmu.byte_set(0x102, 0b1_0000); cpu.registers.carry = true; cpu.registers.half_carry = false; cpu.registers.subtraction = false; cpu.registers.zero = true; cpu.execute(); assert_eq!(cpu.registers.pc, pc + 1); assert_eq!(cpu.registers.carry, true); assert_eq!(cpu.registers.half_carry, true); assert_eq!(cpu.registers.subtraction, true); assert_eq!(cpu.registers.zero, false); assert_eq!(cpu.mmu.byte_get(0x102), 0b0_1111); // zero let pc = cpu.registers.pc; set_inst!(cpu, pc, opcode); cpu.registers.h = 1; cpu.registers.l = 2; cpu.mmu.byte_set(0x102, 1); cpu.registers.carry = true; cpu.registers.half_carry = true; cpu.registers.subtraction = true; cpu.registers.zero = false; cpu.execute(); assert_eq!(cpu.registers.pc, pc + 1); assert_eq!(cpu.registers.carry, true); assert_eq!(cpu.registers.half_carry, false); assert_eq!(cpu.registers.subtraction, true); assert_eq!(cpu.registers.zero, true); assert_eq!(cpu.mmu.byte_get(0x102), 0); // overflow let pc = cpu.registers.pc; set_inst!(cpu, pc, opcode); cpu.registers.h = 2; cpu.registers.l = 3; cpu.mmu.byte_set(0x203, 0); cpu.registers.carry = false; cpu.registers.half_carry = false; cpu.registers.subtraction = false; cpu.registers.zero = true; cpu.execute(); assert_eq!(cpu.registers.pc, pc + 1); assert_eq!(cpu.registers.carry, false); assert_eq!(cpu.registers.half_carry, true); assert_eq!(cpu.registers.subtraction, true); assert_eq!(cpu.registers.zero, false); assert_eq!(cpu.mmu.byte_get(0x203), 0xFF); }
true
7621a3536407b217140ee6294a2d82f1ccbc37d4
Rust
quezlatch/Dining_Philosophers
/src/dining_philosophers/analysis.rs
UTF-8
5,846
3.28125
3
[]
no_license
use std::collections::HashMap; use std::sync::{Arc, Mutex}; use crate::dining_philosophers::philosopher::state_machine::State; use crate::dining_philosophers::philosopher::state_machine::State::Eating; fn calculate_percentage(history: &Vec<State>) -> f32 { let total: f32 = history.len() as f32; let no_of_thinking: i32 = history.iter() .map(|s| { if s != &Eating { 1 } else { 0 } }) .sum(); 100_f32 * no_of_thinking as f32 / total } fn score(percentage: f32) -> f32 { 1_f32 - (percentage / 50_f32 - 1_f32).abs() } pub fn score_one_run(results: &HashMap<usize, Vec<State>>) -> f32 { let total_score: f32 = results.iter() .map(|(_key, value)| { let perc = calculate_percentage(value); score(perc) }) .sum(); let normalised_score = total_score / results.keys().len() as f32; normalised_score } pub fn compute_average_score(results: &Vec<Arc<Mutex<HashMap<usize, Vec<State>>>>>) -> f32 { let total_score: f32 = results.iter() .map(|map| { score_one_run(&map.lock().unwrap()) }) .sum(); total_score / results.len() as f32 } #[cfg(test)] mod tests { use std::collections::HashMap; use std::sync::{Arc, Mutex}; use crate::dining_philosophers::analysis::{calculate_percentage, compute_average_score, score, score_one_run}; use crate::dining_philosophers::philosopher::state_machine::State::{Eating, LeftThinking, RightThinking, Thinking}; use crate::dining_philosophers::philosopher::state_machine::State; #[test] fn calculate_percentage_thinking() { let results = vec![Thinking, LeftThinking, RightThinking]; let percentage = calculate_percentage(&results); assert_eq!(100_f32, percentage); } #[test] fn calculate_percentage_thinking_with_eating() { let results = vec![Thinking, LeftThinking, RightThinking, Eating]; let percentage = calculate_percentage(&results); assert_eq!(75_f32, percentage); } #[test] fn calculate_percentage_thinking_with_equal_eating() { let results = vec![Thinking, LeftThinking, Eating, Eating]; let percentage = calculate_percentage(&results); assert_eq!(50_f32, percentage); } #[test] fn calculate_score_based_on_percentage() { assert_eq!(0.5_f32, score(75_f32)); } #[test] fn max_score_of_one_when_fifty_percent() { assert_eq!(1_f32, score(50_f32)); } #[test] fn min_score_of_zero_when_hundred_percent() { assert_eq!(0_f32, score(100_f32)); } #[test] fn min_score_of_zero_when_zero_percent() { assert_eq!(0_f32, score(0_f32)); } #[test] fn min_score_a_run() { let mut results: HashMap<usize, Vec<State>> = HashMap::new(); results.insert(0, vec![Thinking]); results.insert(1, vec![Thinking]); let score = score_one_run(&results); assert_eq!(0_f32, score); } #[test] fn max_score_for_a_run() { let mut results: HashMap<usize, Vec<State>> = HashMap::new(); results.insert(0, vec![Thinking, Eating]); results.insert(1, vec![Thinking, Eating]); let score = score_one_run(&results); assert_eq!(1_f32, score); } #[test] fn intermediate_score_for_a_run() { let mut results: HashMap<usize, Vec<State>> = HashMap::new(); results.insert(0, vec![Thinking, Thinking]); results.insert(1, vec![Thinking, Eating]); let score = score_one_run(&results); assert_eq!(0.5_f32, score); } #[test] fn average_score_over_several_runs() { let mut results: Vec<Arc<Mutex<HashMap<usize, Vec<State>>>>> = Vec::new(); let run_one: Arc<Mutex<HashMap<usize, Vec<State>>>> = Arc::new(Mutex::new(HashMap::new())); let run_two: Arc<Mutex<HashMap<usize, Vec<State>>>> = Arc::new(Mutex::new(HashMap::new())); run_one.lock().unwrap().insert(0, vec![Thinking, Thinking]); run_one.lock().unwrap().insert(1, vec![Thinking, Eating]); run_two.lock().unwrap().insert(0, vec![Thinking, Thinking]); run_two.lock().unwrap().insert(1, vec![Thinking, Eating]); results.push(run_one); results.push(run_two); assert_eq!(0.5_f32, compute_average_score(&results)); } #[test] fn max_average_score_over_several_runs() { let mut results: Vec<Arc<Mutex<HashMap<usize, Vec<State>>>>> = Vec::new(); let run_one: Arc<Mutex<HashMap<usize, Vec<State>>>> = Arc::new(Mutex::new(HashMap::new())); let run_two: Arc<Mutex<HashMap<usize, Vec<State>>>> = Arc::new(Mutex::new(HashMap::new())); run_one.lock().unwrap().insert(0, vec![Thinking, Eating]); run_one.lock().unwrap().insert(1, vec![Thinking, Eating]); run_two.lock().unwrap().insert(0, vec![Thinking, Eating]); run_two.lock().unwrap().insert(1, vec![Thinking, Eating]); results.push(run_one); results.push(run_two); assert_eq!(1_f32, compute_average_score(&results)); } #[test] fn min_average_score_over_several_runs() { let mut results: Vec<Arc<Mutex<HashMap<usize, Vec<State>>>>> = Vec::new(); let run_one: Arc<Mutex<HashMap<usize, Vec<State>>>> = Arc::new(Mutex::new(HashMap::new())); let run_two: Arc<Mutex<HashMap<usize, Vec<State>>>> = Arc::new(Mutex::new(HashMap::new())); run_one.lock().unwrap().insert(0, vec![Thinking, Thinking]); run_one.lock().unwrap().insert(1, vec![Eating, Eating]); run_two.lock().unwrap().insert(0, vec![Thinking, Thinking]); run_two.lock().unwrap().insert(1, vec![Eating, Eating]); results.push(run_one); results.push(run_two); assert_eq!(0_f32, compute_average_score(&results)); } }
true
4e4e5aaaa15fc4affce21d0958d3816ef892739d
Rust
freexploit/weebtk
/src/main.rs
UTF-8
1,513
3.140625
3
[]
no_license
use cstr::cstr; use qmetaobject::prelude::*; // The `QObject` custom derive macro allows to expose a class to Qt and QML #[derive(QObject, Default)] struct Greeter { // Specify the base class with the qt_base_class macro base: qt_base_class!(trait QObject), // Declare `name` as a property usable from Qt name: qt_property!(QString; NOTIFY name_changed), // Declare a signal name_changed: qt_signal!(), // And even a slot compute_greetings: qt_method!( fn compute_greetings(&self, verb: String) -> QString { format!("{} {}", verb, self.name.to_string()).into() } ), } fn main() { // Register the `Greeter` struct to QML qml_register_type::<Greeter>(cstr!("Greeter"), 1, 0, cstr!("Greeter")); // Create a QML engine from rust let mut engine = QmlEngine::new(); // (Here the QML code is inline, but one can also load from a file) engine.load_data( r#" import QtQuick 2.6 import QtQuick.Window 2.0 // Import our Rust classes import Greeter 1.0 Window { visible: true // Instantiate the rust struct Greeter { id: greeter; // Set a property name: "World" } Text { anchors.centerIn: parent // Call a method text: greeter.compute_greetings("hello") } } "# .into(), ); engine.exec(); }
true
ae7b95c7ae4ee95a5f05ff1c2cbc8bfbe05da5d1
Rust
FlyingDutchmanGames/lib_table_top
/src/common/deck/card/rank.rs
UTF-8
6,372
3.609375
4
[]
no_license
use serde_repr::*; /// The pips of a standard deck. Important note that the cards have `repr(u8)` and Ace is /// represented by 1 #[derive( Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Hash, Ord, Serialize_repr, Deserialize_repr, )] #[repr(u8)] pub enum Rank { Ace = 1, Two = 2, Three = 3, Four = 4, Five = 5, Six = 6, Seven = 7, Eight = 8, Nine = 9, Ten = 10, Jack = 11, Queen = 12, King = 13, } use Rank::*; impl Rank { pub const ALL: [Self; 13] = [ Ace, Two, Three, Four, Five, Six, Seven, Eight, Nine, Ten, Jack, Queen, King, ]; /// Returns the next card, with Ace being high /// ``` /// use lib_table_top::common::deck::Rank::*; /// /// assert_eq!(Ace.next_with_ace_high(), None); /// assert_eq!(King.next_with_ace_high(), Some(Ace)); /// ``` pub fn next_with_ace_high(&self) -> Option<Self> { match self { Ace => None, _ => Some(self.next_with_wrapping()), } } /// Returns the next card, with Ace being low /// ``` /// use lib_table_top::common::deck::Rank::*; /// /// assert_eq!(King.next_with_ace_low(), None); /// assert_eq!(Ace.next_with_ace_low(), Some(Two)); /// ``` pub fn next_with_ace_low(&self) -> Option<Self> { match self { King => None, _ => Some(self.next_with_wrapping()), } } /// Returns the previous card, with Ace being high /// ``` /// use lib_table_top::common::deck::Rank::*; /// /// assert_eq!(Two.previous_with_ace_high(), None); /// assert_eq!(Ace.previous_with_ace_high(), Some(King)); /// ``` pub fn previous_with_ace_high(&self) -> Option<Self> { match self { Two => None, _ => Some(self.previous_with_wrapping()), } } /// Returns the previous card, with Ace being high /// ``` /// use lib_table_top::common::deck::Rank::*; /// /// assert_eq!(Two.previous_with_ace_low(), Some(Ace)); /// assert_eq!(Ace.previous_with_ace_low(), None); /// ``` pub fn previous_with_ace_low(&self) -> Option<Self> { match self { Ace => None, _ => Some(self.previous_with_wrapping()), } } /// Provides the next highest card, wraps from King => Ace => Two /// ``` /// use lib_table_top::common::deck::Rank::*; /// /// assert_eq!(King.next_with_wrapping(), Ace); /// assert_eq!(Ace.next_with_wrapping(), Two); /// assert_eq!(Two.next_with_wrapping(), Three); /// // etc .. /// ``` pub fn next_with_wrapping(&self) -> Self { match self { Ace => Two, Two => Three, Three => Four, Four => Five, Five => Six, Six => Seven, Seven => Eight, Eight => Nine, Nine => Ten, Ten => Jack, Jack => Queen, Queen => King, King => Ace, } } /// Provides the next lowest card, wraps from Two => Ace => King /// ``` /// use lib_table_top::common::deck::Rank::*; /// /// assert_eq!(Two.previous_with_wrapping(), Ace); /// assert_eq!(Ace.previous_with_wrapping(), King); /// assert_eq!(King.previous_with_wrapping(), Queen); /// // etc .. /// ``` pub fn previous_with_wrapping(&self) -> Self { match self { Ace => King, King => Queen, Queen => Jack, Jack => Ten, Ten => Nine, Nine => Eight, Eight => Seven, Seven => Six, Six => Five, Five => Four, Four => Three, Three => Two, Two => Ace, } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_next_with_ace_high() { let test_cases = [ (Ace, None), (King, Some(Ace)), (Queen, Some(King)), (Jack, Some(Queen)), (Ten, Some(Jack)), (Nine, Some(Ten)), (Eight, Some(Nine)), (Seven, Some(Eight)), (Six, Some(Seven)), (Five, Some(Six)), (Four, Some(Five)), (Three, Some(Four)), (Two, Some(Three)), ]; for (test, expected) in test_cases.iter() { assert_eq!(test.next_with_ace_high(), *expected); } } #[test] fn test_next_with_ace_low() { let test_cases = [ (King, None), (Queen, Some(King)), (Jack, Some(Queen)), (Ten, Some(Jack)), (Nine, Some(Ten)), (Eight, Some(Nine)), (Seven, Some(Eight)), (Six, Some(Seven)), (Five, Some(Six)), (Four, Some(Five)), (Three, Some(Four)), (Two, Some(Three)), (Ace, Some(Two)), ]; for (test, expected) in test_cases.iter() { assert_eq!(test.next_with_ace_low(), *expected); } } #[test] fn test_previous_with_ace_high() { let test_cases = [ (Ace, Some(King)), (King, Some(Queen)), (Queen, Some(Jack)), (Jack, Some(Ten)), (Ten, Some(Nine)), (Nine, Some(Eight)), (Eight, Some(Seven)), (Seven, Some(Six)), (Six, Some(Five)), (Five, Some(Four)), (Four, Some(Three)), (Three, Some(Two)), (Two, None), ]; for (test, expected) in test_cases.iter() { assert_eq!(test.previous_with_ace_high(), *expected); } } #[test] fn test_previous_with_ace_low() { let test_cases = [ (King, Some(Queen)), (Queen, Some(Jack)), (Jack, Some(Ten)), (Ten, Some(Nine)), (Nine, Some(Eight)), (Eight, Some(Seven)), (Seven, Some(Six)), (Six, Some(Five)), (Five, Some(Four)), (Four, Some(Three)), (Three, Some(Two)), (Two, Some(Ace)), (Ace, None), ]; for (test, expected) in test_cases.iter() { assert_eq!(test.previous_with_ace_low(), *expected); } } }
true
746e0082dedde11d2c63cd13517c473e54bf9cd3
Rust
sparky8251/maelstrom
/src/db/mod.rs
UTF-8
619
2.9375
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
pub mod postgres; pub use postgres::PostgresStore; use async_trait::async_trait; use std::error::Error; /// A Storage Driver. /// /// This trait encapsulates a complete storage driver to a /// specific type of storage mechanism, e.g. Postgres, Kafka, etc. #[async_trait] pub trait Store: Clone + Sync + Send + Sized { /// Gets the type of this data store, e.g. Postgres fn get_type(&self) -> String; /// Determines if a username is available for registration. /// TODO: Create more generic error responses async fn is_username_available(&self, username: &str) -> Result<bool, Box<dyn Error>>; }
true
1a496a75623ac9952bd713bc8a942f0768919194
Rust
nilsso/challenge-solutions
/exercism/rust/rectangles/src/lib.rs
UTF-8
1,830
3.1875
3
[]
no_license
#![feature(bool_to_option)] use std::iter::repeat; fn cartesian_product(m: usize, n: usize) -> impl Iterator<Item = (usize, usize)> { (0..m).flat_map(move |i| repeat(i).zip(0..n)) } fn combinations(n: usize) -> impl Iterator<Item = (usize, usize)> { (0..n.max(1) - 1).flat_map(move |i| repeat(i).zip(i + 1..n)) } pub fn count(lines: &[&str]) -> u32 { let rows = lines.len(); let cols = lines.get(0).map_or(0, |l| l.len()); let chars: Vec<Vec<char>> = lines.iter().map(|line| line.chars().collect()).collect(); let is_vertex = |&(i, j): &(usize, usize)| chars[i][j] == '+'; let vertices: Vec<(usize, usize)> = cartesian_product(rows, cols).filter(is_vertex).collect(); let is_rectangle = |&[t, l, b, r]: &[usize; 4]| { // Helpers to check that a char belongs on a rectangle side let is_hline_char = |(i, j): (usize, usize)| { let c = chars[i][j]; c == '+' || c == '-' }; let is_vline_char = |(i, j): (usize, usize)| { let c = chars[i][j]; c == '+' || c == '|' }; // Helpers to check rectangle sides for gaps let is_hline = |row| repeat(row).zip(l..r).all(is_hline_char); let is_vline = |col| (t..b).zip(repeat(col)).all(is_vline_char); is_hline(t) && is_hline(b) && is_vline(l) && is_vline(r) }; combinations(vertices.len()) .filter_map(|(i, j)| { let (t, l) = vertices[i]; let (b, r) = vertices[j]; // If two selected vertices could be top-left and bottom-right corners of a rectangle, // and if bottom-left and top-right are also vertices. (l < r && t < b && is_vertex(&(t, r)) && is_vertex(&(b, l))).then_some([t, l, b, r]) }) .filter(is_rectangle) .count() as u32 }
true
03ef86f43404ce52bf262a4749e755367620de40
Rust
joaodelgado/advent-of-code-2019
/src/day02.rs
UTF-8
2,886
3.484375
3
[]
no_license
use super::Day; struct Computer { instructions: Vec<usize>, pc: usize, } impl Computer { fn new(instructions: Vec<usize>) -> Computer { Computer { pc: 0, instructions, } } fn init(&mut self, noun: usize, verb: usize) { self.instructions[1] = noun; self.instructions[2] = verb; } fn run(&mut self) { loop { match self.next() { 1 => self.add(), 2 => self.mul(), 99 => return, _ => unreachable!(), } } } fn add(&mut self) { let a = self.next(); let a = self.instructions[a]; let b = self.next(); let b = self.instructions[b]; let dest = self.next(); self.instructions[dest] = a + b; } fn mul(&mut self) { let a = self.next(); let a = self.instructions[a]; let b = self.next(); let b = self.instructions[b]; let dest = self.next(); self.instructions[dest] = a * b; } fn next(&mut self) -> usize { let i = self.pc as usize; self.pc += 1; self.instructions[i] } } pub struct Day02; impl Day<usize, usize> for Day02 { fn run1(input: &str) -> usize { let insts = input.split(",").filter_map(|m| m.parse().ok()).collect(); let mut computer = Computer::new(insts); computer.init(12, 02); computer.run(); return computer.instructions[0]; } fn run2(input: &str) -> usize { let insts = input .split(",") .filter_map(|m| m.parse().ok()) .collect::<Vec<_>>(); for noun in 0..100 { for verb in 0..100 { let mut computer = Computer::new(insts.clone()); computer.init(noun, verb); computer.run(); if computer.instructions[0] == 19690720 { return 100 * noun + verb; } } } unreachable!() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_run1() { let mut computer = Computer::new(vec![1, 0, 0, 0, 99]); computer.run(); assert_eq!(computer.instructions, vec![2, 0, 0, 0, 99]); let mut computer = Computer::new(vec![2, 3, 0, 3, 99]); computer.run(); assert_eq!(computer.instructions, vec![2, 3, 0, 6, 99]); let mut computer = Computer::new(vec![2, 4, 4, 5, 99, 0]); computer.run(); assert_eq!(computer.instructions, vec![2, 4, 4, 5, 99, 9801]); let mut computer = Computer::new(vec![1, 1, 1, 4, 99, 5, 6, 0, 99]); computer.run(); assert_eq!(computer.instructions, vec![30, 1, 1, 4, 2, 5, 6, 0, 99]); assert_eq!(Day02::run1(include_str!("../data/day02")), 5534943); } }
true
7ddb4f2445ad5311781fa8f54fdafefc73b491ca
Rust
mrmanne/advent-of-code-2020
/src/days/day23.rs
UTF-8
3,217
3.109375
3
[ "MIT" ]
permissive
use crate::puzzle::{io, File, Puzzle}; pub struct Day23; fn play(cups: &mut Vec<usize>, mut cur: usize, turns: usize) { let max = cups.len() - 1; for _ in 0..turns { let mut removals = vec![]; let mut next = cups[cur]; for _ in 0..3 { removals.push(next); next = cups[next]; } cups[cur] = next; let mut dest = 0; for i in 1..max + 1 { dest = cur as i32 - (i as i32); if dest <= 0 { dest = max as i32 + dest; } if !removals.contains(&(dest as usize)) { break; } } let tmp = cups[dest as usize]; cups[dest as usize] = removals[0]; cups[removals[2]] = tmp; cur = cups[cur]; } } impl Day23 { fn solve_part1(&self, input: &str, turns: usize) -> String { let input: Vec<usize> = input .chars() .map(|x| x.to_digit(10).unwrap() as usize) .collect(); // Build kind of a linked list vector where you can index a cup label to find the next cup. // This gives O(1) time complexity for insertions and removals. let mut cups: Vec<usize> = vec![0; input.len() + 1]; for i in 0..cups.len() - 1 { cups[input[i]] = input[(i + 1) % input.len()]; } play(&mut cups, input[0], turns); let mut result = String::new(); let mut next = 1; for _ in 1..cups.len() - 1 { result += &cups[next].to_string(); next = cups[next]; } result } fn solve_part2(&self, input: &str, turns: usize) -> usize { let input: Vec<usize> = input .chars() .map(|x| x.to_digit(10).unwrap() as usize) .collect(); // Build kind of a linked list vector where you can index a cup label to find the next cup. // This gives O(1) time complexity for insertions and removals. let mut cups: Vec<usize> = vec![0; input.len() + 1]; for i in 0..cups.len() - 2 { cups[input[i]] = input[(i + 1)]; } cups[input[input.len() - 1]] = 10; for i in 10..1000000 { cups.push(i + 1); } cups.push(input[0]); play(&mut cups, input[0], turns); let mut result = String::new(); let mut next = 1; for _ in 1..cups.len() - 1 { result += &cups[next].to_string(); next = cups[next]; } cups[1] as usize * cups[cups[1]] as usize } } impl Puzzle for Day23 { fn solve(&self, _: io::Result<io::Lines<io::BufReader<File>>>) -> (String, String) { return ( self.solve_part1("792845136", 100), self.solve_part2("792845136", 10000000).to_string(), ); } } #[cfg(test)] mod tests { use super::*; #[test] fn part1_example1() { assert_eq!(Day23 {}.solve_part1("389125467", 10), "92658374"); } // Disable this unit test since its too slow to run for every build. // #[test] // fn part2_example1() { // assert_eq!(Day23 {}.solve_part2("389125467", 10000000), 149245887792); // } }
true
1a13a85923c74fe6c94f6fc5f9dc7c9a602b4b0e
Rust
dragon1061/MoonZoon
/crates/zoon/src/web_storage.rs
UTF-8
7,244
3.046875
3
[ "MIT" ]
permissive
use crate::*; use once_cell::race::OnceBox; use web_sys::Storage; pub type Result<T> = std::result::Result<T, Error>; // ------ local_storage ------ pub fn local_storage() -> &'static LocalStorage { static LOCAL_STORAGE: OnceBox<SendWrapper<LocalStorage>> = OnceBox::new(); LOCAL_STORAGE.get_or_init(|| { let storage = LocalStorage::try_new().unwrap_throw(); Box::new(SendWrapper::new(storage)) }) } // ------ session_storage ------ pub fn session_storage() -> &'static SessionStorage { static SESSION_STORAGE: OnceBox<SendWrapper<SessionStorage>> = OnceBox::new(); SESSION_STORAGE.get_or_init(|| { let storage = SessionStorage::try_new().unwrap_throw(); Box::new(SendWrapper::new(storage)) }) } // ------ Error ------ #[derive(thiserror::Error, Debug)] pub enum Error { #[error("the platform does not support the required WebStorage")] StorageNotFoundError, #[error("cannot get access to the required WebStorage")] GetStorageError(JsValue), #[error("cannot insert or update the given key-value pair (error: `{0:?}`)")] InsertError(JsValue), #[cfg(feature = "serde-lite")] #[error("(de)serialization failed (error: `{0}`)")] SerdeError(serde_lite::Error), #[error("(de)serialization to JSON failed (error: `{0}`)")] SerdeJsonError(serde_json::Error), } // ------ LocalStorage ------ /// Local Storage maintains a separate storage area for each given origin /// that persists even when the browser is closed and reopened. /// /// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage) pub struct LocalStorage(Storage); impl WebStorage for LocalStorage { fn try_new() -> Result<Self> { let storage = window() .local_storage() .map_err(Error::GetStorageError)? .ok_or(Error::StorageNotFoundError); Ok(Self(storage?)) } fn storage(&self) -> &Storage { &self.0 } } // ------ SessionStorage ------ /// - Session Storage maintains a separate storage area for each given origin /// that's available for the duration of the page session /// (as long as the browser is open, including page reloads and restores). /// /// - Opening multiple tabs/windows with the same URL creates sessionStorage for each tab/window. /// /// - Data stored in sessionStorage is specific to the protocol of the page. /// In other words, _`http://example.com`_ will have separate storage than _`https://example.com`_. /// /// - Storage limit is larger than a cookie (at most 5MB). /// /// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/Window/sessionStorage) pub struct SessionStorage(Storage); impl WebStorage for SessionStorage { fn try_new() -> Result<Self> { let storage = window() .local_storage() .map_err(Error::GetStorageError)? .ok_or(Error::StorageNotFoundError); Ok(Self(storage?)) } fn storage(&self) -> &Storage { &self.0 } } // ------ WebStorage ------ /// Web Storage API. /// /// `LocalStorage` and `SessionStorage` implement this trait. /// /// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/Web_Storage_API) pub trait WebStorage: Sized { /// Creates a new instance. /// /// # Errors /// /// Returns error if we cannot get access to the storage - security errors, /// browser does not have given storage, user denied access for the current origin, etc. /// /// - [MDN ref for Local Storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage) /// - [MDN ref for Session Storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/sessionStorage) fn try_new() -> Result<Self>; /// Get the inner `web_sys::Storage` instance. /// /// This method is used internally by other methods. fn storage(&self) -> &Storage; /// Clear all data in the storage. /// /// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/Storage/clear) fn clear(&self) { self.storage().clear().unwrap_throw() } /// Get the number of stored data items. /// /// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/Storage/length) fn len(&self) -> u32 { self.storage().length().unwrap_throw() } /// Returns the key in the given position. /// /// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/Storage/key) fn key(&self, index: u32) -> Option<String> { self.storage().key(index).unwrap_throw() } /// Removes a key. /// /// If there is no item associated with the given key, this method will do nothing. /// /// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/Storage/removeItem) fn remove(&self, key: impl AsRef<str>) { self.storage().remove_item(key.as_ref()).unwrap_throw() } /// Returns a deserialized value corresponding to the key. /// /// # Errors /// /// Returns error when deserialization fails. /// /// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/Storage/getItem) #[cfg(feature = "serde")] fn get<T: DeserializeOwned>(&self, key: &str) -> Option<Result<T>> { let value = self.storage().get_item(key).unwrap_throw()?; fn deserialize<T: DeserializeOwned>(value: &str) -> Result<T> { Ok(serde_json::from_str(&value).map_err(Error::SerdeJsonError)?) } Some(deserialize(&value)) } #[cfg(feature = "serde-lite")] fn get<T: Deserialize>(&self, key: &str) -> Option<Result<T>> { let value = self.storage().get_item(key).unwrap_throw()?; fn deserialize<T: Deserialize>(value: &str) -> Result<T> { let value = serde_json::from_str(&value).map_err(Error::SerdeJsonError)?; T::deserialize(&value).map_err(Error::SerdeError) } Some(deserialize(&value)) } /// Insert a key-value pair. The value will be serialized. /// /// If the key already exists, the value will be updated. /// /// # Errors /// /// Returns error if we cannot serialize the value or insert/update the pair. /// /// The function `web_sys::Storage::set_item` is used under the hood. /// A related warning from MDN docs: /// /// "setItem() may throw an exception if the storage is full. /// Particularly, in Mobile Safari (since iOS 5) it always throws when the user enters private mode. /// (Safari sets the quota to 0 bytes in private mode, unlike other browsers, /// which allow storage in private mode using separate data containers.) /// Hence developers should make sure to always catch possible exceptions from setItem()." /// /// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem) fn insert<T: Serialize + ?Sized>(&self, key: &str, value: &T) -> Result<()> { // let value = T::serialize(value).map_err(Error::SerdeError)?; -- for serde-lite let value = serde_json::to_string(&value).map_err(Error::SerdeJsonError)?; self.storage() .set_item(&key, &value) .map_err(Error::InsertError) } }
true
586e1e388eb2d1c4ba9e4a629c221bf33a7c1f27
Rust
ArturKovacs/emulsion
/subcrates/gelatin/src/misc.rs
UTF-8
8,477
3.21875
3
[ "MIT" ]
permissive
use cgmath::Vector2; use glium::glutin::dpi; use std::ops::{Add, AddAssign, Div, Mul, Sub}; /// Used to represent logical pixel coordinates and dimensions. /// /// This struct is distinct from `PhysicalVector` which represents /// physical pixel coordinates and dimensions to avoid /// confusion when dealing with scaled dpi scenarios. #[derive(Copy, Clone, Debug)] pub struct LogicalVector { pub vec: Vector2<f32>, } impl LogicalVector { pub fn new(x: f32, y: f32) -> Self { LogicalVector { vec: Vector2::new(x, y) } } } impl Default for LogicalVector { fn default() -> LogicalVector { LogicalVector { vec: Vector2::<f32>::new(0.0, 0.0) } } } impl Add for LogicalVector { type Output = Self; fn add(self, other: LogicalVector) -> Self::Output { (self.vec + other.vec).into() } } impl AddAssign for LogicalVector { fn add_assign(&mut self, other: LogicalVector) { self.vec += other.vec; } } impl Sub for LogicalVector { type Output = Self; fn sub(self, other: LogicalVector) -> Self::Output { (self.vec - other.vec).into() } } impl<T: Into<f32>> Mul<T> for LogicalVector { type Output = Self; fn mul(self, other: T) -> Self::Output { (self.vec * other.into()).into() } } impl Mul<LogicalVector> for f32 { type Output = LogicalVector; fn mul(self, other: LogicalVector) -> Self::Output { (self * other.vec).into() } } impl<T: Into<f32>> Div<T> for LogicalVector { type Output = Self; fn div(self, other: T) -> Self::Output { (self.vec / other.into()).into() } } impl<T: Into<f32>> From<Vector2<T>> for LogicalVector { fn from(other: Vector2<T>) -> LogicalVector { LogicalVector { vec: Vector2::new(other.x.into(), other.y.into()) } } } impl From<dpi::LogicalSize<f32>> for LogicalVector { fn from(other: dpi::LogicalSize<f32>) -> LogicalVector { LogicalVector { vec: Vector2::new(other.width, other.height) } } } impl From<LogicalVector> for dpi::LogicalSize<f32> { fn from(vec: LogicalVector) -> Self { dpi::LogicalSize::<f32> { width: vec.vec.x, height: vec.vec.y } } } impl From<dpi::LogicalPosition<f32>> for LogicalVector { fn from(other: dpi::LogicalPosition<f32>) -> LogicalVector { LogicalVector { vec: Vector2::new(other.x, other.y) } } } impl From<LogicalVector> for dpi::LogicalPosition<f32> { fn from(vec: LogicalVector) -> Self { dpi::LogicalPosition::<f32> { x: vec.vec.x, y: vec.vec.y } } } pub trait FromPhysical<T> { fn from_physical(source: T, scale_factor: f32) -> Self; } impl<T: Into<f64>> FromPhysical<dpi::PhysicalSize<T>> for LogicalVector { fn from_physical(source: dpi::PhysicalSize<T>, scale_factor: f32) -> Self { let vec = Vector2::new(source.width.into() as f32, source.height.into() as f32); LogicalVector { vec: vec / scale_factor } } } impl<T: Into<f64>> FromPhysical<dpi::PhysicalPosition<T>> for LogicalVector { fn from_physical(source: dpi::PhysicalPosition<T>, scale_factor: f32) -> Self { let vec = Vector2::new(source.x.into() as f32, source.y.into() as f32); LogicalVector { vec: vec / scale_factor } } } #[derive(Debug, Default, Copy, Clone)] pub struct LogicalRect { /// The position of the top left corner of this rectangle pub pos: LogicalVector, pub size: LogicalVector, } impl LogicalRect { #[inline] pub fn left(&self) -> f32 { self.pos.vec.x } #[inline] pub fn right(&self) -> f32 { self.pos.vec.x + self.size.vec.x } #[inline] pub fn bottom(&self) -> f32 { self.pos.vec.y + self.size.vec.y } #[inline] pub fn top(&self) -> f32 { self.pos.vec.y } #[inline] pub fn center(&self) -> LogicalVector { self.pos + self.size * 0.5 } pub fn contains(&self, point: LogicalVector) -> bool { point.vec.x > self.pos.vec.x && point.vec.x < self.pos.vec.x + self.size.vec.x && point.vec.y > self.pos.vec.y && point.vec.y < self.pos.vec.y + self.size.vec.y } /// Set the position and the size so that they will line up /// with pyhsical display pixels. pub fn align_to_pixels(mut self, dpi_scale: f32) -> LogicalRect { // Note that this `phys_pos` is not in OpenGl coordinates // because in `phys_pos` the top left corner is (0, 0) // but in OpenGL the bottom left is. let mut phys_pos = self.pos * dpi_scale; let mut phys_size = self.size * dpi_scale; phys_pos.vec.x = phys_pos.vec.x.round(); phys_pos.vec.y = phys_pos.vec.y.round(); phys_size.vec.x = phys_size.vec.x.round(); phys_size.vec.y = phys_size.vec.y.round(); self.pos = phys_pos / dpi_scale; self.size = phys_size / dpi_scale; self } } #[derive(Debug, Copy, Clone)] pub enum Length { Fixed(f32), Stretch { min: f32, max: f32 }, } impl Default for Length { fn default() -> Length { Length::Fixed(256.0) } } #[derive(Debug, Copy, Clone)] pub enum Alignment { Start, Center, End, } impl Default for Alignment { fn default() -> Alignment { Alignment::Start } } #[derive(Default, Debug, Copy, Clone)] pub struct WidgetPlacement { pub width: Length, pub height: Length, pub horizontal_align: Alignment, pub vertical_align: Alignment, pub ignore_layout: bool, pub margin_left: f32, pub margin_right: f32, pub margin_top: f32, pub margin_bottom: f32, } /// Used to represent physical pixel coordinates and dimensions. /// /// See `LogicalVector` #[derive(Copy, Clone, Debug)] pub struct PhysicalVector { pub vec: Vector2<f32>, } // TODO implement stuff for physical vector pub trait PickDimension { fn vec_mut(v: &mut LogicalVector) -> &mut f32; fn vec(v: LogicalVector) -> f32; fn margin_start_mut(placement: &mut WidgetPlacement) -> &mut f32; fn margin_start(placement: &WidgetPlacement) -> f32; fn margin_end_mut(placement: &mut WidgetPlacement) -> &mut f32; fn margin_end(placement: &WidgetPlacement) -> f32; fn alignment_mut(placement: &mut WidgetPlacement) -> &mut Alignment; fn alignment(placement: &WidgetPlacement) -> Alignment; fn extent_mut(placement: &mut WidgetPlacement) -> &mut Length; fn extent(placement: &WidgetPlacement) -> Length; fn rect_pos_mut(rect: &mut LogicalRect) -> &mut f32; fn rect_pos(rect: &LogicalRect) -> f32; fn rect_size_mut(rect: &mut LogicalRect) -> &mut f32; fn rect_size(rect: &LogicalRect) -> f32; } pub struct HorDim {} impl PickDimension for HorDim { fn vec_mut(v: &mut LogicalVector) -> &mut f32 { &mut v.vec.x } fn vec(v: LogicalVector) -> f32 { v.vec.x } fn margin_start_mut(placement: &mut WidgetPlacement) -> &mut f32 { &mut placement.margin_left } fn margin_start(placement: &WidgetPlacement) -> f32 { placement.margin_left } fn margin_end_mut(placement: &mut WidgetPlacement) -> &mut f32 { &mut placement.margin_right } fn margin_end(placement: &WidgetPlacement) -> f32 { placement.margin_right } fn alignment_mut(placement: &mut WidgetPlacement) -> &mut Alignment { &mut placement.horizontal_align } fn alignment(placement: &WidgetPlacement) -> Alignment { placement.horizontal_align } fn extent_mut(placement: &mut WidgetPlacement) -> &mut Length { &mut placement.width } fn extent(placement: &WidgetPlacement) -> Length { placement.width } fn rect_pos_mut(rect: &mut LogicalRect) -> &mut f32 { &mut rect.pos.vec.x } fn rect_pos(rect: &LogicalRect) -> f32 { rect.pos.vec.x } fn rect_size_mut(rect: &mut LogicalRect) -> &mut f32 { &mut rect.size.vec.x } fn rect_size(rect: &LogicalRect) -> f32 { rect.size.vec.x } } pub struct VerDim {} impl PickDimension for VerDim { fn vec_mut(v: &mut LogicalVector) -> &mut f32 { &mut v.vec.y } fn vec(v: LogicalVector) -> f32 { v.vec.y } fn margin_start_mut(placement: &mut WidgetPlacement) -> &mut f32 { &mut placement.margin_top } fn margin_start(placement: &WidgetPlacement) -> f32 { placement.margin_top } fn margin_end_mut(placement: &mut WidgetPlacement) -> &mut f32 { &mut placement.margin_bottom } fn margin_end(placement: &WidgetPlacement) -> f32 { placement.margin_bottom } fn alignment_mut(placement: &mut WidgetPlacement) -> &mut Alignment { &mut placement.vertical_align } fn alignment(placement: &WidgetPlacement) -> Alignment { placement.vertical_align } fn extent_mut(placement: &mut WidgetPlacement) -> &mut Length { &mut placement.height } fn extent(placement: &WidgetPlacement) -> Length { placement.height } fn rect_pos_mut(rect: &mut LogicalRect) -> &mut f32 { &mut rect.pos.vec.y } fn rect_pos(rect: &LogicalRect) -> f32 { rect.pos.vec.y } fn rect_size_mut(rect: &mut LogicalRect) -> &mut f32 { &mut rect.size.vec.y } fn rect_size(rect: &LogicalRect) -> f32 { rect.size.vec.y } }
true
837bb10fac90ad3ceb69a750e6044be367943a8b
Rust
EdShaw/rust-sdl2
/src/timer.rs
UTF-8
679
2.53125
3
[ "MIT" ]
permissive
pub mod ll { use std::libc::{uint32_t, uint64_t}; //SDL_timer.h externfn!(fn SDL_GetTicks() -> uint32_t) externfn!(fn SDL_GetPerformanceCounter() -> uint64_t) externfn!(fn SDL_GetPerformanceFrequency() -> uint64_t) externfn!(fn SDL_Delay(ms: uint32_t)) //TODO: Figure out what to do with the timer callback functions } pub fn get_ticks() -> uint { unsafe { ll::SDL_GetTicks() as uint } } pub fn get_performance_counter() -> u64 { unsafe { ll::SDL_GetPerformanceCounter() } } pub fn get_performance_frequency() -> u64 { unsafe { ll::SDL_GetPerformanceFrequency() } } pub fn delay(ms: uint) { unsafe { ll::SDL_Delay(ms as u32) } }
true
811a23a018aea0ed38e382a83c0932b28bc07637
Rust
neelakantankk/rust_book_vec_operations
/src/main.rs
UTF-8
2,316
3.5
4
[]
no_license
extern crate rand; use rand::Rng; use std::collections::HashMap; use std::cmp::Ordering; fn main() { let mut numbers: Vec<u32> = Vec::new(); println!("Populating Vector..."); const LOWER : u32 = 0; const UPPER : u32 = 50; for _i in 0..500 { let input : u32 = rand::thread_rng().gen_range(LOWER,UPPER); numbers.push(input); } println!("Numbers = {:?}", numbers); println!("The average is {:.3}",average(&numbers)); user_sort(&mut numbers); println!("The median value is {}",numbers[numbers.len()/2]); let frequencies = get_frequencies(&numbers); let mode = get_mode(&frequencies); match mode.len().cmp(&1) { Ordering::Equal => println!("The mode is {}",&mode[0]), Ordering::Greater => println!("The modes are {:?}",mode), Ordering::Less => println!("No Modes! Error!"), } } fn average(numbers : &[u32]) -> f64 { let mut average: u32 = 0; for item in numbers { average = average + item; } let average: f64 = (average as f64) / (numbers.len() as f64); average } fn user_sort(numbers: &mut Vec<u32>) { for index in 0..numbers.len() { let mut smallest = numbers[index]; let mut smallest_index = index; for subindex in (index+1)..numbers.len() { if numbers[subindex] < smallest { smallest = numbers[subindex]; smallest_index = subindex; } } smallest = numbers.remove(smallest_index); numbers.insert(index,smallest); } } fn get_frequencies(numbers: &[u32]) -> HashMap<u32,u32> { let mut frequencies : HashMap<u32,u32> = HashMap::new(); for item in numbers.iter() { let count = frequencies.entry(*item).or_insert(0); *count += 1; } frequencies } fn get_mode(frequencies: &HashMap<u32,u32>) -> Vec<u32> { let mut mode : Vec<u32> = Vec::new(); let mut largest = 0; for (item,frequency) in frequencies { match frequency.cmp(&largest) { Ordering::Greater => { mode.clear(); mode.push(*item); largest = *frequency; }, Ordering::Equal => mode.push(*item), Ordering::Less => continue, } } mode }
true
922e97f2a45cff1d36b3072078c17724591eff2c
Rust
darkedge/advent-of-code-2020
/day08/src/main.rs
UTF-8
6,762
3.671875
4
[]
no_license
use std::fs::File; use std::io::prelude::*; use std::io::BufReader; /* --- Day 8: Handheld Halting --- Your flight to the major airline hub reaches cruising altitude without incident. While you consider checking the in-flight menu for one of those drinks that come with a little umbrella, you are interrupted by the kid sitting next to you. Their handheld game console won't turn on! They ask if you can take a look. You narrow the problem down to a strange infinite loop in the boot code (your puzzle input) of the device. You should be able to fix it, but first you need to be able to run the code in isolation. The boot code is represented as a text file with one instruction per line of text. Each instruction consists of an operation (acc, jmp, or nop) and an argument (a signed number like +4 or -20). acc increases or decreases a single global value called the accumulator by the value given in the argument. For example, acc +7 would increase the accumulator by 7. The accumulator starts at 0. After an acc instruction, the instruction immediately below it is executed next. jmp jumps to a new instruction relative to itself. The next instruction to execute is found using the argument as an offset from the jmp instruction; for example, jmp +2 would skip the next instruction, jmp +1 would continue to the instruction immediately below it, and jmp -20 would cause the instruction 20 lines above to be executed next. nop stands for No OPeration - it does nothing. The instruction immediately below it is executed next. For example, consider the following program: nop +0 acc +1 jmp +4 acc +3 jmp -3 acc -99 acc +1 jmp -4 acc +6 These instructions are visited in this order: nop +0 | 1 acc +1 | 2, 8(!) jmp +4 | 3 acc +3 | 6 jmp -3 | 7 acc -99 | acc +1 | 4 jmp -4 | 5 acc +6 | First, the nop +0 does nothing. Then, the accumulator is increased from 0 to 1 (acc +1) and jmp +4 sets the next instruction to the other acc +1 near the bottom. After it increases the accumulator from 1 to 2, jmp -4 executes, setting the next instruction to the only acc +3. It sets the accumulator to 5, and jmp -3 causes the program to continue back at the first acc +1. This is an infinite loop: with this sequence of jumps, the program will run forever. The moment the program tries to run any instruction a second time, you know it will never terminate. Immediately before the program would run an instruction a second time, the value in the accumulator is 5. Run your copy of the boot code. Immediately before any instruction is executed a second time, what value is in the accumulator? */ fn execute_instruction(instruction: &str, accumulator: &mut i32) -> i32 { let mut tokens = instruction.split(' ').map(|p| p.replace(&['+'][..], "")); let keyword = tokens.next(); let keyword_ref = keyword.as_deref(); let val = tokens.next().unwrap().parse::<i32>().unwrap(); match keyword_ref { Some("acc") => { *accumulator += val; 1 } Some("jmp") => val, _ => 1, } } // Accumulator is a signed integer, starts at zero. // jmp instructions are relative fn part_one() -> std::io::Result<i32> { let file = File::open("input")?; // Collect all lines into a vector let mut history = vec![]; let a: Vec<_> = BufReader::new(file).lines().collect(); let mut instruction_index = 0; let mut accumulator = 0; while !history.contains(&instruction_index) { history.push(instruction_index); if let Ok(instruction) = &a[instruction_index as usize] { //println!("{}: {}", instruction_index + 1, instruction); instruction_index += execute_instruction(instruction, &mut accumulator); } } Ok(accumulator) } /* --- Part Two --- After some careful analysis, you believe that exactly one instruction is corrupted. Somewhere in the program, either a jmp is supposed to be a nop, or a nop is supposed to be a jmp. (No acc instructions were harmed in the corruption of this boot code.) The program is supposed to terminate by attempting to execute an instruction immediately after the last instruction in the file. By changing exactly one jmp or nop, you can repair the boot code and make it terminate correctly. For example, consider the same program from above: nop +0 acc +1 jmp +4 acc +3 jmp -3 acc -99 acc +1 jmp -4 acc +6 If you change the first instruction from nop +0 to jmp +0, it would create a single-instruction infinite loop, never leaving that instruction. If you change almost any of the jmp instructions, the program will still eventually find another jmp instruction and loop forever. However, if you change the second-to-last instruction (from jmp -4 to nop -4), the program terminates! The instructions are visited in this order: nop +0 | 1 acc +1 | 2 jmp +4 | 3 acc +3 | jmp -3 | acc -99 | acc +1 | 4 nop -4 | 5 acc +6 | 6 After the last instruction (acc +6), the program terminates by attempting to run the instruction below the last instruction in the file. With this change, after the program terminates, the accumulator contains the value 8 (acc +1, acc +1, acc +6). Fix the program so that it terminates normally by changing exactly one jmp (to nop) or nop (to jmp). What is the value of the accumulator after the program terminates? */ fn part_two() -> std::io::Result<i32> { let file = File::open("input")?; // Collect all lines into a vector let mut lines: Vec<_> = BufReader::new(file).lines().map(Result::unwrap).collect(); for i in 0..=lines.len() { let original = lines[i].clone(); if let Some(line) = lines[i].split(' ').next() { match line { "jmp" => lines[i] = lines[i].replace("jmp", "nop"), "nop" => lines[i] = lines[i].replace("nop", "jmp"), _ => (), } } let mut instruction_index = 0; let mut accumulator = 0; let mut history = vec![]; while !history.contains(&instruction_index) { history.push(instruction_index); let instruction = &lines[instruction_index as usize]; //println!("{}: {}", instruction_index + 1, instruction); instruction_index += execute_instruction(instruction, &mut accumulator); if instruction_index as usize == lines.len() { println!("Fixed line {}", i); return Ok(accumulator); } } lines[i] = original; } Ok(0) } fn main() { println!("=== Advent of Code Day 8 ==="); println!("Part One: {}", part_one().unwrap_or(0)); println!("Part Two: {}", part_two().unwrap_or(0)); }
true
c0a82dfec335c489fd95293aa1c77e73934c31da
Rust
fredmorcos/attic
/Snippets/Rust/sdl-windows/src/main.rs
UTF-8
3,532
2.875
3
[ "Unlicense" ]
permissive
use derive_more::From; use log::trace; use sdl2::event::{Event, WindowEvent}; use sdl2::pixels::Color; use sdl2::render::WindowCanvas; use sdl2::video::WindowBuildError; use sdl2::VideoSubsystem; use std::collections::HashMap; use std::fmt::{self, Debug}; use std::process; use std::sync::mpsc; use thiserror::Error; #[derive(From, Error)] enum Error { #[error("Cannot initialize logger: {0}")] LogInit(log::SetLoggerError), #[error("SDL: {0}")] SDL(String), #[error("SDL-Window: {0}")] SDLWindow(WindowBuildError), #[error("SDL-Canvas: {0}")] SDLCanvas(sdl2::IntegerOrSdlError), } impl Debug for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self) } } // A message queue from the perspective of the child window. struct MessageQ { incoming_pair: (mpsc::Sender<Event>, mpsc::Receiver<Event>), outgoing_pair: (mpsc::Sender<Event>, mpsc::Receiver<Event>), } impl MessageQ { fn new() -> Self { Self { incoming_pair: mpsc::channel(), outgoing_pair: mpsc::channel(), } } } fn create_window( windows: &mut HashMap<u32, (WindowCanvas, MessageQ)>, vid: &VideoSubsystem, title: &str, width: u32, height: u32, resizable: bool, ) -> Result<u32, Error> { let mut win_builder = vid.window(title, width, height); win_builder.allow_highdpi().opengl(); if resizable { win_builder.resizable(); } let win = win_builder.build()?; let id = win.id(); let canvas = win.into_canvas().accelerated().present_vsync().build()?; windows.insert(id, (canvas, MessageQ::new())); Ok(id) } fn main() -> Result<(), Error> { env_logger::Builder::new() .filter_level(log::LevelFilter::Trace) .try_init()?; trace!("Process {}", process::id()); let sdl_ctx = sdl2::init()?; let vid = sdl_ctx.video()?; let mut windows = HashMap::new(); let main_win = create_window(&mut windows, &vid, "Rine", 800, 600, true)?; windows.get_mut(&main_win).and_then(|(w, _)| { w.set_draw_color(Color::RGB(255, 255, 255)); w.clear(); w.present(); Some(()) }); let main_win_2 = create_window(&mut windows, &vid, "Rine 2", 800, 600, false)?; windows.get_mut(&main_win_2).and_then(|(w, _)| { w.set_draw_color(Color::RGB(0, 255, 255)); w.clear(); w.present(); Some(()) }); let mut evq = sdl_ctx.event_pump()?; 'mainloop: loop { for ev in evq.wait_iter() { match ev { Event::Quit { .. } => { trace!("Quit"); break 'mainloop; } Event::Window { window_id, win_event, .. } => match win_event { WindowEvent::Exposed | WindowEvent::Resized(_, _) => { trace!("Window {} expose", window_id); windows.get_mut(&window_id).and_then(|(w, _)| { w.clear(); w.present(); Some(()) }); } WindowEvent::Close => { trace!("Window {} close", window_id); windows.remove(&window_id); } _ => {} }, _ => {} } } } Ok(()) }
true
651be25f76eee3fed7ca3947938d39036d488e0b
Rust
PrismaPhonic/Learn-Rust
/Chapter-13/workout-closures/src/main.rs
UTF-8
2,749
3.71875
4
[ "MIT" ]
permissive
use std::thread; use std::time::Duration; use std::collections::HashMap; use std::hash::Hash; // fn simulated_expensive_calculation(intensity: u32) -> u32 { // println!("calculating slowly..."); // thread::sleep(Duration::from_secs(2)); // intensity // } struct Cacher<T, V, Y> where T: Fn(V) -> Y, V: Eq + Copy + Hash, Y: Eq + Copy { calculation: T, value: HashMap<V, Y>, } impl<T, V, Y> Cacher<T, V, Y> where T: Fn(V) -> Y, V: Eq + Copy + Hash, Y: Eq + Copy, { fn new(calculation: T) -> Cacher<T, V, Y> { Cacher { calculation, value: HashMap::new(), } } // had to dereference with * so hashmap get returns // a copy of the int rather than a reference fn value(&mut self, arg: V) -> Y { let arg = arg.clone(); let result = if self.value.contains_key(&arg) { *self.value.get(&arg).unwrap() } else { let v: Y = (self.calculation)(arg); self.value.insert(arg, v); v }; result } } fn generate_workout(intensity: u32, random_number: u32) { let mut expensive_result = Cacher::new(|num| { println!("calculating slowly..."); thread::sleep(Duration::from_secs(2)); num * 2 }); if intensity < 25 { println!( "Today, do {} pushups!", expensive_result.value(intensity) ); println!( "Next, do {} situps!", expensive_result.value(intensity) ); } else { if random_number == 3 { println!("Take a break today! Remember to stay hydrated!"); } else { println!( "Today, run for {} minutes!", expensive_result.value(intensity) ); } } } // main calls generate_workout function with simulated // values - would come from phone app in reality fn main() { let simulated_user_specified_value = 27; let simulated_random_number = 5; generate_workout( simulated_user_specified_value, simulated_random_number ); } #[cfg(test)] mod tests { use super::*; #[test] fn call_with_different_values() { let mut c = Cacher::new(|a| a); let _v1 = c.value(1); let v2 = c.value(2); assert_eq!(v2, 2); } #[test] fn call_with_varying_types() { let mut c = Cacher::new(|a: &str| -> usize {a.len()}); let mut c2 = Cacher::new(|a: char| -> usize {a.len_utf8()}); let v1 = c.value("yes"); let v2 = c2.value('A'); assert_eq!(v1, 3); // assert 'A' char is 1 byte in size assert_eq!(v2, 1); } }
true
d77a64c543f47ced8c63db9c130a40e359af0a13
Rust
uzairhasankhan/Rust-Based-Ludo-Game
/for_loop/src/main.rs
UTF-8
1,049
4.03125
4
[]
no_license
// // // //If the enum is C-like (as in your example), then you can create a static array of each of the variants and return an iterator of references to them: // use self::Direction::*; // use std::slice::Iter; // #[derive(Debug)] // pub enum Direction { // Forward, // Left, // Backward, // Right, // } // impl Direction { // fn iterator() -> Iter<'static, Direction> { // static DIRECTIONS: [Direction; 4] = [Forward, Left, Backward, Right]; // DIRECTIONS.iter() // } // } // fn main() { // for dir in Direction::iterator() { // println!("{:?}", dir); // } // } #[derive(Debug)] enum Direction { North, South, East, West, } impl Direction { fn turn(&self) -> Self { use Direction::*; match *self { North => South, South => East, East => West, West => North, } } } fn main() { use Direction::*; for i in &[North, South, East, West] { println!("{:?}",i.turn()); } }
true
c1b0fc3ddb253421f8b1d90c3f10f1f80ccd25f7
Rust
yaspoon/adventOfCode2020
/day2_1/src/main.rs
UTF-8
2,336
3.484375
3
[]
no_license
use std::path::Path; use std::fs::File; use std::io::BufReader; use std::io::prelude::*; struct PasswordPolicy { min: usize, max: usize, character: char, password: String, } impl PasswordPolicy { fn new(line: String) -> PasswordPolicy { let parts: Vec<&str> = line.split(" ").collect(); if parts.len() != 3 { panic!("Expected only 3 parts got {}", parts.len()); } let minMax: Vec<usize> = parts[0].split("-").map(|s| s.parse::<usize>().unwrap()).collect(); if minMax.len() != 2 { panic!("Expected only two minMaxs got {}", minMax.len()); } return PasswordPolicy { min: minMax[0], max: minMax[1], character: parts[1].replace(":", "").chars().next().unwrap(), password: parts[2].to_string() }; } } fn read_input(path: &Path) -> Vec<String> { let file = match File::open(path) { Ok(f) => f, Err(e) => panic!("Failed to open path:{}", e), }; let mut br = BufReader::new(file); let mut contents = String::new(); match br.read_to_string(&mut contents) { Ok(_) => (), Err(e) => println!("Failed to read in string:{}", e), } let lines: Vec<String> = contents.split("\n").filter(|s| s.len() > 1).map(|s| s.to_string()).collect(); return lines; } fn generate_password_policies(lines: Vec<String>) -> Vec<PasswordPolicy> { let mut passwordPolicies: Vec<PasswordPolicy> = Vec::new(); for line in lines { passwordPolicies.push(PasswordPolicy::new(line)); } return passwordPolicies; } fn main() { let input = read_input(Path::new("./input")); let passwordPolicies = generate_password_policies(input); let mut valid: usize = 0; for pp in passwordPolicies.into_iter() { let valid_chars = pp.password.chars().filter(|c| *c == pp.character).count(); if pp.min <= valid_chars && valid_chars <= pp.max { valid += 1; } } println!("valid:{}", valid); //input.into_iter().filter(|i| input.into_iter().filter(|j| i + j == 2020)); /* for i in &input { for j in &input { for k in &input { if i + j + k == 2020 { println!("i:{} j:{} k:{} answer:{}", i, j, k, i * j * k); } } } } */ }
true
fab8e049a34ac91a7cc1561de5e9b097f355d1c5
Rust
bantic/project-euler
/problem9/problem9.rs
UTF-8
445
3.484375
3
[]
no_license
fn is_perfect(num: int) -> bool { let root = (num as f32).sqrt(); return root.fract() == 0f32; } fn main() { for a in range(100i, 500) { for b in range(100i, 500) { let prod = a*a + b*b; if is_perfect(prod) { let c = (prod as f32).sqrt() as int; if a+b+c == 1000 { println!("Got it: {:d}*{:d}*{:d}={:d}",a,b,c,(a*b*c)); } } } } }
true
9306b169c542fc5e69371ee25550945e6e55db6e
Rust
jeffilluminati/sqtoy
/src/main.rs
UTF-8
7,773
2.515625
3
[ "MIT" ]
permissive
#![allow(dead_code)] #[macro_use] extern crate gfx; extern crate gfx_window_glutin; extern crate glutin; extern crate rand; extern crate image; use gfx::traits::FactoryExt; use gfx::Device; use gfx_window_glutin as gfx_glutin; pub type ColorFormat = gfx::format::Srgba8; pub type DepthFormat = gfx::format::DepthStencil; gfx_defines! { vertex Vertex { pos: [f32; 2] = "a_Pos", uv: [f32; 2] = "a_Uv", color: [f32; 3] = "a_Color", } pipeline pipe { vbuf: gfx::VertexBuffer<Vertex> = (), awesome: gfx::TextureSampler<[f32; 4]> = "t_Awesome", switch: gfx::Global<i32> = "i_Switch", out: gfx::RenderTarget<ColorFormat> = "Target0", } } #[derive(Debug, Clone, Copy)] struct Square { pub pos: (f32, f32), pub size: f32, pub color: [f32; 3] } #[derive(Debug, Clone, Copy)] enum Cursor { Plain((f32, f32), [f32; 3]), Growing((f32, f32), f32, [f32; 3]) } impl Cursor { fn to_square(self) -> Square { match self { Cursor::Plain(xy, color) => Square { pos: xy, size: 0.05, color }, Cursor::Growing(xy, size, color) => Square { pos: xy, size, color }, } } } // A cube is a pile of infinitely (as continuum) many squares // This data stucture is finite, so we call it “pseudo” #[derive(Debug)] struct Pseudocube { cursor: Cursor, squares: Vec<Square>, ratio: f32, } impl Pseudocube { pub fn new() -> Self { Pseudocube { cursor: Cursor::Plain((0.0, 0.0), WHITE), squares: vec![], ratio: 1.0, } } pub fn add_square(&mut self, x: f32, y: f32, size: f32, color: [f32; 3]) { let sq = Square { pos: (x, y), size, color }; self.squares.push(sq); } pub fn get_vertices_indices(&self) -> (Vec<Vertex>, Vec<u16>) { let (mut vs, mut is) = (vec![], vec![]); let cursor = self.cursor.to_square(); for (i, sq) in self.squares.iter().chain(Some(&cursor)).enumerate() { let (pos, half) = (sq.pos, 0.5 * sq.size); let i = i as u16; let (hx, hy); if self.ratio > 1.0 { hx = half / self.ratio; hy = half; } else { hx = half; hy = half * self.ratio; } vs.extend(&[ Vertex { pos: [pos.0 + hx, pos.1 - hy], uv: [1.0, 1.0], color: sq.color }, Vertex { pos: [pos.0 - hx, pos.1 - hy], uv: [0.0, 1.0], color: sq.color }, Vertex { pos: [pos.0 - hx, pos.1 + hy], uv: [0.0, 0.0], color: sq.color }, Vertex { pos: [pos.0 + hx, pos.1 + hy], uv: [1.0, 0.0], color: sq.color }, ]); is.extend(&[ 4*i, 4*i + 1, 4*i + 2, 4*i + 2, 4*i + 3, 4*i ]); } (vs, is) } pub fn update_ratio(&mut self, ratio: f32) { self.ratio = ratio } pub fn update_cursor_position(&mut self, x: f32, y: f32) { let x = 2.0*x - 1.0; let y = -2.0*y + 1.0; let cursor = match self.cursor { Cursor::Plain(_, color) => Cursor::Plain((x, y), color), Cursor::Growing(_, size, color) => Cursor::Growing((x, y), size, color), }; self.cursor = cursor; } pub fn start_growing(&mut self) { if let Cursor::Plain(xy, color) = self.cursor { self.cursor = Cursor::Growing(xy, 0.05, color) } } pub fn stop_growing(&mut self) { if let Cursor::Growing(xy, size, color) = self.cursor { self.squares.push (Cursor::Growing(xy, size, color).to_square()); self.cursor = Cursor::Plain(xy, rand::random()) } } pub fn tick(&mut self) { if let Cursor::Growing(xy, size, color) = self.cursor { self.cursor = Cursor::Growing(xy, size + 0.01, color) } } } const BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0]; const WHITE: [f32; 3] = [1.0, 1.0, 1.0]; fn load_texture<F, R>(factory: &mut F, path: &str) -> gfx::handle::ShaderResourceView<R, [f32; 4]> where F: gfx::Factory<R>, R: gfx::Resources { let img = image::open(path).unwrap().to_rgba(); let (width, height) = img.dimensions(); let kind = gfx::texture::Kind::D2(width as u16, height as u16, gfx::texture::AaMode::Single); let (_, view) = factory.create_texture_immutable_u8::<ColorFormat>(kind, &[&img]).unwrap(); view } pub fn main() { let mut cube = Pseudocube::new(); //cube.add_square(0.0, 0.0, 1.0, WHITE); let events_loop = glutin::EventsLoop::new(); let builder = glutin::WindowBuilder::new() .with_title("Square Toy".to_string()) .with_dimensions(800, 800) .with_vsync(); let (window, mut device, mut factory, mut main_color, mut main_depth) = gfx_glutin::init::<ColorFormat, DepthFormat>(builder, &events_loop); let mut encoder: gfx::Encoder<_, _> = factory.create_command_buffer().into(); let pso = factory.create_pipeline_simple( include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/shaders/rect_150.glslv")), include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/shaders/rect_150.glslf")), pipe::new() ).unwrap(); let (vertices, indices) = cube.get_vertices_indices(); let (vertex_buffer, mut slice) = factory.create_vertex_buffer_with_slice(&vertices, &*indices); let texture = load_texture(&mut factory, "assets/awesome.png"); let sampler = factory.create_sampler_linear(); let mut data = pipe::Data { vbuf: vertex_buffer, awesome: (texture, sampler), switch: 0, out: main_color.clone() }; let mut running = true; let mut needs_update = false; let mut window_size = (800.0, 800.0); while running { if needs_update { let (vs, is) = cube.get_vertices_indices(); let (vbuf, sl) = factory.create_vertex_buffer_with_slice(&vs, &*is); data.vbuf = vbuf; data.out = main_color.clone(); slice = sl; needs_update = false } events_loop.poll_events(|glutin::Event::WindowEvent{window_id: _, event}| { use glutin::WindowEvent::*; use glutin::{MouseButton, ElementState, VirtualKeyCode}; match event { KeyboardInput(_, _, Some(VirtualKeyCode::Escape), _) | Closed => running = false, Resized(w, h) => { gfx_glutin::update_views(&window, &mut main_color, &mut main_depth); cube.update_ratio(w as f32 / h as f32); window_size = (w as f32, h as f32); needs_update = true }, MouseMoved(x, y) => { cube.update_cursor_position(x as f32 / window_size.0, y as f32 / window_size.1); needs_update = true }, MouseInput(ElementState::Pressed, MouseButton::Left) => cube.start_growing(), MouseInput(ElementState::Released, MouseButton::Left) => cube.stop_growing(), KeyboardInput(ElementState::Pressed, _, Some(VirtualKeyCode::Space), _) => if data.switch == 0 { data.switch = 1 } else { data.switch = 0 }, _ => (), } cube.tick(); }); encoder.clear(&main_color, BLACK); encoder.draw(&slice, &pso, &data); encoder.flush(&mut device); window.swap_buffers().unwrap(); device.cleanup(); } }
true
a796f436a176cfed5be6a33943b5673a465dae21
Rust
tsoernes/ndarray
/src/doc/various_utils/mod.rs
UTF-8
3,461
4
4
[ "MIT", "Apache-2.0" ]
permissive
//! Various operations on `ndarray`s. //! //! # Sorting //! Sorting `ndarray`s can be achieved by using the //! [`sort` functions from the standard library][https://doc.rust-lang.org/stable/std/primitive.slice.html#method.sort_unstable]. //! As a basic example, here is how to sort an array of integers using `sort_unstable` //! from the standard library: //! ``` //! extern crate ndarray; //! use ndarray::*; //! let mut arr: Array1<i64> = Array::from_vec(vec![3, 2, 5, 1]); //! arr.as_slice_mut().unwrap().sort_unstable(); //! ``` //! //! If we wish to sort floating point numbers, then we must specify how //! to handle sorting `NaN`s or `inf`s. //! ``` //! use ndarray::*; //! let mut arr: Array1<f64> = Array::from_vec(vec![3.0, 2.0, 5.0, 1.0]); //! arr.as_slice_mut() //! .unwrap() //! .sort_unstable_by(|x, y| match x.partial_cmp(y) { //! Some(ord) => ord, //! None => panic!("Attempting to sort NaN's or Inf's"), //! }); //! ``` //! //! We can perform an argsort, that is, retrieving the indices //! that would sort the array. //! ``` //! use ndarray::*; //! fn argsort<D: Dimension, E: Ord>(arr: &Array<E, D>) -> Array1<usize> { //! let mut zipped: Array1<(usize, &E)> = arr.into_iter().enumerate().collect(); //! zipped //! .as_slice_mut() //! .unwrap() //! .sort_unstable_by_key(|&(_, val)| val); //! zipped.map(|(idx, _)| *idx) //! } //! ``` //! //! # Shuffling //! Similarly, shuffling can be performed using the [Rand Crate][https://crates.io/crates/rand] //! ``` //! extern crate rand; //! extern crate ndarray; //! use ndarray::*; //! use rand::{thread_rng, Rng}; //! let mut arr: Array1<f64> = Array::from_vec(vec![3.0, 2.0, 5.0, 1.0]); //! thread_rng().shuffle(arr.as_slice_mut().unwrap()); //! ``` //! //! Two arrays can be shuffled in unison, at the cost of copying both arrays: //! ``` //! extern crate rand; //! extern crate ndarray; //! use ndarray::*; //! use rand::{thread_rng, Rng}; //! let arr1: Array1<f64> = Array::from_vec(vec![3.0, -1.0, 8.0, 2.0]); //! let arr2: Array1<f64> = Array::from_vec(vec![3.0, 2.0, 5.0, 1.0]); //! //! let mut indices: Vec<usize> = (0..arr1.len_of(Axis(0))).collect(); //! thread_rng().shuffle(&mut indices); //! let arr1 = arr1.select(Axis(0), &indices); //! let arr2 = arr2.select(Axis(0), &indices); //! ``` //! //! # On mutating non-contiguous vs. contiguous arrays //! Approaches using [`.as_slice_mut()`][https://docs.rs/ndarray/0.12.0/ndarray/struct.ArrayBase.html#method.as_slice_mut] only work if //! the array/view is contiguous. This means that, for example, trying to sort a column in a 2-D row-major array doesn't work with this approach. //!``` //!use ndarray::prelude::*; //! //!fn main() { //! let mut a = arr2(&[[1, 2], [3, 4]]); //! // a.column_mut(0).as_slice_mut().unwrap().sort_unstable(); //!} //!``` //!This panics with "called `Option::unwrap()` on a `None` value" since the column is non-contiguous. //! //! There is not yet a good way to sort/shuffle non-contiguous arrays/views. If the array/view is non-contiguous, //! there are basically three options at the moment: //! * Copy the data into a continuous Vec/array/view, and then sort/shuffle the copy. This doesn't modify the order of the original data. //! * Sort/shuffle a slice of indices with `.sort_by_key()`/`.shuffle()`, and then rearrange the data according to the sorted/shuffled indices. [][examples/sort-axis.rs] provides an example of this.
true
2a7d31ebd484eecc2288b9368ed34d78c0ca7a93
Rust
CrabBucket/AdventOfCode2020
/src/Day1.rs
UTF-8
790
3.25
3
[]
no_license
use std::{fs, path::Path, env}; pub fn findpairthenmult(input: u32) -> u32{ input * (2020u32 - input) } pub fn day1(){ // let path = env::current_dir().expect("error getting current dir").join("day1.txt"); // println!("{:?}", path); let inputs = fs::read_to_string("day1.txt").expect("couldn't find file"); let mut numbers = Vec::new(); for pair in inputs.split("\n"){ let temp = &pair.replace("\r", "").to_string(); // println!("{:?}",temp); let first = temp.parse::<u32>().unwrap(); numbers.push(first); } for first in numbers.clone() { for second in numbers.clone() { if first + second == 2020u32{ println!("{}",findpairthenmult(first)); } } } }
true
9900c1536870dc1d1725b8aacb8f7bc26ec125f5
Rust
frenicth/tock
/chips/nrf51/src/timer.rs
UTF-8
10,825
2.75
3
[ "Apache-2.0", "MIT" ]
permissive
//! The nRF51822 timer system operates off of the high frequency clock //! (HFCLK) and provides three timers from the clock. Timer0 is tied //! to the radio through some hard-coded peripheral linkages (e.g., there //! are dedicated PPI connections between Timer0's compare events and //! radio tasks, its capture tasks and radio events). //! //! This implementation provides a full-fledged Timer interface to //! timers 0 and 2, and exposes Timer1 as an HIL Alarm, for a Tock //! timer system. It may be that the Tock timer system should be ultimately //! placed on top of the RTC (from the low frequency clock). It's currently //! implemented this way as a demonstration that it can be and because //! the full RTC/clock interface hasn't been finalized yet. //! //! This approach should be rewritten, such that the timer system uses //! the RTC from the low frequency clock (lower power) and the scheduler //! uses the high frequency clock. //! //! Author: Philip Levis <pal@cs.stanford.edu> //! Date: August 18, 2016 use chip; use core::cell::Cell; use core::mem; use kernel::common::VolatileCell; use kernel::hil; use nvic; use peripheral_interrupts::NvicIdx; #[repr(C, packed)] struct Registers { pub task_start: VolatileCell<u32>, pub task_stop: VolatileCell<u32>, pub task_count: VolatileCell<u32>, pub task_clear: VolatileCell<u32>, pub task_shutdown: VolatileCell<u32>, _reserved0: [VolatileCell<u32>; 11], pub task_capture: [VolatileCell<u32>; 4], // 0x40 _reserved1: [VolatileCell<u32>; 60], // 0x140 pub event_compare: [VolatileCell<u32>; 4], _reserved2: [VolatileCell<u32>; 44], // 0x150 pub shorts: VolatileCell<u32>, // 0x200 _reserved3: [VolatileCell<u32>; 64], // 0x204 pub intenset: VolatileCell<u32>, // 0x304 pub intenclr: VolatileCell<u32>, // 0x308 _reserved4: [VolatileCell<u32>; 126], // 0x30C pub mode: VolatileCell<u32>, // 0x504 pub bitmode: VolatileCell<u32>, // 0x508 _reserved5: VolatileCell<u32>, pub prescaler: VolatileCell<u32>, // 0x510 _reserved6: [VolatileCell<u32>; 11], // 0x514 pub cc: [VolatileCell<u32>; 4], // 0x540 } const SIZE: usize = 0x1000; const TIMER_BASE: usize = 0x40008000; #[derive(Copy,Clone)] pub enum Location { TIMER0, TIMER1, TIMER2, } pub static mut TIMER0: Timer = Timer { which: Location::TIMER0, nvic: NvicIdx::TIMER0, client: Cell::new(None), }; pub static mut ALARM1: TimerAlarm = TimerAlarm { which: Location::TIMER1, nvic: NvicIdx::TIMER1, client: Cell::new(None), }; pub static mut TIMER2: Timer = Timer { which: Location::TIMER2, nvic: NvicIdx::TIMER2, client: Cell::new(None), }; #[allow(non_snake_case)] fn TIMER(location: Location) -> &'static Registers { let ptr = TIMER_BASE + (location as usize) * SIZE; unsafe { mem::transmute(ptr) } } pub trait CompareClient { /// Passes a bitmask of which of the 4 compares/captures fired (0x0-0xf). fn compare(&self, bitmask: u8); } pub struct Timer { which: Location, nvic: NvicIdx, client: Cell<Option<&'static CompareClient>>, } impl Timer { fn timer(&self) -> &'static Registers { TIMER(self.which) } pub const fn new(location: Location, nvic: NvicIdx) -> Timer { Timer { which: location, nvic: nvic, client: Cell::new(None), } } pub fn set_client(&self, client: &'static CompareClient) { self.client.set(Some(client)); } pub fn start(&self) { self.timer().task_start.set(1); } // Stops the timer and keeps the value pub fn stop(&self) { self.timer().task_stop.set(1); } // Stops the timer and clears the value pub fn shutdown(&self) { self.timer().task_shutdown.set(1); } // Clear the value pub fn clear(&self) { self.timer().task_clear.set(1); } /// Capture the current timer value into the CC register /// specified by which, and return the value. pub fn capture(&self, which: u8) -> u32 { match which { 0 => { self.timer().task_capture[0].set(1); self.timer().cc[0].get() } 1 => { self.timer().task_capture[1].set(1); self.timer().cc[1].get() } 2 => { self.timer().task_capture[2].set(1); self.timer().cc[2].get() } _ => { self.timer().task_capture[3].set(1); self.timer().cc[3].get() } } } /// Capture the current value to the CC register specified by /// which and do not return the value. pub fn capture_to(&self, which: u8) { let _ = self.capture(which); } /// Shortcuts can automatically stop or clear the timer on a particular /// compare event; refer to section 18.3 of the nRF reference manual /// for details. Implementation currently provides shortcuts as the /// raw bitmask. pub fn get_shortcuts(&self) -> u32 { self.timer().shorts.get() } pub fn set_shortcuts(&self, shortcut: u32) { self.timer().shorts.set(shortcut); } pub fn get_cc0(&self) -> u32 { self.timer().cc[0].get() } pub fn set_cc0(&self, val: u32) { self.timer().cc[0].set(val); } pub fn get_cc1(&self) -> u32 { self.timer().cc[1].get() } pub fn set_cc1(&self, val: u32) { self.timer().cc[0].set(val); } pub fn get_cc2(&self) -> u32 { self.timer().cc[2].get() } pub fn set_cc2(&self, val: u32) { self.timer().cc[0].set(val); } pub fn get_cc3(&self) -> u32 { self.timer().cc[3].get() } pub fn set_cc3(&self, val: u32) { self.timer().cc[0].set(val); } pub fn enable_interrupts(&self, interrupts: u32) { self.timer().intenset.set(interrupts << 16); } pub fn disable_interrupts(&self, interrupts: u32) { self.timer().intenclr.set(interrupts << 16); } pub fn enable_nvic(&self) { nvic::enable(self.nvic); } pub fn disable_nvic(&self) { nvic::disable(self.nvic); } pub fn set_prescaler(&self, val: u8) { // Only bottom 4 bits are valid, so mask them // nRF51822 reference manual, page 102 self.timer().prescaler.set((val & 0xf) as u32); } pub fn get_prescaler(&self) -> u8 { self.timer().prescaler.get() as u8 } /// When an interrupt occurs, check if any of the 4 compares have /// created an event, and if so, add it to the bitmask of triggered /// events that is passed to the client. pub fn handle_interrupt(&self) { nvic::clear_pending(self.nvic); self.client.get().map(|client| { let mut val = 0; // For each of 4 possible compare events, if it's happened, // clear it and store its bit in val to pass in callback. for i in 0..4 { if self.timer().event_compare[i].get() != 0 { val = val | 1 << i; self.timer().event_compare[i].set(0); self.disable_interrupts(1 << (i + 16)); } } client.compare(val as u8); }); } } pub struct TimerAlarm { which: Location, nvic: NvicIdx, client: Cell<Option<&'static hil::time::Client>>, } // CC0 is used for capture // CC1 is used for compare/interrupts const ALARM_CAPTURE: usize = 0; const ALARM_COMPARE: usize = 1; const ALARM_INTERRUPT_BIT: u32 = 1 << (16 + ALARM_COMPARE); impl TimerAlarm { fn timer(&self) -> &'static Registers { TIMER(self.which) } pub const fn new(location: Location, nvic: NvicIdx) -> TimerAlarm { TimerAlarm { which: location, nvic: nvic, client: Cell::new(None), } } pub fn clear(&self) { self.clear_alarm(); self.timer().task_clear.set(1); } pub fn clear_alarm(&self) { self.timer().event_compare[ALARM_COMPARE].set(0); self.disable_interrupts(); nvic::clear_pending(self.nvic); } pub fn set_client(&self, client: &'static hil::time::Client) { self.client.set(Some(client)); } pub fn start(&self) { // Make timer 32 bits wide self.timer().bitmode.set(3); // Clock is 16MHz, so scale down by 2^10 to 16KHz self.timer().prescaler.set(10); self.timer().task_start.set(1); } pub fn stop(&self) { self.timer().task_stop.set(1); } #[inline(never)] pub fn handle_interrupt(&self) { self.clear_alarm(); self.client.get().map(|client| { client.fired(); }); } // Enable and disable interrupts use the bottom 4 bits // for the 4 compare interrupts. These functions shift // those bits to the correct place in the register. pub fn enable_interrupts(&self) { self.timer().intenset.set(ALARM_INTERRUPT_BIT); } pub fn disable_interrupts(&self) { self.timer().intenclr.set(ALARM_INTERRUPT_BIT); } pub fn interrupts_enabled(&self) -> bool { self.timer().intenset.get() == (ALARM_INTERRUPT_BIT) } pub fn enable_nvic(&self) { nvic::enable(self.nvic); } pub fn disable_nvic(&self) { nvic::disable(self.nvic); } pub fn value(&self) -> u32 { self.timer().task_capture[ALARM_CAPTURE].set(1); self.timer().cc[ALARM_CAPTURE].get() } } impl hil::time::Time for TimerAlarm { fn disable(&self) { self.disable_interrupts(); } fn is_armed(&self) -> bool { self.interrupts_enabled() } } impl hil::time::Alarm for TimerAlarm { type Frequency = hil::time::Freq16KHz; fn now(&self) -> u32 { self.value() } fn set_alarm(&self, tics: u32) { self.disable_interrupts(); self.timer().cc[ALARM_COMPARE].set(tics); self.clear_alarm(); self.enable_interrupts(); } fn get_alarm(&self) -> u32 { self.timer().cc[ALARM_COMPARE].get() } } #[no_mangle] #[allow(non_snake_case)] pub unsafe extern "C" fn TIMER0_Handler() { use kernel::common::Queue; nvic::disable(NvicIdx::TIMER0); chip::INTERRUPT_QUEUE.as_mut().unwrap().enqueue(NvicIdx::TIMER0); } #[no_mangle] #[allow(non_snake_case)] pub unsafe extern "C" fn TIMER1_Handler() { use kernel::common::Queue; nvic::disable(NvicIdx::TIMER1); chip::INTERRUPT_QUEUE.as_mut().unwrap().enqueue(NvicIdx::TIMER1); } #[no_mangle] #[allow(non_snake_case)] pub unsafe extern "C" fn TIMER2_Handler() { use kernel::common::Queue; nvic::disable(NvicIdx::TIMER2); chip::INTERRUPT_QUEUE.as_mut().unwrap().enqueue(NvicIdx::TIMER2); }
true
3a353403f09e20dcc3ddb30ab314ed2a323d55bf
Rust
louisch/advent-of-code-2019
/rust/src/main.rs
UTF-8
969
3.53125
4
[]
no_license
fn meets_criteria(number: &u64) -> bool { let digits: Vec<u32> = number.to_string().chars().filter_map(|c| c.to_digit(10)).collect(); let mut digits_clone = digits.clone(); digits_clone.reverse(); let mut matching = None; loop { let maybe_digit = digits_clone.pop(); if let Some(digit) = maybe_digit { let mut count = 1; while let Some(next_digit) = digits_clone.pop() { if next_digit == digit { count += 1; } else { digits_clone.push(next_digit); break; } } if count == 2 { matching = Some(digit); } } else { break; } } matching.is_some() && (&digits[..5]).iter().zip(&digits[1..]).all(|(d1, d2)| d1 <= d2) } fn main() { println!("found {}", (372304..847060).filter(|n| meets_criteria(n)).count()); }
true
7ce822adafcf2c09aea826f02aac77f899364b8f
Rust
payload/tot-up
/src/entry_data.rs
UTF-8
1,528
3.46875
3
[ "MIT" ]
permissive
use std::collections::HashMap; use internment::ArcIntern; /// trade higher runtime with lower peak memory usage pub type Term = ArcIntern<String>; /// An entry is a file or a directory. /// Data per entry is the map of used terms and their counts. #[derive(Clone, Debug, Default)] pub struct EntryData { path: String, term_count: HashMap<Term, usize>, } impl EntryData { pub fn new(path: &str) -> Self { Self { path: path.into(), ..Self::default() } } pub fn tot_up(&mut self, other: &Self) { tot_up(&mut self.term_count, &other.term_count); } pub fn inc_term(&mut self, string: &str) { let term = Term::from(string); self.term_count .entry(term) .and_modify(|x| *x += 1) .or_insert(1); } pub fn sorted_term_counts(&self) -> Vec<(&Term, &usize)> { let mut term_counts: Vec<_> = self.term_count.iter().collect(); term_counts.sort_by_key(|entry| std::usize::MAX - entry.1); term_counts } pub fn path(&self) -> &str { &self.path } } fn tot_up(dest: &mut HashMap<Term, usize>, src: &HashMap<Term, usize>) { for (key, value) in src.iter() { *dest.entry(key.clone()).or_default() += *value; } } #[test] fn EntryData_tot_up_term_counts() { let mut foo = EntryData::new("foo"); foo.inc_term("term1"); let mut bar = EntryData::new("bar"); bar.inc_term("term1"); bar.inc_term("term2"); foo.tot_up(&bar); }
true
76d03a7ca9dca54b3bd09f20ce6b379b1664325f
Rust
bouzuya/rust-atcoder
/cargo-atcoder/contests/abc192/src/bin/c.rs
UTF-8
585
2.84375
3
[]
no_license
use proconio::input; fn f(x: usize) -> usize { let mut g2 = x.to_string().chars().collect::<Vec<char>>(); g2.sort(); let mut g1 = g2.clone(); g1.reverse(); let g1 = g1.iter().collect::<String>().parse::<usize>().unwrap(); let g2 = g2 .iter() .collect::<String>() .trim_start_matches('0') .parse::<usize>() .unwrap_or(0); g1 - g2 } fn main() { input! { n: usize, k: usize, }; let mut a_p = n; for _ in 0..k { a_p = f(a_p); } let ans = a_p; println!("{}", ans); }
true
8ad979e271d322fff373bcfc6414afed8aefb474
Rust
ruby-vietnam/hardcore-rule
/algorithms/solutions/week12/unrealhoang/lisp_intepreter.rs
UTF-8
6,412
3.390625
3
[]
no_license
use std::collections::HashMap; use std::fmt::Debug; #[derive(Debug, Clone)] enum Value { Integer(i64), // String(String), Symbol(String), List(Vec<Value>), } #[derive(Debug)] enum Bounded { Value(Value), Function(&'static Callable), Form(&'static Callable) } #[derive(Debug)] struct ContextFrame<'a> { bindings: HashMap<String, Bounded>, parent: Option<&'a ContextFrame<'a>>, } impl<'a> ContextFrame<'a> { fn lookup(&self, key: &String) -> Option<&Bounded> { let mut cur_frame = self; loop { if let Some(b) = cur_frame.bindings.get(key) { return Some(b); } else { if let Some(frame) = cur_frame.parent { cur_frame = &frame; } else { return None; } } } } fn new(parent: Option<&'a ContextFrame>) -> ContextFrame<'a> { ContextFrame { bindings: HashMap::new(), parent: parent } } fn insert(&mut self, s: String, v: Bounded) { self.bindings.insert(s, v); } } trait Callable:Debug { fn call(&self, params: Vec<Value>, context: &ContextFrame) -> Value; } #[derive(Debug)] struct Add; #[derive(Debug)] struct Mult; #[derive(Debug)] struct Binding; use Value::*; impl Callable for Binding { fn call(&self, mut params: Vec<Value>, context: &ContextFrame) -> Value { if params.len() != 2 { panic!("let form must have 3 elements") }; if let List(mut bindings) = params.remove(0) { if bindings.len() % 2 == 1 { panic!("Binding must have even number of elements") } let mut new_frame = ContextFrame::new(Some(context)); while let Some(expr) = bindings.pop() { if let Value::Symbol(ref s) = bindings.pop().unwrap() { let value = eval(expr, context); new_frame.insert(s.clone(), Bounded::Value(value)); } else { panic!("binding must be symbol"); } } let expr = params.remove(0); let result = eval(expr, &new_frame); return result; } else { panic!("2nd element of let form must be list"); } } } impl Callable for Add { fn call(&self, params: Vec<Value>, _context: &ContextFrame) -> Value { let mut sum = 0i64; for param in params { match param { Integer(i) => { sum += i; }, _ => { panic!("Cannot add non-Integer"); } } } Value::Integer(sum) } } impl Callable for Mult { fn call(&self, params: Vec<Value>, _context: &ContextFrame) -> Value { let mut product = 1i64; for param in params { match param { Integer(i) => { product *= i; }, _ => { panic!("Cannot mult non-Integer"); } } } Value::Integer(product) } } impl Value { fn parsable(s: &str) -> bool { s.parse::<i64>().is_ok() } fn parse(s: &str) -> Value { let i = s.parse::<i64>().unwrap(); Value::Integer(i) } } fn unwrap_list(ast: Value) -> Value { match ast { List(mut list) => list.pop().expect("unwrapping empty list"), _ => { panic!("Unwrap list only works on list") } } } fn parse_list(s: &mut Iterator<Item=&str>) -> Value { let mut list = Vec::new(); while let Some(token) = s.next() { if token == ")" { break } if token == "(" { let sub_list = parse_list(s); list.push(sub_list); } else { list.push(parse_token(token)); } } return Value::List(list); } fn parse_token(token_str: &str) -> Value { if Value::parsable(token_str) { return Value::parse(token_str); } return Value::Symbol(token_str.to_owned()); } fn eval(ast: Value, context: &ContextFrame) -> Value { match ast { Symbol(symbol) => { println!("Evaluating {:?}", symbol); let bounded = context.lookup(&symbol).expect("symbol not found"); if let &Bounded::Value(ref v) = bounded { v.clone() } else { Symbol(symbol) } }, List(mut list) => { if list.is_empty() { panic!("empty list"); } else { let mut drain = list.drain(..); if let Value::Symbol(ref symbol) = drain.next().unwrap() { println!("Evaluating {:?}", symbol); let callable = context.lookup(symbol).expect("symbol not found"); match *callable { Bounded::Value(_) => { panic!("first element in list not callable") }, Bounded::Form(callable) => { let mut params = Vec::new(); for param in drain { params.push(param); } callable.call(params, context) }, Bounded::Function(callable) => { let mut params = Vec::new(); for param in drain { params.push(eval(param, context)); } callable.call(params, context) } } } else { panic!("first element must be symbol"); } } }, // if not list, return as is _ => ast, } } fn main() { let s = "( let ( x 2 ) ( mult x ( let ( x 3 y 4 ) ( add x y ) ) )"; let mut tokens = s.split(" "); let raw_ast = parse_list(&mut tokens); let ast = unwrap_list(raw_ast); println!("parsed: {:?}", ast); let mut global_frame = ContextFrame::new(None); global_frame.insert(String::from("add"), Bounded::Function(&Add)); global_frame.insert(String::from("mult"), Bounded::Function(&Mult)); global_frame.insert(String::from("let"), Bounded::Form(&Binding)); let result = eval(ast, &global_frame); println!("evaluated to: {:?}", result); }
true
22fb38f99ee044f9f1e445a2b05d59ab2bed6dca
Rust
singaraiona/rik
/src/kobjects.rs
UTF-8
12,785
2.515625
3
[]
no_license
use std::mem::size_of; use std::ptr::{read, copy_nonoverlapping}; use std::vec::Vec; #[derive(Debug)] pub enum KObject { Atom (KAtom), Vector (KVector), Dictionary (KDictionary), Table (KTable), KeyedTable (KKeyedTable), Function (KFunction), Error (KSymbol), Unknown (Vec<u8>), } #[derive(Debug)] pub enum KAtom { Boolean (KBoolean), Guid (KGuid), Byte (KByte), Short (KShort), Int (KInt), Long (KLong), Real (KReal), Float (KFloat), Char (KChar), Symbol (KSymbol), Timestamp (KTimestamp), Month (KMonth), Date (KDate), DateTime (KDateTime), Timespan (KTimespan), Minute (KMinute), Second (KSecond), Time (KTime), } #[derive(Debug)] pub enum KVector { List(KList), Boolean (Vec<KBoolean>), Guid (Vec<KGuid>), Byte (Vec<KByte>), Short (Vec<KShort>), Int (Vec<KInt>), Long (Vec<KLong>), Real (Vec<KReal>), Float (Vec<KFloat>), Char (Vec<KChar>), Symbol (Vec<KSymbol>), Timestamp (Vec<KTimestamp>), Month (Vec<KMonth>), Date (Vec<KDate>), DateTime (Vec<KDateTime>), Timespan (Vec<KTimespan>), Minute (Vec<KMinute>), Second (Vec<KSecond>), Time (Vec<KTime>), } #[derive(Debug)] pub enum KFunction { Lambda(KLambda), PrimVerb(KPrimVerb), Adverb(KAdverb), Projection(KProjection), Composition(KComposition), } impl KVector { pub fn len(&self) -> usize { match *self { KVector::List(ref v) => v.len(), KVector::Boolean(ref v) => v.len(), KVector::Guid(ref v) => v.len(), KVector::Byte(ref v) => v.len(), KVector::Short(ref v) => v.len(), KVector::Int(ref v) => v.len(), KVector::Long(ref v) => v.len(), KVector::Real(ref v) => v.len(), KVector::Float(ref v) => v.len(), KVector::Char(ref v) => v.len(), KVector::Symbol(ref v) => v.len(), KVector::Timestamp(ref v) => v.len(), KVector::Month(ref v) => v.len(), KVector::Date(ref v) => v.len(), KVector::DateTime(ref v) => v.len(), KVector::Timespan(ref v) => v.len(), KVector::Minute(ref v) => v.len(), KVector::Second(ref v) => v.len(), KVector::Time(ref v) => v.len(), } } } pub type KBoolean = u8; pub type KGuid = [u64;16]; pub type KByte = i8; pub type KShort = i16; pub type KInt = i32; pub type KLong = i64; pub type KReal = f32; pub type KFloat = f64; pub type KChar = u8; pub type KSymbol = String; pub type KTimestamp = i64; pub type KMonth = i32; pub type KDate = i32; pub type KDateTime = f64; pub type KTimespan = i64; pub type KMinute = i32; pub type KSecond = i32; pub type KTime = i32; pub type KList = Vec<KObject>; pub type KLambda = (KSymbol, String); pub type KPrimVerb = (i8, i8); pub type KAdverb = (i8, Box<KFunction>); pub type KProjection = Vec<KObject>; pub type KComposition = Vec<KObject>; // This is all functions, could be tighened up some #[derive(Debug)] pub struct KDictionary(pub KVector, pub KVector); #[derive(Debug)] pub struct KTable(pub KVector, pub KList); #[derive(Debug)] pub struct KKeyedTable(pub KTable, pub KTable); // #[derive(Debug)] // #[repr(packed)] // struct KAtomHeader { // val_type: i8, // } #[derive(Debug)] #[repr(packed)] struct KVectorHeader { val_type: i8, attrib: i8, len: i32, } // #[derive(Debug)] // #[repr(packed)] // struct KTableHeader { // val_type: i8, // = 98 // attrib: i8, // } macro_rules! cast_add { ($c:expr, $p:expr, $i:expr) => ({ let (val, len) = $p; ($c(val), len + $i) }) } #[macro_export] macro_rules! kdict_to_hashmap { ($kt:path, $vt:path, $val:expr) => ({ if let $crate::KObject::Dictionary( $crate::KDictionary( $kt(kk), $vt(vv) ) ) = $val { assert!(kk.len() == vv.len()); let mut hmap = HashMap::with_capacity(kk.len()); for i in 0..kk.len() { hmap.insert(kk[i].clone(), vv[i].clone()); } hmap } else { panic!("failure to deconstruct KObject::Dictionary {:?}", $val); } }) } impl KObject { pub fn parse(msg: &[u8]) -> (KObject, usize) { let val_type = msg[0] as i8; match val_type { -128 => cast_add!(KObject::Error, Self::read_sym_atom(&msg[1..]), 1), -19...-4 | -2...-1 => cast_add!(KObject::Atom, Self::parse_atom(msg), 0), 0...2 | 4...19 => cast_add!(KObject::Vector, Self::parse_vector(msg), 0), 98 => cast_add!(KObject::Table, Self::parse_table(msg), 0), 99 | 127 => Self::parse_dict(msg), 100...111 => cast_add!(KObject::Function, Self::parse_function(msg), 0), _ => panic!("Unknown type code: {} {:?}", val_type, msg), } } pub fn parse_function(msg: &[u8]) -> (KFunction, usize) { let val_type = msg[0] as i8; match val_type { 100 => cast_add!(KFunction::Lambda, Self::parse_lambda(&msg[1..]), 1), 101...103 => (KFunction::PrimVerb((val_type, msg[1] as i8)), 2), 104 => cast_add!(KFunction::Projection, Self::parse_proj(&msg[1..]), 1), 105 => cast_add!(KFunction::Composition, Self::parse_proj(&msg[1..]), 1), 106...111 => cast_add!(KFunction::Adverb, Self::parse_adverb(&msg), 0), _ => unreachable!(), } } fn parse_proj(msg: &[u8]) -> (KProjection, usize) { let len = Self::read_atom::<i32>(&msg).0; let (kobj, klen) = Self::read_list(len, &msg[4..]); (kobj, len as usize + klen) } fn parse_adverb(msg: &[u8]) -> (KAdverb, usize) { let adverb = msg[0] as i8; let (func, len) = Self::parse(&msg[1..]); match func { KObject::Function(f) => ((adverb, Box::new(f)), 1+len), _ => panic!("parsing adverb expected function, found: {:?}", func), } } fn parse_dict(msg: &[u8]) -> (KObject, usize) { let (keys, klen) = Self::parse(&msg[1..]); let (vals, vlen) = Self::parse(&msg[1+klen..]); let kobj = match (keys, vals) { (KObject::Vector(kv), KObject::Vector(vv)) => { KObject::Dictionary(KDictionary(kv, vv)) } (KObject::Table(kt), KObject::Table(vt)) => { KObject::KeyedTable(KKeyedTable(kt, vt)) } (ref k, ref v) => { panic!("parsing dict, not correct form:\n\t{:?}\n\t{:?}", k, v); } }; (kobj, 1+klen+vlen) } fn parse_table(msg: &[u8]) -> (KTable, usize) { let (dict, len) = Self::parse_dict(&msg[2..]); match dict { KObject::Dictionary(KDictionary(v, KVector::List(w))) => (KTable(v, w), 2+len), ref d => panic!("parsing table, not correct form: {:?}", d), } } fn parse_lambda(msg: &[u8]) -> (KLambda, usize) { let (sym, slen) = Self::read_sym_atom(&msg); let (text, tlen) = Self::parse_vector(&msg[slen..]); match text { KVector::Char(v) => ((sym, String::from_utf8(v).unwrap()), slen+tlen), ref t => panic!("parsing lambda, not correct form: {:?}", t), } } fn parse_atom(msg: &[u8]) -> (KAtom, usize) { let val_type = msg[0]; // Could cast to KAtomHeader, but why bother? let val = &msg[1..]; match val_type as i8 { -19 => cast_add!(KAtom::Time, Self::read_atom::<KTime>(val), 1), -18 => cast_add!(KAtom::Second, Self::read_atom::<KSecond>(val), 1), -17 => cast_add!(KAtom::Minute, Self::read_atom::<KMinute>(val), 1), -16 => cast_add!(KAtom::Timespan, Self::read_atom::<KTimespan>(val), 1), -15 => cast_add!(KAtom::DateTime, Self::read_atom::<KDateTime>(val), 1), -14 => cast_add!(KAtom::Date, Self::read_atom::<KDate>(val), 1), -13 => cast_add!(KAtom::Month, Self::read_atom::<KMonth>(val), 1), -12 => cast_add!(KAtom::Timestamp, Self::read_atom::<KTimestamp>(val), 1), -11 => cast_add!(KAtom::Symbol, Self::read_sym_atom(val), 1), -10 => cast_add!(KAtom::Char, Self::read_atom::<KChar>(val), 1), -9 => cast_add!(KAtom::Float, Self::read_atom::<KFloat>(val), 1), -8 => cast_add!(KAtom::Real, Self::read_atom::<KReal>(val), 1), -7 => cast_add!(KAtom::Long, Self::read_atom::<KLong>(val), 1), -6 => cast_add!(KAtom::Int, Self::read_atom::<KInt>(val), 1), -5 => cast_add!(KAtom::Short, Self::read_atom::<KShort>(val), 1), -4 => cast_add!(KAtom::Byte, Self::read_atom::<KByte>(val), 1), -2 => cast_add!(KAtom::Guid, Self::read_atom::<KGuid>(val), 1), -1 => cast_add!(KAtom::Boolean, Self::read_atom::<KBoolean>(val), 1), _ => unreachable!(), } } fn parse_vector(msg: &[u8]) -> (KVector, usize) { let size = size_of::<KVectorHeader>(); let (vhdr, _) : (KVectorHeader, _) = Self::read_atom(&msg[..size]); let val = &msg[size..]; match vhdr.val_type { 19 => cast_add!(KVector::Time, Self::read_vector::<KTime>(vhdr.len, val), size), 18 => cast_add!(KVector::Second, Self::read_vector::<KSecond>(vhdr.len, val), size), 17 => cast_add!(KVector::Minute, Self::read_vector::<KMinute>(vhdr.len, val), size), 16 => cast_add!(KVector::Timespan, Self::read_vector::<KTimespan>(vhdr.len, val), size), 15 => cast_add!(KVector::DateTime, Self::read_vector::<KDateTime>(vhdr.len, val), size), 14 => cast_add!(KVector::Date, Self::read_vector::<KDate>(vhdr.len, val), size), 13 => cast_add!(KVector::Month, Self::read_vector::<KMonth>(vhdr.len, val), size), 12 => cast_add!(KVector::Timestamp, Self::read_vector::<KTimestamp>(vhdr.len, val), size), 11 => cast_add!(KVector::Symbol, Self::read_sym_vector(vhdr.len, val), size), 10 => cast_add!(KVector::Char, Self::read_vector::<KChar>(vhdr.len, val), size), 9 => cast_add!(KVector::Float, Self::read_vector::<KFloat>(vhdr.len, val), size), 8 => cast_add!(KVector::Real, Self::read_vector::<KReal>(vhdr.len, val), size), 7 => cast_add!(KVector::Long, Self::read_vector::<KLong>(vhdr.len, val), size), 6 => cast_add!(KVector::Int, Self::read_vector::<KInt>(vhdr.len, val), size), 5 => cast_add!(KVector::Short, Self::read_vector::<KShort>(vhdr.len, val), size), 4 => cast_add!(KVector::Byte, Self::read_vector::<KByte>(vhdr.len, val), size), 2 => cast_add!(KVector::Guid, Self::read_vector::<KGuid>(vhdr.len, val), size), 1 => cast_add!(KVector::Boolean, Self::read_vector::<KBoolean>(vhdr.len, val), size), 0 => cast_add!(KVector::List, Self::read_list(vhdr.len, val), size), _ => unreachable!(), } } fn read_list(len: i32, msg: &[u8]) -> (KList, usize) { let mut v = Vec::<KObject>::with_capacity(len as usize); let mut s = 0usize; for _ in 0..len { let (obj, len) = Self::parse(&msg[s..]); v.push(obj); s += len; } (v, s) } fn read_atom<T>(data: &[u8]) -> (T, usize) { unsafe { (read(data.as_ptr() as *const T), size_of::<T>()) } } // FIXME: hacky. If not 0-terminated, gives back from length (one too long), // but still works bc the only caller doesn't add term byte either! fn read_sym_atom(data: &[u8]) -> (KSymbol, usize) { let p = data.iter().position(|x| *x == 0u8).unwrap(); let s = String::from_utf8(data[..p].to_vec()).unwrap(); (s, p + 1) } fn read_vector<T>(len: i32, data: &[u8]) -> (Vec<T>, usize) { let mut v = Vec::<T>::with_capacity(len as usize); let size = size_of::<T>() * len as usize; unsafe { copy_nonoverlapping(data.as_ptr(), v.as_mut_ptr() as *mut u8, size); v.set_len(len as usize); } (v, size) } fn read_sym_vector(len: i32, data: &[u8]) -> (Vec<KSymbol>, usize) { let mut v = Vec::<KSymbol>::with_capacity(len as usize); let mut s = 0usize; while v.len() < len as usize { let e = data[s..].iter().position(|x| *x == 0u8).unwrap(); v.push(String::from_utf8(data[s..s+e].to_vec()).unwrap()); s += e + 1; } (v, s) } }
true
c749c9f82c3a4acde35427e9104a52e4aff87e58
Rust
KevDev13/horcrux
/src/main.rs
UTF-8
3,237
3.265625
3
[ "MIT", "LicenseRef-scancode-warranty-disclaimer" ]
permissive
// main.rs // Author: Kevin Garner, kevin@kgar.net // // Horcrux is an application that will split a file into // multiple shares using Shamir's Secret Sharing. This will // allow the separating of files across different locations // (i.e. cloud services, USB drives, etc) while still allowing // the loss of 1 or more shares with the ability to recover // the primary file at the end. // // License information can be found at the repo: // https://github.com/KevDev13/horcrux use std::env; // horcrux-specific crates mod support; mod split; mod recover; use support::*; use split::*; use recover::*; fn main() { // get command line arguments let args: Vec<String> = env::args().collect(); //println!("{:?}", args); if args.len() <= 1 { println!("{}", TOO_FEW_ARGS_STRING); return; } // get the first argument, which should be what the user wants to do let first_arg = &args[1]; // strings to check for what the user wants to do let help_strings: Vec<String> = vec![String::from("-h"), String::from("--help"), String::from("help")]; let split_strings: Vec<String> = vec![String::from("-s"), String::from("--split"), String::from("split")]; let recover_strings: Vec<String> = vec![String::from("-r"), String::from("--recover"), String::from("recover")]; // if user wants to list help if help_strings.contains(first_arg) { print_help(); return; } // else if user wants to split strings else if split_strings.contains(first_arg) { if args.len() < 5 { println!("{}", TOO_FEW_ARGS_STRING); } // input file name let file_name = &args[2]; // parse the shares and error check them before proceeding let (minimum_shares, num_shares) = match get_shares(&args[3], &args[4]) { Some((min, max)) => (min, max), None => { println!("Exiting..."); // TODO: make this error message better return; } }; // split the shares into the appropriate files split_shares(file_name.to_string(), minimum_shares, num_shares); } // else if the user wants to recover a secret else if recover_strings.contains(first_arg) { if args.len() < 4 { println!("{}", TOO_FEW_ARGS_STRING); return; } // file name where user wants to output the secret let file_name = &args[2]; // add all recovery shares to a vector to use in a recovery attempt let mut r_shares: Vec<String> = Vec::new(); for share in 3..args.len() { r_shares.push(args[share].to_string()); } // attempt to recover the shares recover_shares(file_name.to_string(), r_shares); } // if get here, the user typed in an unknown command and we don't know what to do. else { println!("Unknown qualifier. Use \"horcrux -h\" or \"horcrux --help \" for help."); } }
true
fb883c8c3df6f7edfd2c791eb244bde30b6ef27a
Rust
billsjchw/tigerc-rs
/src/util.rs
UTF-8
2,173
3.71875
4
[]
no_license
pub fn parse_integer_literal(literal: &str) -> i64 { let mut result = 0i64; for &c in literal.as_bytes() { result = result.wrapping_mul(10).wrapping_add((c - b'0') as i64); } result } pub fn parse_string_literal(literal: &str) -> String { let mut result = String::new(); let bytes = literal.as_bytes(); let mut i = 1; while i + 1 < bytes.len() { if bytes[i] != b'\\' { result.push(bytes[i] as char); i += 1; } else if bytes[i + 1] == b'n' { result.push('\n'); i += 2; } else if bytes[i + 1] == b't' { result.push('\t'); i += 2; } else if bytes[i + 1] == b'"' { result.push('"'); i += 2; } else if bytes[i + 1] == b'\\' { result.push('\\'); i += 2; } else if bytes[i + 1] >= b'0' && bytes[i + 1] <= b'9' { result.push( (bytes[i + 1] - b'0') .wrapping_mul(64) .wrapping_add((bytes[i + 2] - b'0') * 8) .wrapping_add(bytes[i + 3] - b'0') as char, ); i += 4; } else { i += 1; while bytes[i] != b'\\' { i += 1; } i += 1; } } result } #[cfg(test)] mod tests { use super::parse_integer_literal; use super::parse_string_literal; #[test] fn test_parse_integer_literal() { assert_eq!(parse_integer_literal("0"), 0); assert_eq!(parse_integer_literal("123"), 123); assert_eq!(parse_integer_literal("9223372036854775807"), i64::MAX); assert_eq!(parse_integer_literal("18446744073709551616"), 0); } #[test] fn test_parse_string_literal() { assert_eq!( parse_string_literal("\"123\\n\\t\\\"\\\\xyz\""), "123\n\t\"\\xyz" ); assert_eq!( parse_string_literal("\"\\110\\145\\154\\154\\157\""), "Hello" ); assert_eq!( parse_string_literal("\"Hello,\\\n\t \\ world!\""), "Hello, world!" ); } }
true