blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
789481137a9b9edcd42b1b61292fcdf5d94991c5
Rust
pacman82/rtiow
/src/camera.rs
UTF-8
2,579
3.15625
3
[]
no_license
use crate::{ ray::Ray, vec3::{cross, Point, Vec3}, }; use rand::Rng; pub struct Camera { origin: Point, lower_left_corner: Point, horizontal: Vec3, vertical: Vec3, lens_radius: f64, u: Vec3, v: Vec3, exposure_time: f64, } impl Camera { pub fn new( vertical_field_of_view: f64, aspect_ratio: f64, lookfrom: Point, lookat: Point, view_up: Vec3, distance_to_focus: f64, aperture: f64, exposure_time: f64, ) -> Self { let theta = vertical_field_of_view.to_radians(); let h = (theta / 2.).tan(); let viewport_height = 2. * h; let viewport_width = aspect_ratio * viewport_height; // Distance between projection point and image plane. let focal_length = 1.; let w = (lookfrom - lookat).unit() * focal_length; let u = cross(&view_up, &w).unit(); let v = cross(&w, &u); let origin = lookfrom; let horizontal = u * viewport_width * distance_to_focus; let vertical = v * viewport_height * distance_to_focus; let lower_left_corner = origin - horizontal / 2. - vertical / 2. - w * distance_to_focus; let lens_radius = aperture / 2.; Self { origin, lower_left_corner, horizontal, vertical, lens_radius, u, v, exposure_time, } } /// Creates a random ray /// /// # Parameters /// /// * `s`: horizontal coordinate of the projection plane going from left to right and zero to /// one. /// * `t`: vertical coordinate of the projection plane going from bottom to top and zero to one. /// * `rng`: Used to generate random minor shifts in rays position for sampling. pub fn get_ray(&self, s: f64, t: f64, rng: &mut impl Rng) -> Ray { let rd = random_in_unit_disk(rng) * self.lens_radius; let offset = self.u * rd.x() + self.v * rd.y(); Ray::from_to( self.origin + offset, self.lower_left_corner + self.horizontal * s + self.vertical * t, ) } /// Get a random point in time, between 0 and exposure time. pub fn get_time(&self, rng: &mut impl Rng) -> f64 { rng.gen_range(0., self.exposure_time) } } fn random_in_unit_disk(rng: &mut impl Rng) -> Vec3 { loop { let x = rng.gen_range(-1., 1.); let y: f64 = rng.gen_range(-1., 1.); if x * x + y * y < 1. { break Vec3::new(x, y, 0.); } } }
true
c014415aaabf3c8a1b98de867af4b265338a5987
Rust
torkeldanielsson/pe
/pe_21/src/main.rs
UTF-8
485
3.40625
3
[]
no_license
fn sum_of_proper_divisors(d: i64) -> i64 { let mut res: i64 = 0; let mut t: i64 = 0; while t < d / 2 { t += 1; if d % t == 0 { res += t; } } return res; } fn main() { let mut res = 0; for i in 1..10000 { let a = sum_of_proper_divisors(i); let b = sum_of_proper_divisors(a); if a == b { println!("{} / {}", b, a); res += i; } } println!("{:?}", res); }
true
92ba6ea0169fb1e5684d5d106e565c62cfe418b5
Rust
LordAro/dcpu16
/src/dcpu.rs
UTF-8
25,680
2.765625
3
[ "MIT" ]
permissive
#![allow(dead_code)] use std::path::Path; use std::fs::File; use std::io::Read; use std::io::Result; use std::any::Any; use std::cell::RefCell; use std::rc::Rc; use instructions::*; // Note: this can't be changed willy-nilly, since the PC is naturally wrapped around, so it will // not wrap around correctly if this is changed. pub const MEMORY_SIZE: usize = 0x10000; pub const REG_A: usize = 0; pub const REG_B: usize = 1; pub const REG_C: usize = 2; pub const REG_X: usize = 3; pub const REG_Y: usize = 4; pub const REG_Z: usize = 5; pub const REG_I: usize = 6; pub const REG_J: usize = 7; pub const CYCLE_HZ: usize = 100_000; const SHOW_ROWS_RADIUS: usize = 1; pub trait Device { fn info_hardware_id_upper(&self) -> u16; fn info_hardware_id_lower(&self) -> u16; fn info_manufacturer_id_upper(&self) -> u16; fn info_manufacturer_id_lower(&self) -> u16; fn info_version(&self) -> u16; fn process_interrupt(&mut self, cpu: &mut DCPU) -> (); fn run(&mut self, cpu: &mut DCPU, cycle: usize) -> (); fn as_any(&self) -> &Any; fn as_any_mut(&mut self) -> &mut Any; } pub struct DCPU { pub terminate: bool, pub reg: [u16; 8], pub mem: [u16; MEMORY_SIZE], pub pc: u16, pub sp: u16, pub ex: u16, pub ia: u16, interrupt_queueing: bool, interrupt_queue: Vec<u16>, skip_next: bool, cycle: usize, overshot_cycles: isize, inside_run: bool, pub devices: Rc<Vec<RefCell<Box<Device>>>>, } impl DCPU { pub fn new() -> DCPU { DCPU { terminate: false, reg: [0; 8], mem: [0; MEMORY_SIZE], pc: 0, sp: 0, ex: 0, ia: 0, interrupt_queueing: false, interrupt_queue: Vec::new(), skip_next: false, cycle: 0, overshot_cycles: 0, inside_run: false, devices: Rc::new(Vec::new()), } } // Run multiple ticks until cycles have been met // Resets cycle count, so that it won't overflow pub fn run(&mut self, cycles: usize) { self.inside_run = true; if self.overshot_cycles > cycles as isize { self.overshot_cycles -= cycles as isize; return; } let end_cycle = ((self.cycle + cycles) as isize - self.overshot_cycles) as usize; while !self.terminate && self.cycle < end_cycle { self.tick(); } self.overshot_cycles = self.cycle as isize - end_cycle as isize; // Pretend cycle is u16 and let it overflow safely while self.cycle > 0xffff { self.cycle -= 0xffff; } self.inside_run = false; } /// Get cycle count pub fn cycle(&self) -> usize { self.cycle } /// Halts the DCPU for a specified number of cycles. pub fn halt(&mut self, cycles: usize) -> () { if self.inside_run { self.cycle += cycles; } else { self.overshot_cycles += cycles as isize; } } fn reset(&mut self) { self.terminate = false; for i in 0..MEMORY_SIZE { self.mem[i] = 0; } for i in 0..8 { self.reg[i] = 0; } self.pc = 0; self.sp = 0; self.ex = 0; self.ia = 0; self.cycle = 0; self.interrupt_queue = Vec::new(); self.interrupt_queueing = false; self.overshot_cycles = 0; self.skip_next = false; self.devices = Rc::new(Vec::new()); } fn pcplus(&mut self, movepc: bool) -> u16 { let oldpc = self.pc; if movepc { self.pc = self.pc.wrapping_add(1); } oldpc } fn set(&mut self, identifier: usize, value: u16) { match identifier { 0x00...0x07 => { self.reg[identifier] = value; } 0x08...0x0f => { self.cycle += 1; let pos = self.reg[(identifier - 0x08) as usize]; self.mem[pos as usize] = value; } 0x10...0x17 => { self.cycle += 1; let pos = self.reg[(identifier - 0x10) as usize]; let offset = self.mem[self.pcplus(true) as usize]; self.mem[pos.wrapping_add(offset) as usize] = value; } 0x18 => { self.sp = self.sp.wrapping_sub(1); self.mem[self.sp as usize] = value; } 0x19 => { self.mem[self.sp as usize] = value; } 0x1a => { self.cycle += 1; let pos = self.sp.wrapping_add(self.mem[self.pcplus(true) as usize]); self.mem[pos as usize] = value; } 0x1b => { self.sp = value; } 0x1c => { self.pc = value; } 0x1d => { self.ex = value; } 0x1e => { self.cycle += 1; let pos = self.mem[self.pcplus(true) as usize]; self.mem[pos as usize] = value; } // Instructions 0x1f - 0x3f are not possible (silently ignore) _ => {} } } fn value(&mut self, identifier: usize, is_a: bool, movepc: bool) -> u16 { match identifier { 0x00...0x07 => self.reg[identifier as usize], 0x08...0x0f => { let pos = self.reg[(identifier - 0x08) as usize]; self.mem[pos as usize] } 0x10...0x17 => { self.cycle += 1; let pos = self.reg[(identifier - 0x10) as usize]; let offset = self.mem[self.pcplus(movepc) as usize]; self.mem[pos.wrapping_add(offset) as usize] } 0x18 => { if is_a { let oldsp = self.sp; self.sp = self.sp.wrapping_add(1); self.mem[oldsp as usize] } else { self.sp = self.sp.wrapping_sub(1); self.mem[self.sp as usize] } } 0x19 => self.mem[self.sp as usize], 0x1a => { self.cycle += 1; let pos = self.sp.wrapping_add(self.mem[self.pcplus(movepc) as usize]); self.mem[pos as usize] } 0x1b => self.sp, 0x1c => self.pc, 0x1d => self.ex, 0x1e => { self.cycle += 1; let pos = self.mem[self.pcplus(movepc) as usize]; self.mem[pos as usize] } 0x1f => { self.cycle += 1; self.mem[self.pcplus(movepc) as usize] } _ => { if is_a && identifier >= 0x20 && identifier <= 0x3f { ((0x10000 + identifier - 0x21) & 0xffff) as u16 } else { 0 } } } } fn value_signed(&mut self, identifier: usize, is_a: bool, movepc: bool) -> i16 { let v = self.value(identifier, is_a, movepc); v as i16 } fn takes_next(&self, id: usize) -> bool { id >= 0x10 && id <= 0x17 || id == 0x1a || id == 0x1e || id == 0x1f } fn process_conditional(&mut self, truth: bool) { if !truth { self.skip_next = true; self.cycle += 1; } } /// Connect device pub fn add_device(&mut self, device: Box<Device>) -> () { if let Some(devices) = Rc::get_mut(&mut self.devices) { devices.push(RefCell::new(device)); } } /// Queues up interrupt. Can be used from hardware. pub fn interrupt(&mut self, message: u16) -> () { self.interrupt_queue.push(message); } pub fn tick(&mut self) { let word = self.mem[self.pcplus(true) as usize] as usize; let opcode = word & 0x1f; let id_b = (word >> 5) & 0x1f; let id_a = (word >> 10) & 0x3f; let old_cycle = self.cycle; if self.skip_next { if self.takes_next(id_a) { self.pcplus(true); } if opcode != 0 && self.takes_next(id_b) { self.pcplus(true); } self.skip_next = opcode >= 0x10 && opcode <= 0x17; if self.skip_next { self.cycle += 1; } } else { match opcode { 0 => { let spec_opcode = (word >> 5) & 0x1f; self.process_special_opcode(spec_opcode, id_a); } SET => { self.cycle += 1; let v = self.value(id_a, true, true); self.set(id_b, v); } ADD => { self.cycle += 2; // TODO: Use overflow_add let v = (self.value(id_a, true, true) as i32) + (self.value(id_b, false, false) as i32); if v > 0xffff { self.ex = 1; } else { self.ex = 0; } self.set(id_b, (v & 0xffff) as u16); // & might not be needed } SUB => { self.cycle += 2; let a = self.value(id_a, true, true); let b = self.value(id_b, false, false); if a > b { self.ex = 0xffff; } else { self.ex = 0; } let v = b.wrapping_sub(a); self.set(id_b, v); } MUL => { self.cycle += 2; let v = (self.value(id_a, true, true) as i32) * (self.value(id_b, false, false) as i32); if v > 0xffff || v < 0 { self.ex = ((v >> 16) & 0xffff) as u16; } else { self.ex = 0; } self.set(id_b, (v & 0xffff) as u16); } MLI => { self.cycle += 2; let v = (self.value_signed(id_a, true, true) as i32) * (self.value_signed(id_b, false, false) as i32); if v > 0xffff || v < 0 { self.ex = ((v >> 16) & 0xffff) as u16; } else { self.ex = 0; } self.set(id_b, (v & 0xffff) as u16); } DIV => { self.cycle += 3; let a = self.value(id_a, true, true); let b = self.value(id_b, false, false); let v = if a == 0 { self.ex = 0; 0u16 } else { self.ex = ((((b as i32) << 16) / (a as i32)) & 0xffff) as u16; b / a }; self.set(id_b, v); } DVI => { self.cycle += 3; let a = self.value_signed(id_a, true, true); let b = self.value_signed(id_b, false, false); let v = if a == 0 { self.ex = 0; 0i16 } else { self.ex = ((((b as i32) << 16) / (a as i32)) & 0xffff) as u16; b / a }; self.set(id_b, v as u16); } MOD => { self.cycle += 3; let a = self.value(id_a, true, true); let b = self.value(id_b, false, false); let v = if a != 0 { b % a } else { 0 }; self.set(id_b, v); } MDI => { self.cycle += 3; let a = self.value_signed(id_a, true, true); let b = self.value_signed(id_b, false, false); let v = if a != 0 { b % a } else { 0 }; self.set(id_b, v as u16); } AND => { self.cycle += 1; let v = self.value(id_a, true, true) & self.value(id_b, false, false); self.set(id_b, v); } BOR => { self.cycle += 1; let v = self.value(id_a, true, true) | self.value(id_b, false, false); self.set(id_b, v); } XOR => { self.cycle += 1; let v = self.value(id_a, true, true) ^ self.value(id_b, false, false); self.set(id_b, v); } // TODO: These can panic (in debug mode) if shifting too much (>=16) SHR => { self.cycle += 1; let a = self.value(id_a, true, true); let b = self.value(id_b, false, false); let v = b >> a; self.ex = (((b as u32) << 16) >> a) as u16; self.set(id_b, v); } ASR => { self.cycle += 1; let a = self.value(id_a, true, true); let b = self.value_signed(id_b, false, false); let v = (b >> a) as u16; self.ex = (((b as u32) << 16) >> (a as u32)) as u16; self.set(id_b, v); } SHL => { self.cycle += 1; let a = self.value(id_a, true, true); let b = self.value(id_b, false, false); self.ex = (((b as u32) << (a as u32)) >> 16) as u16; let v = b << a; self.set(id_b, v); } IFB => { self.cycle += 2; let truth = (self.value(id_b, false, true) & self.value(id_a, true, true)) != 0; self.process_conditional(truth); } IFC => { self.cycle += 2; let truth = (self.value(id_b, false, true) & self.value(id_a, true, true)) == 0; self.process_conditional(truth); } IFE => { self.cycle += 2; let truth = self.value(id_b, false, true) == self.value(id_a, true, true); self.process_conditional(truth); } IFN => { self.cycle += 2; let truth = self.value(id_b, false, true) != self.value(id_a, true, true); self.process_conditional(truth); } IFG => { self.cycle += 2; let truth = self.value(id_b, false, true) > self.value(id_a, true, true); self.process_conditional(truth); } IFA => { self.cycle += 2; let truth = self.value_signed(id_b, false, true) > self.value_signed(id_a, true, true); self.process_conditional(truth); } IFL => { self.cycle += 2; let truth = self.value(id_b, false, true) < self.value(id_a, true, true); self.process_conditional(truth); } IFU => { self.cycle += 2; let truth = self.value_signed(id_b, false, true) < self.value_signed(id_a, true, true); self.process_conditional(truth); } ADX => { self.cycle += 3; let a = self.value(id_a, true, true); let b = self.value(id_b, false, false); if (a as usize) + (b as usize) + (self.ex as usize) > 0xffff { self.ex = 1; } else { self.ex = 0; } let v = a.wrapping_add(b).wrapping_add(self.ex); self.set(id_b, v); } SBX => { self.cycle += 3; let a = self.value(id_a, true, true); let b = self.value(id_b, false, false); if (a as usize) + (self.ex as usize) < (b as usize) { self.ex = 0xffff; } else { self.ex = 0; } let v = a.wrapping_sub(b).wrapping_add(self.ex); self.set(id_b, v); } STI => { self.cycle += 2; let v = self.value(id_a, true, true); self.set(id_b, v); // Increment I and J let v_i = self.reg[REG_I]; let v_j = self.reg[REG_J]; self.reg[REG_I] = v_i.wrapping_add(1); self.reg[REG_J] = v_j.wrapping_add(1); } STD => { self.cycle += 2; let v = self.value(id_a, true, true); self.set(id_b, v); // Decrement I and J let v_i = self.reg[REG_I]; let v_j = self.reg[REG_J]; self.reg[REG_I] = v_i.wrapping_sub(1); self.reg[REG_J] = v_j.wrapping_sub(1); } _ => {} } } if self.skip_next { self.tick(); } else { if !self.interrupt_queue.is_empty() && !self.interrupt_queueing { let message = self.interrupt_queue.remove(0); self.cycle += 4; if self.ia != 0 { self.interrupt_queueing = true; self.sp = self.sp.wrapping_sub(1); self.mem[self.sp as usize] = self.pc; self.sp = self.sp.wrapping_sub(1); self.mem[self.sp as usize] = self.reg[REG_A]; self.pc = self.ia; self.reg[REG_A] = message; } } // Tick devices. This allows devices to act asynchronously. // As an optimization, we might want to do this less frequently in the future. let devices = self.devices.clone(); let delta_cycle = self.cycle - old_cycle; for dref in devices.iter() { let mut device = dref.borrow_mut(); device.run(self, delta_cycle); } } } fn process_special_opcode(&mut self, spec_opcode: usize, id_a: usize) { match spec_opcode { 0 => { // Terminate if a 0x00 is processed as an instruction self.terminate = true; } JSR => { self.cycle += 3; self.sp = self.sp.wrapping_sub(1); let new_pc = self.value(id_a, true, true); self.mem[self.sp as usize] = self.pc; self.pc = new_pc; } INT => { let message = self.value(id_a, true, true); self.interrupt(message); } IAG => { self.cycle += 1; let ia = self.ia; self.set(id_a, ia); } IAS => { self.cycle += 1; self.ia = self.value(id_a, true, true); } RFI => { self.cycle += 3; self.interrupt_queueing = false; let stack_a = self.mem[self.sp as usize]; self.reg[REG_A] = stack_a; self.sp = self.sp.wrapping_add(1); self.pc = self.mem[self.sp as usize]; self.sp = self.sp.wrapping_add(1); } IAQ => { self.cycle += 2; let a = self.value(id_a, true, true); self.interrupt_queueing = a > 0; } HWN => { self.cycle += 2; let n_devices = self.devices.len() as u16; self.set(id_a, n_devices); } HWQ => { self.cycle += 4; let device_id = self.value(id_a, true, true) as usize; let (i1, i2, i3, i4, i5) = match self.devices.get(device_id) { Some(dref) => { let d = dref.borrow(); (d.info_hardware_id_lower(), d.info_hardware_id_upper(), d.info_version(), d.info_manufacturer_id_lower(), d.info_manufacturer_id_upper()) } None => (0, 0, 0, 0, 0), }; self.reg[REG_A] = i1; self.reg[REG_B] = i2; self.reg[REG_C] = i3; self.reg[REG_X] = i4; self.reg[REG_Y] = i5; } HWI => { self.cycle += 4; let device_id = self.value(id_a, true, true) as usize; if device_id < self.devices.len() { let devices = self.devices.clone(); let mut device = devices.get(device_id).unwrap().borrow_mut(); device.process_interrupt(self); } } // Extensions OUT => { // Temporary printing // OUT p (prints memory address p as a null-terminated string) let a = self.value(id_a, true, true); for i in 0..MEMORY_SIZE { let c = self.mem[a.wrapping_add(i as u16) as usize]; if c == 0 { break; } else { print!("{}", ((c & 0xff) as u8) as char); } } } OUV => { let a = self.value(id_a, true, true); println!("{}", a); } _ => {} } } /// Loads a binary file into the memory. The file needs to be assembled separately. pub fn load_from_binary_file(&mut self, path: &Path) -> Result<()> { //let mut file = try!(File::open(&path)); let mut file = File::open(&path)?; let mut i = 0; let mut buffer: Vec<u8> = Vec::new(); try!(file.read_to_end(&mut buffer)); let mut j = 0; let mut sig = 0u16; for v in buffer { if j % 2 == 0 { sig = v as u16; } else { self.mem[i] = (sig << 8) + (v as u16); i += 1; } j += 1; } Ok(()) } // This function will assemble the binary for you, so the input file should be a .asm file. //pub fn load_from_assembly_file(&mut self, path: &Path) -> Result<()> { // TODO //} #[allow(dead_code)] pub fn print(&self) { println!("PC {:04x} SP {:04x} IA {:04x} EX {:04x}\n\ A {:04x} B {:04x} C {:04x}\n\ X {:04x} Y {:04x} Z {:04x}\n\ I {:04x} J {:04x} cycles: {:6}", self.pc, self.sp, self.ia, self.ex, self.reg[0], self.reg[1], self.reg[2], self.reg[3], self.reg[4], self.reg[5], self.reg[6], self.reg[7], self.cycle); // Determine context window let rows = MEMORY_SIZE / 8; let window = 2 * SHOW_ROWS_RADIUS + 1; let cur_row = (self.pc / 8) as usize; let (from, to) = if cur_row < SHOW_ROWS_RADIUS { (0, window) } else if cur_row >= rows - SHOW_ROWS_RADIUS { (rows - window, rows) } else { (cur_row - SHOW_ROWS_RADIUS, cur_row + SHOW_ROWS_RADIUS + 1) }; for i in from..to { let p = i * 8; print!("{:04x}: ", p); for j in 0..8usize { if self.pc as usize == p + j { print!("\x1b[32m{:04x}\x1b[0m ", self.mem[(p + j) as usize]); } else { print!("{:04x} ", self.mem[(p + j) as usize]); } } println!(""); } /* // Use this code to print the bottom (the stack) for i in (0xffff-8)/8..(0x10000/8) { let p = i * 8; print!("{:04x}: ", p); for j in 0..8usize { if self.pc as usize == p + j { print!("\x1b[32m{:04x}\x1b[0m ", self.mem[(p + j) as usize]); } else { print!("{:04x} ", self.mem[(p + j) as usize]); } } println!(""); } */ } }
true
3ee007d7500391df60d759912c99cedb7f0126a9
Rust
Limegrass/luffy
/luffy_gitea/src/structs.rs
UTF-8
9,026
2.609375
3
[ "MIT" ]
permissive
use serde::{Deserialize, Serialize}; // use chrono::DateTime if I'm doing more than just forwarding type DateTimeType = String; // "2017-03-13T13:52:11-04:00" #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct GitUser { pub name: String, pub email: String, pub username: String, } #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct GiteaUser { pub id: i64, pub login: String, pub full_name: String, pub email: String, pub avatar_url: String, pub username: String, pub language: String, pub is_admin: bool, pub last_login: DateTimeType, pub created: DateTimeType, } // They're the same type in Gitea Might make sense to redefine // or to just reference GiteaUser in usages later pub type Organization = GiteaUser; #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct Commit { pub id: String, // uuid pub message: String, pub url: String, // url with scheme // Might be Option<GitUser>? pub author: GitUser, pub committer: GitUser, pub timestamp: DateTimeType, pub verification: Option<CommitVerification>, } #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct CommitVerification { #[serde(rename = "verified")] pub is_verified: bool, pub reason: String, pub signature: String, pub signer: Option<GitUser>, pub payload: String, } #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct Milestone { pub id: i64, pub title: String, pub description: String, pub state: String, // StateType: "open", "closed", "all" pub open_issues: i32, pub closed_issues: i32, pub created_at: String, pub updated_at: String, pub closed_at: String, pub due_on: String, } #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct Repository { pub id: i64, pub owner: GiteaUser, pub name: String, pub full_name: String, // GiteaUsername/RepositoryName pub description: String, #[serde(rename = "empty")] pub is_empty: bool, #[serde(rename = "private")] pub is_private: bool, #[serde(rename = "fork")] pub is_fork: bool, #[serde(rename = "template")] pub is_template: bool, pub parent: Option<Box<Repository>>, #[serde(rename = "mirror")] pub is_mirror: bool, #[serde(rename = "size")] pub size_mib: i32, pub html_url: String, pub ssh_url: String, pub clone_url: String, pub original_url: String, pub website: String, pub stars_count: i32, pub forks_count: i32, pub watchers_count: i32, pub open_issues_count: i32, pub open_pr_counter: i32, pub release_counter: i32, pub default_branch: String, #[serde(rename = "archived")] pub is_archived: bool, pub created_at: DateTimeType, pub updated_at: DateTimeType, #[serde(default)] pub permissions: Option<Permissions>, pub has_issues: bool, #[serde(default)] pub internal_tracker: Option<InternalTracker>, #[serde(default)] pub external_tracker: Option<ExternalTracker>, pub has_wiki: bool, #[serde(default)] pub external_wiki: Option<ExternalWiki>, pub has_pull_requests: bool, pub has_projects: bool, #[serde(rename = "ignore_whitespace_conflicts")] pub is_whitespace_conflict_ignored: bool, pub allow_merge_commits: bool, pub allow_rebase: bool, pub allow_rebase_explicit: bool, pub allow_squash_merge: bool, pub avatar_url: String, #[serde(rename = "internal")] pub is_internal: bool, } // yuck but idc rn #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct InternalTracker { #[serde(rename = "enable_time_tracker")] pub is_enabled: bool, #[serde(rename = "allow_only_contributors_to_track_time")] pub is_contributor_only_time_tracking_enabled: bool, #[serde(rename = "enable_issue_dependencies")] pub is_issue_dependencies_enabled: bool, } #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct ExternalTracker { pub external_tracker_url: String, // External Issue Tracker URL Format. // Use the placeholders {user}, // {repo} and {index} for the username, // repository name and issue index. pub external_tracker_format: String, pub external_tracker_style: String, // `numeric` or `alphanumeric` } #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct ExternalWiki { #[serde(rename = "external_wiki_url")] pub url: String, } // ??? #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct Permissions { #[serde(rename = "admin")] pub has_admin: bool, #[serde(rename = "push")] pub has_push: bool, #[serde(rename = "pull")] pub has_pull: bool, } #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct Issue { pub id: i64, #[serde(rename = "url")] pub api_url: String, pub html_url: String, pub number: i64, pub user: GiteaUser, pub original_author: String, pub original_author_id: i64, pub title: String, pub body: String, pub labels: Vec<String>, pub milestone: Option<Milestone>, pub assignee: Option<GiteaUser>, pub assignees: Option<Vec<GiteaUser>>, pub state: String, // "open", "closed", "all" pub is_locked: bool, #[serde(rename = "comments")] pub comment_count: i32, pub created_at: DateTimeType, pub updated_at: DateTimeType, // updated = created on open? pub closed_at: Option<DateTimeType>, pub due_date: Option<DateTimeType>, #[serde(rename = "pull_request")] pub pull_request_meta: Option<PullRequestMeta>, #[serde(rename = "repository")] pub repository_meta: Option<RepositoryMeta>, } #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct PullRequestMeta { #[serde(rename = "merged")] pub is_merged: bool, #[serde(rename = "merged_at")] pub time_merged: Option<String>, } #[derive(Debug, PartialEq, Deserialize, Serialize)] pub struct RepositoryMeta { pub id: i64, pub name: String, pub owner: String, pub full_name: String, } #[derive(Debug, Deserialize, Serialize)] pub struct Comment { pub id: i64, pub html_url: String, pub pull_request_url: String, pub issue_url: String, pub user: GiteaUser, pub original_author: String, pub original_author_id: i64, pub body: String, pub created_at: DateTimeType, pub updated_at: DateTimeType, } #[derive(Debug, Deserialize, Serialize)] pub struct Changes { #[serde(default)] pub title: Option<ChangesFromPayload>, #[serde(default)] pub body: Option<ChangesFromPayload>, #[serde(default, rename = "ref")] pub ref_path: Option<ChangesFromPayload>, } // TODO: Maybe get rid of this mirroring #[derive(Debug, Deserialize, Serialize)] pub struct ChangesFromPayload { pub from: String, } #[derive(Debug, Deserialize, Serialize)] pub struct Release { pub id: i64, pub tag_name: String, pub target_commitish: String, // branch? pub name: String, // title pub body: String, // note #[serde(rename = "url")] pub api_url: String, pub html_url: String, pub tarball_url: String, pub zipball_url: String, #[serde(rename = "draft")] pub is_draft: bool, #[serde(rename = "prelease")] pub is_prerelease: bool, pub created_at: DateTimeType, pub published_at: DateTimeType, pub author: GiteaUser, pub assets: Vec<Attachment>, } #[derive(Debug, Deserialize, Serialize)] pub struct Attachment { pub id: i64, pub name: String, pub size: i64, pub download_count: i64, pub created_at: DateTimeType, pub uuid: String, pub browser_download_url: String, } #[derive(Debug, Deserialize, Serialize)] pub struct PullRequest { pub id: i64, pub url: String, pub number: i64, pub user: GiteaUser, pub title: String, pub body: String, // TODO: labels: Vec<Label> pub milestone: Option<Milestone>, pub assignee: Option<GiteaUser>, pub assignees: Option<Vec<GiteaUser>>, pub state: String, // StateType pub is_locked: bool, #[serde(rename = "comments")] pub comment_count: i32, pub html_url: String, pub diff_url: String, pub patch_url: String, #[serde(rename = "mergeable")] pub is_mergeable: bool, #[serde(rename = "merged")] pub is_merged: bool, pub merged_at: Option<DateTimeType>, pub merge_commit_sha: Option<String>, pub merged_by: Option<GiteaUser>, pub base: Option<PRBranchInfo>, pub head: Option<PRBranchInfo>, pub merge_base: String, pub due_date: Option<DateTimeType>, pub created_at: Option<DateTimeType>, pub updated_at: Option<DateTimeType>, pub closed_at: Option<DateTimeType>, } #[derive(Debug, Deserialize, Serialize)] pub struct PRBranchInfo { pub label: String, #[serde(rename = "ref")] pub ref_path: String, pub sha: String, pub repo_id: i64, #[serde(rename = "repo")] pub repository: Repository, }
true
82a71fc7c8b16d74847eccb8b05fdeda190fa43d
Rust
ResidentMario/rust-learn
/enums/src/main.rs
UTF-8
1,549
4.03125
4
[]
no_license
enum Whiteness { Whiteish, Alabama } // When you attach fields to a enum like this you have to declare those // field with values when you use them. Remember: an enum is just a fancy struct. // This is kind of sort of just a struct with another struct field in it that you // can run a match over. enum Race { White{ whiteness: Whiteness }, Black, Mexican, Other, } // impl Race { // fn poorness(&self) -> String { // match self { // Race::White => String::from("rich prick"), // Race::Black => String::from("poor fuck"), // Race::Mexican => String::from("no peso"), // Race::Other => String::from("r u asian") // } // } // } impl Person { fn poorness(&self) -> String { match &self.race { Race::White{ whiteness } => { match whiteness { Whiteness::Whiteish => String::from("exactly average"), Whiteness::Alabama => String::from("rich prick"), } } Race::Black => String::from("poor fuck"), Race::Mexican => String::from("no peso"), Race::Other => String::from("r u asian"), } } } struct Person { name: String, age: u8, race: Race, } fn main() { let name = String::from("Peter"); let age = 27; let race = Race::White { whiteness: Whiteness::Whiteish }; let peter = Person { name, age, race }; println!("{}", peter.age); println!("{}", peter.poorness()); }
true
3543370474007b9ea61a4c092fbbbdf5cdcd809a
Rust
starsheriff/rsql
/src/tokenizer.rs
UTF-8
3,245
3.796875
4
[ "MIT" ]
permissive
use std::iter::Peekable; use std::str::Chars; #[derive(Debug, PartialEq)] pub enum Token { Word(Word), Select, Equal, Gt, Lt, LBrace, RBrace, } #[derive(Debug, PartialEq)] pub struct Word {} #[derive(Debug)] pub enum Command { Quit, Help, } #[derive(Debug)] pub enum Error { UnknownError, NotImplemented, UnknownCommand, } pub trait Tokenizer { fn tokenize(&self) -> Result<Vec<Token>, Error>; } impl Tokenizer for String { fn tokenize(&self) -> Result<Vec<Token>, Error> { let mut chars = self.chars().peekable(); let mut tokens: Vec<Token> = vec![]; while let Some(token) = match_token(&mut chars)? { tokens.push(token); } return Ok(tokens); } } fn match_token(b: &mut Peekable<Chars<'_>>) -> Result<Option<Token>, Error> { let consuming_return = |b: &mut Peekable<Chars<'_>>, t: Token| -> Result<Option<Token>, Error> { b.next(); Ok(Some(t)) }; match b.peek() { Some(&ch) => match ch { '=' => consuming_return(b, Token::Equal), '>' => consuming_return(b, Token::Gt), '<' => consuming_return(b, Token::Lt), '{' => consuming_return(b, Token::LBrace), '}' => consuming_return(b, Token::RBrace), _ => { b.next(); Err(Error::NotImplemented) } }, None => Ok(None), } } pub fn match_command(input_buffer: &mut str) -> Result<Command, Error> { let mut chars = input_buffer.chars().peekable(); match chars.peek() { Some('.') => { let mut buffer = String::new(); chars.next(); loop { match chars.peek() { Some(&c) if c.is_alphabetic() => { buffer.push(chars.next().unwrap()); } Some(_) => break, None => break, } } match buffer.to_lowercase().as_ref() { "q" => Ok(Command::Quit), "h" => Ok(Command::Help), _ => Err(Error::UnknownCommand), } } _ => Err(Error::UnknownCommand), } } #[cfg(test)] mod test { use super::*; #[test] fn a_test() { let tokens = "=".to_string().tokenize().unwrap(); assert_eq!(tokens, vec![Token::Equal]); } #[test] fn multiple_equals() { let tokens = "==".to_string().tokenize().unwrap(); assert_eq!(tokens, vec![Token::Equal, Token::Equal]); } #[test] fn multiple_equals_w_whitespace() { let tokens = ">".to_string().tokenize().unwrap(); assert_eq!(tokens, vec![Token::Gt]); } #[test] fn single_lt() { let tokens = "<".to_string().tokenize().unwrap(); assert_eq!(tokens, vec![Token::Lt]); } #[test] fn single_rbrace() { let tokens = "}".to_string().tokenize().unwrap(); assert_eq!(tokens, vec![Token::RBrace]); } #[test] fn single_lbrace() { let tokens = "{".to_string().tokenize().unwrap(); assert_eq!(tokens, vec![Token::LBrace]); } }
true
12ed0875693e0ff2a03ed5bb71ca15fd61419604
Rust
DrunkFlamingo/kailua
/kailua_langsvr/src/message.rs
UTF-8
1,412
2.5625
3
[ "MIT", "Apache-2.0" ]
permissive
use std::error::Error; // general notifications define_msg! { pub CannotReadConfig: "ko" => "프로젝트에서 `kailua.json`이나 `.vscode/kailua.json`을 읽을 수 없습니다. \ 이번 세션에서 타입 체크가 비활성화됩니다.", _ => "Cannot read `kailua.json` or `.vscode/kailua.json` in the project; \ type checking is disabled for this session.", } define_msg! { pub NoStartPath: "ko" => "`kailua.json`에 시작 경로가 지정되어 있지 않습니다. \ 이번 세션에서 타입 체크가 비활성화됩니다.", _ => "There is no start path specified in `kailua.json`; \ type checking is disabled for this session.", } define_msg! { pub CannotRename: "ko" => "이 이름은 고칠 수 없습니다.", _ => "You cannot rename this name.", } // reports generated by language server define_msg! { pub RestartRequired: "ko" => "`kailua.json`에 문제가 있습니다. 고친 뒤 세션을 재시작해 주십시오", _ => "`kailua.json` has an issue; please fix it and restart the session", } define_msg! { pub CannotOpenStartPath<'a> { error: &'a Error }: "ko" => "시작 경로를 열 수 없습니다. (이유: {error})", _ => "Couldn't open a start path. (Cause: {error})", } define_msg! { pub OmittedSelfLabel: "ko" => "<생략됨>", _ => "<omitted>", }
true
6f4b89e61d35ce594dbb53412d1740f95be3f993
Rust
AlisCode/ld44
/src/resources/mouse.rs
UTF-8
532
2.953125
3
[ "MIT" ]
permissive
use quicksilver::geom::Vector; use quicksilver::input::{ButtonState, Mouse, MouseButton}; #[derive(Default)] pub struct MouseWrapper { pub mouse: Option<Mouse>, } impl MouseWrapper { pub fn get_button(&self, btn: MouseButton) -> ButtonState { if let Some(m) = &self.mouse { m[btn] } else { ButtonState::NotPressed } } pub fn get_coords(&self) -> Vector { match &self.mouse { Some(m) => m.pos(), _ => (0, 0).into(), } } }
true
3e22e6cb37bf5bb1c9366d5b902983fc97c71f96
Rust
rust-lang/rust
/tests/ui/typeck/do-not-suggest-adding-missing-zero-to-floating-point-number.rs
UTF-8
756
3
3
[ "Apache-2.0", "LLVM-exception", "NCSA", "BSD-2-Clause", "LicenseRef-scancode-unicode", "MIT", "LicenseRef-scancode-other-permissive" ]
permissive
macro_rules! num { () => { 1 } } fn main() { let x = 1i32; x.e10; //~ERROR `i32` is a primitive type and therefore doesn't have fields let y = 1; y.e10; //~ERROR `{integer}` is a primitive type and therefore doesn't have fields 2u32.e10; //~ERROR `u32` is a primitive type and therefore doesn't have fields num!().e10; //~ERROR `{integer}` is a primitive type and therefore doesn't have fields 2.e10foo; //~ERROR `{integer}` is a primitive type and therefore doesn't have fields 42._; //~^ERROR expected identifier, found reserved identifier `_` //~|ERROR `{integer}` is a primitive type and therefore doesn't have fields 42.a; //~ERROR `{integer}` is a primitive type and therefore doesn't have fields }
true
e434edee7d79a5e3af1c6fb9a859cdf14b6922bb
Rust
dfrankland/mk20d7
/src/uart1/c1/mod.rs
UTF-8
26,257
2.796875
3
[ "MIT" ]
permissive
#[doc = r" Value read from the register"] pub struct R { bits: u8, } #[doc = r" Value to write to the register"] pub struct W { bits: u8, } impl super::C1 { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `PT`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PTR { #[doc = "Even parity."] _0, #[doc = "Odd parity."] _1, } impl PTR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { PTR::_0 => false, PTR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> PTR { match value { false => PTR::_0, true => PTR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == PTR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == PTR::_1 } } #[doc = "Possible values of the field `PE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PER { #[doc = "Parity function disabled."] _0, #[doc = "Parity function enabled."] _1, } impl PER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { PER::_0 => false, PER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> PER { match value { false => PER::_0, true => PER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == PER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == PER::_1 } } #[doc = "Possible values of the field `ILT`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ILTR { #[doc = "Idle character bit count starts after start bit."] _0, #[doc = "Idle character bit count starts after stop bit."] _1, } impl ILTR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { ILTR::_0 => false, ILTR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> ILTR { match value { false => ILTR::_0, true => ILTR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == ILTR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == ILTR::_1 } } #[doc = "Possible values of the field `WAKE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum WAKER { #[doc = "Idle-line wakeup."] _0, #[doc = "Address-mark wakeup."] _1, } impl WAKER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { WAKER::_0 => false, WAKER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> WAKER { match value { false => WAKER::_0, true => WAKER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == WAKER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == WAKER::_1 } } #[doc = "Possible values of the field `M`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MR { #[doc = "Normal - start + 8 data bits (MSB/LSB first as determined by MSBF) + stop."] _0, #[doc = "Use - start + 9 data bits (MSB/LSB first as determined by MSBF) + stop."] _1, } impl MR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { MR::_0 => false, MR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> MR { match value { false => MR::_0, true => MR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == MR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == MR::_1 } } #[doc = "Possible values of the field `RSRC`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RSRCR { #[doc = "Selects internal loop back mode and receiver input is internally connected to transmitter output."] _0, #[doc = "Single-wire UART mode where the receiver input is connected to the transmit pin input signal."] _1, } impl RSRCR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { RSRCR::_0 => false, RSRCR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> RSRCR { match value { false => RSRCR::_0, true => RSRCR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == RSRCR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == RSRCR::_1 } } #[doc = "Possible values of the field `UARTSWAI`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum UARTSWAIR { #[doc = "UART clock continues to run in wait mode."] _0, #[doc = "UART clock freezes while CPU is in wait mode."] _1, } impl UARTSWAIR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { UARTSWAIR::_0 => false, UARTSWAIR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> UARTSWAIR { match value { false => UARTSWAIR::_0, true => UARTSWAIR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == UARTSWAIR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == UARTSWAIR::_1 } } #[doc = "Possible values of the field `LOOPS`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LOOPSR { #[doc = "Normal operation."] _0, #[doc = "Loop mode where transmitter output is internally connected to receiver input. The receiver input is determined by the RSRC bit."] _1, } impl LOOPSR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { LOOPSR::_0 => false, LOOPSR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> LOOPSR { match value { false => LOOPSR::_0, true => LOOPSR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == LOOPSR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == LOOPSR::_1 } } #[doc = "Values that can be written to the field `PT`"] pub enum PTW { #[doc = "Even parity."] _0, #[doc = "Odd parity."] _1, } impl PTW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { PTW::_0 => false, PTW::_1 => true, } } } #[doc = r" Proxy"] pub struct _PTW<'a> { w: &'a mut W, } impl<'a> _PTW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PTW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Even parity."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(PTW::_0) } #[doc = "Odd parity."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(PTW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = "Values that can be written to the field `PE`"] pub enum PEW { #[doc = "Parity function disabled."] _0, #[doc = "Parity function enabled."] _1, } impl PEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { PEW::_0 => false, PEW::_1 => true, } } } #[doc = r" Proxy"] pub struct _PEW<'a> { w: &'a mut W, } impl<'a> _PEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Parity function disabled."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(PEW::_0) } #[doc = "Parity function enabled."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(PEW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = "Values that can be written to the field `ILT`"] pub enum ILTW { #[doc = "Idle character bit count starts after start bit."] _0, #[doc = "Idle character bit count starts after stop bit."] _1, } impl ILTW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { ILTW::_0 => false, ILTW::_1 => true, } } } #[doc = r" Proxy"] pub struct _ILTW<'a> { w: &'a mut W, } impl<'a> _ILTW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: ILTW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Idle character bit count starts after start bit."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(ILTW::_0) } #[doc = "Idle character bit count starts after stop bit."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(ILTW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = "Values that can be written to the field `WAKE`"] pub enum WAKEW { #[doc = "Idle-line wakeup."] _0, #[doc = "Address-mark wakeup."] _1, } impl WAKEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { WAKEW::_0 => false, WAKEW::_1 => true, } } } #[doc = r" Proxy"] pub struct _WAKEW<'a> { w: &'a mut W, } impl<'a> _WAKEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: WAKEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Idle-line wakeup."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(WAKEW::_0) } #[doc = "Address-mark wakeup."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(WAKEW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 3; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = "Values that can be written to the field `M`"] pub enum MW { #[doc = "Normal - start + 8 data bits (MSB/LSB first as determined by MSBF) + stop."] _0, #[doc = "Use - start + 9 data bits (MSB/LSB first as determined by MSBF) + stop."] _1, } impl MW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { MW::_0 => false, MW::_1 => true, } } } #[doc = r" Proxy"] pub struct _MW<'a> { w: &'a mut W, } impl<'a> _MW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: MW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Normal - start + 8 data bits (MSB/LSB first as determined by MSBF) + stop."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(MW::_0) } #[doc = "Use - start + 9 data bits (MSB/LSB first as determined by MSBF) + stop."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(MW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = "Values that can be written to the field `RSRC`"] pub enum RSRCW { #[doc = "Selects internal loop back mode and receiver input is internally connected to transmitter output."] _0, #[doc = "Single-wire UART mode where the receiver input is connected to the transmit pin input signal."] _1, } impl RSRCW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { RSRCW::_0 => false, RSRCW::_1 => true, } } } #[doc = r" Proxy"] pub struct _RSRCW<'a> { w: &'a mut W, } impl<'a> _RSRCW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: RSRCW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Selects internal loop back mode and receiver input is internally connected to transmitter output."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(RSRCW::_0) } #[doc = "Single-wire UART mode where the receiver input is connected to the transmit pin input signal."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(RSRCW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = "Values that can be written to the field `UARTSWAI`"] pub enum UARTSWAIW { #[doc = "UART clock continues to run in wait mode."] _0, #[doc = "UART clock freezes while CPU is in wait mode."] _1, } impl UARTSWAIW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { UARTSWAIW::_0 => false, UARTSWAIW::_1 => true, } } } #[doc = r" Proxy"] pub struct _UARTSWAIW<'a> { w: &'a mut W, } impl<'a> _UARTSWAIW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: UARTSWAIW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "UART clock continues to run in wait mode."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(UARTSWAIW::_0) } #[doc = "UART clock freezes while CPU is in wait mode."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(UARTSWAIW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 6; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = "Values that can be written to the field `LOOPS`"] pub enum LOOPSW { #[doc = "Normal operation."] _0, #[doc = "Loop mode where transmitter output is internally connected to receiver input. The receiver input is determined by the RSRC bit."] _1, } impl LOOPSW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { LOOPSW::_0 => false, LOOPSW::_1 => true, } } } #[doc = r" Proxy"] pub struct _LOOPSW<'a> { w: &'a mut W, } impl<'a> _LOOPSW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: LOOPSW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Normal operation."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(LOOPSW::_0) } #[doc = "Loop mode where transmitter output is internally connected to receiver input. The receiver input is determined by the RSRC bit."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(LOOPSW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 7; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } #[doc = "Bit 0 - Parity Type"] #[inline] pub fn pt(&self) -> PTR { PTR::_from({ const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u8) != 0 }) } #[doc = "Bit 1 - Parity Enable"] #[inline] pub fn pe(&self) -> PER { PER::_from({ const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u8) != 0 }) } #[doc = "Bit 2 - Idle Line Type Select"] #[inline] pub fn ilt(&self) -> ILTR { ILTR::_from({ const MASK: bool = true; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u8) != 0 }) } #[doc = "Bit 3 - Receiver Wakeup Method Select"] #[inline] pub fn wake(&self) -> WAKER { WAKER::_from({ const MASK: bool = true; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u8) != 0 }) } #[doc = "Bit 4 - 9-bit or 8-bit Mode Select"] #[inline] pub fn m(&self) -> MR { MR::_from({ const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u8) != 0 }) } #[doc = "Bit 5 - Receiver Source Select"] #[inline] pub fn rsrc(&self) -> RSRCR { RSRCR::_from({ const MASK: bool = true; const OFFSET: u8 = 5; ((self.bits >> OFFSET) & MASK as u8) != 0 }) } #[doc = "Bit 6 - UART Stops in Wait Mode"] #[inline] pub fn uartswai(&self) -> UARTSWAIR { UARTSWAIR::_from({ const MASK: bool = true; const OFFSET: u8 = 6; ((self.bits >> OFFSET) & MASK as u8) != 0 }) } #[doc = "Bit 7 - Loop Mode Select"] #[inline] pub fn loops(&self) -> LOOPSR { LOOPSR::_from({ const MASK: bool = true; const OFFSET: u8 = 7; ((self.bits >> OFFSET) & MASK as u8) != 0 }) } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u8) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - Parity Type"] #[inline] pub fn pt(&mut self) -> _PTW { _PTW { w: self } } #[doc = "Bit 1 - Parity Enable"] #[inline] pub fn pe(&mut self) -> _PEW { _PEW { w: self } } #[doc = "Bit 2 - Idle Line Type Select"] #[inline] pub fn ilt(&mut self) -> _ILTW { _ILTW { w: self } } #[doc = "Bit 3 - Receiver Wakeup Method Select"] #[inline] pub fn wake(&mut self) -> _WAKEW { _WAKEW { w: self } } #[doc = "Bit 4 - 9-bit or 8-bit Mode Select"] #[inline] pub fn m(&mut self) -> _MW { _MW { w: self } } #[doc = "Bit 5 - Receiver Source Select"] #[inline] pub fn rsrc(&mut self) -> _RSRCW { _RSRCW { w: self } } #[doc = "Bit 6 - UART Stops in Wait Mode"] #[inline] pub fn uartswai(&mut self) -> _UARTSWAIW { _UARTSWAIW { w: self } } #[doc = "Bit 7 - Loop Mode Select"] #[inline] pub fn loops(&mut self) -> _LOOPSW { _LOOPSW { w: self } } }
true
df5e230a2af8ac60780a393d0b10ac5d937b36e1
Rust
drew-y/rustracer
/src/geometry/translation.rs
UTF-8
5,624
2.921875
3
[]
no_license
use super::super::tracer::*; use std::f32::{consts::PI, MAX as F32MAX}; #[derive(Clone)] pub struct FlipNormals { hitable: BoxHitable, } impl Hitable for FlipNormals { fn hit(&self, r: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord> { let rec = self.hitable.hit(r, t_min, t_max)?; Some(HitRecord { normal: -rec.normal, ..rec }) } fn bounding_box(&self) -> Option<BoundingBox> { self.hitable.bounding_box() } fn box_clone(&self) -> BoxHitable { Box::new(self.clone()) } } impl Translation for FlipNormals {} pub fn flip_normals(hitable: BoxHitable) -> FlipNormals { FlipNormals { hitable } } #[derive(Clone)] pub struct Shift { hitable: BoxHitable, offset: Vec3, } impl Hitable for Shift { fn hit(&self, r: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord> { let moved_ray = Ray { origin: r.origin - self.offset, direction: r.direction, }; if let Some(rec) = self.hitable.hit(&moved_ray, t_min, t_max) { let new_rec = HitRecord { p: rec.p + self.offset, ..rec }; Some(new_rec) } else { None } } fn bounding_box(&self) -> Option<BoundingBox> { if let Some(original_bounding_box) = self.hitable.bounding_box() { Some(BoundingBox { min: original_bounding_box.min + self.offset, max: original_bounding_box.max + self.offset, }) } else { None } } fn box_clone(&self) -> BoxHitable { Box::new(self.clone()) } } impl Translation for Shift {} #[derive(Clone)] pub struct YRotation { hitable: BoxHitable, sin_theta: f32, cos_theta: f32, bbox: Option<BoundingBox>, } impl YRotation { fn gen_bbox(hitable_bbox: BoundingBox, cos_theta: f32, sin_theta: f32) -> BoundingBox { let mut min = Vec3::new(F32MAX, F32MAX, F32MAX); let mut max = Vec3::new(-F32MAX, -F32MAX, -F32MAX); for i in 0..2 { for j in 0..2 { for k in 0..2 { let x = i as f32 * hitable_bbox.max.x + (1 - i) as f32 * hitable_bbox.min.x; let y = j as f32 * hitable_bbox.max.y + (1 - j) as f32 * hitable_bbox.min.y; let z = k as f32 * hitable_bbox.max.z + (1 - k) as f32 * hitable_bbox.min.z; let newx = cos_theta * x + sin_theta * z; let newz = -sin_theta * x + cos_theta * z; let tester = Vec3::new(newx, y, newz); for c in 0..3 { let val = tester.index(c); if val > max.index(c) { max.set_index(c, val); } if val < min.index(c) { min.set_index(c, val); } } } } } BoundingBox { min, max } } /// Rotate a hitable about the y axis by angle in degrees pub fn new(hitable: BoxHitable, angle: f32) -> YRotation { let radians = (PI / 180.0) * angle; let sin_theta = radians.sin(); let cos_theta = radians.cos(); let hitable_bbox_maybe = hitable.bounding_box(); let bbox = if let Some(hitable_bbox) = hitable_bbox_maybe { Some(Self::gen_bbox(hitable_bbox, cos_theta, sin_theta)) } else { None }; YRotation { hitable, bbox, sin_theta, cos_theta, } } } impl Hitable for YRotation { fn hit(&self, r: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord> { let mut origin = r.origin; let mut direction = r.direction; origin.x = self.cos_theta * r.origin.x - self.sin_theta * r.origin.z; origin.z = self.sin_theta * r.origin.x + self.cos_theta * r.origin.z; direction.x = self.cos_theta * r.direction.x - self.sin_theta * r.direction.z; direction.z = self.sin_theta * r.direction.x + self.cos_theta * r.direction.z; let rotated_ray = Ray { origin, direction }; let rec = self.hitable.hit(&rotated_ray, t_min, t_max)?; let mut p = rec.p; let mut normal = rec.normal; p.x = self.cos_theta * rec.p.x + self.sin_theta * rec.p.z; p.z = -self.sin_theta * rec.p.x + self.cos_theta * rec.p.z; normal.x = self.cos_theta * rec.normal.x + self.sin_theta * rec.normal.z; normal.z = -self.sin_theta * rec.normal.x + self.cos_theta * rec.normal.z; Some(HitRecord { p, normal, ..rec }) } fn bounding_box(&self) -> Option<BoundingBox> { self.bbox } fn box_clone(&self) -> BoxHitable { Box::new(self.clone()) } } impl Translation for YRotation {} pub trait Translation: Hitable + Sized { fn shift(self, x: f32, y: f32, z: f32) -> Shift { Shift { hitable: self.box_clone(), offset: Vec3::new(x, y, z), } } fn rotate_y(self, angle: f32) -> YRotation { YRotation::new(self.box_clone(), angle) } fn flip_normals(self) -> FlipNormals { flip_normals(self.box_clone()) } fn to_box(self) -> Box<Self> { Box::new(self) } /// Push self into a list of boxed hitables (boxes self) fn push_into_list_of_boxed_hitables<'a>(self, list: &mut Vec<Box<dyn Hitable + 'a>>) where Self: 'a, { list.push(self.to_box()) } }
true
ef0d8b26f9c894c914454f4b2f111850da9f7440
Rust
PurpleBooth/kata-tennis
/src/lib.rs
UTF-8
7,383
3.25
3
[]
no_license
use std::collections::HashMap; use std::fmt::Error; use std::fmt::Formatter; const PLAYER_1_ID: bool = false; const PLAYER_2_ID: bool = true; #[derive(PartialEq, Eq, Debug, Hash, Copy, Clone)] struct Score(u8); impl Score { fn new(score: u8) -> Result<Score, String> { score_to_call(score) .map(|_| Score(score)) .ok_or(format!("Score {} is not a valid tennis score", score)) } fn add(&self, point: Point) -> Result<Score, String> { Score::new(self.0 + 1) } } impl std::fmt::Display for Score { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { write!(f, "{}", score_to_call(self.0).unwrap()) } } trait Call { fn to_string(&self) -> String; } impl std::fmt::Display for Call { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { write!(f, "{}", self.to_string()) } } struct CallName(String); impl Call for CallName { fn to_string(&self) -> String { self.0.to_string() } } struct CallNumber(u8); impl Call for CallNumber { fn to_string(&self) -> String { format!("{}", self.0).to_string() } } struct GameScore<'gs> { scores: HashMap<&'gs Player, Score>, } impl<'gs> GameScore<'gs> { fn new() -> GameScore<'gs> { let mut game = GameScore { scores: HashMap::new(), }; game.scores .insert(Player::player_1(), Score::new(0).unwrap()); game.scores .insert(Player::player_2(), Score::new(0).unwrap()); game } fn scored(&self, point: Point<'gs>) -> GameScore<'gs> { let mut game = GameScore::new(); let old_scores: &HashMap<&Player, Score> = &self.scores; let mut new_scores: HashMap<&Player, Score> = old_scores.into_iter().map(|(k, v)| (*k, *v)).collect(); let scoring_player = point.player; let new_score = old_scores.get(scoring_player).unwrap().add(point).unwrap(); new_scores.insert(scoring_player, new_score); game.scores = new_scores; game } } impl<'gs> std::fmt::Display for GameScore<'gs> { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { write!( f, "{}-{}", self.scores.get(Player::player_1()).unwrap(), self.scores.get(Player::player_2()).unwrap() ) } } pub struct Point<'p> { player: &'p Player, } impl<'p> Point<'p> { fn new(player: &'p Player) -> Point<'p> { Point { player } } } #[derive(PartialEq, Eq, Debug, Hash)] struct Player { id: bool, } impl Player { fn player_1<'p>() -> &'p Player { return &Player { id: PLAYER_1_ID }; } fn player_2<'p>() -> &'p Player { return &Player { id: PLAYER_2_ID }; } } fn score_to_call(score: u8) -> Option<Box<Call>> { return match score { 0 => Some(Box::new(CallName("love".to_string()))), 1 => Some(Box::new(CallNumber(15))), 2 => Some(Box::new(CallNumber(30))), 3 => Some(Box::new(CallNumber(40))), 4 => Some(Box::new(CallName("game".to_string()))), _ => None, }; } pub fn score_game(points: Vec<Point>) -> String { let mut game = GameScore::new(); for point in points { game = game.scored(point) } return game.to_string(); } #[cfg(test)] mod test { use GameScore; use Player; use PLAYER_1_ID; use PLAYER_2_ID; use Point; use Score; use score_game; #[test] fn score_should_be_love_for_0() { assert_eq!(format!("{}", Score::new(0).unwrap()), "love") } #[test] fn score_should_be_15_for_1() { assert_eq!(format!("{}", Score::new(1).unwrap()), "15") } #[test] fn score_should_be_30_for_2() { assert_eq!(format!("{}", Score::new(2).unwrap()), "30") } #[test] fn score_should_be_40_for_3() { assert_eq!(format!("{}", Score::new(3).unwrap()), "40") } #[test] fn score_should_be_game_for_4() { assert_eq!(format!("{}", Score::new(4).unwrap()), "game") } #[test] fn score_should_never_be_more_than_4() { assert_eq!(Score::new(5).is_err(), true) } #[test] fn score_should_of_0_plus_one_point_is_15() { assert_eq!( format!( "{}", Score::new(0) .unwrap() .add(Point { player: &Player::player_1(), }) .unwrap() ), "15" ) } #[test] fn score_should_of_1_plus_one_point_is_30() { assert_eq!( format!( "{}", Score::new(1) .unwrap() .add(Point { player: &Player::player_1(), }) .unwrap() ), "30" ) } #[test] fn score_should_of_2_plus_one_point_is_40() { assert_eq!( format!( "{}", Score::new(2) .unwrap() .add(Point { player: Player::player_1(), }) .unwrap() ), "40" ) } #[test] fn score_should_of_3_plus_one_point_is_game() { assert_eq!( format!( "{}", Score::new(3) .unwrap() .add(Point { player: Player::player_1(), }) .unwrap() ), "game" ) } #[test] fn score_should_of_4_plus_one_point_errors() { assert!( Score::new(4) .unwrap() .add(Point { player: &Player::player_1(), }) .is_err() ) } #[test] fn player_should_have_a_player_1() { assert_eq!(Player::player_1(), &Player { id: PLAYER_1_ID }) } #[test] fn player_should_have_a_player_2() { assert_eq!(Player::player_2(), &Player { id: PLAYER_2_ID }) } #[test] fn game_score_should_be_love_love_for_no_scores() { assert_eq!(format!("{}", GameScore::new()), "love-love") } #[test] fn game_score_should_allow_adding_one_point_to_player_1() { let game_score = GameScore::new().scored(Point::new(Player::player_1())); assert_eq!(format!("{}", game_score), "15-love") } #[test] fn game_score_should_allow_adding_one_point_to_player_2() { let game_score = GameScore::new().scored(Point::new(Player::player_2())); assert_eq!(format!("{}", game_score), "love-15") } #[test] fn game_score_should_allow_adding_two_points_to_player_2() { let game_score = GameScore::new() .scored(Point::new(Player::player_2())) .scored(Point::new(Player::player_2())); assert_eq!(game_score.to_string(), "love-30") } #[test] fn score_game_should_be_love_love_for_no_scores() { assert_eq!(score_game(vec![]), "love-love") } #[test] fn score_game_should_be_love_15_for_a_single_player_2_point() { assert_eq!(score_game(vec![Point::new(Player::player_2())]), "love-15") } }
true
019bcaacbd9f5a8933ce74dafc433e41586ca04d
Rust
jjyr/godwoken
/crates/rpc-client/src/error.rs
UTF-8
4,161
2.796875
3
[ "MIT" ]
permissive
/// Get JSONRPC error code from errors returned by RPC methods. pub fn get_jsonrpc_error_code(e: &anyhow::Error) -> Option<i64> { let e: &jsonrpc_core::types::error::Error = e.downcast_ref()?; Some(e.code.code()) } // Copied from CKB. pub enum CkbRpcError { /// (-1): CKB internal errors are considered to never happen or only happen when the system /// resources are exhausted. CKBInternalError = -1, /// (-2): The CKB method has been deprecated and disabled. /// /// Set `rpc.enable_deprecated_rpc` to `true` in the config file to enable all deprecated /// methods. Deprecated = -2, /// (-3): Error code -3 is no longer used. /// /// Before v0.35.0, CKB returns all RPC errors using the error code -3. CKB no longer uses /// -3 since v0.35.0. Invalid = -3, /// (-4): The RPC method is not enabled. /// /// CKB groups RPC methods into modules, and a method is enabled only when the module is /// explicitly enabled in the config file. RPCModuleIsDisabled = -4, /// (-5): DAO related errors. DaoError = -5, /// (-6): Integer operation overflow. IntegerOverflow = -6, /// (-7): The error is caused by a config file option. /// /// Users have to edit the config file to fix the error. ConfigError = -7, /// (-101): The CKB local node failed to broadcast a message to its peers. P2PFailedToBroadcast = -101, /// (-200): Internal database error. /// /// The CKB node persists data to the database. This is the error from the underlying database /// module. DatabaseError = -200, /// (-201): The chain index is inconsistent. /// /// An example of an inconsistent index is that the chain index says a block hash is in the chain /// but the block cannot be read from the database. /// /// This is a fatal error usually due to a serious bug. Please back up the data directory and /// re-sync the chain from scratch. ChainIndexIsInconsistent = -201, /// (-202): The underlying database is corrupt. /// /// This is a fatal error usually caused by the underlying database used by CKB. Please back up /// the data directory and re-sync the chain from scratch. DatabaseIsCorrupt = -202, /// (-301): Failed to resolve the referenced cells and headers used in the transaction, as inputs or /// dependencies. TransactionFailedToResolve = -301, /// (-302): Failed to verify the transaction. TransactionFailedToVerify = -302, /// (-1000): Some signatures in the submit alert are invalid. AlertFailedToVerifySignatures = -1000, /// (-1102): The transaction is rejected by the outputs validator specified by the RPC parameter. PoolRejectedTransactionByOutputsValidator = -1102, /// (-1103): Pool rejects some transactions which seem contain invalid VM instructions. See the issue /// link in the error message for details. PoolRejectedTransactionByIllTransactionChecker = -1103, /// (-1104): The transaction fee rate must be greater than or equal to the config option `tx_pool.min_fee_rate` /// /// The fee rate is calculated as: /// /// ```text /// fee / (1000 * tx_serialization_size_in_block_in_bytes) /// ``` PoolRejectedTransactionByMinFeeRate = -1104, /// (-1105): The in-pool ancestors count must be less than or equal to the config option `tx_pool.max_ancestors_count` /// /// Pool rejects a large package of chained transactions to avoid certain kinds of DoS attacks. PoolRejectedTransactionByMaxAncestorsCountLimit = -1105, /// (-1106): The transaction is rejected because the pool has reached its limit. PoolIsFull = -1106, /// (-1107): The transaction is already in the pool. PoolRejectedDuplicatedTransaction = -1107, /// (-1108): The transaction is rejected because it does not make sense in the context. /// /// For example, a cellbase transaction is not allowed in `send_transaction` RPC. PoolRejectedMalformedTransaction = -1108, /// (-1109): The transaction is expired from tx-pool after `expiry_hours`. TransactionExpired = -1109, }
true
224469f6defb2175a4fc1846f114916f9caff14f
Rust
GSam/rust-refactor
/tests/lib.rs
UTF-8
35,087
2.71875
3
[]
no_license
extern crate refactor; use std::fs::File; use std::io::Read; use refactor::{AnalysisData, Response}; fn read_to_string(filename: &str) -> String { let mut file = match File::open(filename) { Err(why) => panic!("couldn't open file {} {}", filename, why), Ok(file) => file, }; let mut output = String::new(); file.read_to_string(&mut output).unwrap(); return output; } fn read_analysis(filename: &str) -> AnalysisData { AnalysisData::new(&read_to_string(filename)) } #[test] fn working_variable_1() { let file = "tests/variable/basic_rename.rs"; let output = read_to_string("tests/variable/working_rename_1_out.rs"); let analysis = read_analysis("tests/variable/basic_rename.csv"); match refactor::rename_variable(&"tests/variable/basic_rename.rs", &analysis, "hello", "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_2() { let file = "tests/variable/basic_rename.rs"; let output = read_to_string("tests/variable/working_rename_2_out.rs"); let analysis = read_analysis("tests/variable/basic_rename.csv"); match refactor::rename_variable(&"tests/variable/basic_rename.rs", &analysis, "hello", "17") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_3() { let file = "tests/variable/alex_var_test.rs"; let output = read_to_string("tests/variable/alex_var_test_out.rs"); let analysis = read_analysis("tests/variable/alex_var_test.csv"); match refactor::rename_variable(&"tests/variable/alex_var_test.rs", &analysis, "bar", "14") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_4() { let file = "tests/variable/alex_var_test.rs"; let output = read_to_string("tests/variable/alex_var_test_out2.rs"); let analysis = read_analysis("tests/variable/alex_var_test.csv"); match refactor::rename_variable(&"tests/variable/alex_var_test.rs", &analysis, "bar", "4") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_5() { let file = "tests/variable/const_rename.rs"; let output = read_to_string("tests/variable/const_rename_out.rs"); let analysis = read_analysis("tests/variable/const_rename.csv"); match refactor::rename_variable(&"tests/variable/const_rename.rs", &analysis, "BAZ", "8") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_6() { let file = "tests/variable/working_fn_local.rs"; let output = read_to_string("tests/variable/working_fn_local_out.rs"); let analysis = read_analysis("tests/variable/working_fn_local.csv"); match refactor::rename_variable(&"tests/variable/working_fn_local.rs", &analysis, "Foo", "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_7() { let file = "tests/variable/working_nested.rs"; let output = read_to_string("tests/variable/working_nested_out.rs"); let analysis = read_analysis("tests/variable/working_nested.csv"); match refactor::rename_variable(&"tests/variable/working_nested.rs", &analysis, "b", "16") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_8() { let file = "tests/variable/working_tuple_let.rs"; let output = read_to_string("tests/variable/working_tuple_let_out.rs"); let analysis = read_analysis("tests/variable/working_tuple_let.csv"); match refactor::rename_variable(file, &analysis, "x", "10") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_9() { let file = "tests/variable/working_mut_tuple_let.rs"; let output = read_to_string("tests/variable/working_mut_tuple_let_out.rs"); let analysis = read_analysis("tests/variable/working_mut_tuple_let.csv"); match refactor::rename_variable(file, &analysis, "x", "10") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_10() { let file = "tests/variable/working_mut_tuple_let2.rs"; let output = read_to_string("tests/variable/working_mut_tuple_let2_out.rs"); let analysis = read_analysis("tests/variable/working_mut_tuple_let2.csv"); match refactor::rename_variable(file, &analysis, "x", "11") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_11() { let file = "tests/variable/working_mut_tuple_let3.rs"; let output = read_to_string("tests/variable/working_mut_tuple_let3_out.rs"); let analysis = read_analysis("tests/variable/working_mut_tuple_let3.csv"); match refactor::rename_variable(file, &analysis, "x", "11") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_variable_12() { let file = "tests/variable/conflict_var_use_mod.rs"; let output = read_to_string("tests/variable/conflict_var_use_mod_out.rs"); let analysis = read_analysis("tests/variable/conflict_var_use_mod.csv"); match refactor::rename_variable(file, &analysis, "BIT3", "6") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn prevented_variable_1() { let file = "tests/variable/basic_rename.rs"; let analysis = read_analysis("tests/variable/basic_rename.csv"); match refactor::rename_variable(&file, &analysis, "j", "36") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_2() { let file = "tests/variable/basic_rename.rs"; let analysis = read_analysis("tests/variable/basic_rename.csv"); match refactor::rename_variable(&file, &analysis, "x", "36") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_3() { let file = "tests/variable/override.rs"; let analysis = read_analysis("tests/variable/override.csv"); match refactor::rename_variable(&file, &analysis, "v", "9") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_4() { let file = "tests/variable/name_conflict_method.rs"; let analysis = read_analysis("tests/variable/name_conflict_method.csv"); match refactor::rename_variable(file, &analysis, "foo", "12") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_5() { let file = "tests/variable/name_conflict_type.rs"; let analysis = read_analysis("tests/variable/name_conflict_type.csv"); match refactor::rename_variable(file, &analysis, "Foo", "12") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_6() { let file = "tests/variable/name_conflict_type_local.rs"; let analysis = read_analysis("tests/variable/name_conflict_type_local.csv"); match refactor::rename_variable(file, &analysis, "Foo", "13") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_7() { let file = "tests/variable/name_conflict_type_local2.rs"; let analysis = read_analysis("tests/variable/name_conflict_type_local2.csv"); match refactor::rename_variable(file, &analysis, "Foo", "9") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_8() { let file = "tests/variable/name_conflict_method_local.rs"; let analysis = read_analysis("tests/variable/name_conflict_method_local.csv"); match refactor::rename_variable(file, &analysis, "foo", "13") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_9() { let file = "tests/variable/name_conflict_method_local2.rs"; let analysis = read_analysis("tests/variable/name_conflict_method_local2.csv"); match refactor::rename_variable(file, &analysis, "Foo", "9") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } // fn main() { // let a = 2; // fn foo() {} // foo(); // } // // Unlike the type case, this is not detected by the resolve_path // This test is slightly modified, using a, to make sure only resolving occurs // (Rather than a full run) } #[test] fn prevented_variable_10() { let file = "tests/variable/name_conflict_global.rs"; let analysis = read_analysis("tests/variable/name_conflict_global.csv"); match refactor::rename_variable(file, &analysis, "FOO", "12") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_11() { let file = "tests/variable/name_conflict_type_global.rs"; let analysis = read_analysis("tests/variable/name_conflict_type_global.csv"); match refactor::rename_variable(file, &analysis, "Foo", "7") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_12() { let file = "tests/variable/name_conflict_method_global.rs"; let analysis = read_analysis("tests/variable/name_conflict_method_global.csv"); match refactor::rename_variable(file, &analysis, "foo", "4") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_13() { // Broken destructure // Point {x, y} = Point{x:1, x:2} // => Point {foo, y} = Point{x:1, x:2} let file = "tests/variable/destructure_conflict.rs"; let analysis = read_analysis("tests/variable/destructure_conflict.csv"); match refactor::rename_variable(file, &analysis, "bar", "16") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_14() { let file = "tests/variable/conflict_var_use_mod.rs"; let analysis = read_analysis("tests/variable/conflict_var_use_mod.csv"); match refactor::rename_variable(file, &analysis, "BIT2", "6") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_variable_15() { let file = "tests/variable/conflict_var_use_mod.rs"; let analysis = read_analysis("tests/variable/conflict_var_use_mod.csv"); match refactor::rename_variable(file, &analysis, "BIT1", "11") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn working_argument_1() { let file = "tests/variable/fn_args_1.rs"; let output = read_to_string("tests/variable/fn_args_1_out.rs"); let analysis = read_analysis("tests/variable/fn_args_1.csv"); match refactor::rename_variable(file, &analysis, "z", "6") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_argument_2() { let file = "tests/variable/fn_args_2.rs"; let output = read_to_string("tests/variable/fn_args_2_1_out.rs"); let analysis = read_analysis("tests/variable/fn_args_2.csv"); match refactor::rename_variable(file, &analysis, "z", "10") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_argument_3() { let file = "tests/variable/fn_args_2.rs"; let output = read_to_string("tests/variable/fn_args_2_2_out.rs"); let analysis = read_analysis("tests/variable/fn_args_2.csv"); match refactor::rename_variable(file, &analysis, "z", "15") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn prevented_argument_1() { let file = "tests/variable/fn_args_1.rs"; let analysis = read_analysis("tests/variable/fn_args_1.csv"); match refactor::rename_variable(file, &analysis, "c", "6") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_argument_2() { let file = "tests/variable/fn_args_1.rs"; let analysis = read_analysis("tests/variable/fn_args_1.csv"); match refactor::rename_variable(file, &analysis, "foo", "6") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn working_struct_1() { let file = "tests/type/basic_struct.rs"; let output = read_to_string("tests/type/working_struct_1_out.rs"); let analysis = read_analysis("tests/type/basic_struct.csv"); match refactor::rename_type(&"tests/type/basic_struct.rs", &analysis, "Pointer", "4") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_struct_2() { // ::Point mentioned instead of Point let file = "tests/type/scoped_struct.rs"; let output = read_to_string("tests/type/working_struct_1_out.rs"); let analysis = read_analysis("tests/type/scoped_struct.csv"); match refactor::rename_type(&"tests/type/basic_struct.rs", &analysis, "Pointer", "4") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_struct_3() { // Tuple struct let file = "tests/type/tuple_struct.rs"; let output = read_to_string("tests/type/tuple_struct_out.rs"); let analysis = read_analysis("tests/type/tuple_struct.csv"); match refactor::rename_type(&file, &analysis, "Pointer", "4") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_enum_1() { let file = "tests/type/basic_enum.rs"; let output = read_to_string("tests/type/working_enum_1_out.rs"); let analysis = read_analysis("tests/type/basic_enum.csv"); match refactor::rename_type(&"tests/type/basic_enum.rs", &analysis, "YesNo", "4") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_enum_2() { let file = "tests/type/modref_enum.rs"; let output = read_to_string("tests/type/working_enum_2_out.rs"); let analysis = read_analysis("tests/type/modref_enum.csv"); match refactor::rename_type(&"tests/type/modref_enum.rs", &analysis, "YesNo", "7") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn prevented_struct_1() { let file = "tests/type/conflict_struct.rs"; let analysis = read_analysis("tests/type/conflict_struct.csv"); match refactor::rename_type(&file, &analysis, "P", "4") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_struct_2() { let file = "tests/type/conflict_mod_struct.rs"; let analysis = read_analysis("tests/type/conflict_mod_struct.csv"); match refactor::rename_type(&file, &analysis, "B", "6") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn prevented_struct_3() { let file = "tests/type/conflict_use_mod_struct.rs"; let analysis = read_analysis("tests/type/conflict_use_mod_struct.csv"); match refactor::rename_type(&file, &analysis, "B", "6") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn working_method_1() { let file = "tests/function/basic_default_method.rs"; let output = read_to_string("tests/function/working_method_1_out.rs"); let analysis = read_analysis("tests/function/basic_default_method.csv"); match refactor::rename_function(&file, &analysis, "func", "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_method_2() { let file = "tests/function/impl_override_method.rs"; let output = read_to_string("tests/function/working_method_2_out.rs"); let analysis = read_analysis("tests/function/impl_override_method.csv"); match refactor::rename_function(&file, &analysis, "func", "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_method_3() { let file = "tests/function/alex_override_method.rs"; let output = read_to_string("tests/function/alex_override_method_out2.rs"); let analysis = read_analysis("tests/function/alex_override_method.csv"); match refactor::rename_function(&file, &analysis, "grue", "74") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn not_working_method_1() { let file = "tests/function/alex_override_method.rs"; let analysis = read_analysis("tests/function/alex_override_method.csv"); match refactor::rename_function(&file, &analysis, "foo", "74") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn working_fn_1() { let file = "tests/function/basic_function.rs"; let output = read_to_string("tests/function/basic_function_out.rs"); let analysis = read_analysis("tests/function/basic_function.csv"); match refactor::rename_function(&file, &analysis, "bar", "4") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_fn_2() { let file = "tests/function/basic_module_function.rs"; let output = read_to_string("tests/function/basic_module_function_out.rs"); let analysis = read_analysis("tests/function/basic_module_function.csv"); match refactor::rename_function(&file, &analysis, "bar", "6") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_fn_3() { let file = "tests/function/basic_generic_function.rs"; let output = read_to_string("tests/function/basic_generic_function_out.rs"); let analysis = read_analysis("tests/function/basic_generic_function.csv"); match refactor::rename_function(&file, &analysis, "bar", "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_fn_4() { let file = "tests/function/basic_trait_function.rs"; let output = read_to_string("tests/function/basic_trait_function_out.rs"); let analysis = read_analysis("tests/function/basic_trait_function.csv"); match refactor::rename_function(&file, &analysis, "bar", "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_fn_5() { let file = "tests/function/basic_use_function.rs"; let output = read_to_string("tests/function/basic_use_function_out.rs"); let analysis = read_analysis("tests/function/basic_use_function.csv"); match refactor::rename_function(&file, &analysis, "bar", "6") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_fn_6() { let file = "tests/function/extern_function.rs"; let output = read_to_string("tests/function/extern_function_out.rs"); let analysis = read_analysis("tests/function/extern_function.csv"); match refactor::rename_function(&file, &analysis, "bar", "4") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_fn_7() { let file = "tests/function/extern_stdcall_function.rs"; let output = read_to_string("tests/function/extern_stdcall_function_out.rs"); let analysis = read_analysis("tests/function/extern_stdcall_function.csv"); match refactor::rename_function(&file, &analysis, "bar", "4") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_fn_8() { let file = "tests/function/fn_local_mod.rs"; let output = read_to_string("tests/function/fn_local_mod_out.rs"); let analysis = read_analysis("tests/function/fn_local_mod.csv"); match refactor::rename_function(&file, &analysis, "bar", "10") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_fn_9() { let file = "tests/function/fn_local_mod_after.rs"; let output = read_to_string("tests/function/fn_local_mod_after_out.rs"); let analysis = read_analysis("tests/function/fn_local_mod_after.csv"); match refactor::rename_function(&file, &analysis, "bar", "13") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn not_working_fn_1() { let file = "tests/function/basic_function.rs"; let analysis = read_analysis("tests/function/basic_function.csv"); match refactor::rename_function(&file, &analysis, "main", "4") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn not_working_fn_2() { let file = "tests/function/conflict_module_function.rs"; let analysis = read_analysis("tests/function/conflict_module_function.csv"); match refactor::rename_function(&file, &analysis, "foo", "9") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn not_working_fn_3() { let file = "tests/function/conflict_module_function.rs"; let analysis = read_analysis("tests/function/conflict_module_function.csv"); match refactor::rename_function(&file, &analysis, "bar", "6") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn not_working_fn_4() { let file = "tests/function/conflict_fn_with_var.rs"; let analysis = read_analysis("tests/function/conflict_fn_with_var.csv"); match refactor::rename_function(&file, &analysis, "a", "8") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn multi_file_1() { let file = "tests/multi-file/simple_function_1/main.rs"; let changed1 = file; let changed2 = "tests/multi-file/simple_function_1/foo.rs"; let output1 = read_to_string("tests/multi-file/simple_function_1/main_out.rs"); let output2 = read_to_string("tests/multi-file/simple_function_1/foo_out.rs"); let analysis = read_analysis("tests/multi-file/simple_function_1/main.csv"); match refactor::rename_function(&file, &analysis, "boo", "6") { Ok(x) => { assert_eq!(output1.trim(), x.get(changed1).unwrap().trim()); assert_eq!(output2.trim(), x.get(changed2).unwrap().trim()); }, Err(_) => assert!(false) } } #[test] fn multi_file_2() { let file = "tests/multi-file/simple_function_2/main.rs"; let changed1 = file; let changed2 = "tests/multi-file/simple_function_2/foo/mod.rs"; let output1 = read_to_string("tests/multi-file/simple_function_2/main_out.rs"); let output2 = read_to_string("tests/multi-file/simple_function_2/foo/mod_out.rs"); let analysis = read_analysis("tests/multi-file/simple_function_2/main.csv"); match refactor::rename_function(&file, &analysis, "boo", "6") { Ok(x) => { assert_eq!(output1.trim(), x.get(changed1).unwrap().trim()); assert_eq!(output2.trim(), x.get(changed2).unwrap().trim()); }, Err(_) => assert!(false) } } #[test] fn working_inline_1() { let file = "tests/inline/inline_single.rs"; let output = read_to_string("tests/inline/inline_single_out.rs"); let analysis = read_analysis("tests/inline/inline_single.csv"); match refactor::inline_local(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_inline_2() { let file = "tests/inline/inline3.rs"; let output = read_to_string("tests/inline/inline3_out.rs"); let analysis = read_analysis("tests/inline/inline3.csv"); match refactor::inline_local(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_inline_3() { let file = "tests/inline/inline4.rs"; let output = read_to_string("tests/inline/inline4_out.rs"); let analysis = read_analysis("tests/inline/inline4.csv"); match refactor::inline_local(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_inline_4() { let file = "tests/inline/inline5.rs"; let output = read_to_string("tests/inline/inline5_out.rs"); let analysis = read_analysis("tests/inline/inline5.csv"); match refactor::inline_local(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_inline_5() { let file = "tests/inline/inline6.rs"; let output = read_to_string("tests/inline/inline6_out.rs"); let analysis = read_analysis("tests/inline/inline6.csv"); match refactor::inline_local(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_inline_6() { let file = "tests/inline/inline7.rs"; let output = read_to_string("tests/inline/inline7_out.rs"); let analysis = read_analysis("tests/inline/inline7.csv"); match refactor::inline_local(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_inline_7() { let file = "tests/inline/inline8.rs"; let output = read_to_string("tests/inline/inline8_out.rs"); let analysis = read_analysis("tests/inline/inline8.csv"); match refactor::inline_local(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_inline_8() { // Multi-file let file = "tests/multi-file/simple_inline_1/main.rs"; let changed = "tests/multi-file/simple_inline_1/foo.rs"; let output = read_to_string("tests/multi-file/simple_inline_1/foo_out.rs"); let analysis = read_analysis("tests/multi-file/simple_inline_1/main.csv"); match refactor::inline_local(&file, &analysis, "11") { Ok(x) => { assert_eq!(output.trim(), x.get(changed).unwrap().trim()); }, Err(_) => assert!(false) } } #[test] fn working_field_1() { let file = "tests/field/simple_field.rs"; let output = read_to_string("tests/field/simple_field_out.rs"); let analysis = read_analysis("tests/field/simple_field.csv"); match refactor::rename_variable(&file, &analysis, "z", "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn prevented_field_1() { let file = "tests/field/simple_field.rs"; let analysis = read_analysis("tests/field/simple_field.csv"); match refactor::rename_variable(&file, &analysis, "y", "5") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn working_reify_1() { let file = "tests/lifetime/reify_single_in.rs"; let output = read_to_string("tests/lifetime/reify_single_in_out.rs"); let analysis = read_analysis("tests/lifetime/reify_single_in.csv"); match refactor::restore_fn_lifetime(&file, &analysis, "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_reify_2() { let file = "tests/lifetime/reify_single_in_ret.rs"; let output = read_to_string("tests/lifetime/reify_single_in_ret_out.rs"); let analysis = read_analysis("tests/lifetime/reify_single_in_ret.csv"); match refactor::restore_fn_lifetime(&file, &analysis, "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_reify_3() { let file = "tests/lifetime/reify_single_in_anon_ret.rs"; let output = read_to_string("tests/lifetime/reify_single_in_anon_ret_out.rs"); let analysis = read_analysis("tests/lifetime/reify_single_in_anon_ret.csv"); match refactor::restore_fn_lifetime(&file, &analysis, "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_reify_4() { let file = "tests/lifetime/reify_multi_in.rs"; let output = read_to_string("tests/lifetime/reify_multi_in_out.rs"); let analysis = read_analysis("tests/lifetime/reify_multi_in.csv"); match refactor::restore_fn_lifetime(&file, &analysis, "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_reify_5() { let file = "tests/lifetime/reify_multi_named_in.rs"; let output = read_to_string("tests/lifetime/reify_multi_named_in_out.rs"); let analysis = read_analysis("tests/lifetime/reify_multi_named_in.csv"); match refactor::restore_fn_lifetime(&file, &analysis, "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_reify_6() { let file = "tests/lifetime/reify_multi_named_self_ret.rs"; let output = read_to_string("tests/lifetime/reify_multi_named_self_ret_out.rs"); let analysis = read_analysis("tests/lifetime/reify_multi_named_self_ret.csv"); match refactor::restore_fn_lifetime(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_reify_7() { let file = "tests/lifetime/reify_multi_self_ret.rs"; let output = read_to_string("tests/lifetime/reify_multi_self_ret_out.rs"); let analysis = read_analysis("tests/lifetime/reify_multi_self_ret.csv"); match refactor::restore_fn_lifetime(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_elide_1() { let file = "tests/lifetime/elide_single_in.rs"; let output = read_to_string("tests/lifetime/elide_single_in_out.rs"); let analysis = read_analysis("tests/lifetime/elide_single_in.csv"); match refactor::elide_fn_lifetime(&file, &analysis, "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_elide_2() { // You can't elide something that has a lifetime that isn't in the input let file = "tests/lifetime/elide_single_anon_static_ret.rs"; let analysis = read_analysis("tests/lifetime/elide_single_anon_static_ret.csv"); match refactor::elide_fn_lifetime(&file, &analysis, "5") { Ok(_) => assert!(false), Err(x) => assert_eq!(Response::Conflict, x) } } #[test] fn working_elide_3() { let file = "tests/lifetime/elide_single_in_anon_ret.rs"; let output = read_to_string("tests/lifetime/elide_single_in_anon_ret_out.rs"); let analysis = read_analysis("tests/lifetime/elide_single_in_anon_ret.csv"); match refactor::elide_fn_lifetime(&file, &analysis, "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_elide_4() { let file = "tests/lifetime/elide_single_in_ret.rs"; let output = read_to_string("tests/lifetime/elide_single_in_ret_out.rs"); let analysis = read_analysis("tests/lifetime/elide_single_in_ret.csv"); match refactor::elide_fn_lifetime(&file, &analysis, "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_elide_5() { // TODO this should be fixed // At the moment, don't bother eliding even input if output is invalid let file = "tests/lifetime/elide_single_static_ret.rs"; let output = read_to_string("tests/lifetime/elide_single_static_ret_out.rs"); let analysis = read_analysis("tests/lifetime/elide_single_static_ret.csv"); match refactor::elide_fn_lifetime(&file, &analysis, "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_elide_6() { let file = "tests/lifetime/elide_multi_in.rs"; let output = read_to_string("tests/lifetime/elide_multi_in_out.rs"); let analysis = read_analysis("tests/lifetime/elide_multi_in.csv"); match refactor::elide_fn_lifetime(&file, &analysis, "5") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_elide_7() { let file = "tests/lifetime/elide_multi_named_self_ret.rs"; let output = read_to_string("tests/lifetime/elide_multi_named_self_ret_out.rs"); let analysis = read_analysis("tests/lifetime/elide_multi_named_self_ret.csv"); match refactor::elide_fn_lifetime(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_elide_8() { let file = "tests/lifetime/elide_multi_anon_self_ret.rs"; let output = read_to_string("tests/lifetime/elide_multi_anon_self_ret_out.rs"); let analysis = read_analysis("tests/lifetime/elide_multi_anon_self_ret.csv"); match refactor::elide_fn_lifetime(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } } #[test] fn working_elide_9() { // TODO needs to be fixed, elide when there are non-elidables in input // e.g. self: 'a, 'b, 'static -> 'a // ===> self, _, 'static -> _ let file = "tests/lifetime/elide_multi_static_self_ret.rs"; let output = read_to_string("tests/lifetime/elide_multi_static_self_ret_out.rs"); let analysis = read_analysis("tests/lifetime/elide_multi_static_self_ret.csv"); match refactor::elide_fn_lifetime(&file, &analysis, "9") { Ok(x) => assert_eq!(output.trim(), x.get(file).unwrap().trim()), Err(_) => assert!(false) } }
true
f7dc75117ad383e4bdfbdd74ba76d930982bfc2a
Rust
ChangeCaps/orchard
/src/assets.rs
UTF-8
1,477
2.890625
3
[]
no_license
use ike::prelude::*; pub struct Assets { pub font: Font, pub cursor: Texture, pub base_tile: Texture, pub farm_tile: Texture, pub wheat_seed: Texture, pub wheat_item: Texture, pub wheat_0: Texture, pub wheat_1: Texture, pub wheat_2: Texture, pub wheat_3: Texture, pub pole: Texture, pub pole_item: Texture, pub wood_item: Texture, pub sapling_item: Texture, } impl Assets { #[inline] pub fn load() -> ike::anyhow::Result<Self> { Ok(Self { font: Font::load("assets/misc/font.ttf", 30.0)?, cursor: Texture::load("assets/misc/cursor.png")?, base_tile: Texture::load("assets/tiles/base_tile.png")?, farm_tile: Texture::load("assets/tiles/farm_tile.png")?, wheat_seed: Texture::load("assets/items/wheat_seed.png")?, wheat_item: Texture::load("assets/items/wheat_item.png")?, wheat_0: Texture::load("assets/plants/wheat_0.png")?, wheat_1: Texture::load("assets/plants/wheat_1.png")?, wheat_2: Texture::load("assets/plants/wheat_2.png")?, wheat_3: Texture::load("assets/plants/wheat_3.png")?, pole: Texture::load("assets/structures/pole.png")?, pole_item: Texture::load("assets/items/pole_item.png")?, wood_item: Texture::load("assets/items/wood_item.png")?, sapling_item: Texture::load("assets/items/sapling_item.png")?, }) } }
true
37f199edeb7d06df5a7a5b877335e71e8d7131d7
Rust
flyq/datastruct-algorithm
/leetcode/p1170/src/main.rs
UTF-8
2,081
3.28125
3
[ "MIT" ]
permissive
fn main() { println!("Hello, world!"); let a = vec!["aabbbabaa".to_string()]; let b = vec!["b".to_string(),"aaaba".to_string(),"aaaabba".to_string(),"aa".to_string(),"aabaabab".to_string(),"aabbaaabbb".to_string(),"ababb".to_string(),"bbb".to_string(),"aabbbabb".to_string(),"aab".to_string(),"bbaaababba".to_string(),"baaaaa".to_string()]; println!("{:?}", Solution::num_smaller_by_frequency(a, b)); } pub struct Solution{} impl Solution { pub fn num_smaller_by_frequency(queries: Vec<String>, words: Vec<String>) -> Vec<i32> { let mut res = vec![]; let lenq = queries.len(); let lenw = words.len(); let mut q = vec![0;lenq]; let mut w = vec![0;lenw]; for i in 0..lenq { q[i] = Solution::fun(&queries[i]); } for i in 0..lenw { w[i] = Solution::fun(&words[i]); } w.sort(); for i in 0..lenq { let t = q[i]; let temp = Solution::binary_search(&w,t); res.push(temp); } res } pub fn fun(a: &String) -> i32 { let char_vec: Vec<char> = a.chars().collect(); let mut min = 'z'; let mut min_count = 0; for i in char_vec { if i < min { min = i; min_count = 1; } else if i == min { min_count += 1; } } min_count } pub fn binary_search(a: &Vec<i32>, t: i32) -> i32 { let mut min = 0; let mut max = a.len() - 1; if a[max] <= t { return 0; } while min+1 < max { let temp = (min+max)/2; if a[temp] > t { max = temp; } else { min = temp; } } (a.len() - max) as i32 } } /* 执行结果: 通过 显示详情 执行用时 : 4 ms , 在所有 rust 提交中击败了 100.00% 的用户 内存消耗 : 2.1 MB , 在所有 rust 提交中击败了 100.00% 的用户 */
true
5132d4a9e444a60767e1e778334d06c3a96c104d
Rust
spriest487/uncle-pascal
/pas_syn/src/ast/ctor.rs
UTF-8
3,750
2.890625
3
[]
no_license
use crate::{ast::Expression, parse::prelude::*}; #[derive(Eq, PartialEq, Clone, Hash, Debug)] pub struct ObjectCtorMember<A: Annotation> { pub ident: Ident, pub value: Expression<A>, pub span: Span, } impl<A: Annotation> fmt::Display for ObjectCtorMember<A> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}: {}", self.ident, self.value) } } impl<A: Annotation> Spanned for ObjectCtorMember<A> { fn span(&self) -> &Span { &self.span } } #[derive(Eq, PartialEq, Clone, Hash, Debug)] pub struct ObjectCtorArgs<A: Annotation> { pub open: Span, pub members: Vec<ObjectCtorMember<A>>, pub close: Span, } impl ObjectCtorArgs<Span> { pub fn parse(tokens: &mut TokenStream) -> ParseResult<Self> { let args_group = tokens.match_one(DelimiterPair::Bracket)?; let (open, inner, close) = match args_group { TokenTree::Delimited { open, inner, close, .. } => (open, inner, close), _ => unreachable!(), }; let mut members_tokens = TokenStream::new(inner, open.clone()); let members = members_tokens.match_separated(Separator::Semicolon, |_, tokens| { let ident = tokens.match_one(Matcher::AnyIdent)?.into_ident().unwrap(); tokens.match_one(Separator::Colon)?; let value = Expression::parse(tokens)?; let span = ident.span().to(value.annotation()); Ok(Generate::Yield(ObjectCtorMember { ident, value, span })) })?; members_tokens.finish()?; Ok(ObjectCtorArgs { open, members, close, }) } } impl<A: Annotation> ObjectCtorArgs<A> { pub fn iter(&self) -> impl Iterator<Item = &ObjectCtorMember<A>> { self.members.iter() } } impl<A: Annotation> fmt::Display for ObjectCtorArgs<A> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "(")?; for (i, member) in self.members.iter().enumerate() { if i > 0 { write!(f, "; ")?; } write!(f, "{}", member)?; } write!(f, ")") } } #[derive(Eq, PartialEq, Clone, Hash, Debug)] pub struct ObjectCtor<A: Annotation> { pub ident: IdentPath, pub args: ObjectCtorArgs<A>, pub annotation: A, } impl<A: Annotation> fmt::Display for ObjectCtor<A> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}{}", self.ident, self.args) } } #[derive(Eq, PartialEq, Clone, Hash, Debug)] pub struct CollectionCtor<A: Annotation> { pub elements: Vec<Expression<A>>, pub annotation: A, } impl<A: Annotation> fmt::Display for CollectionCtor<A> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[")?; for (i, element) in self.elements.iter().enumerate() { if i > 0 { write!(f, ", ")?; } write!(f, "{}", element)?; } write!(f, "]") } } impl CollectionCtor<Span> { pub fn parse(tokens: &mut TokenStream) -> ParseResult<Self> { let (span, mut elems_tokens) = match tokens.match_one(Matcher::Delimited(DelimiterPair::SquareBracket))? { TokenTree::Delimited { span, inner, open, .. } => (span, TokenStream::new(inner, open)), _ => unreachable!(), }; let elements = elems_tokens.match_separated(Separator::Comma, |_, elem_tokens| { let elem_expr = Expression::parse(elem_tokens)?; Ok(Generate::Yield(elem_expr)) })?; Ok(Self { elements, annotation: span, }) } }
true
6fb9079d4625d510837f78feee50addef86cb9e7
Rust
holaymzhang/aarch64
/kernel/memory/src/list.rs
UTF-8
3,252
3.109375
3
[]
no_license
use core::default::Default; use core::marker::PhantomData; use core::option::Option; use crate::page::Page; use intrusive::IntrusiveList; #[derive(Debug)] pub struct PageList<'a> { list: IntrusiveList<Page>, _marker: PhantomData<&'a Page>, } impl<'a> PageList<'a> { pub fn new() -> PageList<'a> { let page = Page::new(); PageList { list: IntrusiveList::new(page.link_offset()), _marker: PhantomData, } } pub unsafe fn push(&mut self, page: &'a Page) { self.list.push(page as *const Page); } pub unsafe fn pop(&mut self) -> Option<&'a Page> { self.list.pop().map(|ptr| { &*ptr }) } pub unsafe fn remove(&mut self, page: &'a Page) { self.list.remove(page as *const Page); } } impl<'a> Default for PageList<'a> { fn default() -> PageList<'a> { PageList::new() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_push_pop() { let pages = [Page::new(), Page::new(), Page::new()]; let mut l = PageList::new(); unsafe { assert!(l.pop().is_none()); l.push(&pages[0]); l.push(&pages[1]); l.push(&pages[2]); assert_eq!( l.pop().map(|x| x as *const Page), Some(&pages[2] as *const Page)); assert_eq!( l.pop().map(|x| x as *const Page), Some(&pages[1] as *const Page)); assert_eq!( l.pop().map(|x| x as *const Page), Some(&pages[0] as *const Page)); assert!(l.pop().is_none()); } } #[test] fn test_remove() { let pages = [Page::new(), Page::new(), Page::new()]; let mut l = PageList::new(); unsafe { l.push(&pages[0]); l.push(&pages[1]); l.push(&pages[2]); l.remove(&pages[0]); assert_eq!( l.pop().map(|x| x as *const Page), Some(&pages[2] as *const Page)); assert_eq!( l.pop().map(|x| x as *const Page), Some(&pages[1] as *const Page)); assert!(l.pop().is_none()); } unsafe { l.push(&pages[0]); l.push(&pages[1]); l.push(&pages[2]); l.remove(&pages[1]); assert_eq!( l.pop().map(|x| x as *const Page), Some(&pages[2] as *const Page)); assert_eq!( l.pop().map(|x| x as *const Page), Some(&pages[0] as *const Page)); assert!(l.pop().is_none()); } unsafe { l.push(&pages[0]); l.push(&pages[1]); l.push(&pages[2]); l.remove(&pages[2]); assert_eq!( l.pop().map(|x| x as *const Page), Some(&pages[1] as *const Page)); assert_eq!( l.pop().map(|x| x as *const Page), Some(&pages[0] as *const Page)); assert!(l.pop().is_none()); } unsafe { l.push(&pages[0]); l.remove(&pages[0]); assert!(l.pop().is_none()); } } }
true
f7b193f61f3ca8eb442e2e1b7c924b7e9dd89aa5
Rust
2892931976/mirdb
/mirdb-server/src/slice.rs
UTF-8
5,513
2.703125
3
[]
no_license
use std::borrow::Borrow; use std::cmp::Ordering; use std::convert::From; use std::fmt; use std::hash; use std::io::Cursor; use std::ops::Index; use std::ops::Range; use std::ops::RangeFull; use std::ops::RangeTo; use std::slice::SliceIndex; use bytes::buf; use bytes::Bytes; use bytes::BytesMut; use serde::de::{self, Visitor}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct Slice { inner: Bytes, } impl Default for Slice { #[inline] fn default() -> Self { Self::with_capacity(0) } } impl Slice { #[inline] pub fn new() -> Self { Self::default() } #[inline] pub fn with_capacity(cap: usize) -> Self { Self { inner: Bytes::with_capacity(cap), } } #[inline] pub fn len(&self) -> usize { self.inner.len() } #[inline] pub fn is_empty(&self) -> bool { self.inner.is_empty() } pub fn slice(&self, begin: usize, end: usize) -> Self { Self { inner: self.inner.slice(begin, end), } } pub fn slice_from(&self, begin: usize) -> Self { Self { inner: self.inner.slice_from(begin), } } pub fn slice_to(&self, end: usize) -> Self { Self { inner: self.inner.slice_to(end), } } } impl<'a> PartialEq<&'a [u8]> for Slice { fn eq(&self, other: &&[u8]) -> bool { let s: &[u8] = self.as_ref(); (&s).eq(other) } } impl<'a> PartialOrd<&'a [u8]> for Slice { fn partial_cmp(&self, other: &&[u8]) -> Option<Ordering> { let s: &[u8] = self.as_ref(); (&s).partial_cmp(other) } } impl PartialEq<Vec<u8>> for Slice { fn eq(&self, other: &Vec<u8>) -> bool { let s: &[u8] = self.as_ref(); (&s).eq(&&other[..]) } } impl PartialOrd<Vec<u8>> for Slice { fn partial_cmp(&self, other: &Vec<u8>) -> Option<Ordering> { let s: &[u8] = self.as_ref(); (&s).partial_cmp(&&other[..]) } } #[allow(clippy::derive_hash_xor_eq)] impl hash::Hash for Slice { fn hash<H>(&self, state: &mut H) where H: hash::Hasher, { self.inner.hash(state) } } impl Serialize for Slice { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_bytes(self.inner.as_ref()) } } struct SliceVisitor; impl<'de> Visitor<'de> for SliceVisitor { type Value = Slice; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("need bytes") } fn visit_bytes<E>(self, value: &[u8]) -> Result<Self::Value, E> where E: de::Error, { Ok(Slice::from(value)) } } impl<'de> Deserialize<'de> for Slice { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { deserializer.deserialize_bytes(SliceVisitor) } } impl fmt::Debug for Slice { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.inner.fmt(f) } } impl<'a> Borrow<[u8]> for &'a Slice { #[inline] fn borrow(&self) -> &[u8] { self.as_ref() } } impl Borrow<[u8]> for Slice { #[inline] fn borrow(&self) -> &[u8] { self.as_ref() } } impl AsRef<[u8]> for Slice { #[inline] fn as_ref(&self) -> &[u8] { self.inner.as_ref() } } impl Index<RangeFull> for Slice { type Output = [u8]; fn index(&self, index: RangeFull) -> &Self::Output { self.inner.as_ref().index(index) } } impl Index<RangeTo<usize>> for Slice { type Output = [u8]; fn index(&self, index: RangeTo<usize>) -> &Self::Output { self.inner.as_ref().index(index) } } impl IntoIterator for Slice { type Item = u8; type IntoIter = buf::Iter<Cursor<Bytes>>; fn into_iter(self) -> Self::IntoIter { self.inner.into_iter() } } impl<'a> IntoIterator for &'a Slice { type Item = u8; type IntoIter = buf::Iter<Cursor<&'a Bytes>>; fn into_iter(self) -> Self::IntoIter { (&self.inner).into_iter() } } impl Extend<u8> for Slice { fn extend<T>(&mut self, iter: T) where T: IntoIterator<Item = u8>, { self.inner.extend(iter) } } impl<'a> Extend<&'a u8> for Slice { fn extend<T>(&mut self, iter: T) where T: IntoIterator<Item = &'a u8>, { self.inner.extend(iter) } } impl From<BytesMut> for Slice { fn from(src: BytesMut) -> Self { Self { inner: src.into() } } } macro_rules! impl_from { ($type:ty) => { impl From<$type> for Slice { fn from(src: $type) -> Self { Self { inner: From::from(src), } } } }; } impl_from!(Vec<u8>); impl_from!(String); impl_from!(&[u8]); impl_from!(&str); #[cfg(test)] mod test { use bincode::deserialize; use bincode::serialize; use crate::utils::to_str; use super::*; #[test] fn test_ord() { assert_eq!(Slice::from("abc"), Slice::from("abc")); assert!(Slice::from("abc") < Slice::from("abd")); } #[test] fn test_serde() { let a = Slice::from("abc"); let encoded = serialize(&a).unwrap(); let decoded: Slice = deserialize(&encoded).unwrap(); assert_eq!(Slice::from("abc"), decoded); println!("a: {}", to_str(&a)); } }
true
4709a049cc105f62cb7de3f0702ddfbf745e4f8d
Rust
SaahilClaypool/aoc_2018
/day_15/src/main.rs
UTF-8
19,929
3.203125
3
[]
no_license
use std::cell::RefCell; use std::collections::HashMap; use std::str::FromStr; use std::string::ToString; // 195888 fn main() { // let elf = &game.units[0]; // let new_pos = elf.do_move(&game); // need to split up unit structure from game structure let input = include_str!("input.txt"); part_b(input); } fn part_a() { let input = include_str!("input.txt"); let mut game: Game = input.parse().unwrap(); println!("{}", game.to_string()); for i in 0..100 { game.do_round(); for unit in &game.units { println!( "Unit {} {} health {}", unit.idx, unit.unit_type, &unit.hp.borrow() ); } if game.winner() { println!("{}", game.to_string()); println!("End after {} rounds", i); println!("score is {}", game.score()); eprintln!("score is {}", game.score()); break; } println!("{}\n{}\n---------------------", i, game.to_string()); } } fn part_b(input: &str) -> i32 { let mut elf_damage = 3; let mut winning_score = 0; 'outer: loop { let mut game: Game = input.parse().unwrap(); game.elf_damage = elf_damage; println!("{}", game.to_string()); for i in 0..1000 { game.do_round(); for unit in &game.units { println!( "Unit {} {} health {}", unit.idx, unit.unit_type, &unit.hp.borrow() ); } if game.winner() { if game.elf_winner() { println!("{}", game.to_string()); println!("End after {} rounds", i); println!("score is {}", game.score()); eprintln!("score is {}", game.score()); winning_score = game.score(); break 'outer; } else { elf_damage += 1; println!("goblin won at damage {}", elf_damage); eprintln!("goblin won at damage {}", elf_damage); continue 'outer; } } println!( "{}\n{}\n{}---------------------", i, game.to_string(), elf_damage ); } } return winning_score; } enum Space { Unit(usize), // index of the unit that is there Wall, Empty, } #[derive(Hash, Eq, PartialEq, Debug, Clone, Copy)] struct Pos { row: i32, col: i32, } impl Pos { fn dist(&self, other: &Self) -> usize { ((self.row as i32 - other.row as i32).abs() + (self.col as i32 - other.col as i32).abs()) as usize } } const ELF: char = 'E'; const GOBLIN: char = 'G'; const NONE: char = 'N'; #[derive(Debug, Clone)] struct Unit { unit_type: char, pos: RefCell<Pos>, idx: usize, hp: RefCell<i32>, } struct Game { units: Vec<Unit>, map: Vec<Vec<Space>>, round: usize, rows: usize, cols: usize, elf_damage: i32, } impl Unit { fn targets(&self, game: &Game) -> Vec<Pos> { let mut positions = vec![]; for (r_idx, row) in game.map.iter().enumerate() { for (c_idx, col) in row.iter().enumerate() { if self.pos.borrow().col == c_idx as i32 && self.pos.borrow().row == r_idx as i32 { continue; } match col { Space::Unit(idx) => { if self.idx == *idx { continue; } if game.units[*idx].unit_type != self.unit_type && *game.units[*idx].hp.borrow() > 0 { positions.push(Pos { row: r_idx as i32, col: c_idx as i32, }); } } _ => {} } } } positions } fn other(&self) -> char { match self.unit_type { ELF => GOBLIN, GOBLIN => ELF, _ => NONE, } } fn do_move(&self, game: &Game) -> Option<Pos> { let targets = self.targets(game); let adjacent: Vec<&Pos> = targets .iter() .filter(|other| other.dist(&self.pos.borrow()) == 1) .collect(); if adjacent.len() != 0 { // println!("pos: {:?} {} is engaged to: {:?}", *self.pos.borrow(), self.unit_type, adjacent); return None; } let open: Vec<Vec<Pos>> = targets .iter() .map(|target| game.surrounding(target, NONE)) .collect(); let mut open_flat = vec![]; for target in open.into_iter() { for space in target.into_iter() { open_flat.push(space); } } let dists = game.dists(&self.pos.borrow()); let open_flat: Vec<Pos> = open_flat .into_iter() .filter(|pos| dists.contains_key(pos)) .collect(); // no path to anyone if open_flat.is_empty() { // println!("No Positions from {:?}", self); return None; } let mut min_dist = game.rows * game.cols * 2; let mut best_target = open_flat[0]; for target in open_flat { let d_target = dists[&target]; if d_target.dist < min_dist || d_target.dist == min_dist && game.order(&target) < game.order(&best_target) { min_dist = d_target.dist; best_target = target; } } let surrounding = game.surrounding(&self.pos.borrow(), NONE); let mut best_surrounding_move = 10000; let mut best_surrounding = *self.pos.borrow(); for next in surrounding { let dists = game.dists(&next); if !dists.contains_key(&best_target) { // panic!("from {:?} missing key {:?}", next, best_target); continue; } let dist_to_target = dists[&best_target].dist; if dist_to_target < best_surrounding_move || dist_to_target == best_surrounding_move && game.order(&next) < game.order(&best_surrounding) { best_surrounding_move = dist_to_target; best_surrounding = next; } } return Some(best_surrounding); } fn do_attack(&self, game: &Game) { let surrounding = game.surrounding(&self.pos.borrow(), self.other()); let mut min_target: Option<usize> = None; // find lowest health surrounding enemy for next in surrounding { match game.map[next.row as usize][next.col as usize] { Space::Unit(uid) => { let cur_target = &game.units[uid]; let cur_hp = *cur_target.hp.borrow(); match min_target { Some(min_uid) => { let target = &game.units[min_uid]; let min_hp = *target.hp.borrow(); if cur_hp < min_hp || cur_hp == min_hp && game.order(&cur_target.pos.borrow()) < game.order(&target.pos.borrow()) { min_target = Some(uid); } } None => { min_target = Some(uid); } } } _ => {} } } match min_target { Some(target_id) => { game.units[target_id].damage(game.elf_damage); } None => { return; } } } fn damage(&self, elf_damage: i32) { let damage = match self.unit_type { GOBLIN => elf_damage, // goblins take X damage _ => 3, // elves take 3 damage }; let hp = *self.hp.borrow(); let mut target_hp = self.hp.borrow_mut(); *target_hp = hp - damage; } fn do_turn(&self, game: &Game) { if *self.hp.borrow() < 1 { return; } // println!("Doing turn for {} at {:?}", self.idx, *self.pos.borrow()); let new_pos = self.do_move(game); match new_pos { Some(new_pos) => *self.pos.borrow_mut() = new_pos, _ => {} } self.do_attack(game); } } #[derive(Clone, Copy, Debug)] struct DijkstraNode { visited: bool, dist: usize, pos: Pos, from: Option<Pos>, } impl Game { fn score(&self) -> i32 { let mut s = 0; for unit in &self.units { let hp = *unit.hp.borrow(); if hp > 0 { s += hp; } } eprintln!("rounds: {} sum {}", self.round, s); return s as i32 * self.round as i32; } fn elf_winner(&self) -> bool { for unit in &self.units { if *unit.hp.borrow() < 1 && unit.unit_type == ELF { return false; } } return true; } fn winner(&self) -> bool { let mut has_elf = false; let mut has_gob = false; for r in 0..self.rows { for c in 0..self.cols { match self.map[r][c] { Space::Unit(uid) => { if self.units[uid].unit_type == GOBLIN { has_gob = true; } else if self.units[uid].unit_type == ELF { has_elf = true; } } _ => {} } } } let winner = !(has_gob && has_elf); winner } fn do_round(&mut self) { let mut units: Vec<usize> = self.units.iter().map(|unit| unit.idx).collect(); units.sort_by(|a, b| { self.order(&self.units[*a].pos.borrow()) .cmp(&self.order(&self.units[*b].pos.borrow())) }); for uid in units { if *self.units[uid].hp.borrow() > 0 && self.winner() { return; } let unit = &self.units[uid]; unit.do_turn(&self); self.update_units(); } self.round += 1; eprintln!("did round: {}", self.round); } fn update_units(&mut self) { // clear units for r in 0..self.rows { for c in 0..self.cols { match self.map[r][c] { Space::Unit(_) => self.map[r][c] = Space::Empty, _ => {} } } } // replace units for (uid, unit) in self.units.iter().enumerate() { let hp = *unit.hp.borrow(); if hp > 0 { self.map[unit.pos.borrow().row as usize][unit.pos.borrow().col as usize] = Space::Unit(uid); } } } fn order(&self, pos: &Pos) -> i32 { return pos.row * self.cols as i32 + pos.col; } /// get the number of steps to each position fn dists(&self, start: &Pos) -> HashMap<Pos, DijkstraNode> { let mut dists: HashMap<Pos, DijkstraNode> = HashMap::new(); dists.insert( *start, DijkstraNode { visited: false, dist: 0, from: None, pos: *start, }, ); let mut unvisited: Vec<Pos> = vec![]; unvisited.push(*start); let mut visited = 0; loop { if unvisited.len() == 0 { break; } let current = unvisited.pop().unwrap(); let surrounding = self.surrounding(&current, NONE); // println!("{:?} surrounding: {:?}", current, surrounding); for next in surrounding.iter() { let current_distance = dists[&current].dist + 1; let mut value = dists.entry(*next).or_insert(DijkstraNode { visited: false, dist: current_distance, from: Some(current), pos: *next, }); if value.dist > current_distance || value.dist == current_distance && self.order(&current) < self.order(next) { value.dist = current_distance; value.from = Some(current); } if !value.visited && !unvisited.contains(next) { unvisited.push(*next); } } dists.get_mut(&current).unwrap().visited = true; visited += 1; // println!("visited: {} nodes", visited); // println!("unvisi: {:?} nodes", unvisited); unvisited.sort_by(|a, b| dists[b].dist.cmp(&dists[a].dist)); } dists } fn is_open(&self, pos: &Pos, include_type: char) -> bool { if pos.row < self.rows as i32 && pos.row > -1 && pos.col < self.cols as i32 && pos.col > -1 { match self.map[pos.row as usize][pos.col as usize] { Space::Unit(uid) => { return self.units[uid].unit_type == include_type; } Space::Wall => return false, Space::Empty => return true, } } false } fn surrounding(&self, pos: &Pos, include_type: char) -> Vec<Pos> { let mut surround = Vec::new(); let above = Pos { col: pos.col, row: pos.row - 1, }; if self.is_open(&above, include_type) { surround.push(above); } let below = Pos { col: pos.col, row: pos.row + 1, }; if self.is_open(&below, include_type) { surround.push(below); } let left = Pos { col: pos.col - 1, row: pos.row, }; if self.is_open(&left, include_type) { surround.push(left); } let right = Pos { col: pos.col + 1, row: pos.row, }; if self.is_open(&right, include_type) { surround.push(right); } surround } } impl FromStr for Game { type Err = Box<dyn std::error::Error>; fn from_str(s: &str) -> Result<Self, Self::Err> { let mut units = vec![]; let mut map = vec![]; for (r_idx, line) in s.lines().enumerate() { let mut row = vec![]; for (c_idx, c) in line.chars().enumerate() { match c { '#' => row.push(Space::Wall), 'E' | 'G' => { row.push(Space::Unit(units.len())); units.push(Unit { unit_type: c, pos: RefCell::new(Pos { row: r_idx as i32, col: c_idx as i32, }), idx: units.len(), hp: RefCell::new(200), }); } '.' => row.push(Space::Empty), _ => panic!("Game can't handle {}", c), } } map.push(row); } let rows = map.len(); let cols = map[0].len(); Ok(Self { units, map, rows, cols, round: 0, elf_damage: 3, }) } } impl ToString for Game { fn to_string(&self) -> String { let mut st = String::with_capacity(self.rows * self.cols * 2); for r in 0..self.rows { for c in 0..self.cols { let c = match self.map[r][c] { Space::Empty => '.', Space::Unit(uid) => self.units[uid].unit_type, Space::Wall => '#', }; st.push(c); } st.push('\n'); } st } } #[cfg(test)] mod tests { use super::*; #[test] fn test_dists() { let input = include_str!("test_path.txt"); let game: Game = input.parse().unwrap(); let dists = game.dists(&Pos { row: 0, col: 0 }); println!("{:#?}, ", dists); assert_eq!(dists[&Pos { row: 0, col: 0 }].dist, 0); assert_eq!(dists[&Pos { row: 0, col: 2 }].dist, 2); assert_eq!(dists[&Pos { row: 2, col: 0 }].dist, 8); } #[test] fn test_move() { let input = include_str!("test_move.txt"); let mut game: Game = input.parse().unwrap(); let dists = game.dists(&Pos { row: 0, col: 0 }); println!("{:#?}, ", dists); let elf = &game.units[0]; let new_pos = elf.do_move(&game); // need to split up unit structure from game structure assert_eq!(new_pos.unwrap(), Pos { row: 0, col: 1 }); } #[test] fn test_example() { let input = include_str!("test.txt"); let mut game: Game = input.parse().unwrap(); assert_eq!(game.to_string(), input); println!("{}", game.to_string()); game.do_round(); let round1 = include_str!("test_round_1.txt"); println!("{}", game.to_string()); println!("{}", round1.to_string()); assert_eq!(game.to_string(), round1); // println!("game: \n{}", game.to_string()); } #[test] fn test_1() { let input = include_str!("test_battle.txt"); let mut game: Game = input.parse().unwrap(); println!("{}", game.to_string()); for i in 0..100 { game.do_round(); if game.winner() { break; } } assert_eq!(game.score(), 27730); } #[test] fn test_2() { let input = include_str!("test_battle2.txt"); let mut game: Game = input.parse().unwrap(); println!("{}", game.to_string()); for i in 0..100 { game.do_round(); if game.winner() { break; } } assert_eq!(game.score(), 36334); } #[test] fn test_3() { let input = include_str!("test_battle3.txt"); let mut game: Game = input.parse().unwrap(); println!("{}", game.to_string()); for i in 0..100 { game.do_round(); if game.winner() { break; } } assert_eq!(game.score(), 39514); } #[test] fn test_4() { let input = include_str!("test_battle4.txt"); let mut game: Game = input.parse().unwrap(); println!("{}", game.to_string()); for i in 0..100 { game.do_round(); if game.winner() { break; } } assert_eq!(game.score(), 18740); } #[test] fn test_elf_win() { let input = include_str!("test_elf.txt"); let res = part_b(input); assert_eq!(res, 1140); } #[test] fn test_elf_win2() { let input = include_str!("test_elf2.txt"); let res = part_b(input); assert_eq!(res, 6474) } #[test] fn test_elf_win3() { let input = include_str!("test_elf3.txt"); let res = part_b(input); assert_eq!(res, 3478) } }
true
064191738bee08fd95ea96ef12d750be25fa08fa
Rust
xunilrj/sandbox
/sources/rust/relm/runtime/src/executor.rs
UTF-8
2,631
2.71875
3
[ "Apache-2.0" ]
permissive
use std::boxed::*; use std::future::*; use std::pin::*; use std::task::*; pub struct Executor { next: usize, handles: [Option<Pin<Box<dyn std::future::Future<Output = ()>>>>; 100], } pub static mut VTABLE: RawWakerVTable = std::task::RawWakerVTable::new( Executor::clone, Executor::wake, Executor::wake_by_ref, Executor::drop, ); impl Executor { unsafe fn clone(v: *const ()) -> RawWaker { std::task::RawWaker::new(v, &VTABLE) } unsafe fn wake(v: *const ()) { let waker = Self::clone(v); let waker = std::task::Waker::from_raw(waker); let mut cx = Context::from_waker(&waker); let handle = v as *mut Option<Pin<Box<dyn Future<Output = ()>>>>; let handle = &mut *handle; let remove = match handle.as_mut() { Some(handle) => match handle.as_mut().poll(&mut cx) { Poll::Ready(_) => true, Poll::Pending => false, }, None => false, }; if remove { handle.take(); } } unsafe fn wake_by_ref(v: *const ()) { Self::wake(v) } unsafe fn drop(_: *const ()) {} pub fn new() -> Self { use std::mem::MaybeUninit; Self { next: 0, handles: unsafe { let mut arr: [Option<Pin<Box<dyn std::future::Future<Output = ()>>>>; 100] = MaybeUninit::zeroed().assume_init(); for dst in &mut arr[..] { std::ptr::write(dst, None); } arr }, } } async fn closure<T, THandle: std::future::Future<Output = T>>(handle: THandle) { let _ = handle.await; } pub fn push<T: 'static, THandle: 'static + std::future::Future<Output = T>>( &mut self, handle: THandle, ) -> RawWaker { let index = self.next; self.next += 1; let handle = Self::closure(handle); let handle = Box::pin(handle); self.handles[index] = Some(handle); let v = self.handles.get_mut(index).unwrap() as *mut Option<Pin<Box<dyn Future<Output = ()>>>> as *const (); unsafe { Self::clone(v) } } pub fn spawn<T: 'static, THandle: 'static + std::future::Future<Output = T>>( &mut self, handle: THandle, ) { let waker = self.push(handle); let waker = unsafe { std::task::Waker::from_raw(waker) }; waker.wake(); } } #[no_mangle] pub fn wake_by_id(addr: usize) { let waker = unsafe { Box::from_raw(addr as *mut Waker) }; waker.wake(); }
true
a425b3a9e2e587e93c69f1e3b044741fca11ead4
Rust
tz-rs/tz-rs
/rpc/src/responses/chains/blocks/block_ids_in_chain.rs
UTF-8
4,160
3.140625
3
[ "MIT" ]
permissive
use crate::errors::ParseError; use crate::responses::{json_array, Response}; use crate::types::Unistring; use serde::{Deserialize, Serialize}; use serde_json::json; use std::fmt; #[derive(Serialize, Deserialize)] pub struct BlocksInChainResponse { pub block_ids: json_array::JsonArray<Unistring>, } impl fmt::Display for BlocksInChainResponse { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", json!(self).to_string()) } } impl Response for BlocksInChainResponse { /// Parses a response string in the form /// `"[["alpha_numeric_block_id_string"], ["..."]]"` or /// `[[{ "invalid_utf8_string": [ integer ∈ [0, 255] ... ] }], [...]]` into a /// [`BlocksInChainResponse`](Self). fn from_response_str(response: &str) -> Result<Self, ParseError> { let block_ids = json_array::JsonArray::from_response_str(response)?; Ok(Self { block_ids }) } } #[cfg(test)] mod test { use super::*; #[test] fn get_blocks_in_chain_from_response_empty_fail() { let mock_response = ""; let blocks_response = BlocksInChainResponse::from_response_str(mock_response); assert!(blocks_response.is_err()); } #[test] fn get_blocks_in_chain_from_empty_list_ok() { let mock_response = "[]"; let blocks_response = BlocksInChainResponse::from_response_str(mock_response); assert!(blocks_response.is_ok()); let blocks = blocks_response.unwrap().block_ids.into_vec(); assert!(blocks.is_empty()); } #[test] fn get_blocks_in_chain_from_empty_bulk_array_response_ok() { let mock_response = "[[]]"; let blocks_response = BlocksInChainResponse::from_response_str(mock_response); assert!(blocks_response.is_ok()); let mut blocks = blocks_response.unwrap().block_ids.into_vec(); let parsed_block = blocks.pop().unwrap(); assert!(parsed_block.to_string() == ""); } #[test] fn get_blocks_in_chain_from_response_single_ok() { let mock_block_id = "blockId1"; let mock_response = format!(r#"[["{}"]]"#, mock_block_id); let blocks_response = BlocksInChainResponse::from_response_str(&mock_response); assert!(blocks_response.is_ok()); let mut blocks = blocks_response.unwrap().block_ids.into_vec(); assert!(blocks.len() == 1); let parsed_block = blocks.pop(); assert!(parsed_block.is_some()); let parsed_block_id = parsed_block.unwrap().to_string(); assert_eq!(parsed_block_id, mock_block_id); } #[test] fn get_blocks_in_chain_from_invalid_utf8_response_single_ok() { let mock_nested_object = r#"{"invalid_utf8_string":[1,2,3,4]}"#; let mock_response = format!(r#"[[{}]]"#, mock_nested_object); let blocks_response = BlocksInChainResponse::from_response_str(&mock_response); assert!(blocks_response.is_ok()); let mut blocks = blocks_response.unwrap().block_ids.into_vec(); assert!(blocks.len() == 1); let parsed_block = blocks.pop(); assert!(parsed_block.is_some()); let parsed_block_id = parsed_block.unwrap().to_string(); assert_eq!(parsed_block_id, mock_nested_object); } #[test] fn get_blocks_in_chain_from_response_multiple_ok() { let mock_block_ids = ["blockId1", "blockId2", "blockId3"]; let mock_response = format!( "[{}]", mock_block_ids .iter() .map(|block_id| format!(r#"["{}"]"#, &block_id)) .collect::<Vec<String>>() .join(",") ); let blocks_response = BlocksInChainResponse::from_response_str(&mock_response); assert!(blocks_response.is_ok()); let mut blocks = blocks_response.unwrap().block_ids.into_vec(); assert!(blocks.len() == 3); for mock_block_id in mock_block_ids.iter().rev() { let parsed_block = blocks.pop().unwrap(); let parsed_block_id = parsed_block.to_string(); assert_eq!(parsed_block_id, mock_block_id.to_string()); } } }
true
935d1eec5666fc39644bb02e357df1594f05f3e9
Rust
xgillard/ddo
/ddo/src/abstraction/mdd.rs
UTF-8
5,817
2.59375
3
[ "MIT" ]
permissive
// Copyright 2020 Xavier Gillard // // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of // the Software, and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. use crate::{SubProblem, Completion, Reason, Problem, Relaxation, StateRanking, Solution, Cutoff, Barrier, DominanceChecker}; // FIXME: Replace that with the following enum definition when const generics allow enum types /// What type of cut-set are we using for relaxed DDs ? pub type CutsetType = u8; /// enqueue the last layer with only exact nodes pub const LAST_EXACT_LAYER: u8 = 1; /// enqueue all exact nodes that have at least a relaxed child node pub const FRONTIER: u8 = 2; /* #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum CutsetType { /// enqueue the last layer with only exact nodes LastExactLayer, /// enqueue all exact nodes that have at least a relaxed child node Frontier, } */ /// How are we to compile the decision diagram ? #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum CompilationType { /// If you want to use a pure DP resolution of the problem Exact, /// If you want to compile a restricted DD which yields a lower bound on the objective Relaxed, /// If you want to compile a relaxed DD which yields an upper bound on the objective Restricted, } /// The set of parameters used to tweak the compilation of a MDD pub struct CompilationInput<'a, State> { /// How is the mdd being compiled ? pub comp_type: CompilationType, /// A reference to the original problem we try to maximize pub problem: &'a dyn Problem<State = State>, /// The relaxation which we use to merge nodes in a relaxed dd pub relaxation: &'a dyn Relaxation<State = State>, /// The state ranking heuristic to chose the nodes to keep and those to discard pub ranking: &'a dyn StateRanking<State = State>, /// The cutoff used to decide when to stop trying to solve the problem pub cutoff: &'a dyn Cutoff, /// What is the maximum width of the mdd ? pub max_width: usize, /// The subproblem whose state space must be explored pub residual: &'a SubProblem<State>, /// The best known lower bound at the time when the dd is being compiled pub best_lb: isize, /// Data structure containing info about past compilations used to prune the search pub barrier: &'a dyn Barrier<State = State>, pub dominance: &'a dyn DominanceChecker<State = State>, } /// This trait describes the operations that can be expected from an abstract /// decision diagram regardless of the way it is implemented. pub trait DecisionDiagram { /// This associated type corresponds to the `State` type of the problems /// that can be solved when using this DD. type State; /// This method provokes the compilation of the DD based on the given /// compilation input (compilation type, and root subproblem) fn compile(&mut self, input: &CompilationInput<Self::State>) -> Result<Completion, Reason>; /// Returns true iff the DD which has been compiled is an exact DD. fn is_exact(&self) -> bool; /// Returns the optimal value of the objective function or None when no /// feasible solution has been identified (no r-t path) either because /// the subproblem at the root of this DD is infeasible or because restriction /// has removed all feasible paths that could potentially have been found. fn best_value(&self) -> Option<isize>; /// Returns the best solution of this subproblem as a sequence of decision /// maximizing the objective value. When no feasible solution exists in the /// approximate DD, it returns the value None instead. fn best_solution(&self) -> Option<Solution>; /// Returns the value of the objective function for the best exact node in the DD /// or None when no feasible solution has been identified (no r-t path) either because /// the subproblem at the root of this DD is infeasible or because restriction/relaxation /// has removed all feasible paths that could potentially have been found. fn best_exact_value(&self) -> Option<isize>; /// Returns the best exact solution of this subproblem as a sequence of decision /// maximizing the objective value. When no feasible solution exists in the /// approximate DD, it returns the value None instead. fn best_exact_solution(&self) -> Option<Solution>; /// Iteratively applies the given function `func` to each element of the /// exact cut-set that was computed during DD compilation. /// /// # Important: /// This can only be called if the DD was compiled in relaxed mode. /// All implementations of the DecisionDiagram trait are allowed to assume /// this method will be called at most once per relaxed DD compilation. fn drain_cutset<F>(&mut self, func: F) where F: FnMut(SubProblem<Self::State>); }
true
f97ccabaee393768ca2b5a70bc0898b01b4f7c9c
Rust
pcsm/simulacrum
/simulacrum/examples/manual.rs
UTF-8
3,225
3.375
3
[ "MIT" ]
permissive
// This example demonstrates everything that can be done with Simulacrum at the // at the lowest level API. extern crate simulacrum; use simulacrum::*; trait CoolTrait { // Shared self fn foo(&self); // Mutable self fn bar(&mut self); // One parameter and returning a value fn goop(&mut self, flag: bool) -> u32; // Multiple parameters fn zing(&self, first: i32, second: bool); // Static reference fn boop(&self, name: &'static str); // Shared reference fn store(&self, val: &i64); // Mutable reference fn toggle(&self, bit: &mut bool); } pub struct CoolTraitMock { e: Expectations } impl CoolTraitMock { pub fn new() -> Self { Self { e: Expectations::new() } } pub fn then(&mut self) -> &mut Self { self.e.then(); self } pub fn expect_foo(&mut self) -> Method<(), ()> { self.e.expect::<(), ()>("foo") } pub fn expect_bar(&mut self) -> Method<(), ()> { self.e.expect::<(), ()>("bar") } pub fn expect_goop(&mut self) -> Method<bool, u32> { self.e.expect::<bool, u32>("goop") } pub fn expect_zing(&mut self) -> Method<(i32, bool), ()> { self.e.expect::<(i32, bool), ()>("zing") } pub fn expect_boop(&mut self) -> Method<&'static str, ()> { self.e.expect::<&'static str, ()>("boop") } pub fn expect_store(&mut self) -> Method<*const i64, ()> { self.e.expect::<*const i64, ()>("store") } pub fn expect_toggle(&mut self) -> Method<*mut bool, ()> { self.e.expect::<*mut bool, ()>("toggle") } } impl CoolTrait for CoolTraitMock { fn foo(&self) { self.e.was_called::<(), ()>("foo", ()) } fn bar(&mut self) { self.e.was_called::<(), ()>("bar", ()) } fn goop(&mut self, flag: bool) -> u32 { self.e.was_called_returning::<bool, u32>("goop", flag) } fn zing(&self, first: i32, second: bool) { self.e.was_called::<(i32, bool), ()>("zing", (first, second)) } fn boop(&self, name: &'static str) { self.e.was_called::<&'static str, ()>("boop", name) } fn store(&self, val: &i64) { self.e.was_called::<*const i64, ()>("store", val) } fn toggle(&self, bit: &mut bool) { self.e.was_called::<*mut bool, ()>("toggle", bit) } } fn main() { // Create a mock object let mut m = CoolTraitMock::new(); // Set up expectations for it m.expect_bar().called_never(); m.expect_foo().called_once(); m.then().expect_goop().called_once().with(true).returning(|_| 5); m.then().expect_zing().called_once().with(params!(13, false)); m.expect_boop().called_times(2); m.expect_store().called_once().with(deref(777)); m.expect_toggle().called_once().with(deref(true)) .modifying(|&mut arg| { unsafe { *arg = false } }); // Execute test code m.foo(); assert_eq!(m.goop(true), 5); m.zing(13, false); m.boop("hey"); m.boop("yo"); m.store(&777); let mut b = true; m.toggle(&mut b); assert_eq!(b, false); // When the mock object is dropped, its expectations will be evaluated }
true
e8fb8e59acb178331c71a3a3c31afbe800812f08
Rust
dalcde/x11rb
/src/rust_connection/stream.rs
UTF-8
8,440
2.890625
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use std::io::{IoSlice, Result}; use std::net::{Ipv4Addr, SocketAddr, TcpStream}; #[cfg(unix)] use std::os::unix::io::{AsRawFd, RawFd}; #[cfg(unix)] use std::os::unix::net::UnixStream; use super::fd_read_write::{ReadFD, WriteFD}; use super::xauth::Family; use crate::utils::RawFdContainer; /// A wrapper around a `TcpStream` or `UnixStream`. #[derive(Debug)] pub enum Stream { TcpStream(TcpStream), #[cfg(unix)] UnixStream(UnixStream), } impl Stream { /// Try to connect to the X11 server described by the given arguments. pub fn connect(host: &str, protocol: Option<&str>, display: u16) -> Result<Self> { const TCP_PORT_BASE: u16 = 6000; if (protocol.is_none() || protocol != Some("unix")) && !host.is_empty() && host != "unix" { Ok(Stream::TcpStream(TcpStream::connect(( host, TCP_PORT_BASE + display, ))?)) } else { // On non-unix, this variable is not mutated. #[allow(unused_mut)] let mut error = None; #[cfg(unix)] { if protocol.is_none() || protocol == Some("unix") { let file_name = format!("/tmp/.X11-unix/X{}", display); // TODO: Try abstract socket (file name with prepended '\0') // Not supported on Rust right now: https://github.com/rust-lang/rust/issues/42048 match UnixStream::connect(file_name) { Ok(stream) => return Ok(Stream::UnixStream(stream)), Err(err) => error = Some(err), } } } if protocol.is_none() && host.is_empty() { Ok(Stream::TcpStream(TcpStream::connect(( "localhost", TCP_PORT_BASE + display, ))?)) } else { use crate::errors::ConnectError; use std::io::{Error, ErrorKind}; Err(error.unwrap_or_else(|| { Error::new(ErrorKind::Other, ConnectError::DisplayParsingError) })) } } } } impl Stream { /// Get the peer's address in a format suitable for xauth. /// /// The returned values can be directly given to `super::xauth::get_auth` as `family` and /// `address`. pub(crate) fn peer_addr(&self) -> Result<(Family, Vec<u8>)> { match self { Stream::TcpStream(stream) => { // Get the v4 address of the other end (if there is one) let ip = match stream.peer_addr()? { SocketAddr::V4(addr) => *addr.ip(), SocketAddr::V6(addr) => { let ip = addr.ip(); if ip.is_loopback() { // This is a local connection. // Use LOCALHOST to cause a fall-through in the code below. Ipv4Addr::LOCALHOST } else if let Some(ip) = ip.to_ipv4() { // Let the ipv4 code below handle this ip } else { // Okay, this is really a v6 address return Ok((Family::Internet6, ip.octets().to_vec())); } } }; // Handle the v4 address if !ip.is_loopback() { return Ok((Family::Internet, ip.octets().to_vec())); } else { // This is only reached for loopback addresses. The code below handles this. } } #[cfg(unix)] Stream::UnixStream(_) => { // Fall through to the code below. } }; // If we get to here: This is a local connection. Use the host name as address. let hostname = gethostname::gethostname() .to_str() .map(|name| name.as_bytes().to_vec()) .unwrap_or_else(Vec::new); Ok((Family::Local, hostname)) } /// Creates a new independently owned handle to the underlying socket. /// /// The returned `Stream` is a reference to the same stream that this object references. Both /// handles will read and write the same stream of data, and options set on one stream will be /// propagated to the other stream. pub fn try_clone(&self) -> Result<Stream> { match self { Stream::TcpStream(stream) => Ok(Stream::TcpStream(stream.try_clone()?)), #[cfg(unix)] Stream::UnixStream(stream) => Ok(Stream::UnixStream(stream.try_clone()?)), } } #[cfg(unix)] fn as_raw_fd(&self) -> RawFd { match self { Stream::TcpStream(stream) => stream.as_raw_fd(), Stream::UnixStream(stream) => stream.as_raw_fd(), } } } #[cfg(unix)] fn do_write(fd: RawFd, bufs: &[IoSlice<'_>], fds: &mut Vec<RawFdContainer>) -> Result<usize> { use nix::sys::{ socket::{sendmsg, ControlMessage, MsgFlags}, uio::IoVec, }; let iov = bufs .iter() .map(|b| IoVec::from_slice(&**b)) .collect::<Vec<_>>(); let res = if !fds.is_empty() { let fds = fds.iter().map(|fd| fd.as_raw_fd()).collect::<Vec<_>>(); let cmsgs = [ControlMessage::ScmRights(&fds[..])]; sendmsg(fd, &iov, &cmsgs, MsgFlags::empty(), None) } else { sendmsg(fd, &iov, &[], MsgFlags::empty(), None) }; // Nothing touched errno since sendmsg() failed let res = res.map_err(|_| std::io::Error::last_os_error())?; // We successfully sent all FDs fds.clear(); Ok(res) } impl WriteFD for Stream { fn write(&mut self, buf: &[u8], fds: &mut Vec<RawFdContainer>) -> Result<usize> { #[cfg(unix)] { do_write(self.as_raw_fd(), &[IoSlice::new(buf)], fds) } #[cfg(not(unix))] { use std::io::{Error, ErrorKind, Write}; if !fds.is_empty() { return Err(Error::new(ErrorKind::Other, "FD passing is unsupported")); } match self { Stream::TcpStream(stream) => stream.write(buf), } } } fn write_vectored( &mut self, bufs: &[IoSlice<'_>], fds: &mut Vec<RawFdContainer>, ) -> Result<usize> { #[cfg(unix)] { do_write(self.as_raw_fd(), bufs, fds) } #[cfg(not(unix))] { use std::io::{Error, ErrorKind, Write}; if !fds.is_empty() { return Err(Error::new(ErrorKind::Other, "FD passing is unsupported")); } match self { Stream::TcpStream(stream) => stream.write_vectored(bufs), } } } fn flush(&mut self) -> Result<()> { // We do no buffering Ok(()) } } impl ReadFD for Stream { fn read(&mut self, buf: &mut [u8], fd_storage: &mut Vec<RawFdContainer>) -> Result<usize> { #[cfg(unix)] { use nix::sys::{ socket::{recvmsg, ControlMessageOwned, MsgFlags}, uio::IoVec, }; // Chosen by checking what libxcb does const MAX_FDS_RECEIVED: usize = 16; let mut cmsg = nix::cmsg_space!([RawFd; MAX_FDS_RECEIVED]); let iov = [IoVec::from_mut_slice(buf)]; let fd = self.as_raw_fd(); let msg = recvmsg(fd, &iov[..], Some(&mut cmsg), MsgFlags::empty()); // Nothing touched errno since recvmsg() failed let msg = msg.map_err(|_| std::io::Error::last_os_error())?; let fds_received = msg .cmsgs() .flat_map(|cmsg| match cmsg { ControlMessageOwned::ScmRights(r) => r, _ => Vec::new(), }) .map(RawFdContainer::new); fd_storage.extend(fds_received); Ok(msg.bytes) } #[cfg(not(unix))] { use std::io::Read; // No FDs are read, so nothing needs to be done with fd_storage let _ = fd_storage; match self { Stream::TcpStream(stream) => stream.read(buf), } } } }
true
c9f24ac6590278aa68b4b9e5a000805c2a8c1b72
Rust
cusiman7/AdventOfCode2020
/src/bin/day3.rs
UTF-8
867
2.90625
3
[]
no_license
fn check_slope(ski_map : &Vec<Vec<char>>, dx : usize, dy : usize) -> u32 { let mut x = 0; let mut y = 0; let mut tree_count = 0; while y < ski_map.len() { if ski_map[y][x] == '#' { tree_count += 1; } x = (x + dx) % (ski_map[0].len()); y += dy; } return tree_count; } fn main() { let mut ski_map = Vec::new(); if let Ok(lines) = aoc::read_lines("./day3.txt") { for line in lines { if let Ok(line) = line { ski_map.push(line.chars().collect::<Vec<_>>()); } } } println!("Trees: {}", check_slope(&ski_map, 3, 1)); let mut product : u64 = 1; for (dx, dy) in [(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)].iter() { product *= check_slope(&ski_map, *dx, *dy) as u64; } println!("Trees Product: {}", product); }
true
8d9a8f5653848e1ba2d115e8c61d08c365827504
Rust
willfrew/virtual-rc-controller
/hid-server/src/ws_device.rs
UTF-8
3,705
2.59375
3
[]
no_license
use std::{io, thread, time, fs}; use uhid_virt::{Bus, CreateParams, UHIDDevice, }; use usbd_hid::descriptor::SerializedDescriptor; use actix::{Actor, StreamHandler}; use actix_web_actors::ws; use serde::Deserialize; use serde_json; use crate::reports::{RCControllerInputReport}; #[derive(Deserialize)] struct Coord { x: u8, y: u8, } #[derive(Deserialize)] struct WsReport { left: Coord, right: Coord, } pub struct WsControllerActor { device: UHIDDevice<fs::File>, } impl WsControllerActor { pub fn new() -> io::Result<Self> { let device = UHIDDevice::create(CreateParams { name: String::from("Test RC device"), phys: String::from(""), uniq: String::from(""), bus: Bus::USB, vendor: 0x0b04, product: 0x1867, version: 0, country: 0, rd_data: RCControllerInputReport::desc().to_vec(), })?; Ok(Self { device: device }) } } impl Actor for WsControllerActor { type Context = ws::WebsocketContext<Self>; fn started(&mut self, ctx: &mut Self::Context) { println!("Connection alive!"); } fn stopped(&mut self, ctx: &mut Self::Context) { self.device.destroy(); println!("Connection closed"); } } /// Handler for ws::Message message impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for WsControllerActor { fn handle( &mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context, ) { match msg { Ok(ws::Message::Text(json)) => { let maybeReport: serde_json::Result<WsReport> = serde_json::from_str(&json); match maybeReport { Ok(wsReport) => { let report = RCControllerInputReport { yaw: wsReport.left.x, throttle: wsReport.left.y, pitch: wsReport.right.y, roll: wsReport.right.x, buttons: 0, }; unsafe { let report_bytes = any_as_u8_slice(&report); println!("{:?}", report_bytes); self.device.write(report_bytes); } () }, _ => (), } } // Binary message, 4 bytes // [yaw, throttle, roll, pitch] Ok(ws::Message::Binary(bytes)) => { if !bytes.len() == 4 { println!("Couldn't parse binary message: {:?}", bytes) } else { let report = RCControllerInputReport { yaw: bytes[0], throttle: bytes[1], pitch: bytes[3], roll: bytes[2], buttons: 0, }; unsafe { let report_bytes = any_as_u8_slice(&report); println!("{:?}", report_bytes); self.device.write(report_bytes); } } } _ => (), } } } unsafe fn any_as_u8_slice<T: Sized>(p: &T) -> &[u8] { ::std::slice::from_raw_parts( (p as *const T) as *const u8, ::std::mem::size_of::<T>(), ) } fn for_later() -> io::Result<()> { println!("RCControllerInputReport: {:?}\n", RCControllerInputReport::desc()); loop { thread::sleep(time::Duration::from_millis(1000)); } }
true
e29b1d2c3b6b970ce248b79b2b0e62bcef8eec64
Rust
semrov/JsonParser
/src/test_lex.rs
UTF-8
8,482
3.21875
3
[]
no_license
use lex::{Lex,Token,TokenType}; // assert_eq!(lexer.next(),Token{span: &json[], token_type: } ); #[test] fn test_simple() { let json = r#"-3.12e-10 [-4559,12.66,"string",[]] true false null {}"#; let mut lexer = Lex::new(json); assert_eq!(lexer.next(),Token{span: &json[0..9], token_type: TokenType::Number(-3.12e-10)}); assert_eq!(lexer.next(),Token{span: &json[10..11], token_type: TokenType::LeftBracket}); assert_eq!(lexer.next(),Token{span: &json[11..16], token_type: TokenType::Number(-4559.0)}); assert_eq!(lexer.next(),Token{span: &json[16..17], token_type: TokenType::Comma}); assert_eq!(lexer.next(),Token{span: &json[17..22], token_type: TokenType::Number(12.66)}); assert_eq!(lexer.next(),Token{span: &json[22..23], token_type: TokenType::Comma}); assert_eq!(lexer.next(),Token{span: &json[23..31], token_type: TokenType::String("string".to_string())}); assert_eq!(lexer.next(),Token{span: &json[31..32], token_type: TokenType::Comma}); assert_eq!(lexer.next(),Token{span: &json[32..33], token_type: TokenType::LeftBracket}); assert_eq!(lexer.next(),Token{span: &json[33..34], token_type: TokenType::RightBracket}); assert_eq!(lexer.next(),Token{span: &json[34..35], token_type: TokenType::RightBracket}); assert_eq!(lexer.next(),Token{span: &json[36..40], token_type: TokenType::Bool(true)}); assert_eq!(lexer.next(),Token{span: &json[41..46], token_type: TokenType::Bool(false)}); assert_eq!(lexer.next(),Token{span: &json[47..51], token_type: TokenType::Null}); assert_eq!(lexer.next(),Token{span: &json[52..53], token_type: TokenType::LeftBrace}); assert_eq!(lexer.next(),Token{span: &json[53..54], token_type: TokenType::RightBrace}); assert_eq!(lexer.next(),Token{span: "", token_type : TokenType::End}); } #[test] fn test1() { let json = r#"{ "integer": 565, "string": "test string", "bool":true, "lie":false }"#; let mut lexer = Lex::new(json); assert_eq!(lexer.next(),Token{span: &json[0..1], token_type: TokenType::LeftBrace}); assert_eq!(lexer.next(),Token{span: &json[2..11], token_type: TokenType::String("integer".to_string())}); assert_eq!(lexer.next(),Token{span: &json[11..12], token_type : TokenType::Colon} ); assert_eq!(lexer.next(),Token{span: &json[13..16], token_type : TokenType::Number(565.0)} ); assert_eq!(lexer.next(),Token{span: &json[16..17], token_type : TokenType::Comma} ); assert_eq!(lexer.next(),Token{span: &json[18..26], token_type : TokenType::String("string".to_string())} ); assert_eq!(lexer.next(),Token{span: &json[26..27], token_type : TokenType::Colon}); assert_eq!(lexer.next(),Token{span: &json[28..41], token_type : TokenType::String("test string".to_string())} ); assert_eq!(lexer.next(),Token{span: &json[41..42], token_type : TokenType::Comma} ); assert_eq!(lexer.next(),Token{span: &json[43..49], token_type : TokenType::String("bool".to_string())} ); assert_eq!(lexer.next(),Token{span: &json[49..50], token_type : TokenType::Colon}); assert_eq!(lexer.next(),Token{span: &json[50..54], token_type : TokenType::Bool(true)}); assert_eq!(lexer.next(),Token{span: &json[54..55], token_type : TokenType::Comma} ); assert_eq!(lexer.next(),Token{span: &json[56..61], token_type : TokenType::String("lie".to_string())} ); assert_eq!(lexer.next(),Token{span: &json[61..62], token_type : TokenType::Colon}); assert_eq!(lexer.next(),Token{span: &json[62..67], token_type : TokenType::Bool(false)}); assert_eq!(lexer.next(),Token{span: &json[68..69], token_type : TokenType::RightBrace}); assert_eq!(lexer.next(),Token{span: "", token_type : TokenType::End}); } #[test] fn test2() { let json = r#"{ "float": -64.452672, "exp":10.3e10 "car":null, "object": { "name":"object" } }"#; let mut lexer = Lex::new(json); assert_eq!(lexer.next(),Token{span: &json[0..1], token_type: TokenType::LeftBrace}); assert_eq!(lexer.next(),Token{span: &json[2..9], token_type: TokenType::String("float".to_string())}); assert_eq!(lexer.next(),Token{span: &json[9..10], token_type : TokenType::Colon}); assert_eq!(lexer.next(),Token{span: &json[11..21], token_type : TokenType::Number(-64.452672)}); assert_eq!(lexer.next(),Token{span: &json[21..22], token_type : TokenType::Comma}); assert_eq!(lexer.next(),Token{span: &json[23..28], token_type: TokenType::String("exp".to_string())}); assert_eq!(lexer.next(),Token{span: &json[28..29], token_type : TokenType::Colon}); assert_eq!(lexer.next(),Token{span: &json[29..36], token_type : TokenType::Number(10.3e10)}); assert_eq!(lexer.next(),Token{span: &json[37..42], token_type: TokenType::String("car".to_string())}); assert_eq!(lexer.next(),Token{span: &json[42..43], token_type : TokenType::Colon}); assert_eq!(lexer.next(),Token{span: &json[43..47], token_type : TokenType::Null}); assert_eq!(lexer.next(),Token{span: &json[47..48], token_type : TokenType::Comma}); assert_eq!(lexer.next(),Token{span: &json[49..57], token_type: TokenType::String("object".to_string())}); assert_eq!(lexer.next(),Token{span: &json[57..58], token_type : TokenType::Colon}); assert_eq!(lexer.next(),Token{span: &json[59..60], token_type: TokenType::LeftBrace}); assert_eq!(lexer.next(),Token{span: &json[61..67], token_type: TokenType::String("name".to_string())}); assert_eq!(lexer.next(),Token{span: &json[67..68], token_type : TokenType::Colon}); assert_eq!(lexer.next(),Token{span: &json[68..76], token_type : TokenType::String("object".to_string())}); assert_eq!(lexer.next(),Token{span: &json[77..78], token_type: TokenType::RightBrace}); assert_eq!(lexer.next(),Token{span: &json[79..80], token_type: TokenType::RightBrace}); assert_eq!(lexer.next(),Token{span: "", token_type : TokenType::End}); } #[test] fn test_array() { let json = r#"{ "array" : [["anna",170,62],["matthew",182,84]] }"#; let mut lexer = Lex::new(json); assert_eq!(lexer.next(),Token{span: &json[0..1], token_type: TokenType::LeftBrace}); assert_eq!(lexer.next(),Token{span: &json[2..9], token_type: TokenType::String("array".to_string())}); assert_eq!(lexer.next(),Token{span: &json[10..11], token_type : TokenType::Colon}); assert_eq!(lexer.next(),Token{span: &json[12..13], token_type: TokenType::LeftBracket}); assert_eq!(lexer.next(),Token{span: &json[13..14], token_type: TokenType::LeftBracket}); assert_eq!(lexer.next(),Token{span: &json[14..20], token_type: TokenType::String("anna".to_string())}); assert_eq!(lexer.next(),Token{span: &json[20..21], token_type : TokenType::Comma}); assert_eq!(lexer.next(),Token{span: &json[21..24], token_type: TokenType::Number(170.0)}); assert_eq!(lexer.next(),Token{span: &json[24..25], token_type : TokenType::Comma}); assert_eq!(lexer.next(),Token{span: &json[25..27], token_type: TokenType::Number(62.0)}); assert_eq!(lexer.next(),Token{span: &json[27..28], token_type: TokenType::RightBracket}); assert_eq!(lexer.next(),Token{span: &json[28..29], token_type : TokenType::Comma}); assert_eq!(lexer.next(),Token{span: &json[29..30], token_type: TokenType::LeftBracket}); assert_eq!(lexer.next(),Token{span: &json[30..39], token_type: TokenType::String("matthew".to_string())}); assert_eq!(lexer.next(),Token{span: &json[39..40], token_type : TokenType::Comma}); assert_eq!(lexer.next(),Token{span: &json[40..43], token_type: TokenType::Number(182.0)}); assert_eq!(lexer.next(),Token{span: &json[43..44], token_type : TokenType::Comma}); assert_eq!(lexer.next(),Token{span: &json[44..46], token_type: TokenType::Number(84.0)}); assert_eq!(lexer.next(),Token{span: &json[46..47], token_type: TokenType::RightBracket}); assert_eq!(lexer.next(),Token{span: &json[47..48], token_type: TokenType::RightBracket}); assert_eq!(lexer.next(),Token{span: &json[49..50], token_type: TokenType::RightBrace}); assert_eq!(lexer.next(),Token{span: "", token_type : TokenType::End}); } #[test] fn test_special_chars() { let json = "\"\u{0041}\u{0042}\u{0043}\u{0044}\""; // \u{00B6}\u{03B1}"; let mut lexer = Lex::new(json); assert_eq!(lexer.next(),Token{span: &json[0..6], token_type : TokenType::String("ABCD".to_string())}); let json = "\"\u{00B6}\u{03B1}\""; // \u{00B6}\u{03B1}"; let mut lexer = Lex::new(json); assert_eq!(lexer.next(),Token{span: &json[0..6], token_type : TokenType::String("¶α".to_string())}); }
true
b2d7380e1146f9ca8886f4313e890ac3ebc3479b
Rust
johnny-human/nlp-euclidean
/src/lib.rs
UTF-8
2,569
3.796875
4
[ "MIT" ]
permissive
use std::env; struct Count<T: Iterator> { iter: T, next: Option<T::Item> } impl<T: Iterator> From<T> for Count<T> { fn from (iter: T) -> Count<T> { Count { iter: iter, next: None } } } impl<T:Iterator> Iterator for Count<T> where T::Item: PartialEq { type Item = (T::Item, i32); fn next (&mut self) -> Option<(T::Item, i32)> { let ch = match self.next.take() { Some (ch) => ch, // still have an item from previous run None => match self.iter.next() { Some (ch) => ch, None => return None // parent iterator is mpty } }; let mut count = 1; loop { match self.iter.next() { None => {return Some ((ch, count))}, Some (next) => { if next == ch { count += 1 } else { self.next = Some (next); return Some ((ch, count)); } } } } } } #[allow(dead_code)] fn main() { let args: Vec<String> = env::args().collect(); if args.len() > 1 { // turn argument into string let s = args[1].to_string(); let t = args[2].to_string(); println!("Euclidean Distance: {:?}", distance(s,t)); } } pub fn distance(s: String, t: String) -> f64 { // Vector of tuples (counts, character) let mut v = Vec::<(i32, char)>::new(); // use iterator on string for count in Count::from (s.chars()) { v.push((count.1, count.0)); } for count in Count::from (t.chars()) { if v.iter().any(|&(_, a)| a == count.0) { v.iter_mut().find(|&&mut (_, b)| b == count.0) .map(|tag| { tag.0 -= count.1 }); } else { v.push((count.1, count.0)); } // if verbous //println!("{:?}", v); } // multiply by itself and sum everything together let sum = v.iter() .map(|&(i, _)| i * i) .fold(0, |acc, i| acc + i); // reteurn sum squared (sum as f64).sqrt() } #[test] fn returns_zero_on_identical_strings() { assert_eq!( 0.0, distance("A".to_string(), "A".to_string()) ); } #[test] fn returns_f64_on_nonidentical_strings() { assert_eq!( 1.4142135623730951, distance("John".to_string(), "Johnny".to_string()) ); }
true
27176da7556d0a7615d686003aa2797e630a5d89
Rust
jarkkom/adventofcode-2020
/src/15/part2.rs
UTF-8
4,184
3.953125
4
[]
no_license
use std::collections::HashMap; #[derive(Debug)] struct MemoryGame { turn: i64, starting_numbers: Vec<i64>, last_seen: HashMap<i64, i64>, previous: i64, } impl MemoryGame { fn new(starting_numbers: Vec<i64>) -> Self { Self { turn: 0, starting_numbers, last_seen: HashMap::new(), previous: -1, } } } impl Iterator for MemoryGame { type Item = i64; fn next(&mut self) -> Option<i64> { self.turn += 1; if self.turn > 30000000 { return None; } let speaking; if !self.starting_numbers.is_empty() { // speak a starting number speaking = self.starting_numbers.remove(0); } else if self.last_seen.contains_key(&self.previous) { let last_seen = self.last_seen.get(&self.previous).unwrap(); speaking = self.turn - 1 - last_seen; } else { speaking = 0; } self.last_seen.insert(self.previous, self.turn - 1); self.previous = speaking; Some(speaking) } } fn main() { let game = MemoryGame::new(vec![16, 1, 0, 18, 12, 14, 19]); println!("30000000th {:?}", game.last()); } #[cfg(test)] mod tests { use super::*; #[test] fn test_iterator() { let starting_numbers: Vec<i64> = vec![0, 3, 6]; let mut game = MemoryGame::new(starting_numbers); println!("{:?}", game); // Turn 1: The 1st number spoken is a starting number, 0. assert_eq!(game.next(), Some(0)); println!("{:?}", game); // Turn 2: The 2nd number spoken is a starting number, 3. assert_eq!(game.next(), Some(3)); println!("{:?}", game); // Turn 3: The 3rd number spoken is a starting number, 6. assert_eq!(game.next(), Some(6)); println!("{:?}", game); // Turn 4: Now, consider the last number spoken, 6. Since that was the first time the number had been spoken, the 4th number spoken is 0. assert_eq!(game.next(), Some(0)); println!("{:?}", game); // Turn 5: Next, again consider the last number spoken, 0. Since it had been spoken before, the next number to speak is the difference between the turn number when it was last spoken (the previous turn, 4) and the turn number of the time it was most recently spoken before then (turn 1). Thus, the 5th number spoken is 4 - 1, 3. assert_eq!(game.next(), Some(3)); println!("{:?}", game); // Turn 6: The last number spoken, 3 had also been spoken before, most recently on turns 5 and 2. So, the 6th number spoken is 5 - 2, 3. assert_eq!(game.next(), Some(3)); println!("{:?}", game); // Turn 7: Since 3 was just spoken twice in a row, and the last two turns are 1 turn apart, the 7th number spoken is 1. assert_eq!(game.next(), Some(1)); println!("{:?}", game); // Turn 8: Since 1 is new, the 8th number spoken is 0. assert_eq!(game.next(), Some(0)); println!("{:?}", game); // Turn 9: 0 was last spoken on turns 8 and 4, so the 9th number spoken is the difference between them, 4. assert_eq!(game.next(), Some(4)); println!("{:?}", game); // Turn 10: 4 is new, so the 10th number spoken is 0. assert_eq!(game.next(), Some(0)); } #[test] fn test_iterator_next_more() { // these take too long to run /* let game = MemoryGame::new(vec![0, 3, 6]); assert_eq!(game.last(), Some(175594)); let game = MemoryGame::new(vec![1, 3, 2]); assert_eq!(game.last(), Some(2578)); let game = MemoryGame::new(vec![2, 1, 3]); assert_eq!(game.last(), Some(3544142)); let game = MemoryGame::new(vec![1, 2, 3]); assert_eq!(game.last(), Some(261214)); let game = MemoryGame::new(vec![2, 3, 1]); assert_eq!(game.last(), Some(6895259)); let game = MemoryGame::new(vec![3, 2, 1]); assert_eq!(game.last(), Some(18)); let game = MemoryGame::new(vec![3, 1, 2]); assert_eq!(game.last(), Some(362)); */ } }
true
1f63b1ac655b76b5d82fdd78bc94b74986966370
Rust
fireice-uk/rus_errors
/src/main.rs
UTF-8
3,001
3.234375
3
[]
no_license
use std::fmt; use std::error::Error; #[derive(Debug)] // Needed for fmt not to complain struct ErrorNumberIsOne { c : &'static str, } impl ErrorNumberIsOne { fn new() -> ErrorNumberIsOne { ErrorNumberIsOne { c : "You passed one to fun_needs_zero" } } } impl fmt::Display for ErrorNumberIsOne { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.c) } } impl Error for ErrorNumberIsOne { fn source(&self) -> Option<&(dyn Error + 'static)> { Some(self) } } #[derive(Debug)] // Needed for fmt not to complain struct ErrorNumberIsTwo { c : &'static str, } impl ErrorNumberIsTwo { fn new() -> ErrorNumberIsTwo { ErrorNumberIsTwo { c : "You passed two to fun_needs_zero" } } } impl fmt::Display for ErrorNumberIsTwo { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.c) } } impl Error for ErrorNumberIsTwo { fn source(&self) -> Option<&(dyn Error + 'static)> { Some(self) } } #[derive(Debug)] // Needed for fmt not to complain struct ErrorNumberIsOther { c : &'static str, num : u32, } impl ErrorNumberIsOther { fn new(other : u32) -> ErrorNumberIsOther { ErrorNumberIsOther { c : "You passed some other number to fun_needs_zero", num : other } } } impl fmt::Display for ErrorNumberIsOther { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{} : {}", self.c, self.num) } } impl Error for ErrorNumberIsOther { fn source(&self) -> Option<&(dyn Error + 'static)> { Some(self) } } fn fun_needs_zero(number: u32) -> Result<(), Box<dyn Error>> { match number { 0 => Ok(()), 1 => Err(Box::new(ErrorNumberIsOne::new())), 2 => Err(Box::new(ErrorNumberIsTwo::new())), _ => Err(Box::new(ErrorNumberIsOther::new(number))), } } fn error_prone_fun() -> Result<(), Box<dyn Error>> { let mut line = String::new(); if let Err(err) = std::io::stdin().read_line(&mut line) { return Err(Box::new(err)); }; // any way to make ? operator work here let line: u32 = line.trim().parse()?; fun_needs_zero(line) } fn main() { match error_prone_fun() { Ok(_) => println!("error_prone_fun finished ok!"), Err(err) => { println!("we got an error!"); if let Some(ev) = err.downcast_ref::<ErrorNumberIsOne>() { println!("ErrorNumberIsOne error: {}", ev); } else if let Some(ev) = err.downcast_ref::<ErrorNumberIsTwo>() { println!("ErrorNumberIsTwo error: {}", ev); } else if let Some(ev) = err.downcast_ref::<ErrorNumberIsOther>() { println!("ErrorNumberIsOther error: {}", ev); } else { println!("Other application error: {}", err); } } } }
true
4eb5c67657b9133fc1bc760710e8070200ef51b4
Rust
pombredanne/whackadep
/depdive/src/ghcomment.rs
UTF-8
7,613
3.75
4
[ "Apache-2.0" ]
permissive
//! This module abstracts github comment generation //! by using markdown, html, and emojis #[derive(PartialEq)] #[allow(dead_code)] pub enum TextStyle { Plain, Bold, Italic, Code, } #[non_exhaustive] pub enum Emoji { WhiteCheckMark, RedCross, Warning, } pub struct GitHubCommentGenerator { comment: String, } /// This type offers both functionality /// to get makrdown formatted text just as string /// or build a string along with the type impl GitHubCommentGenerator { pub fn new() -> Self { Self { comment: String::new(), } } pub fn get_comment(&mut self) -> String { self.comment.clone() } pub fn append_comment(&mut self, s: &str) { self.comment.push_str(s); } pub fn add_text(&mut self, s: &str, style: &TextStyle) { self.append_comment(&Self::get_text(s, style)); } pub fn get_text(s: &str, style: &TextStyle) -> String { match style { TextStyle::Plain => s.to_string(), TextStyle::Bold => format!("**{}**", s), TextStyle::Italic => format!("*{}*", s), TextStyle::Code => format!("` {} `", s), } } pub fn add_newline(&mut self, count: u8) { for _i in 0..count { self.comment.push('\n'); } } pub fn add_bulleted_list<T: AsRef<str>>(&mut self, items: &[T], text_style: &TextStyle) { self.append_comment(&Self::get_bulleted_list(items, text_style)); self.add_newline(2); } pub fn get_bulleted_list<T: AsRef<str>>(items: &[T], text_style: &TextStyle) -> String { let mut s = String::new(); for item in items { s.push_str(&format!( "\n * {}", Self::get_text(item.as_ref(), text_style) )) } s } pub fn add_collapsible_section(&mut self, title: &str, body: &str) { self.append_comment(&Self::get_collapsible_section(title, body)); self.add_newline(2); } pub fn get_collapsible_section(title: &str, body: &str) -> String { format!( "<details>\n\t<summary>{}</summary><br>\n{}\n</details>", title, body ) } pub fn get_emoji(emoji: Emoji) -> &'static str { match emoji { Emoji::WhiteCheckMark => ":white_check_mark:", Emoji::RedCross => ":x:", Emoji::Warning => ":warning:", } } pub fn add_header(&mut self, s: &str, level: usize) { self.append_comment(&Self::get_header_text(s, level)); self.add_newline(1); } pub fn get_header_text(s: &str, level: usize) -> String { let mut header = String::new(); for _i in 0..level { header.push('#') } header.push_str(&format!(" {}", s)); header } pub fn get_checkmark(flag: bool) -> &'static str { match flag { true => Self::get_emoji(Emoji::WhiteCheckMark), false => Self::get_emoji(Emoji::RedCross), } } pub fn add_html_table<T: AsRef<str>>(&mut self, table: &[Vec<T>]) { self.add_newline(1); self.append_comment(&Self::get_html_table(table)); self.add_newline(2); } pub fn get_html_table<T: AsRef<str>>(table: &[Vec<T>]) -> String { let mut s = String::new(); s.push_str("<table>"); for row in table { s.push_str("<tr>"); for col in row { s.push_str("<td>"); s.push_str(col.as_ref()); s.push_str("</td>"); } s.push_str("</tr>"); } s.push_str("</table>"); s } pub fn get_hyperlink(body: &str, url: &str) -> String { format!("[{}]({})", body, url) } } impl Default for GitHubCommentGenerator { fn default() -> Self { GitHubCommentGenerator::new() } } #[cfg(test)] mod test { use super::*; #[test] fn test_ghcomment_mutliline() { let test_str = "rust"; let mut gh = GitHubCommentGenerator::new(); gh.add_text(test_str, &TextStyle::Plain); gh.add_newline(1); gh.add_text(test_str, &TextStyle::Bold); gh.add_newline(1); gh.add_text(test_str, &TextStyle::Code); println!("{}", gh.get_comment()); assert_eq!(gh.get_comment(), format!("{0}\n**{0}**\n` {0} `", test_str)); } #[test] fn test_ghcomment_plain() { let test_str = "rust"; let mut gh = GitHubCommentGenerator::new(); gh.add_text(test_str, &TextStyle::Plain); assert_eq!(gh.get_comment(), test_str); } #[test] fn test_ghcomment_bold() { let test_str = "rust"; let mut gh = GitHubCommentGenerator::new(); gh.add_text(test_str, &TextStyle::Bold); assert_eq!(gh.get_comment(), format!("**{}**", test_str)); } #[test] fn test_ghcomment_italic() { let test_str = "rust"; let mut gh = GitHubCommentGenerator::new(); gh.add_text(test_str, &TextStyle::Italic); assert_eq!(gh.get_comment(), format!("*{}*", test_str)); } #[test] fn test_ghcomment_code() { let test_str = "rust"; let mut gh = GitHubCommentGenerator::new(); gh.add_text(test_str, &TextStyle::Code); assert_eq!(gh.get_comment(), format!("` {} `", test_str)); } #[test] fn test_ghcomment_bulleted_list() { let mut gh = GitHubCommentGenerator::new(); let v = vec!["a".to_string(), "b".to_string(), "c".to_string()]; let s = "\n * ` a `\n * ` b `\n * ` c `\n\n"; gh.add_bulleted_list(&v, &TextStyle::Code); assert_eq!(gh.get_comment(), s); } #[test] fn test_ghcomment_collapsible_section() { let mut gh = GitHubCommentGenerator::new(); let v = vec!["a".to_string(), "b".to_string(), "c".to_string()]; let body = GitHubCommentGenerator::get_bulleted_list(&v, &TextStyle::Code); let title = "Click to expand!"; gh.add_collapsible_section(title, &body); let s = "<details>\n\t<summary>Click to expand!</summary><br>\n\n * ` a `\n * ` b `\n * ` c `\n</details>\n\n"; assert_eq!(gh.get_comment(), s); } #[test] fn test_ghcomment_emoji() { let mut gh = GitHubCommentGenerator::new(); let s = format!( "testing emoji {} {} {}", GitHubCommentGenerator::get_emoji(Emoji::WhiteCheckMark), GitHubCommentGenerator::get_emoji(Emoji::RedCross), GitHubCommentGenerator::get_emoji(Emoji::Warning) ); gh.add_text(&s, &TextStyle::Plain); assert_eq!( gh.get_comment(), "testing emoji :white_check_mark: :x: :warning:" ); } #[test] fn test_ghcomment_header() { let mut gh = GitHubCommentGenerator::new(); let test_str = "rust"; gh.add_header(test_str, 2); assert_eq!(gh.get_comment(), format!("## {}\n", test_str)); } #[test] fn test_ghcomment_html_table() { let mut gh = GitHubCommentGenerator::new(); gh.add_html_table(&[vec!["first", ":tada:"], vec!["first", ":tada:"]]); assert_eq!(gh.get_comment(), "\n<table><tr><td>first</td><td>:tada:</td></tr><tr><td>first</td><td>:tada:</td></tr></table>\n\n"); } #[test] fn test_ghcomment_hyperlink() { assert_eq!( GitHubCommentGenerator::get_hyperlink("google", "www.google.com"), "[google](www.google.com)" ); } }
true
6601e67d3b89ffa8f5c64c62eee781e014fb34bc
Rust
Playing-with-Rust/simple-card-draft
/src/main.rs
UTF-8
789
3.03125
3
[]
no_license
mod card; mod pack; mod random; mod helpers; use card::Card; use helpers::ask_input; use pack::Pack; fn main() { manual_flow() } fn manual_flow() { let mut picked_cards: Vec<Card> = vec!(); let mut pack1 = Pack::new(); pack1.open(); picked_cards.push(pick_from_pack(pack1).unwrap()); let mut pack2 = Pack::new(); pack2.open(); picked_cards.push(pick_from_pack(pack2).unwrap()); let mut pack3 = Pack::new(); pack3.open(); picked_cards.push(pick_from_pack(pack3).unwrap()); println!("Picked cards:\n{:?}", &picked_cards); } fn pick_from_pack(mut pack: Pack) -> Option<Card> { let input = ask_input(Some(format!("{}\n\nPlayer - Choose a card from this pack: ", pack))); println!("{}", input); pack.pick(input) }
true
86eac495f42aef429dded6ea7e822ad863fd671e
Rust
BitcoinCredit/E-Bills
/build.rs
UTF-8
3,019
2.71875
3
[ "MIT" ]
permissive
use std::{ env, fs, path::{Path, PathBuf}, }; const IDENTITY_FOLDER_PATH: &str = "identity"; const BILLS_FOLDER_PATH: &str = "bills"; const CONTACT_MAP_FOLDER_PATH: &str = "contacts"; const CSS_FOLDER_PATH: &str = "css"; const IMAGE_FOLDER_PATH: &str = "image"; const TEMPLATES_FOLDER_PATH: &str = "templates"; pub const BOOTSTRAP_FOLDER_PATH: &str = "bootstrap"; const BILLS_KEYS_FOLDER_PATH: &str = "bills_keys"; const FRONTEND_FOLDER_PATH: &str = "frontend_build"; const COPY_DIR: [&str; 9] = [ IDENTITY_FOLDER_PATH, BILLS_FOLDER_PATH, CONTACT_MAP_FOLDER_PATH, CSS_FOLDER_PATH, IMAGE_FOLDER_PATH, TEMPLATES_FOLDER_PATH, BOOTSTRAP_FOLDER_PATH, BILLS_KEYS_FOLDER_PATH, FRONTEND_FOLDER_PATH, ]; /// A helper function for recursively copying a directory. fn copy_dir<P, Q>(from: P, to: Q) where P: AsRef<Path>, Q: AsRef<Path>, { let to = to.as_ref().to_path_buf(); for path in fs::read_dir(from).unwrap() { let path = path.unwrap().path(); let to = to.clone().join(path.file_name().unwrap()); if path.is_file() { fs::copy(&path, to).unwrap(); } else if path.is_dir() { if !to.exists() { fs::create_dir(&to).unwrap(); } copy_dir(&path, to); } else { /* Skip other content */ } } } fn main() { init_folders(); let out = env::var("PROFILE").unwrap(); for dir in COPY_DIR { let out = PathBuf::from(format!("target/{}/{}", out, dir)); if out.exists() { fs::remove_dir_all(&out).unwrap(); } fs::create_dir(&out).unwrap(); copy_dir(dir, &out); } } //TODO: for cycle. fn init_folders() { if !Path::new(CONTACT_MAP_FOLDER_PATH).exists() { fs::create_dir(CONTACT_MAP_FOLDER_PATH).expect("Can't create folder contacts."); } if !Path::new(IDENTITY_FOLDER_PATH).exists() { fs::create_dir(IDENTITY_FOLDER_PATH).expect("Can't create folder identity."); } if !Path::new(BILLS_FOLDER_PATH).exists() { fs::create_dir(BILLS_FOLDER_PATH).expect("Can't create folder bills."); } if !Path::new(CSS_FOLDER_PATH).exists() { fs::create_dir(CSS_FOLDER_PATH).expect("Can't create folder css."); } if !Path::new(IMAGE_FOLDER_PATH).exists() { fs::create_dir(IMAGE_FOLDER_PATH).expect("Can't create folder image."); } if !Path::new(TEMPLATES_FOLDER_PATH).exists() { fs::create_dir(TEMPLATES_FOLDER_PATH).expect("Can't create folder templates."); } if !Path::new(BOOTSTRAP_FOLDER_PATH).exists() { fs::create_dir(BOOTSTRAP_FOLDER_PATH).expect("Can't create folder bootstrap."); } if !Path::new(BILLS_KEYS_FOLDER_PATH).exists() { fs::create_dir(BILLS_KEYS_FOLDER_PATH).expect("Can't create folder bills_keys."); } if !Path::new(FRONTEND_FOLDER_PATH).exists() { fs::create_dir(FRONTEND_FOLDER_PATH).expect("Can't create folder frontend_build."); } }
true
42d32d9029a5682e92de0e2d3c9965faa943f4b3
Rust
vni/programming
/rust-in-action/ch10/src/bin/listing10_1.rs
UTF-8
290
3.5
4
[]
no_license
fn add(a: i32, b: i32) -> i32 { a + b } fn main() { let lambda_add = |a, b| a + b; let lambda_add_2 = |a, b| a + b; println!("add(4, 5): {}", add(4, 5)); println!("lambda_add(4, 5): {}", lambda_add(4, 5)); println!("lambda_add_2(4, 5): {}", lambda_add_2(4, 5)); }
true
6ef2fd8ccb92d084508ac674bd6a7882d6d75c02
Rust
GuilloteauQ/tex-rs
/src/math_mode.rs
UTF-8
588
2.890625
3
[]
no_license
use latex_file::LatexFile; /// Math mode use std::io::BufWriter; use std::io::Write; use writable::*; #[derive(Clone)] pub struct MathContent { content: String, } impl MathContent { pub fn new(content: String) -> Self { MathContent { content } } } impl Writable for MathContent { fn write_latex(&self, file: &mut LatexFile) { let mut writer = BufWriter::new(file); self.write_to_buffer(&mut writer); } fn write_to_buffer(&self, mut buf: &mut BufWriter<&mut LatexFile>) { write!(&mut buf, "${}$", self.content).unwrap(); } }
true
8ac22336b041ffc3af73251d7a7732fa61f6c4de
Rust
lperlaki/const-layout-rs
/src/lib.rs
UTF-8
1,993
3.078125
3
[]
no_license
#![feature(const_generics)] use core::mem::{align_of, size_of}; #[doc(hidden)] pub enum Size<const SIZE: usize> {} #[doc(hidden)] pub enum Align<const ALIGN: usize> {} #[doc(hidden)] pub unsafe trait CL { type Size; type Align; } #[doc(hidden)] unsafe impl<T> CL for T { type Size = Size<{ size_of::<T>() }>; type Align = Align<{ align_of::<T>() }>; } /// # EqSize /// Implemented for types with same `mem::size_of` value /// /// /// /// ```rust /// use core::mem::size_of; /// /// use const_layout::EqSize; /// /// fn must_have_same_size<I: EqSize<O>, O>(input: I) -> O { /// assert_eq!(size_of::<I>(), size_of::<O>()); /// unimplemented!() /// } /// /// ``` pub unsafe trait EqSize<Rhs> {} #[doc(hidden)] unsafe impl<X, Y, const S: usize> EqSize<Y> for X where X: CL<Size = Size<S>>, Y: CL<Size = Size<S>>, { } /// # EqAlign /// Implemented for types with same `mem::align_of` value /// /// /// /// ```rust /// use core::mem::align_of; /// /// use const_layout::EqAlign; /// /// fn must_have_same_align<I: EqAlign<O>, O>(input: I) -> O { /// assert_eq!(align_of::<I>(), align_of::<O>()); /// unimplemented!() /// } /// pub unsafe trait EqAlign<Rhs> {} #[doc(hidden)] unsafe impl<X, Y, const A: usize> EqAlign<Y> for X where X: CL<Align = Align<A>>, Y: CL<Align = Align<A>>, { } /// # EqLayout /// Implemented for types with same `mem::size_of` and `mem::align_of` value /// /// /// /// ```rust /// use core::alloc::Layout; /// /// use const_layout::EqLayout; /// /// fn must_have_same_layout<A: EqLayout<B>, B>(a: A, b: B) { /// assert_eq!(Layout::new::<A>(), Layout::new::<B>()); /// } /// pub unsafe trait EqLayout<Rhs>: EqSize<Rhs> + EqAlign<Rhs> {} #[doc(hidden)] unsafe impl<X, Y> EqLayout<Y> for X where X: EqSize<Y> + EqAlign<Y> {} #[cfg(test)] mod tests { use super::EqSize; fn same_size<X: EqSize<Y>, Y>(_x: X, _y: Y) {} #[test] fn it_works() { same_size(1u16, 1i16); } }
true
4dbb1aa055070e02c21641f422477cbe465c9ffd
Rust
Stef-a-d/raytracing
/src/texture.rs
UTF-8
1,053
3.328125
3
[]
no_license
use crate::vec3::{Point3, Color, Vec3}; use std::rc::Rc; pub trait Texture { fn value(&self, u:f64, v: f64, p: &Point3) -> Color; } pub struct SolidColor { color_value: Color, } impl SolidColor { pub fn new(c: Color) -> SolidColor { SolidColor { color_value: c, } } } impl Texture for SolidColor { fn value(&self, u: f64, v: f64, p: &Vec3) -> Color { self.color_value } } pub struct CheckerTexture { odd: Rc<dyn Texture>, even: Rc<dyn Texture>, } impl CheckerTexture { pub fn new(odd: Color, even: Color) -> CheckerTexture { CheckerTexture { odd: Rc::new(SolidColor::new(odd)), even: Rc::new(SolidColor::new(even)), } } } impl Texture for CheckerTexture { fn value(&self, u: f64, v: f64, p: &Point3) -> Color { let sines = (10.0*p.x()).sin() * (10.0 * p.y()).sin() * (10.0*p.z()).sin(); if (sines < 0.0){ self.odd.value(u, v, p) }else{ self.even.value(u, v, p) } } }
true
e13e813df771ba2a574964d7a8b6b77585bfad08
Rust
sigp/lighthouse
/beacon_node/beacon_chain/src/observed_block_producers.rs
UTF-8
17,419
2.96875
3
[ "Apache-2.0" ]
permissive
//! Provides the `ObservedBlockProducers` struct which allows for rejecting gossip blocks from //! validators that have already produced a block. use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; use std::marker::PhantomData; use types::{BeaconBlockRef, Epoch, EthSpec, Hash256, Slot, Unsigned}; #[derive(Debug, PartialEq)] pub enum Error { /// The slot of the provided block is prior to finalization and should not have been provided /// to this function. This is an internal error. FinalizedBlock { slot: Slot, finalized_slot: Slot }, /// The function to obtain a set index failed, this is an internal error. ValidatorIndexTooHigh(u64), } #[derive(Eq, Hash, PartialEq, Debug, Default)] struct ProposalKey { slot: Slot, proposer: u64, } /// Maintains a cache of observed `(block.slot, block.proposer)`. /// /// The cache supports pruning based upon the finalized epoch. It does not automatically prune, you /// must call `Self::prune` manually. /// /// The maximum size of the cache is determined by `slots_since_finality * /// VALIDATOR_REGISTRY_LIMIT`. This is quite a large size, so it's important that upstream /// functions only use this cache for blocks with a valid signature. Only allowing valid signed /// blocks reduces the theoretical maximum size of this cache to `slots_since_finality * /// active_validator_count`, however in reality that is more like `slots_since_finality * /// known_distinct_shufflings` which is much smaller. pub struct ObservedBlockProducers<E: EthSpec> { finalized_slot: Slot, items: HashMap<ProposalKey, HashSet<Hash256>>, _phantom: PhantomData<E>, } impl<E: EthSpec> Default for ObservedBlockProducers<E> { /// Instantiates `Self` with `finalized_slot == 0`. fn default() -> Self { Self { finalized_slot: Slot::new(0), items: HashMap::new(), _phantom: PhantomData, } } } pub enum SeenBlock { Duplicate, Slashable, UniqueNonSlashable, } impl SeenBlock { pub fn proposer_previously_observed(self) -> bool { match self { Self::Duplicate | Self::Slashable => true, Self::UniqueNonSlashable => false, } } pub fn is_slashable(&self) -> bool { matches!(self, Self::Slashable) } } impl<E: EthSpec> ObservedBlockProducers<E> { /// Observe that the `block` was produced by `block.proposer_index` at `block.slot`. This will /// update `self` so future calls to it indicate that this block is known. /// /// The supplied `block` **MUST** be signature verified (see struct-level documentation). /// /// ## Errors /// /// - `block.proposer_index` is greater than `VALIDATOR_REGISTRY_LIMIT`. /// - `block.slot` is equal to or less than the latest pruned `finalized_slot`. pub fn observe_proposal( &mut self, block_root: Hash256, block: BeaconBlockRef<'_, E>, ) -> Result<SeenBlock, Error> { self.sanitize_block(block)?; let key = ProposalKey { slot: block.slot(), proposer: block.proposer_index(), }; let entry = self.items.entry(key); let slashable_proposal = match entry { Entry::Occupied(mut occupied_entry) => { let block_roots = occupied_entry.get_mut(); let newly_inserted = block_roots.insert(block_root); let is_equivocation = block_roots.len() > 1; if is_equivocation { SeenBlock::Slashable } else if !newly_inserted { SeenBlock::Duplicate } else { SeenBlock::UniqueNonSlashable } } Entry::Vacant(vacant_entry) => { let block_roots = HashSet::from([block_root]); vacant_entry.insert(block_roots); SeenBlock::UniqueNonSlashable } }; Ok(slashable_proposal) } /// Returns `Ok(true)` if the `block` has been observed before, `Ok(false)` if not. Does not /// update the cache, so calling this function multiple times will continue to return /// `Ok(false)`, until `Self::observe_proposer` is called. /// /// ## Errors /// /// - `block.proposer_index` is greater than `VALIDATOR_REGISTRY_LIMIT`. /// - `block.slot` is equal to or less than the latest pruned `finalized_slot`. pub fn proposer_has_been_observed( &self, block: BeaconBlockRef<'_, E>, block_root: Hash256, ) -> Result<SeenBlock, Error> { self.sanitize_block(block)?; let key = ProposalKey { slot: block.slot(), proposer: block.proposer_index(), }; if let Some(block_roots) = self.items.get(&key) { let block_already_known = block_roots.contains(&block_root); let no_prev_known_blocks = block_roots.difference(&HashSet::from([block_root])).count() == 0; if !no_prev_known_blocks { Ok(SeenBlock::Slashable) } else if block_already_known { Ok(SeenBlock::Duplicate) } else { Ok(SeenBlock::UniqueNonSlashable) } } else { Ok(SeenBlock::UniqueNonSlashable) } } /// Returns `Ok(())` if the given `block` is sane. fn sanitize_block(&self, block: BeaconBlockRef<'_, E>) -> Result<(), Error> { if block.proposer_index() >= E::ValidatorRegistryLimit::to_u64() { return Err(Error::ValidatorIndexTooHigh(block.proposer_index())); } let finalized_slot = self.finalized_slot; if finalized_slot > 0 && block.slot() <= finalized_slot { return Err(Error::FinalizedBlock { slot: block.slot(), finalized_slot, }); } Ok(()) } /// Removes all observations of blocks equal to or earlier than `finalized_slot`. /// /// Stores `finalized_slot` in `self`, so that `self` will reject any block that has a slot /// equal to or less than `finalized_slot`. /// /// No-op if `finalized_slot == 0`. pub fn prune(&mut self, finalized_slot: Slot) { if finalized_slot == 0 { return; } self.finalized_slot = finalized_slot; self.items.retain(|key, _| key.slot > finalized_slot); } /// Returns `true` if the given `validator_index` has been stored in `self` at `epoch`. /// /// This is useful for doppelganger detection. pub fn index_seen_at_epoch(&self, validator_index: u64, epoch: Epoch) -> bool { self.items.iter().any(|(key, _)| { key.slot.epoch(E::slots_per_epoch()) == epoch && key.proposer == validator_index }) } } #[cfg(test)] mod tests { use super::*; use types::{BeaconBlock, MainnetEthSpec}; type E = MainnetEthSpec; fn get_block(slot: u64, proposer: u64) -> BeaconBlock<E> { let mut block = BeaconBlock::empty(&E::default_spec()); *block.slot_mut() = slot.into(); *block.proposer_index_mut() = proposer; block } #[test] fn pruning() { let mut cache = ObservedBlockProducers::default(); assert_eq!(cache.finalized_slot, 0, "finalized slot is zero"); assert_eq!(cache.items.len(), 0, "no slots should be present"); // Slot 0, proposer 0 let block_a = get_block(0, 0); let block_root = block_a.canonical_root(); assert_eq!( cache .observe_proposal(block_root, block_a.to_ref()) .map(SeenBlock::proposer_previously_observed), Ok(false), "can observe proposer, indicates proposer unobserved" ); /* * Preconditions. */ assert_eq!(cache.finalized_slot, 0, "finalized slot is zero"); assert_eq!(cache.items.len(), 1, "only one slot should be present"); assert_eq!( cache .items .get(&ProposalKey { slot: Slot::new(0), proposer: 0 }) .expect("slot zero should be present") .len(), 1, "only one proposer should be present" ); /* * Check that a prune at the genesis slot does nothing. */ cache.prune(Slot::new(0)); assert_eq!(cache.finalized_slot, 0, "finalized slot is zero"); assert_eq!(cache.items.len(), 1, "only one slot should be present"); assert_eq!( cache .items .get(&ProposalKey { slot: Slot::new(0), proposer: 0 }) .expect("slot zero should be present") .len(), 1, "only one proposer should be present" ); /* * Check that a prune empties the cache */ cache.prune(E::slots_per_epoch().into()); assert_eq!( cache.finalized_slot, Slot::from(E::slots_per_epoch()), "finalized slot is updated" ); assert_eq!(cache.items.len(), 0, "no items left"); /* * Check that we can't insert a finalized block */ // First slot of finalized epoch, proposer 0 let block_b = get_block(E::slots_per_epoch(), 0); let block_root_b = block_b.canonical_root(); assert_eq!( cache .observe_proposal(block_root_b, block_b.to_ref()) .map(SeenBlock::proposer_previously_observed), Err(Error::FinalizedBlock { slot: E::slots_per_epoch().into(), finalized_slot: E::slots_per_epoch().into(), }), "cant insert finalized block" ); assert_eq!(cache.items.len(), 0, "block was not added"); /* * Check that we _can_ insert a non-finalized block */ let three_epochs = E::slots_per_epoch() * 3; // First slot of finalized epoch, proposer 0 let block_b = get_block(three_epochs, 0); assert_eq!( cache .observe_proposal(block_root_b, block_b.to_ref()) .map(SeenBlock::proposer_previously_observed), Ok(false), "can insert non-finalized block" ); assert_eq!(cache.items.len(), 1, "only one slot should be present"); assert_eq!( cache .items .get(&ProposalKey { slot: Slot::new(three_epochs), proposer: 0 }) .expect("the three epochs slot should be present") .len(), 1, "only one proposer should be present" ); /* * Check that a prune doesnt wipe later blocks */ let two_epochs = E::slots_per_epoch() * 2; cache.prune(two_epochs.into()); assert_eq!( cache.finalized_slot, Slot::from(two_epochs), "finalized slot is updated" ); assert_eq!(cache.items.len(), 1, "only one slot should be present"); assert_eq!( cache .items .get(&ProposalKey { slot: Slot::new(three_epochs), proposer: 0 }) .expect("the three epochs slot should be present") .len(), 1, "only one proposer should be present" ); } #[test] fn simple_observations() { let mut cache = ObservedBlockProducers::default(); // Slot 0, proposer 0 let block_a = get_block(0, 0); let block_root_a = block_a.canonical_root(); assert_eq!( cache .proposer_has_been_observed(block_a.to_ref(), block_a.canonical_root()) .map(|x| x.proposer_previously_observed()), Ok(false), "no observation in empty cache" ); assert_eq!( cache .observe_proposal(block_root_a, block_a.to_ref()) .map(SeenBlock::proposer_previously_observed), Ok(false), "can observe proposer, indicates proposer unobserved" ); assert_eq!( cache .proposer_has_been_observed(block_a.to_ref(), block_a.canonical_root()) .map(|x| x.proposer_previously_observed()), Ok(true), "observed block is indicated as true" ); assert_eq!( cache .observe_proposal(block_root_a, block_a.to_ref()) .map(SeenBlock::proposer_previously_observed), Ok(true), "observing again indicates true" ); assert_eq!(cache.finalized_slot, 0, "finalized slot is zero"); assert_eq!(cache.items.len(), 1, "only one slot should be present"); assert_eq!( cache .items .get(&ProposalKey { slot: Slot::new(0), proposer: 0 }) .expect("slot zero should be present") .len(), 1, "only one proposer should be present" ); // Slot 1, proposer 0 let block_b = get_block(1, 0); let block_root_b = block_b.canonical_root(); assert_eq!( cache .proposer_has_been_observed(block_b.to_ref(), block_b.canonical_root()) .map(|x| x.proposer_previously_observed()), Ok(false), "no observation for new slot" ); assert_eq!( cache .observe_proposal(block_root_b, block_b.to_ref()) .map(SeenBlock::proposer_previously_observed), Ok(false), "can observe proposer for new slot, indicates proposer unobserved" ); assert_eq!( cache .proposer_has_been_observed(block_b.to_ref(), block_b.canonical_root()) .map(|x| x.proposer_previously_observed()), Ok(true), "observed block in slot 1 is indicated as true" ); assert_eq!( cache .observe_proposal(block_root_b, block_b.to_ref()) .map(SeenBlock::proposer_previously_observed), Ok(true), "observing slot 1 again indicates true" ); assert_eq!(cache.finalized_slot, 0, "finalized slot is zero"); assert_eq!(cache.items.len(), 2, "two slots should be present"); assert_eq!( cache .items .get(&ProposalKey { slot: Slot::new(0), proposer: 0 }) .expect("slot zero should be present") .len(), 1, "only one proposer should be present in slot 0" ); assert_eq!( cache .items .get(&ProposalKey { slot: Slot::new(1), proposer: 0 }) .expect("slot zero should be present") .len(), 1, "only one proposer should be present in slot 1" ); // Slot 0, proposer 1 let block_c = get_block(0, 1); let block_root_c = block_c.canonical_root(); assert_eq!( cache .proposer_has_been_observed(block_c.to_ref(), block_c.canonical_root()) .map(|x| x.proposer_previously_observed()), Ok(false), "no observation for new proposer" ); assert_eq!( cache .observe_proposal(block_root_c, block_c.to_ref()) .map(SeenBlock::proposer_previously_observed), Ok(false), "can observe new proposer, indicates proposer unobserved" ); assert_eq!( cache .proposer_has_been_observed(block_c.to_ref(), block_c.canonical_root()) .map(|x| x.proposer_previously_observed()), Ok(true), "observed new proposer block is indicated as true" ); assert_eq!( cache .observe_proposal(block_root_c, block_c.to_ref()) .map(SeenBlock::proposer_previously_observed), Ok(true), "observing new proposer again indicates true" ); assert_eq!(cache.finalized_slot, 0, "finalized slot is zero"); assert_eq!(cache.items.len(), 3, "three slots should be present"); assert_eq!( cache .items .iter() .filter(|(k, _)| k.slot == cache.finalized_slot) .count(), 2, "two proposers should be present in slot 0" ); assert_eq!( cache .items .iter() .filter(|(k, _)| k.slot == Slot::new(1)) .count(), 1, "only one proposer should be present in slot 1" ); } }
true
1455cb1831e476eae513ac5fc018e264df54e1be
Rust
optozorax/olymp
/templates/src/to_include/z_function.rs
UTF-8
940
2.984375
3
[]
no_license
fn z_function<T: PartialEq>(input: &[T]) -> Vec<usize> { let mut z = vec![0; input.len()]; let mut l = 0usize; let mut r = 0usize; for i in 1..input.len() { let prototype_z = z[i - l]; let zi = &mut z[i]; if let Some(dist_to_end) = (r + 1).checked_sub(i) { *zi = min(prototype_z, dist_to_end); } while *zi+i < input.len() && input[*zi] == input[*zi + i] { *zi += 1; } let newl = i; let newr = *zi + i - 1; if newr > r { l = newl; r = newr; } } z } #[derive(Clone, Debug)] struct ZVec<T> { vec: Vec<T>, z: Vec<usize>, } impl<T: PartialEq> FromIterator<T> for ZVec<T> { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self { let vec: Vec<T> = Vec::from_iter(iter); let z = z_function(&vec); ZVec { vec, z, } } }
true
474a0612b87464b64999185040b47962828b9e46
Rust
jobdeng/rucene
/src/core/util/variant_value.rs
UTF-8
14,190
2.765625
3
[ "Apache-2.0" ]
permissive
// Copyright 2019 Zhizhesihai (Beijing) Technology Limited. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. use serde; use serde::ser::{SerializeMap, SerializeSeq}; use std::cmp::Ordering; use std::collections::HashMap; use std::fmt; use std::hash::{Hash, Hasher}; use core::util::numeric::Numeric; #[derive(Debug, Clone, Deserialize)] pub enum VariantValue { Bool(bool), Char(char), Short(i16), Int(i32), Long(i64), Float(f32), Double(f64), VString(String), Binary(Vec<u8>), Vec(Vec<VariantValue>), Map(HashMap<String, VariantValue>), } impl VariantValue { pub fn get_bool(&self) -> Option<bool> { match self { VariantValue::Bool(b) => Some(*b), _ => None, } } pub fn get_char(&self) -> Option<char> { match self { VariantValue::Char(c) => Some(*c), _ => None, } } pub fn get_short(&self) -> Option<i16> { match self { VariantValue::Short(s) => Some(*s), _ => None, } } pub fn get_int(&self) -> Option<i32> { match self { VariantValue::Int(i) => Some(*i), _ => None, } } pub fn get_long(&self) -> Option<i64> { match self { VariantValue::Long(l) => Some(*l), _ => None, } } pub fn get_numeric(&self) -> Option<Numeric> { match *self { VariantValue::Short(s) => Some(Numeric::Short(s)), VariantValue::Int(i) => Some(Numeric::Int(i)), VariantValue::Long(l) => Some(Numeric::Long(l)), VariantValue::Float(f) => Some(Numeric::Float(f)), VariantValue::Double(d) => Some(Numeric::Double(d)), _ => None, } } pub fn get_float(&self) -> Option<f32> { match self { VariantValue::Float(f) => Some(*f), _ => None, } } pub fn get_double(&self) -> Option<f64> { match self { VariantValue::Double(d) => Some(*d), _ => None, } } pub fn get_string(&self) -> Option<&str> { match self { VariantValue::VString(s) => Some(s.as_str()), _ => None, } } pub fn get_binary(&self) -> Option<&[u8]> { match self { VariantValue::Binary(b) => Some(b.as_slice()), _ => None, } } pub fn get_utf8_string(&self) -> Option<String> { match self { VariantValue::VString(s) => Some(s.clone()), VariantValue::Binary(b) => { if let Ok(s) = String::from_utf8(b.clone()) { Some(s) } else { None } } _ => None, } } // used for index sort check pub fn is_zero(&self) -> bool { match self { VariantValue::Int(i) => *i == 0, VariantValue::Long(i) => *i == 0, VariantValue::Float(i) => *i == 0.0, VariantValue::Double(i) => *i == 0.0, _ => { unreachable!(); } } } pub fn get_vec(&self) -> Option<&Vec<VariantValue>> { match self { VariantValue::Vec(v) => Some(v), _ => None, } } pub fn get_map(&self) -> Option<&HashMap<String, VariantValue>> { match self { VariantValue::Map(m) => Some(m), _ => None, } } } impl Eq for VariantValue {} impl fmt::Display for VariantValue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { VariantValue::Bool(b) => write!(f, "{}", b), VariantValue::Char(c) => write!(f, "{}", c), VariantValue::Short(s) => write!(f, "{}s", s), VariantValue::Int(ival) => write!(f, "{}", ival), VariantValue::Long(lval) => write!(f, "{}", lval), VariantValue::Float(fval) => write!(f, "{:.3}", fval), VariantValue::Double(d) => write!(f, "{:.6}", d), VariantValue::VString(ref s) => write!(f, "{}", s), VariantValue::Binary(ref _b) => write!(f, "Binary(unprintable)"), VariantValue::Vec(ref v) => write!(f, "{:?}", v), VariantValue::Map(ref m) => write!(f, "{:?}", m), } } } impl serde::Serialize for VariantValue { fn serialize<S>(&self, serializer: S) -> ::std::result::Result<S::Ok, S::Error> where S: serde::Serializer, { match *self { VariantValue::Bool(b) => serializer.serialize_bool(b), VariantValue::Char(c) => serializer.serialize_char(c), VariantValue::Short(s) => serializer.serialize_i16(s), VariantValue::Int(ival) => serializer.serialize_i32(ival), VariantValue::Long(lval) => serializer.serialize_i64(lval), VariantValue::Float(fval) => serializer.serialize_f32(fval), VariantValue::Double(d) => serializer.serialize_f64(d), VariantValue::VString(ref s) => serializer.serialize_str(s.as_str()), VariantValue::Binary(ref b) => serializer.serialize_bytes(b), VariantValue::Vec(ref vec) => { let mut seq = serializer.serialize_seq(Some(vec.len())).unwrap(); for v in vec { seq.serialize_element(v)?; } seq.end() } VariantValue::Map(ref m) => { let mut map = serializer.serialize_map(Some(m.len())).unwrap(); for (k, v) in m { map.serialize_entry(&k.to_string(), &v)?; } map.end() } } } } impl Hash for VariantValue { fn hash<H: Hasher>(&self, state: &mut H) { match *self { VariantValue::Bool(ref b) => b.hash(state), VariantValue::Char(ref c) => c.hash(state), VariantValue::Short(ref s) => s.hash(state), VariantValue::Int(ref i) => i.hash(state), VariantValue::Long(ref l) => l.hash(state), VariantValue::Float(ref f) => f.to_bits().hash(state), VariantValue::Double(ref d) => d.to_bits().hash(state), VariantValue::VString(ref s) => s.hash(state), VariantValue::Binary(ref v) => v.hash(state), _ => (), } } } impl PartialEq for VariantValue { fn eq(&self, other: &VariantValue) -> bool { match *self { VariantValue::Bool(ref b) => { if let VariantValue::Bool(ref o) = *other { b.eq(o) } else { false } } VariantValue::Char(ref c) => { if let VariantValue::Char(ref o) = *other { c.eq(o) } else { false } } VariantValue::Short(ref s) => { if let VariantValue::Short(ref o) = *other { s.eq(o) } else { false } } VariantValue::Int(ref i) => { if let VariantValue::Int(ref o) = *other { i.eq(o) } else { false } } VariantValue::Long(ref l) => { if let VariantValue::Long(ref o) = *other { l.eq(o) } else { false } } VariantValue::Float(ref f) => { if let VariantValue::Float(ref o) = *other { f.eq(o) } else { false } } VariantValue::Double(ref d) => { if let VariantValue::Double(ref o) = *other { d.eq(o) } else { false } } VariantValue::VString(ref s) => { if let VariantValue::VString(ref o) = *other { s.eq(o) } else { false } } VariantValue::Binary(ref v) => { if let VariantValue::Binary(ref o) = *other { v.eq(o) } else { false } } _ => unreachable!(), } } } impl Ord for VariantValue { fn cmp(&self, other: &Self) -> Ordering { match (self, other) { (&VariantValue::Bool(b1), &VariantValue::Bool(b2)) => b1.cmp(&b2), (&VariantValue::Char(c1), &VariantValue::Char(c2)) => c1.cmp(&c2), (&VariantValue::Short(v1), &VariantValue::Short(v2)) => v1.cmp(&v2), (&VariantValue::Int(v1), &VariantValue::Int(v2)) => v1.cmp(&v2), (&VariantValue::Long(v1), &VariantValue::Long(v2)) => v1.cmp(&v2), (&VariantValue::Float(v1), &VariantValue::Float(v2)) => v1.partial_cmp(&v2).unwrap(), (&VariantValue::Double(v1), &VariantValue::Double(v2)) => v1.partial_cmp(&v2).unwrap(), (&VariantValue::VString(ref s1), &VariantValue::VString(ref s2)) => s1.cmp(&s2), (&VariantValue::Binary(ref b1), &VariantValue::Binary(ref b2)) => b1.cmp(&b2), (_, _) => panic!("Non-comparable"), } } } impl PartialOrd for VariantValue { fn partial_cmp(&self, other: &VariantValue) -> Option<Ordering> { Some(self.cmp(other)) } } impl From<bool> for VariantValue { fn from(val: bool) -> Self { VariantValue::Bool(val) } } /// Implement the From<char> trait for VariantValue impl From<char> for VariantValue { fn from(val: char) -> Self { VariantValue::Char(val) } } /// Implement the From<i16> trait for VariantValue impl From<i16> for VariantValue { fn from(val: i16) -> Self { VariantValue::Short(val) } } impl From<i32> for VariantValue { fn from(val: i32) -> Self { VariantValue::Int(val) } } impl From<i64> for VariantValue { fn from(val: i64) -> Self { VariantValue::Long(val) } } impl From<f32> for VariantValue { fn from(val: f32) -> Self { VariantValue::Float(val) } } impl From<f64> for VariantValue { fn from(val: f64) -> Self { VariantValue::Double(val) } } impl<'a> From<&'a str> for VariantValue { fn from(val: &'a str) -> Self { VariantValue::VString(String::from(val)) } } impl<'a> From<&'a [u8]> for VariantValue { fn from(val: &'a [u8]) -> Self { VariantValue::Binary(val.to_vec()) } } impl From<Numeric> for VariantValue { fn from(val: Numeric) -> Self { debug_assert!(!val.is_null()); match val { Numeric::Byte(b) => VariantValue::Char(b as u8 as char), Numeric::Short(s) => VariantValue::Short(s), Numeric::Int(i) => VariantValue::Int(i), Numeric::Long(v) => VariantValue::Long(v), Numeric::Float(v) => VariantValue::Float(v), Numeric::Double(v) => VariantValue::Double(v), Numeric::Null => unreachable!(), } } } #[cfg(test)] mod tests { use super::*; #[test] fn variant_bool_test() { let b = VariantValue::Bool(true); let expr = format!("{}", b); assert_eq!(expr, "true"); } #[test] fn variant_char_test() { let c = VariantValue::Char('Z'); let expr = format!("{}", c); assert_eq!(expr, "Z"); } #[test] fn variant_short_test() { let s = VariantValue::Short(30); let expr = format!("{}", s); assert_eq!(expr, "30s"); } #[test] fn variant_int_test() { let ival = VariantValue::Int(287); let expr = format!("{}", ival); assert_eq!(expr, "287"); } #[test] fn variant_long_test() { let ival = VariantValue::Long(28_754_383); let expr = format!("{}", ival); assert_eq!(expr, "28754383"); } #[test] #[allow(clippy::approx_constant)] fn variant_float_test() { let fval = VariantValue::Float(3.141_593); let expr = format!("{}f", fval); assert_eq!(expr, "3.142f"); { let fval2 = VariantValue::Float(3.141_593); assert_eq!(fval.cmp(&fval2), Ordering::Equal); } { let fval2 = VariantValue::Float(2.141_593); assert_eq!(fval.cmp(&fval2), Ordering::Greater); } { let fval2 = VariantValue::Float(4.141_593); assert_eq!(fval.cmp(&fval2), Ordering::Less); } { let fval2 = VariantValue::Float(4.141_593); assert_eq!(fval.partial_cmp(&fval2), Some(Ordering::Less)); } } #[test] #[allow(clippy::approx_constant)] fn variant_double_test() { let dval = VariantValue::Double(3.141_592_653_5); let expr = format!("{}", dval); assert_eq!(expr, "3.141593"); } #[test] fn variant_string_test() { let strval = VariantValue::VString(String::from("hello world")); let expr = format!("{}", strval); assert_eq!(expr, "hello world"); } #[test] fn variant_binary_test() { let bval = VariantValue::Binary(vec![65u8, 66u8, 67u8]); let expr = format!("{}", bval); assert_eq!(expr, "Binary(unprintable)"); if let VariantValue::Binary(ref bvec) = bval { for (i, val) in bvec.iter().enumerate() { assert_eq!(*val, b'A' + i as u8); } } } }
true
602077669378cff53251beebf3556febeef04832
Rust
hiimtaylorjones/programming-kata
/karate/rust/src/lib.rs
UTF-8
3,031
3.671875
4
[]
no_license
use std::thread; pub fn chop(int: i32, array: &mut [i32]) -> i32{ let mut count = 0; for x in array.iter() { if x == &int { return count; } count += 1; } return -1; } pub fn binary_chop(int: i32, array: &mut [i32]) -> i32 { let half = array.len() / 2; let full = array.len(); let result = search_array(int, array, 0, half); if (result != -1) { return result; } let result = search_array(int, array, half, full); if (result != -1) { return result; } return -1; } fn search_array(int: i32, array: &mut [i32], start: usize, stop: usize) -> i32 { for x in start..stop { if array[x] == int { return x as i32; } } return -1; } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { assert_eq!(2 + 2, 4); } #[test] fn test_chop() { assert_eq!(-1, chop(3, &mut [])); assert_eq!(-1, chop(3, &mut [1])); assert_eq!(0, chop(1, &mut [1])); // assert_eq!(0, chop(1, &mut [1, 3, 5])); assert_eq!(1, chop(3, &mut [1, 3, 5])); assert_eq!(2, chop(5, &mut [1, 3, 5])); assert_eq!(-1, chop(0, &mut [1, 3, 5])); assert_eq!(-1, chop(2, &mut [1, 3, 5])); assert_eq!(-1, chop(4, &mut [1, 3, 5])); assert_eq!(-1, chop(6, &mut [1, 3, 5])); // // assert_eq!(0, chop(1, &mut [1, 3, 5, 7])); assert_eq!(1, chop(3, &mut [1, 3, 5, 7])); assert_eq!(2, chop(5, &mut [1, 3, 5, 7])); assert_eq!(3, chop(7, &mut [1, 3, 5, 7])); assert_eq!(-1, chop(0, &mut [1, 3, 5, 7])); assert_eq!(-1, chop(2, &mut [1, 3, 5, 7])); assert_eq!(-1, chop(4, &mut [1, 3, 5, 7])); assert_eq!(-1, chop(6, &mut [1, 3, 5, 7])); assert_eq!(-1, chop(8, &mut [1, 3, 5, 7])); } #[test] fn test_binary_chop() { let mut empty: [i32; 0] = []; let mut one: [i32; 1] = [1]; let mut one_three_five: [i32; 3] = [1, 3, 5]; let mut all: [i32; 4] = [1, 3, 5, 7]; assert_eq!(-1, binary_chop(3, &mut empty)); assert_eq!(-1, binary_chop(3, &mut one)); assert_eq!(0, binary_chop(1, &mut one)); assert_eq!(0, chop(1, &mut one_three_five)); assert_eq!(1, chop(3, &mut one_three_five)); assert_eq!(2, chop(5, &mut one_three_five)); assert_eq!(-1, chop(0, &mut one_three_five)); assert_eq!(-1, chop(2, &mut one_three_five)); assert_eq!(-1, chop(4, &mut one_three_five)); assert_eq!(-1, chop(6, &mut one_three_five)); assert_eq!(0, chop(1, &mut all)); assert_eq!(1, chop(3, &mut all)); assert_eq!(2, chop(5, &mut all)); assert_eq!(3, chop(7, &mut all)); assert_eq!(-1, chop(0, &mut all)); assert_eq!(-1, chop(2, &mut all)); assert_eq!(-1, chop(4, &mut all)); assert_eq!(-1, chop(6, &mut all)); assert_eq!(-1, chop(8, &mut all)); } }
true
0c7c424bedb2b8d2e2c80c0ce33298e1c81be09f
Rust
IThawk/rust-project
/rust-master/src/test/ui/consts/const-binops.rs
UTF-8
2,396
2.9375
3
[ "MIT", "LicenseRef-scancode-other-permissive", "Apache-2.0", "BSD-3-Clause", "BSD-2-Clause", "NCSA" ]
permissive
// run-pass macro_rules! assert_approx_eq { ($a:expr, $b:expr) => ({ let (a, b) = (&$a, &$b); assert!((*a - *b).abs() < 1.0e-6, "{} is not approximately equal to {}", *a, *b); }) } static A: isize = -4 + 3; static A2: usize = 3 + 3; static B: f64 = 3.0 + 2.7; static C: isize = 3 - 4; static D: usize = 3 - 3; static E: f64 = 3.0 - 2.7; static E2: isize = -3 * 3; static F: usize = 3 * 3; static G: f64 = 3.3 * 3.3; static H: isize = 3 / -1; static I: usize = 3 / 3; static J: f64 = 3.3 / 3.3; static N: bool = true && false; static O: bool = true || false; static P: isize = 3 & 1; static Q: usize = 1 & 3; static R: isize = 3 | 1; static S: usize = 1 | 3; static T: isize = 3 ^ 1; static U: usize = 1 ^ 3; static V: isize = 1 << 3; // NOTE: better shr coverage static W: isize = 1024 >> 4; static X: usize = 1024 >> 4; static Y: bool = 1 == 1; static Z: bool = 1.0f64 == 1.0; static AA: bool = 1 <= 2; static AB: bool = -1 <= 2; static AC: bool = 1.0f64 <= 2.0; static AD: bool = 1 < 2; static AE: bool = -1 < 2; static AF: bool = 1.0f64 < 2.0; static AG: bool = 1 != 2; static AH: bool = -1 != 2; static AI: bool = 1.0f64 != 2.0; static AJ: bool = 2 >= 1; static AK: bool = 2 >= -2; static AL: bool = 1.0f64 >= -2.0; static AM: bool = 2 > 1; static AN: bool = 2 > -2; static AO: bool = 1.0f64 > -2.0; pub fn main() { assert_eq!(A, -1); assert_eq!(A2, 6); assert_approx_eq!(B, 5.7); assert_eq!(C, -1); assert_eq!(D, 0); assert_approx_eq!(E, 0.3); assert_eq!(E2, -9); assert_eq!(F, 9); assert_approx_eq!(G, 10.89); assert_eq!(H, -3); assert_eq!(I, 1); assert_approx_eq!(J, 1.0); assert_eq!(N, false); assert_eq!(O, true); assert_eq!(P, 1); assert_eq!(Q, 1); assert_eq!(R, 3); assert_eq!(S, 3); assert_eq!(T, 2); assert_eq!(U, 2); assert_eq!(V, 8); assert_eq!(W, 64); assert_eq!(X, 64); assert_eq!(Y, true); assert_eq!(Z, true); assert_eq!(AA, true); assert_eq!(AB, true); assert_eq!(AC, true); assert_eq!(AD, true); assert_eq!(AE, true); assert_eq!(AF, true); assert_eq!(AG, true); assert_eq!(AH, true); assert_eq!(AI, true); assert_eq!(AJ, true); assert_eq!(AK, true); assert_eq!(AL, true); assert_eq!(AM, true); assert_eq!(AN, true); assert_eq!(AO, true); }
true
9d30573c3c0489c42491cfe80a0b6dbba7306a3b
Rust
mehcode/config-rs
/examples/static_env.rs
UTF-8
630
2.9375
3
[ "MIT", "Apache-2.0" ]
permissive
use config::Config; lazy_static::lazy_static! { #[derive(Debug)] pub static ref CONFIG: Config = Config::builder() .add_source(config::Environment::with_prefix("APP_NAME").separator("_")) .build() .unwrap(); } /// Get a configuration value from the static configuration object pub fn get<'a, T: serde::Deserialize<'a>>(key: &str) -> T { // You shouldn't probably do it like that and actually handle that error that might happen // here, but for the sake of simplicity, we do it like this here CONFIG.get::<T>(key).unwrap() } fn main() { println!("{:?}", get::<String>("foo")); }
true
0473e1a309f82a3691be1fec0a7c3b115f3337e7
Rust
ThermalSpan/rust-util
/src/num_util/mod.rs
UTF-8
1,086
3.546875
4
[]
no_license
use std::ops::Rem; use std::fmt; use std::error::Error; #[derive(Debug)] struct FactorError { description: String, } impl fmt::Display for FactorError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description) } } impl Error for FactorError { fn description(&self) -> &str { &self.description } fn cause(&self) -> Option<&Error> { None } } fn gcd(a: u32, b: u32) -> u32 { let mut r: [u32; 3] = [0; 3]; r[1] = a.rem(b); r[0] = b.rem(r[1]); while r[0] != 0 { r[2] = r[1]; r[1] = r[0]; r[0] = r[2].rem(r[1]); } r[1] } #[cfg(test)] mod tests { use super::*; #[test] fn gcd_1() { assert_eq!(gcd(6, 9), 3); } #[test] fn gcd_2() { } #[test] fn gcd_3() { assert_eq!(gcd(55, 15), 5); } #[test] fn gcd_4() { assert_eq!(gcd(906,755), 151); assert_eq!(gcd(1661,906), 151); assert_eq!(gcd(25821,1661), 151); assert_eq!(gcd(163231,135749), 151); } }
true
f8a8a0f00ec3a00d154ea117daf373059e86d449
Rust
fabianschuiki/moore
/src/vhdl/hir/expr.rs
UTF-8
6,733
2.734375
3
[ "Apache-2.0", "MIT" ]
permissive
// Copyright (c) 2016-2021 Fabian Schuiki #![deny(missing_docs)] use num::{BigInt, BigRational, ToPrimitive}; use crate::common::errors::*; use crate::common::SessionContext; use crate::hir::prelude::*; use crate::konst2::{Const2, FloatingConst, IntegerConst}; pub use crate::syntax::ast::Dir; use crate::ty2::{UniversalIntegerType, UniversalRealType}; /// An expression. /// /// See IEEE 1076-2008 section 9. pub trait Expr2<'t>: Node<'t> { /// Determine the type of the expression. fn typeval(&self, tyctx: Option<&'t Type>, ctx: &ExprContext<'t>) -> Result<&'t Type>; /// Determine the constant value of the expression. /// /// Emits diagnostic errors if the expression has no constant value. fn constant_value(&self, ctx: &ExprContext<'t>) -> Result<&'t Const2<'t>>; } /// A context that provides the facilities to operate on expressions. pub trait ExprContext<'t>: SessionContext + AllocInto<'t, IntegerConst<'t>> + AllocOwnedInto<'t, Const2<'t>> + AllocOwnedInto<'t, Type> { } impl<'t, T> ExprContext<'t> for T where T: SessionContext + AllocInto<'t, IntegerConst<'t>> + AllocOwnedInto<'t, Const2<'t>> + AllocOwnedInto<'t, Type> { } /// A literal expression. #[derive(Debug)] pub struct LitExpr { span: Span, value: LitExprValue, } /// The value of a literal expression. #[derive(Debug)] pub enum LitExprValue { /// The value of an integer literal. Integer(BigInt), /// The value of a floating-point literal. Float(BigRational), } impl LitExpr { /// Create a new integer literal expression. pub fn new_integer(span: Span, value: BigInt) -> LitExpr { LitExpr { span: span, value: LitExprValue::Integer(value), } } /// Create a new float literal expression. pub fn new_float(span: Span, value: BigRational) -> LitExpr { LitExpr { span: span, value: LitExprValue::Float(value), } } /// Return the constant value of the literal. pub fn value(&self) -> &LitExprValue { &self.value } /// Check if this is an integer literal. pub fn is_integer(&self) -> bool { match self.value { LitExprValue::Integer(..) => true, _ => false, } } /// Check if this is a floating-point literal. pub fn is_float(&self) -> bool { match self.value { LitExprValue::Float(..) => true, _ => false, } } /// Return the literal's integer value, or `None` if it is not an integer. pub fn integer_value(&self) -> Option<&BigInt> { match self.value { LitExprValue::Integer(ref v) => Some(v), _ => None, } } /// Return the literal's float value, or `None` if it is not an float. pub fn float_value(&self) -> Option<&BigRational> { match self.value { LitExprValue::Float(ref v) => Some(v), _ => None, } } } impl<'t> Node<'t> for LitExpr { fn span(&self) -> Span { self.span } fn desc_kind(&self) -> String { match self.value { LitExprValue::Integer(..) => "integer literal".into(), LitExprValue::Float(..) => "floating-point literal".into(), } } fn accept(&'t self, visitor: &mut Visitor<'t>) { visitor.visit_literal_expr(self); } fn walk(&'t self, _visitor: &mut Visitor<'t>) {} } impl<'t> Expr2<'t> for LitExpr { fn typeval(&self, _: Option<&'t Type>, _: &ExprContext<'t>) -> Result<&'t Type> { Ok(match self.value { LitExprValue::Integer(..) => &UniversalIntegerType, LitExprValue::Float(..) => &UniversalRealType, }) } fn constant_value(&self, ctx: &ExprContext<'t>) -> Result<&'t Const2<'t>> { Ok(ctx.alloc_owned(match self.value { LitExprValue::Integer(ref v) => IntegerConst::try_new(&UniversalIntegerType, v.clone()) .emit(ctx)? .into_owned(), LitExprValue::Float(ref v) => { // Convert from BigRational to f64. This is ugly and stupid, but // good enough for the beginning. let f = v.numer().to_f64().unwrap() / v.denom().to_f64().unwrap(); FloatingConst::try_new(&UniversalRealType, f) .emit(ctx)? .into_owned() } })) } } /// A range. /// /// See IEEE 1076-2008 section 5.2.1. /// /// ```text /// range := range.attribute_name | simple_expression direction simple_expression /// ``` #[derive(Debug)] pub enum Range2<'t> { // Attr(AttrRef), /// An range given by two immediate values. Immediate(Span, Spanned<Dir>, &'t Expr2<'t>, &'t Expr2<'t>), } impl<'t> Range2<'t> { /// Determine the type of the range. /// /// This determines the type of the range's bounds and applies any necessary /// implicit casts to make them be of the same type. pub fn bound_type<C>(&self, ctx: C) -> Result<&'t Type> where C: ExprContext<'t> + Copy, { match *self { Range2::Immediate(span, _, l, r) => { let lt = l.typeval(None, &ctx); let rt = r.typeval(None, &ctx); let (lt, rt) = (lt?, rt?); if lt == rt { Ok(lt) } else if lt.is_implicitly_castable(rt) { Ok(rt) } else if rt.is_implicitly_castable(lt) { Ok(lt) } else { ctx.emit( DiagBuilder2::error(format!( "range bounds `{}` and `{}` have incompatible types", l.span().extract(), r.span().extract() )) .span(span) .add_note(format!(" left bound type: {}", lt)) .add_note(format!("right bound type: {}", rt)), ); Err(()) } } } } /// Determine the constant value of the range. pub fn constant_value<C>(&self, ctx: C) -> Result<(Dir, &'t Const2<'t>, &'t Const2<'t>)> where C: ExprContext<'t> + Copy, { let ty = self.bound_type(ctx)?; match *self { Range2::Immediate(_, d, l, r) => { let lc = l.constant_value(&ctx).and_then(|x| x.cast(ty).emit(ctx)); let rc = r.constant_value(&ctx).and_then(|x| x.cast(ty).emit(ctx)); Ok((d.value, ctx.maybe_alloc(lc?), ctx.maybe_alloc(rc?))) } } } }
true
a1debb1259142744d8d653f19172fc88f141f1d4
Rust
maxsnew/cargo-dot
/src/main.rs
UTF-8
4,725
2.703125
3
[]
no_license
extern crate cargo; extern crate docopt; extern crate dot; extern crate rustc_serialize; use cargo::core::{Resolve, SourceId, PackageId}; use docopt::Docopt; use std::borrow::{Cow}; use std::convert::Into; use std::env; use std::io; use std::io::Write; use std::fs::File; use std::path::{Path, PathBuf}; static USAGE: &'static str = " Generate a graph of package dependencies in graphviz format Usage: cargo dot [options] cargo dot --help Options: -h, --help Show this message -V, --version Print version info and exit --lock-file=FILE Specify location of input file, default \"Cargo.lock\" --dot-file=FILE Output to file, default prints to stdout --source-labels Use sources for the label instead of package names "; #[derive(RustcDecodable, Debug)] struct Flags { flag_help: bool, flag_version: bool, flag_lock_file: String, flag_dot_file: String, flag_source_labels: bool, } fn main() { let mut argv: Vec<String> = env::args().collect(); if argv.len() > 0 { argv[0] = "cargo".to_string(); } let flags: Flags = Docopt::new(USAGE) .and_then(|d| d.decode()) .unwrap_or_else(|e| e.exit()); let dot_f_flag = if flags.flag_dot_file.is_empty() { None } else { Some(flags.flag_dot_file) }; let source_labels = flags.flag_source_labels; let lock_path = unless_empty(flags.flag_lock_file, "Cargo.lock"); let lock_path = Path::new(&lock_path); let lock_path_buf = absolutize(lock_path.to_path_buf()); let lock_path = lock_path_buf.as_path(); let proj_dir = lock_path.parent().unwrap(); // TODO: check for None let src_id = SourceId::for_path(&proj_dir).unwrap(); let resolved = cargo::ops::load_lockfile(&lock_path, &src_id).unwrap() .expect("Lock file not found."); let mut graph = Graph::with_root(resolved.root(), source_labels); graph.add_dependencies(&resolved); match dot_f_flag { None => graph.render_to(&mut io::stdout()), Some(dot_file) => graph.render_to(&mut File::create(&Path::new(&dot_file)).unwrap()) }; } fn absolutize(pb: PathBuf) -> PathBuf { if pb.as_path().is_absolute() { pb } else { std::env::current_dir().unwrap().join(&pb.as_path()).clone() } } fn unless_empty(s: String, default: &str) -> String { if s.is_empty() { default.to_string() } else { s } } pub type Nd = usize; pub type Ed = (usize, usize); pub struct Graph<'a> { nodes: Vec<&'a PackageId>, edges: Vec<Ed>, source_labels: bool } impl<'a> Graph<'a> { pub fn with_root(root: &PackageId, source_labels: bool) -> Graph { Graph { nodes: vec![root], edges: vec![], source_labels: source_labels } } pub fn add_dependencies(&mut self, resolved: &'a Resolve) { for crat in resolved.iter() { match resolved.deps(crat) { Some(crate_deps) => { let idl = self.find_or_add(crat); for dep in crate_deps { let idr = self.find_or_add(dep); self.edges.push((idl, idr)); }; }, None => { } } } } fn find_or_add(&mut self, new: &'a PackageId) -> usize { for (i, id) in self.nodes.iter().enumerate() { if *id == new { return i } } self.nodes.push(new); self.nodes.len() - 1 } pub fn render_to<W:Write>(&'a self, output: &mut W) { match dot::render(self, output) { Ok(_) => {}, Err(e) => panic!("error rendering graph: {}", e) } } } impl<'a> dot::Labeller<'a, Nd, Ed> for Graph<'a> { fn graph_id(&self) -> dot::Id<'a> { dot::Id::new(self.nodes[0].name()).unwrap_or(dot::Id::new("dependencies").unwrap()) } fn node_id(&self, n: &Nd) -> dot::Id { // unwrap is safe because N######## is a valid graphviz id dot::Id::new(format!("N{}", *n)).unwrap() } fn node_label(&'a self, i: &Nd) -> dot::LabelText<'a> { if !self.source_labels { dot::LabelText::LabelStr(self.nodes[*i].name().into()) } else { dot::LabelText::LabelStr(self.nodes[*i].source_id().url().to_string().into()) } } } impl<'a> dot::GraphWalk<'a, Nd, Ed> for Graph<'a> { fn nodes(&self) -> dot::Nodes<'a,Nd> { (0..self.nodes.len()).collect() } fn edges(&self) -> dot::Edges<Ed> { Cow::Borrowed(&self.edges[..]) } fn source(&self, &(s, _): &Ed) -> Nd { s } fn target(&self, &(_, t): &Ed) -> Nd { t } }
true
4ae0715152794c3feee2031d226a81f2b4caeb7c
Rust
rickwebiii/RustHexEditor
/src/hex_edit/binary_file.rs
UTF-8
1,176
3.3125
3
[]
no_license
use std::fs::File; use std::io; use std::io::prelude::*; pub struct BinaryFile { _data: Vec<u8>, } #[derive(Debug)] pub enum BinaryFileErrorCode { CouldNotOpenFile { reason: io::Error }, CouldNotReadFile { reason: io::Error } } impl BinaryFile { pub fn open(file_path: &String) -> Result<BinaryFile, BinaryFileErrorCode> { match BinaryFile::load_file(&file_path) { Ok(x) => Ok(BinaryFile {_data: x}), Err(x) => Err(x) } } fn load_file(file_path: &String) -> Result<Vec<u8>, BinaryFileErrorCode> { let mut bytes: Vec<u8> = Vec::new(); let mut file: File = match File::open(file_path) { Ok(x) => x, Err(reason) => { return Err(BinaryFileErrorCode::CouldNotOpenFile {reason: reason}) } }; match file.read_to_end(&mut bytes) { Ok(_) => {}, Err(reason) => { return Err(BinaryFileErrorCode::CouldNotReadFile {reason: reason}) } }; Ok(bytes) } pub fn length(&self) -> u64 { self._data.len() as u64 } pub fn as_slice(&self) -> &[u8] { self._data.as_slice() } }
true
a0158f6928506a9684b539aa1cc8b509ebd9072f
Rust
White-Green/partial_const
/src/stable.rs
UTF-8
4,741
3.703125
4
[ "MIT" ]
permissive
/// A trait for handling constant and non-constant values in a common way /// /// # Example /// ``` /// # #[cfg(feature = "usize")] #[rustversion::since(1.51)] fn test() { /// fn twice<T: partial_const::MayBeConst<usize>>(i: T) -> usize { /// i.value() * 2 /// } /// /// assert_eq!(twice(1usize), 2usize); /// assert_eq!(twice(partial_const::ConstUsize::<1>::new()), 2usize); /// # } /// # #[cfg(not(feature = "usize"))] fn test(){} /// # #[cfg(feature = "usize")] #[rustversion::not(since(1.51))] fn test(){} /// # test(); /// ``` pub trait MayBeConst<T>: MayBeConstAT<Type=T> {} /// A trait [MayBeConst] by associated type for internal trait bounds. pub trait MayBeConstAT: Sized + Clone + Copy + Default + core::fmt::Debug + core::fmt::Display { type Type: MayBeConstAT<Type=Self::Type>; const IS_CONST: bool; fn value(&self) -> Self::Type; } impl<T: MayBeConstAT> MayBeConst<T::Type> for T {} /// A trait for putting equality constraints on constants. /// It will be implemented if the constants are equal. /// /// # Example /// ``` /// # #[cfg(feature = "u8")] #[rustversion::since(1.51)] fn test() { /// fn equal<A: partial_const::MayBeConst<u8>, B: partial_const::MayBeConst<u8>>(a: A, b: B) -> bool /// where A: partial_const::Equals<B> { /// a.value() == b.value() /// } /// /// assert!(equal(partial_const::ConstU8::<1>::new(), partial_const::ConstU8::<1>::new())); /// assert!(equal(partial_const::ConstU8::<1>::new(), 1)); /// assert!(!equal(partial_const::ConstU8::<1>::new(), 2)); /// // assert!(equal(partial_const::ConstU8::<1>::new(), partial_const::ConstU8::<2>::new())); <- Compile Error /// # } /// # #[cfg(not(feature = "u8"))] fn test(){} /// # #[cfg(feature = "u8")] #[rustversion::not(since(1.51))] fn test(){} /// # test(); /// ``` pub trait Equals<T: MayBeConstAT>: MayBeConstAT<Type=T::Type> { /// The const side of two types. /// # Example /// ``` /// # #[cfg(feature = "u16")] #[rustversion::since(1.51)] fn test() { /// assert_eq!(<partial_const::ConstU16<0> as partial_const::Equals<partial_const::ConstU16<0>>>::ConstSide::default(), partial_const::ConstU16::<0>::new()); /// assert_eq!(<partial_const::ConstU16<0> as partial_const::Equals<u16>>::ConstSide::default(), partial_const::ConstU16::<0>::new()); /// assert_eq!(<u16 as partial_const::Equals<partial_const::ConstU16<0>>>::ConstSide::default(), partial_const::ConstU16::<0>::new()); /// assert_eq!(<u16 as partial_const::Equals<u16>>::ConstSide::default(), 0_u16); /// # } /// # #[cfg(not(feature = "u16"))] fn test(){} /// # #[cfg(feature = "u16")] #[rustversion::not(since(1.51))] fn test(){} /// # test(); /// ``` type ConstSide: MayBeConst<T::Type>; /// Return the const side value of two values if two values are equal. /// # Example /// ``` /// # #[cfg(feature = "u32")] #[rustversion::since(1.51)] fn test() { /// # use partial_const::{Equals,ConstU32}; /// assert_eq!(ConstU32::<0>::new().get_const_side(&ConstU32::<0>::new()), Some(ConstU32::<0>::new())); /// assert_eq!(ConstU32::<0>::new().get_const_side(&0_u32), Some(ConstU32::<0>::new())); /// assert_eq!(0_u32.get_const_side(&ConstU32::<0>::new()), Some(ConstU32::<0>::new())); /// assert_eq!(0_u32.get_const_side(&0_u32), Some(0_u32)); /// /// assert_eq!(ConstU32::<0>::new().get_const_side(&1_u32), None); /// assert_eq!(1_u32.get_const_side(&ConstU32::<0>::new()), None); /// assert_eq!(0_u32.get_const_side(&1_u32), None); /// # } /// # #[cfg(not(feature = "u32"))] fn test(){} /// # #[cfg(feature = "u32")] #[rustversion::not(since(1.51))] fn test(){} /// # test(); /// ``` fn get_const_side(&self, rhs: &T) -> Option<Self::ConstSide>; } macro_rules! impl_stable { ($t:tt) => { impl crate::MayBeConstAT for $t { type Type = $t; const IS_CONST: bool = false; #[inline(always)] fn value(&self) -> $t { *self } } impl crate::Equals<$t> for $t { type ConstSide = $t; fn get_const_side(&self, rhs: &$t) -> Option<Self::ConstSide> { if *self == *rhs { Some(*self) } else { None } } } #[cfg(test)] mod test_stable { #[test] fn test_equals() { fn test<A: crate::MayBeConst<$t>, B: crate::MayBeConst<$t>>() where A: crate::Equals<B> {} test::<$t, $t>(); } #[test] fn test_is_const() { assert!(!<$t as crate::MayBeConstAT>::IS_CONST); } } } }
true
19816651355e6d33a10592e62e56d38e6ff891a5
Rust
johannlilly/the-rust-programming-language_v2
/projects/hello_world/main.rs
UTF-8
107
2.578125
3
[]
no_license
fn main() { println!("Hello, world!"); // println! is a Rust macro. println without the ! is a function }
true
8a05bbeed9086377d26854898647af09b921e9ec
Rust
Neo-Ciber94/mattro-rs
/src/name_value.rs
UTF-8
7,283
3.59375
4
[ "MIT" ]
permissive
use crate::{lit_to_string, display_lit}; use syn::{Lit}; use std::str::FromStr; use std::fmt::{Display, Formatter, Write}; use std::hash::{Hash, Hasher}; /// Represents an attribute name-value: `name="value"`. #[derive(Debug, Clone)] pub struct NameValue { pub name: String, pub value: Value, } impl Display for NameValue { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{}={}", self.name, self.value) } } impl Hash for NameValue { fn hash<H: Hasher>(&self, state: &mut H) { state.write(self.name.as_bytes()); } } impl Eq for NameValue {} impl PartialEq for NameValue { fn eq(&self, other: &Self) -> bool { self.name == other.name } } /// Represents a value for a `name-value` attribute. #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub enum Value { /// A literal value: `#[attribute(name="value")]`. Literal(Lit), /// An array of literal values: `#[attribute(name=1,2,3,4)]`. Array(Vec<Lit>), } impl Value { /// Returns `true` if this value is a literal. pub fn is_literal(&self) -> bool { matches!(self, Value::Literal(_)) } /// Returns `true` if this value is an array. pub fn is_array(&self) -> bool { matches!(self, Value::Array(_)) } /// Returns `true` if this value is a `string` or `byte string` literal value. pub fn is_string(&self) -> bool { match self { Value::Literal(lit) => matches!(lit, Lit::Str(_) | Lit::ByteStr(_)), _ => false, } } /// Returns `true` if this value is a `char` literal value. pub fn is_char(&self) -> bool { matches!(self, Value::Literal(Lit::Char(_))) } /// Returns `true` if this value is a `bool` literal value. pub fn is_bool(&self) -> bool { matches!(self, Value::Literal(Lit::Bool(_))) } /// Returns `true` if this value is an `integer` or `byte` literal value. pub fn is_integer(&self) -> bool { match self { Value::Literal(lit) => matches!(lit, Lit::Int(_) | Lit::Byte(_)), _ => false, } } /// Returns `true` if this value is a `float` literal value. pub fn is_float(&self) -> bool { matches!(self, Value::Literal(Lit::Float(_))) } /// Returns `true` if this value is a numeric literal (integer or float). pub fn is_numeric(&self) -> bool { self.is_integer() || self.is_float() } /// Returns the `String` representation of this value or `None` if is not a string literal. pub fn to_string_literal(&self) -> Option<String> { if let Value::Literal(lit) = self { return match lit { Lit::Str(x) => Some(x.value()), Lit::ByteStr(x) => unsafe { Some(String::from_utf8_unchecked(x.value())) }, _ => None, }; } None } /// Returns the `char` representation of this value or `None` if is not a char literal. pub fn to_char_literal(&self) -> Option<char> { if let Value::Literal(lit) = self { return match lit { Lit::Char(x) => Some(x.value()), _ => None, }; } None } /// Returns the `bool` representation of this value or `None` if is not a bool literal. pub fn to_bool_literal(&self) -> Option<bool> { if let Value::Literal(lit) = self { return match lit { Lit::Bool(x) => Some(x.value), _ => None, }; } None } /// Returns the `byte` representation of this value or `None` if is not a byte literal. pub fn to_byte_literal(&self) -> Option<u8> { if let Value::Literal(lit) = self { return match lit { Lit::Byte(x) => Some(x.value()), _ => None, }; } None } /// Converts this value into a integer or `None` if is not an integer literal. pub fn to_integer_literal<N>(&self) -> Option<N> where N: FromStr, N::Err: Display, { match self { Value::Literal(lit) => match lit { Lit::Byte(n) => { let s = n.value().to_string(); N::from_str(s.as_str()).ok() } Lit::Int(n) => n.base10_parse().ok(), _ => None, }, _ => None, } } /// Converts this value into a float or `None` if is not a float literal. pub fn to_float_literal<N>(&self) -> Option<N> where N: FromStr, N::Err: Display, { match self { Value::Literal(Lit::Float(n)) => n.base10_parse().ok(), _ => None, } } /// Returns a reference to this value literal. pub fn as_literal(&self) -> Option<&Lit> { match self { Value::Literal(x) => Some(x), _ => None, } } /// Returns a reference to this value array of literals. pub fn as_array(&self) -> Option<&[Lit]> { match self { Value::Array(x) => Some(x.as_slice()), _ => None, } } /// Parses this value into the given type. /// /// # Returns None /// - `None` if the value is not a literal. /// - `None` if the parse fails. pub fn parse_literal<T: FromStr>(&self) -> Option<T> { match self { Value::Literal(x) => { let value = lit_to_string(x); T::from_str(&value).ok() } _ => None, } } /// Parses this value into a `Vec<T>` /// /// # Returns None /// - `None` if the value is not an array literal. /// - `None` if the parse fails. pub fn parse_array<T: FromStr>(&self) -> Option<Vec<T>> { match self { Value::Array(array) => { let mut ret = Vec::new(); for arg in array { let value = lit_to_string(arg); let n = T::from_str(&value).ok()?; ret.push(n); } Some(ret) } _ => None, } } /// Writes a `String` representation of this value using the given `Write`. /// /// # Arguments /// - `formatter` : The formatter used to write the data. /// - `use_array_brackets` : Wraps the arrays using brackets: `[1,2,3]`. pub fn display<W: Write>( &self, formatter: &mut W, use_array_brackets: bool, ) -> std::fmt::Result { match self { Value::Literal(lit) => display_lit(formatter, lit), Value::Array(array) => { let result = array .iter() .map(|s| lit_to_string(s)) .collect::<Vec<String>>(); if use_array_brackets { write!(formatter, "[{}]", result.join(", ")) } else { write!(formatter, "{}", result.join(", ")) } } } } } impl Display for Value { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { self.display(f, true) } }
true
4118fce1e550827e43bf6cce7015b94b98ee2f37
Rust
joseluis/bitarray
/src/serde_impl.rs
UTF-8
2,144
2.96875
3
[ "MIT" ]
permissive
use crate::BitArray; use core::fmt; use serde::{ de::{Error, Expected, SeqAccess, Visitor}, Deserialize, Deserializer, Serialize, Serializer, }; impl<const B: usize> Serialize for BitArray<B> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_bytes(&self.bytes) } } impl<'de, const B: usize> Deserialize<'de> for BitArray<B> { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { deserializer.deserialize_bytes(BitArrayVisitor::<B>) } } struct BitArrayVisitor<const B: usize>; impl<'de, const B: usize> Visitor<'de> for BitArrayVisitor<B> { type Value = BitArray<B>; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "[u8; {}]", B) } fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: SeqAccess<'de>, { let mut arr = [0u8; B]; let mut ix = 0; // Continuously fill the array with more values. while let Some(value) = seq.next_element()? { if ix == B { return Err(Error::custom("bitarray: too many bytes in sequence")); } arr[ix] = value; ix += 1; } if ix != B { Err(Error::invalid_length(ix, &BitArrayExpectedBytes::<B>)) } else { Ok(BitArray::new(arr)) } } fn visit_bytes<E>(self, bytes: &[u8]) -> Result<Self::Value, E> where E: Error, { if bytes.len() != B { Err(Error::invalid_length( bytes.len(), &BitArrayExpectedBytes::<B>, )) } else { let mut bitarray = BitArray::<B>::zeros(); bitarray.bytes.copy_from_slice(bytes); Ok(bitarray) } } } struct BitArrayExpectedBytes<const B: usize>; impl<const B: usize> Expected for BitArrayExpectedBytes<B> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "{} bytes", B) } }
true
947720be15e9ba3b2a9e0a8f3b416c5ddcace6ea
Rust
SuperiorJT/twilight
/http/src/request/application/command/create_guild_command/message.rs
UTF-8
2,360
3.109375
3
[ "ISC" ]
permissive
use super::super::CommandBorrowed; use crate::{ client::Client, error::Error, request::{Request, RequestBuilder}, response::ResponseFuture, routing::Route, }; use twilight_model::{ application::command::{Command, CommandType}, id::{ApplicationId, GuildId}, }; /// Create a message command in a guild. /// /// Creating a guild command with the same name as an already-existing guild /// command in the same guild will overwrite the old command. See [the discord /// docs] for more information. /// /// [the discord docs]: https://discord.com/developers/docs/interactions/application-commands#create-guild-application-command #[must_use = "requests must be configured and executed"] pub struct CreateGuildMessageCommand<'a> { application_id: ApplicationId, default_permission: Option<bool>, guild_id: GuildId, http: &'a Client, name: &'a str, } impl<'a> CreateGuildMessageCommand<'a> { pub(crate) const fn new( http: &'a Client, application_id: ApplicationId, guild_id: GuildId, name: &'a str, ) -> Self { Self { application_id, default_permission: None, guild_id, http, name, } } /// Whether the command is enabled by default when the app is added to a /// guild. pub const fn default_permission(mut self, default: bool) -> Self { self.default_permission = Some(default); self } fn request(&self) -> Result<Request, Error> { Request::builder(&Route::CreateGuildCommand { application_id: self.application_id.0, guild_id: self.guild_id.0, }) .json(&CommandBorrowed { application_id: Some(self.application_id), default_permission: self.default_permission, description: None, kind: CommandType::Message, name: self.name, options: None, }) .map(RequestBuilder::build) } /// Execute the request, returning a future resolving to a [`Response`]. /// /// [`Response`]: crate::response::Response pub fn exec(self) -> ResponseFuture<Command> { match self.request() { Ok(request) => self.http.request(request), Err(source) => ResponseFuture::error(source), } } }
true
44e74265af7d91f6016f958b5aa81ab2fa61a35c
Rust
llgoer/quickjs-rs
/src/bindings.rs
UTF-8
22,353
2.515625
3
[ "MIT" ]
permissive
use std::{ ffi::CString, os::raw::{c_int, c_void}, sync::Mutex, }; use libquickjs_sys as q; use crate::{callback::Callback, ContextError, ExecutionError, JsValue, ValueError}; // JS_TAG_* constants from quickjs. // For some reason bindgen does not pick them up. const TAG_STRING: i64 = -7; const TAG_OBJECT: i64 = -1; const TAG_INT: i64 = 0; const TAG_BOOL: i64 = 1; const TAG_NULL: i64 = 2; const TAG_UNDEFINED: i64 = 3; const TAG_EXCEPTION: i64 = 6; const TAG_FLOAT64: i64 = 7; /// Free a JSValue. /// This function is the equivalent of JS_FreeValue from quickjs, which can not /// be used due to being `static inline`. unsafe fn free_value(context: *mut q::JSContext, value: q::JSValue) { // All tags < 0 are garbage collected and need to be freed. if value.tag < 0 { // This transmute is OK since if tag < 0, the union will be a refcount // pointer. let ptr = value.u.ptr as *mut q::JSRefCountHeader; let pref: &mut q::JSRefCountHeader = &mut *ptr; pref.ref_count -= 1; if pref.ref_count <= 0 { q::__JS_FreeValue(context, value); } } } /// Serialize a Rust value into a quickjs runtime value. fn serialize_value(context: *mut q::JSContext, value: JsValue) -> Result<q::JSValue, ValueError> { let v = match value { JsValue::Null => q::JSValue { u: q::JSValueUnion { int32: 0 }, tag: TAG_NULL, }, JsValue::Bool(flag) => q::JSValue { u: q::JSValueUnion { int32: if flag { 1 } else { 0 }, }, tag: TAG_BOOL, }, JsValue::Int(val) => q::JSValue { u: q::JSValueUnion { int32: val }, tag: TAG_INT, }, JsValue::Float(val) => q::JSValue { u: q::JSValueUnion { float64: val }, tag: TAG_FLOAT64, }, JsValue::String(val) => { let qval = unsafe { q::JS_NewStringLen( context, val.as_ptr() as *const i8, val.len() as std::os::raw::c_int, ) }; if qval.tag == TAG_EXCEPTION { return Err(ValueError::Internal( "Could not create string in runtime".into(), )); } qval } JsValue::Array(values) => { // Allocate a new array in the runtime. let arr = unsafe { q::JS_NewArray(context) }; if arr.tag == TAG_EXCEPTION { return Err(ValueError::Internal( "Could not create array in runtime".into(), )); } for (index, value) in values.into_iter().enumerate() { let qvalue = match serialize_value(context, value) { Ok(qval) => qval, Err(e) => { // Make sure to free the array if a individual element // fails. unsafe { free_value(context, arr); } return Err(e); } }; let ret = unsafe { q::JS_DefinePropertyValueUint32( context, arr, index as u32, qvalue, q::JS_PROP_C_W_E as i32, ) }; if ret < 0 { // Make sure to free the array if a individual // element fails. unsafe { free_value(context, arr); } return Err(ValueError::Internal( "Could not append element to array".into(), )); } } arr } JsValue::Object(map) => { let obj = unsafe { q::JS_NewObject(context) }; if obj.tag == TAG_EXCEPTION { return Err(ValueError::Internal("Could not create object".into())); } for (key, value) in map { let ckey = make_cstring(key)?; let qvalue = serialize_value(context, value).map_err(|e| { // Free the object if a property failed. unsafe { free_value(context, obj); } e })?; let ret = unsafe { q::JS_SetPropertyStr(context, obj, ckey.as_ptr(), qvalue) }; if ret < 0 { // Free the object if a property failed. unsafe { free_value(context, obj); } return Err(ValueError::Internal( "Could not add add property to object".into(), )); } } obj } }; Ok(v) } fn deserialize_value( context: *mut q::JSContext, value: &q::JSValue, ) -> Result<JsValue, ValueError> { let r = value; match r.tag { // Int. TAG_INT => { let val = unsafe { r.u.int32 }; Ok(JsValue::Int(val)) } // Bool. TAG_BOOL => { let raw = unsafe { r.u.int32 }; let val = raw > 0; Ok(JsValue::Bool(val)) } // Null. TAG_NULL => Ok(JsValue::Null), // Undefined. TAG_UNDEFINED => Ok(JsValue::Null), // Float. TAG_FLOAT64 => { let val = unsafe { r.u.float64 }; Ok(JsValue::Float(val)) } // String. TAG_STRING => { let ptr = unsafe { q::JS_ToCStringLen(context, std::ptr::null::<i32>() as *mut i32, *r, 0) }; if ptr.is_null() { return Err(ValueError::Internal( "Could not convert string: got a null pointer".into(), )); } let cstr = unsafe { std::ffi::CStr::from_ptr(ptr) }; let s = cstr .to_str() .map_err(ValueError::InvalidString)? .to_string(); // Free the c string. unsafe { q::JS_FreeCString(context, ptr) }; Ok(JsValue::String(s)) } // Object. TAG_OBJECT => { let is_array = unsafe { q::JS_IsArray(context, *r) } > 0; if is_array { let length_name = make_cstring("length")?; let len_value = unsafe { let raw = q::JS_GetPropertyStr(context, *r, length_name.as_ptr()); match deserialize_value(context, &raw) { Ok(v) => { free_value(context, raw); v } Err(e) => { free_value(context, raw); return Err(e); } } }; let len = if let JsValue::Int(x) = len_value { x } else { return Err(ValueError::Internal( "Could not determine array length".into(), )); }; let mut values = Vec::new(); for index in 0..(len as usize) { let value_raw = unsafe { q::JS_GetPropertyUint32(context, *r, index as u32) }; if value_raw.tag == TAG_EXCEPTION { return Err(ValueError::Internal("Could not build array".into())); } match deserialize_value(context, &value_raw) { Ok(v) => { unsafe { free_value(context, value_raw) }; values.push(v) } Err(e) => { unsafe { free_value(context, value_raw) }; return Err(e); } } } Ok(JsValue::Array(values)) } else { Err(ValueError::Internal("Unsupported JS type: Object".into())) } } x => Err(ValueError::Internal(format!( "Unhandled JS_TAG value: {}", x ))), } } /// Helper for creating CStrings. fn make_cstring(value: impl Into<Vec<u8>>) -> Result<CString, ValueError> { CString::new(value).map_err(ValueError::StringWithZeroBytes) } type WrappedCallback = dyn Fn(c_int, *mut q::JSValue) -> q::JSValue; /// Taken from: https://s3.amazonaws.com/temp.michaelfbryan.com/callbacks/index.html /// /// Create a C wrapper function for a Rust closure to enable using it as a /// callback function in the Quickjs runtime. /// /// Both the boxed closure and the boxed data are returned and must be stored /// by the caller to guarantee they stay alive. /// /// TODO: use catch_unwind to prevent pancis. unsafe fn build_closure_trampoline<F>( closure: F, ) -> ((Box<WrappedCallback>, Box<q::JSValue>), q::JSCFunctionData) where F: Fn(c_int, *mut q::JSValue) -> q::JSValue + 'static, { unsafe extern "C" fn trampoline<F>( _ctx: *mut q::JSContext, _this: q::JSValue, argc: c_int, argv: *mut q::JSValue, _magic: c_int, data: *mut q::JSValue, ) -> q::JSValue where F: Fn(c_int, *mut q::JSValue) -> q::JSValue, { let closure_ptr = (*data).u.ptr; let closure: &mut F = &mut *(closure_ptr as *mut F); (*closure)(argc, argv) } let boxed_f = Box::new(closure); let data = Box::new(q::JSValue { u: q::JSValueUnion { ptr: (&*boxed_f) as *const F as *mut c_void, }, tag: TAG_NULL, }); ((boxed_f, data), Some(trampoline::<F>)) } /// OwnedValueRef wraps a Javascript value from the quickjs runtime. /// It prevents leaks by ensuring that the inner value is deallocated on drop. pub struct OwnedValueRef<'a> { context: &'a ContextWrapper, value: q::JSValue, } impl<'a> Drop for OwnedValueRef<'a> { fn drop(&mut self) { unsafe { free_value(self.context.context, self.value); } } } impl<'a> OwnedValueRef<'a> { pub fn new(context: &'a ContextWrapper, value: q::JSValue) -> Self { Self { context, value } } /// Get the inner JSValue without freeing in drop. /// /// Unsafe because the caller is responsible for freeing the value. //unsafe fn into_inner(mut self) -> q::JSValue { //let v = self.value; //self.value = q::JSValue { //u: q::JSValueUnion { int32: 0 }, //tag: TAG_NULL, //}; //v //} pub fn is_null(&self) -> bool { self.value.tag == TAG_NULL } pub fn is_exception(&self) -> bool { self.value.tag == TAG_EXCEPTION } pub fn is_object(&self) -> bool { self.value.tag == TAG_OBJECT } pub fn is_string(&self) -> bool { self.value.tag == TAG_STRING } pub fn to_string(&self) -> Result<String, ExecutionError> { let value = if self.is_string() { self.to_value()? } else { let raw = unsafe { q::JS_ToString(self.context.context, self.value) }; let value = OwnedValueRef::new(self.context, raw); if value.value.tag != TAG_STRING { return Err(ExecutionError::Exception( "Could not convert value to string".into(), )); } value.to_value()? }; Ok(value.as_str().unwrap().to_string()) } pub fn to_value(&self) -> Result<JsValue, ValueError> { self.context.to_value(&self.value) } } /// Wraps an object from the quickjs runtime. /// Provides convenience property accessors. pub struct OwnedObjectRef<'a> { value: OwnedValueRef<'a>, } impl<'a> OwnedObjectRef<'a> { pub fn new(value: OwnedValueRef<'a>) -> Result<Self, ValueError> { if value.value.tag != TAG_OBJECT { Err(ValueError::Internal("Expected an object".into())) } else { Ok(Self { value }) } } pub fn property(&'a self, name: &str) -> Result<OwnedValueRef<'a>, ExecutionError> { let cname = make_cstring(name)?; let raw = unsafe { q::JS_GetPropertyStr(self.value.context.context, self.value.value, cname.as_ptr()) }; if raw.tag == TAG_EXCEPTION { Err(ExecutionError::Internal(format!( "Exception while getting property '{}'", name ))) } else if raw.tag == TAG_UNDEFINED { Err(ExecutionError::Internal(format!( "Property '{}' not found", name ))) } else { Ok(OwnedValueRef::new(self.value.context, raw)) } } unsafe fn set_property_raw(&self, name: &str, value: q::JSValue) -> Result<(), ExecutionError> { let cname = make_cstring(name)?; let ret = q::JS_SetPropertyStr( self.value.context.context, self.value.value, cname.as_ptr(), value, ); if ret < 0 { Err(ExecutionError::Exception("Could not set property".into())) } else { Ok(()) } } // pub fn set_property(&self, name: &str, value: JsValue) -> Result<(), ExecutionError> { // let qval = self.value.context.serialize_value(value)?; // unsafe { self.set_property_raw(name, qval.value) } // } } /// Wraps a quickjs context. /// /// Cleanup of the context happens in drop. pub struct ContextWrapper { runtime: *mut q::JSRuntime, context: *mut q::JSContext, /// Stores callback closures and quickjs data pointers. /// This array is write-only and only exists to ensure the lifetime of /// the closure. // A Mutex is used over a RefCell because it needs to be unwind-safe. callbacks: Mutex<Vec<(Box<WrappedCallback>, Box<q::JSValue>)>>, } impl Drop for ContextWrapper { fn drop(&mut self) { unsafe { q::JS_FreeContext(self.context); q::JS_FreeRuntime(self.runtime); } } } impl ContextWrapper { /// Initialize a wrapper by creating a JSRuntime and JSContext. pub fn new(memory_limit: Option<usize>) -> Result<Self, ContextError> { let runtime = unsafe { q::JS_NewRuntime() }; if runtime.is_null() { return Err(ContextError::RuntimeCreationFailed); } // Configure memory limit if specified. if let Some(limit) = memory_limit { unsafe { q::JS_SetMemoryLimit(runtime, limit); } } let context = unsafe { q::JS_NewContext(runtime) }; if context.is_null() { return Err(ContextError::ContextCreationFailed); } Ok(Self { runtime, context, callbacks: Mutex::new(Vec::new()), }) } /// Reset the wrapper by creating a new context. pub fn reset(self) -> Result<Self, ContextError> { unsafe { q::JS_FreeContext(self.context); }; self.callbacks.lock().unwrap().clear(); let context = unsafe { q::JS_NewContext(self.runtime) }; if context.is_null() { return Err(ContextError::ContextCreationFailed); } let mut s = self; s.context = context; Ok(s) } pub fn serialize_value(&self, value: JsValue) -> Result<OwnedValueRef<'_>, ExecutionError> { let serialized = serialize_value(self.context, value)?; Ok(OwnedValueRef::new(self, serialized)) } // Deserialize a quickjs runtime value into a Rust value. fn to_value(&self, value: &q::JSValue) -> Result<JsValue, ValueError> { deserialize_value(self.context, value) } /// Get the global object. pub fn global(&self) -> Result<OwnedObjectRef<'_>, ExecutionError> { let global_raw = unsafe { q::JS_GetGlobalObject(self.context) }; let global_ref = OwnedValueRef::new(self, global_raw); let global = OwnedObjectRef::new(global_ref)?; Ok(global) } /// Get the last exception from the runtime, and if present, convert it to a ExceptionError. fn get_exception(&self) -> Option<ExecutionError> { let raw = unsafe { q::JS_GetException(self.context) }; let value = OwnedValueRef::new(self, raw); if value.is_null() { None } else { let err = if value.is_exception() { ExecutionError::Internal("Could get exception from runtime".into()) } else { match value.to_value() { Ok(e) => ExecutionError::Exception(e), Err(_) => match value.to_string() { Ok(strval) => { if strval.contains("out of memory") { ExecutionError::OutOfMemory } else { ExecutionError::Exception(JsValue::String(strval)) } } Err(_) => ExecutionError::Internal("Unknown exception".into()), }, } }; Some(err) } } /// Evaluate javascript code. pub fn eval<'a>(&'a self, code: &str) -> Result<OwnedValueRef<'a>, ExecutionError> { let filename = "script.js"; let filename_c = make_cstring(filename)?; let code_c = make_cstring(code)?; let value_raw = unsafe { q::JS_Eval( self.context, code_c.as_ptr(), code.len(), filename_c.as_ptr(), q::JS_EVAL_TYPE_GLOBAL as i32, ) }; let value = OwnedValueRef::new(self, value_raw); if value.is_exception() { let err = self .get_exception() .unwrap_or_else(|| ExecutionError::Exception("Unknown exception".into())); Err(err) } else { Ok(value) } } /// Call a JS function with the given arguments. pub fn call_function<'a>( &'a self, function: OwnedValueRef<'a>, args: Vec<OwnedValueRef<'a>>, ) -> Result<OwnedValueRef<'a>, ExecutionError> { let mut qargs = args.iter().map(|arg| arg.value).collect::<Vec<_>>(); let n = q::JSValue { u: q::JSValueUnion { int32: 0 }, tag: TAG_NULL, }; let qres_raw = unsafe { q::JS_Call( self.context, function.value, n, qargs.len() as i32, qargs.as_mut_ptr(), ) }; let qres = OwnedValueRef::new(self, qres_raw); if qres.is_exception() { let err = self .get_exception() .unwrap_or_else(|| ExecutionError::Exception("Unknown exception".into())); Err(err) } else { Ok(qres) } } /// Helper for executing a callback closure. fn exec_callback<F>( context: *mut q::JSContext, argc: c_int, argv: *mut q::JSValue, callback: &impl Callback<F>, ) -> Result<q::JSValue, ExecutionError> { let result = std::panic::catch_unwind(|| { let arg_slice = unsafe { std::slice::from_raw_parts(argv, argc as usize) }; let args = arg_slice .iter() .map(|raw| deserialize_value(context, raw)) .collect::<Result<Vec<_>, _>>()?; match callback.call(args) { Ok(Ok(result)) => { let serialized = serialize_value(context, result)?; Ok(serialized) } // TODO: better error reporting. Ok(Err(e)) => Err(ExecutionError::Exception(JsValue::String(e))), Err(e) => Err(e.into()), } }); match result { Ok(r) => r, Err(_e) => Err(ExecutionError::Internal("Callback panicked!".to_string())), } } /// Add a global JS function that is backed by a Rust function or closure. pub fn add_callback<'a, F>( &'a self, name: &str, callback: impl Callback<F> + 'static, ) -> Result<(), ExecutionError> { let argcount = callback.argument_count() as i32; let context = self.context; let wrapper = move |argc: c_int, argv: *mut q::JSValue| -> q::JSValue { match Self::exec_callback(context, argc, argv, &callback) { Ok(value) => value, // TODO: better error reporting. Err(e) => { let js_exception_value = match e { ExecutionError::Exception(e) => e, other => other.to_string().into(), }; let js_exception = serialize_value(context, js_exception_value).unwrap(); unsafe { q::JS_Throw(context, js_exception); } q::JSValue { u: q::JSValueUnion { int32: 0 }, tag: TAG_EXCEPTION, } } } }; let (pair, trampoline) = unsafe { build_closure_trampoline(wrapper) }; let data = (&*pair.1) as *const q::JSValue as *mut q::JSValue; self.callbacks.lock().unwrap().push(pair); let cfunc = unsafe { q::JS_NewCFunctionData(self.context, trampoline, argcount, 0, 1, data) }; if cfunc.tag != TAG_OBJECT { return Err(ExecutionError::Internal("Could not create callback".into())); } let global = self.global()?; unsafe { global.set_property_raw(name, cfunc)?; } Ok(()) } }
true
c19314e3fb7f34bc4c3830ba7aa43a75e044c10f
Rust
kmeisthax/retrogram
/src/arch/sm83/types.rs
UTF-8
6,544
2.921875
3
[]
no_license
//! Types used in modeling the SM83 use crate::arch::sm83::{dataflow, disassemble, prereq, trace}; use crate::arch::{ArchName, Architecture}; use crate::memory::{Memory, Pointer}; use crate::{analysis, ast, memory, reg}; use serde::{Deserialize, Serialize}; use std::collections::HashSet; use std::{fmt, result, str}; /// Enumeration of all architectural GBZ80 registers. /// /// Couple things to note: /// /// * We don't consider register pairs (e.g. BC, DE, HL) /// * F isn't considered special here /// * SP has been treated as a register pair and split into S and P. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] pub enum Register { A, B, C, D, E, H, L, F, S, P, } impl Register { pub fn prereqs_from_sym(s: &str) -> Vec<Requisite> { match s.to_ascii_lowercase().as_str() { "a" => vec![Register::A], "b" => vec![Register::B], "c" => vec![Register::C], "d" => vec![Register::D], "e" => vec![Register::E], "h" => vec![Register::H], "l" => vec![Register::L], "af" => vec![Register::A, Register::F], "bc" => vec![Register::B, Register::C], "de" => vec![Register::D, Register::E], "hl" => vec![Register::H, Register::L], "hld" => vec![Register::H, Register::L], "hli" => vec![Register::H, Register::L], "sp" => vec![Register::S, Register::P], _ => vec![], } .iter() .map(|r| Requisite::register(*r, 0xFF)) .collect() } pub fn into_operand<L>(self) -> ast::Operand<L> where L: ast::Literal, { match self { Self::A => ast::Operand::sym("a"), Self::B => ast::Operand::sym("b"), Self::C => ast::Operand::sym("c"), Self::D => ast::Operand::sym("d"), Self::E => ast::Operand::sym("e"), Self::H => ast::Operand::sym("h"), Self::L => ast::Operand::sym("l"), Self::F => ast::Operand::sym("f"), Self::S => ast::Operand::sym("s"), Self::P => ast::Operand::sym("p"), } } pub fn into_requisite(self) -> Requisite { Requisite::register(self, 0xFF) } } impl fmt::Display for Register { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use Register::*; match self { A => write!(f, "A"), B => write!(f, "B"), C => write!(f, "C"), D => write!(f, "D"), E => write!(f, "E"), H => write!(f, "H"), L => write!(f, "L"), F => write!(f, "F"), S => write!(f, "S"), P => write!(f, "P"), } } } impl str::FromStr for Register { type Err = (); fn from_str(s: &str) -> result::Result<Self, Self::Err> { use Register::*; match s { "A" => Ok(A), "B" => Ok(B), "C" => Ok(C), "D" => Ok(D), "E" => Ok(E), "H" => Ok(H), "L" => Ok(L), "F" => Ok(F), "S" => Ok(S), "P" => Ok(P), _ => Err(()), } } } /// The type which represents a value contained in an SM83 register. pub type Value = u8; /// The type which represents an SM83 memory address. pub type PtrVal = u16; /// The type which represents a positive memory offset. pub type Offset = u16; /// The type which represents a signed value contained in an SM83 register. pub type SignedValue = i8; /// The type which represents data stored in memory as seen by the processor. pub type Data = u8; /// The compatible memory model type necessary to analyze GBz80 programs. pub type Bus = memory::Memory<Sm83>; /// The pointer type necessary to model GBz80 pointers. pub type BusAddress = memory::Pointer<PtrVal>; /// A trait which defines what assembler literals we need support for. pub trait Literal: ast::Literal + From<Value> + From<Offset> + From<memory::Pointer<PtrVal>> + From<i8> { } impl<L> Literal for L where L: ast::Literal + From<Value> + From<Offset> + From<memory::Pointer<PtrVal>> + From<i8> { } /// The AST type which represents disassembled code. /// /// Generic parameter `L` *must* match the Literal trait defined above, which /// is an extension of the generic AST literal trait. pub type Section<L> = ast::Section<L, PtrVal, Data, Offset>; /// The register state type which represents the execution state of a given /// SM83 program. pub type State = reg::State<Sm83>; /// The prerequisites necessary to execute a given SM83 program. pub type Requisite = analysis::Requisite<Sm83>; /// The trace log type which represents the past execution of a given SM83 /// program. pub type Trace = analysis::Trace<Sm83>; /// The disasm type which represents a successful disassembly of a single /// instruction. /// /// Generic parameter `L` *must* match the Literal trait defined above, which /// is an extension of the generic AST literal trait. pub type Disasm<L> = analysis::Disasm<L, PtrVal, Offset>; pub type Result<T> = analysis::Result<T, Sm83>; /// Architectural type for SM83 #[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Serialize, Deserialize, Default)] pub struct Sm83(); impl Architecture for Sm83 { type Register = Register; type Word = Value; type SignedWord = SignedValue; type Byte = Data; type PtrVal = PtrVal; type Offset = Offset; fn name(&self) -> ArchName { ArchName::Sm83 } fn parse_architectural_contexts( _contexts: &mut &[&str], _ptr: &mut Pointer<Self::PtrVal>, ) -> Option<()> { Some(()) } fn disassemble<L>(&self, at: &Pointer<Self::PtrVal>, bus: &Bus) -> Result<Disasm<L>> where L: Literal, { disassemble(at, bus) } fn dataflow( &self, at: &BusAddress, bus: &Bus, ) -> Result<(HashSet<Requisite>, HashSet<Requisite>)> { dataflow(at, bus) } fn prerequisites( &self, at: Self::PtrVal, bus: &Memory<Self>, state: &State, ) -> Result<(HashSet<Requisite>, bool)> { prereq(at, bus, state) } fn trace( &self, at: Self::PtrVal, bus: &Memory<Self>, state: State, this_trace: &mut Trace, ) -> Result<(State, Self::PtrVal)> { trace(at, bus, state, this_trace) } }
true
aa69981a2790e35a1f80e08ba6a53115b3787785
Rust
jingfee/advent-of-code-rust
/src/y2020/day19.rs
UTF-8
6,415
3.125
3
[]
no_license
use crate::solver::Solver; use itertools::Itertools; use pcre2::bytes::Regex; use std::io::prelude::*; use std::io::BufReader; use std::{collections::HashMap, fs::File}; pub struct Problem; impl Solver for Problem { type Input = (HashMap<usize, String>, Vec<String>); type Output1 = usize; type Output2 = usize; fn parse_input(&self, file: File) -> (HashMap<usize, String>, Vec<String>) { let buf_reader = BufReader::new(file); let lines = buf_reader.lines().map(|l| l.unwrap()).collect(); parse_lines(lines) } fn solve_part_one(&self, input: &(HashMap<usize, String>, Vec<String>)) -> usize { let regex = generate_regex(0, &input.0, false); validate_messages(regex, &input.1) } fn solve_part_two(&self, input: &(HashMap<usize, String>, Vec<String>)) -> usize { let regex = generate_regex(0, &input.0, true); validate_messages(regex, &input.1) } } fn generate_regex( rule_number: usize, rules: &HashMap<usize, String>, with_replace: bool, ) -> String { let rule = &rules[&rule_number]; if rule == "\"a\"" { return "a".to_string(); } if rule == "\"b\"" { return "b".to_string(); } rule.split(" | ") .map(|p| { let mut pipe_regex = String::from("(?:"); let rec_rules = p .split(" ") .map(|r| { let mut follow_regex = String::from("(?:"); let rule_number = r.parse::<usize>().unwrap(); follow_regex.push_str(&generate_regex(rule_number, rules, with_replace)); follow_regex.push_str(")"); follow_regex }) .collect_vec(); if with_replace && rule_number == 11 { pipe_regex.push_str(&format!("({}(?1)?{})", rec_rules[0], rec_rules[1])); } else { for rec_rule in rec_rules { pipe_regex.push_str(&rec_rule); } } pipe_regex.push_str(")"); if with_replace && rule_number == 8 { pipe_regex.push_str("+"); } pipe_regex }) .join("|") } fn validate_messages(regex: String, messages: &Vec<String>) -> usize { let re = Regex::new(&format!(r"^{}$", regex)).unwrap(); let mut valid_messages = 0; for message in messages { valid_messages = if re.is_match(message.as_bytes()).unwrap() { valid_messages + 1 } else { valid_messages }; } valid_messages } fn parse_lines(lines: Vec<String>) -> (HashMap<usize, String>, Vec<String>) { let mut rules = HashMap::new(); let mut messages = Vec::new(); let mut i = 0; loop { let line = &lines[i]; if line == "" { i = i + 1; break; } let split = line.split(": ").collect::<Vec<&str>>(); rules.insert(split[0].parse::<usize>().unwrap(), split[1].to_string()); i = i + 1; } loop { if i == lines.len() { break; } messages.push(lines[i].to_string()); i = i + 1; } (rules, messages) } #[cfg(test)] mod tests { use crate::y2020::day19::*; #[test] fn test_validate_messages() { let ex1 = parse_lines(vec![ "0: 4 1 5".to_string(), "1: 2 3 | 3 2".to_string(), "2: 4 4 | 5 5".to_string(), "3: 4 5 | 5 4".to_string(), "4: \"a\"".to_string(), "5: \"b\"".to_string(), "".to_string(), "ababbb".to_string(), "bababa".to_string(), "abbbab".to_string(), "aaabbb".to_string(), "aaaabbb".to_string(), ]); let ex2 = parse_lines(vec![ "42: 9 14 | 10 1".to_string(), "9: 14 27 | 1 26".to_string(), "10: 23 14 | 28 1".to_string(), "1: \"a\"".to_string(), "11: 42 31".to_string(), "5: 1 14 | 15 1".to_string(), "19: 14 1 | 14 14".to_string(), "12: 24 14 | 19 1".to_string(), "16: 15 1 | 14 14".to_string(), "31: 14 17 | 1 13".to_string(), "6: 14 14 | 1 14".to_string(), "2: 1 24 | 14 4".to_string(), "0: 8 11".to_string(), "13: 14 3 | 1 12".to_string(), "15: 1 | 14".to_string(), "17: 14 2 | 1 7".to_string(), "23: 25 1 | 22 14".to_string(), "28: 16 1".to_string(), "4: 1 1".to_string(), "20: 14 14 | 1 15".to_string(), "3: 5 14 | 16 1".to_string(), "27: 1 6 | 14 18".to_string(), "14: \"b\"".to_string(), "21: 14 1 | 1 14".to_string(), "25: 1 1 | 1 14".to_string(), "22: 14 14".to_string(), "8: 42".to_string(), "26: 14 22 | 1 20".to_string(), "18: 15 15".to_string(), "7: 14 5 | 1 21".to_string(), "24: 14 1".to_string(), "".to_string(), "abbbbbabbbaaaababbaabbbbabababbbabbbbbbabaaaa".to_string(), "bbabbbbaabaabba".to_string(), "babbbbaabbbbbabbbbbbaabaaabaaa".to_string(), "aaabbbbbbaaaabaababaabababbabaaabbababababaaa".to_string(), "bbbbbbbaaaabbbbaaabbabaaa".to_string(), "bbbababbbbaaaaaaaabbababaaababaabab".to_string(), "ababaaaaaabaaab".to_string(), "ababaaaaabbbaba".to_string(), "baabbaaaabbaaaababbaababb".to_string(), "abbbbabbbbaaaababbbbbbaaaababb".to_string(), "aaaaabbaabaaaaababaa".to_string(), "aaaabbaaaabbaaa".to_string(), "aaaabbaabbaaaaaaabbbabbbaaabbaabaaa".to_string(), "babaaabbbaaabaababbaabababaaab".to_string(), "aabbbbbaabbbaaaaaabbbbbababaaaaabbaaabba".to_string(), ]); let reg1 = generate_regex(0, &ex1.0, false); let reg2_no_replacement = generate_regex(0, &ex2.0, false); let reg2_with_replacement = generate_regex(0, &ex2.0, true); assert_eq!(validate_messages(reg1, &ex1.1), 2); assert_eq!(validate_messages(reg2_no_replacement, &ex2.1), 3); assert_eq!(validate_messages(reg2_with_replacement, &ex2.1), 12); } }
true
9392d671f4ec3a8026a996a78282a9a90d41b2e3
Rust
melted/aoc2016
/day19/src/main.rs
UTF-8
1,103
3.296875
3
[]
no_license
fn get_elves(n : usize) -> Vec<usize> { let mut out = Vec::with_capacity(n); for i in 1..n + 1 { out.push(i) } out } fn play_game(n : usize, part1 : bool) -> usize { let mut elves = get_elves(n); let mut count = n; let mut index = 0; loop { let mut target = if part1 { 1 } else { count/2 }; let mut i = index; while target > 0 { i = (i + 1) % elves.len(); if elves[i] > 0 { target -= 1; } } elves[i] = 0; count -= 1; if count == 1 { return elves[index]; } loop { index += 1; if index == elves.len() { println!("Compressing {} {}", elves.len(), count); elves = elves.into_iter().filter(|e| *e > 0).collect(); index = 0; } if elves[index] > 0 { break; } } } } fn main() { let input = 3018458; println!("{}", play_game(input, true), ); println!("{}", play_game(input, false)); }
true
a1cd5d9ff345b2f3f4df7d86c6f71bd19b31a620
Rust
jacob-pro/arm-kernel
/hilevel/src/io/PL011.rs
UTF-8
2,714
2.703125
3
[]
no_license
#![allow(non_snake_case)] use crate::bindings; use crate::bindings::{PL011_t, PL011_putc, PL011_getc}; use core::fmt::{Write, Error}; use core::result::Result; use crate::io::descriptor::{FileDescriptor, FileDescriptorBase, IOResult, FileError}; use alloc::collections::VecDeque; const KEYBOARD_BUFFER: usize = 4096; #[derive(Clone, Debug)] pub struct PL011(*mut PL011_t); pub fn UART0() -> PL011 { unsafe { PL011(bindings::UART0) } } pub fn UART1() -> PL011 { unsafe { PL011(bindings::UART1) } } impl PL011 { fn putc(&self, byte: u8, blocking: bool) { unsafe { PL011_putc(self.0, byte, blocking) }; } fn getc(&self, blocking: bool) -> u8 { unsafe { PL011_getc(self.0, blocking) } } } impl Write for PL011 { fn write_str(&mut self, s: &str) -> Result<(), Error> { s.as_bytes().iter().for_each(|b| { self.putc(*b, true); }); Ok(()) } } #[derive(Debug)] pub struct PL011FileDescriptor { internal: PL011, base: FileDescriptorBase, read: bool, write: bool, read_buffer: VecDeque<u8>, } impl PL011FileDescriptor { pub fn new(internal: PL011, read: bool, write: bool) -> Self { assert!(read || write); PL011FileDescriptor { internal, base: Default::default(), read, write, read_buffer: Default::default() } } // Add chars to the input buffer, then notify any blocked readers pub fn on_interrupt(&mut self) { let char = self.internal.getc(true); if self.read_buffer.len() < KEYBOARD_BUFFER { self.read_buffer.push_back(char); self.notify_pending_readers(); } } } impl FileDescriptor for PL011FileDescriptor { fn base(&mut self) -> &mut FileDescriptorBase {&mut self.base} // This will return blocked until input is available fn read(&mut self, buffer: &mut [u8]) -> Result<IOResult, FileError> { if !self.read { return Err(FileError::UnsupportedOperation) } let mut idx = 0; while idx < buffer.len() { if self.read_buffer.is_empty() { return Ok(IOResult{ bytes: idx, blocked: true }) } else { buffer[idx] = self.read_buffer.pop_front().unwrap(); idx = idx + 1; } }; Ok(IOResult{ bytes: idx, blocked: false }) } fn write(&mut self, data: &[u8]) -> Result<IOResult, FileError> { if !self.write { return Err(FileError::UnsupportedOperation) } data.iter().for_each(|b| { self.internal.putc(*b, true); }); Ok(IOResult{ bytes: data.len(), blocked: false }) } }
true
ec617854da558698464ed35538f83e52871a4eb5
Rust
AgeManning/discv5-cli
/src/server/bootstrap.rs
UTF-8
2,281
2.984375
3
[ "MIT" ]
permissive
use std::{fs::File, io::BufReader, str::FromStr}; use discv5::{Discv5, Enr}; use serde::{Deserialize, Serialize}; /// The top level bootstrap object. #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd, Hash)] pub struct BootstrapStore { /// The list of bootstrap nodes. pub data: Vec<BootstrapNode>, } /// A bootstrap node. #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd, Hash)] pub struct BootstrapNode { /// The node's peer id. pub peer_id: String, /// The node's ENR. pub enr: String, /// The last seen p2p address. pub last_seen_p2p_address: String, /// The node's state. pub state: State, /// The node's direction. pub direction: Direction, } /// The direction of node connection. #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd, Hash)] pub enum Direction { /// An inbound connection #[serde(rename = "inbound")] Inbound, /// An outbound connection #[serde(rename = "outbound")] Outbound, } /// The connection state. #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Ord, PartialOrd, Hash)] pub enum State { /// Connected state #[serde(rename = "connected")] Connected, /// Disconnected state #[serde(rename = "disconnected")] Disconnected, } /// Function to bootstrap peers using a JSON file. pub async fn boostrap(discv5: &mut Discv5, file: Option<String>) -> eyre::Result<()> { if let Some(f) = file { // Read the JSON bootstrap file let file = File::open(f)?; let reader = BufReader::new(file); let bootstrap_store: BootstrapStore = serde_json::from_reader(reader)?; // For each bootstrap node, try to connect to it. for node in bootstrap_store.data { // Skip over invalid enrs if let Ok(enr) = Enr::from_str(&node.enr) { let node_id = enr.node_id(); match discv5.add_enr(enr) { Err(_) => { /* log::warn!("Failed to bootstrap node with id: {node_id}") */ } Ok(_) => { log::debug!("Bootstrapped node: {node_id}"); } } } } } Ok(()) }
true
0733da369ef91bfbf642813824ac66a3da1ee141
Rust
kazu69/Scripts_Notes
/rust/basic_study/vendor_machine.rs
UTF-8
1,998
3.859375
4
[]
no_license
#[derive(Debug)] struct Drink{ name: String, price_in_yen: u32 } impl Drink { fn new(name: &str, price_in_yen: u32) -> Drink { Drink { name: name.to_string(), price_in_yen: price_in_yen } } } #[derive(Debug)] struct VendingMachine { drinks: Vec<Drink>, cash_balance: u64 } impl VendingMachine { fn new() -> VendingMachine { VendingMachine { drinks: Vec::new(), cash_balance: 0 } } fn add_drink(&mut self, drink: Drink) { self.drinks.push(drink) } fn buy_drink(&mut self, inserted_yen:u32, drink_name: &str) -> (u32, Option<Drink>) { let position = self.drinks .iter() .position(|drink| drink.name == drink_name && inserted_yen == drink.price_in_yen); let drink_option = position.map(|pos| self.drinks.remove(pos)); // remove: 指定の要素を取得しつつベクターから削除する let drink_price = drink_option.as_ref().map(|drink| drink.price_in_yen).unwrap_or(0); // drink_optionの所有権を借用するために as_ref()を使う // as_ref()はOption<T>をOption<&T>に変換 let change = inserted_yen - drink_price; self.cash_balance += drink_price as u64; (change, drink_option) } } fn main() { let drink = Drink::new("Dr.Papper", 120); // let drink = Drink{ // name: "Dr. Papper".to_string(), // cash_balance: 120 // }; let mut machine = VendingMachine::new(); // let mut machine = VendingMachine{ // drink: Vec::new(), // cash_balance: 0 // }; machine.add_drink(drink); // machine.drinks.push(drink); let buy_result_1 = machine.buy_drink(150, "Dr.Papper"); println!("Tried to buy a drink, got {:?}", buy_result_1); let buy_result_2 = machine.buy_drink(150, "Cider"); println!("Tried to buy a drink, got {:?}", buy_result_2); let buy_result_3 = machine.buy_drink(150, "Grape Soda"); println!("Tried to buy a drink, got {:?}", buy_result_3); println!("Machine after buying drinks: {:#?}", machine); }
true
6c254e43e42683cdc84c513d2ad400f86370ff07
Rust
cpralea/xlang
/xlc/src/ast/token.rs
UTF-8
685
3.125
3
[ "MIT" ]
permissive
use common; pub struct Token { pub kind: TokenKind, pub value: String, pub location: common::Location, } impl Token { pub fn new(kind: TokenKind, value: String, location: common::Location) -> Token { Token { kind: kind, value: value, location: location, } } } pub type Tokens = common::Collection<Token>; #[derive(Debug, PartialEq, Eq, Hash)] pub enum TokenKind { Add, And, Assign, Boolean, Div, Eq, Ge, Gt, Identifier, Integer, Le, LParen, Lt, Mul, Ne, Not, Or, String, Print, RParen, Separator, Sub, Unknown, }
true
4a3af3cee9ca284a7a769ce908ebafb207ca1908
Rust
38/grass-demo
/grass-macros/src/ql/operator.rs
UTF-8
4,123
2.8125
3
[]
no_license
use super::CodeGeneratorContext; use quote::quote; use std::fmt::{Debug, Formatter, Result as FmtResult}; use syn::{ parenthesized, parse::{Parse, ParseStream}, punctuated::Punctuated, visit_mut::VisitMut, Expr, Ident, LitInt, Result, Token, }; pub(crate) enum Operator { Where(Expr), Map(Expr), Invoke(Ident, Punctuated<Expr, Token![,]>), } impl Debug for Operator { fn fmt(&self, f: &mut Formatter) -> FmtResult { write!(f, "Operator") } } struct ClosureRewriter; fn rewrite_field_access(node: &mut Expr) -> bool { let id = match node { Expr::Path(path_expr) => { if let Some(ident) = path_expr.path.get_ident() { let ident_str = ident.to_string(); if !ident_str.starts_with('_') || !ident_str[1..].chars().all(|x| x.is_digit(10)) || ident_str == "_0" { return false; } LitInt::new( &format!("{}", ident_str[1..].parse::<usize>().unwrap() - 1), ident.span(), ) } else { return false; } } _ => { return false; } }; let new_expr: Expr = syn::parse2(quote! { _0 . #id }) .unwrap(); *node = new_expr; true } impl VisitMut for ClosureRewriter { fn visit_expr_mut(&mut self, node: &mut Expr) { if !rewrite_field_access(node) { syn::visit_mut::visit_expr_mut(self, node); } } } impl Operator { pub(crate) fn apply(&self, upstream: Ident, ctx: &mut CodeGeneratorContext) -> Ident { match self { Operator::Map(code) => { let id = ctx.fresh_id(); let mut code = code.clone(); ClosureRewriter.visit_expr_mut(&mut code); ctx.append(quote! { let #id = { use grass::properties::*; #upstream . map(|mut _0| { #code _0 }) }; }); id } Operator::Where(expr) => { let id = ctx.fresh_id(); let mut expr = expr.clone(); ClosureRewriter.visit_expr_mut(&mut expr); let code = quote! { let #id = #upstream.filter(|_0| { use grass::properties::*; #expr }); }; ctx.append(code); id } Operator::Invoke(method, arg) => { let id = ctx.fresh_id(); let code = quote! { let #id = { use grass::high_level_api::*; #upstream . #method ( #arg ) }; }; ctx.append(code); id } } } } impl Parse for Operator { fn parse(input: ParseStream) -> Result<Self> { if let Ok(_) = input.fork().parse::<Token![where]>() { let _: Token![where] = input.parse()?; let inner; let _ = parenthesized!(inner in input); return Ok(Operator::Where(inner.parse()?)); } else if let Ok(id) = input.fork().parse::<Ident>() { match id.to_string().as_str() { "map" => { let _: Ident = input.parse()?; let inner; let _ = parenthesized!(inner in input); return Ok(Operator::Map(inner.parse()?)); } _ => { let id = input.parse()?; let inner; parenthesized!(inner in input); return Ok(Operator::Invoke(id, Punctuated::parse_terminated(&inner)?)); } } } else { panic!("Invalid operator"); } } }
true
c3dcef3bead0ae82838e5f60c7751c033f73a457
Rust
nphyx/scrapsrl
/src/resource/asset/entity_template.rs
UTF-8
3,408
2.890625
3
[]
no_license
use serde::{Deserialize, Serialize}; use specs::World; use crate::component::*; #[derive(Clone, Serialize, Deserialize)] pub struct EntityTemplate { brain: Option<AIBrain>, character: Option<Character>, colors: Option<Colors>, description: Option<Description>, icon: Option<IconRef>, notification: Option<NotificationInteraction>, solid: Option<Solid>, } impl Default for EntityTemplate { fn default() -> EntityTemplate { EntityTemplate { brain: None, character: None, colors: None, description: None, icon: None, notification: None, solid: None, } } } use specs::Builder; #[allow(unused)] impl EntityTemplate { pub fn create() -> EntityTemplateBuilder { EntityTemplateBuilder::new() } pub fn to_world<'a>(&self, world: &'a mut World) -> impl Builder + 'a { let mut builder = world.create_entity(); if let Some(brain) = &self.brain { builder = builder.with(brain.clone()); } if let Some(character) = &self.character { builder = builder.with(character.clone()); } if let Some(colors) = &self.colors { builder = builder.with(colors.clone()); } if let Some(description) = &self.description { builder = builder.with(description.clone()); } if let Some(icon) = &self.icon { builder = builder.with(icon.clone()); } if let Some(notification) = &self.notification { builder = builder.with(notification.clone()); } if self.solid.is_some() { builder = builder.with(Solid); } builder } } #[allow(unused)] pub struct EntityTemplateBuilder { template: EntityTemplate, } #[allow(unused)] impl EntityTemplateBuilder { pub fn new() -> EntityTemplateBuilder { EntityTemplateBuilder { template: EntityTemplate::default(), } } pub fn from(template: EntityTemplate) -> EntityTemplateBuilder { EntityTemplateBuilder { template } } pub fn brain(&mut self) -> &mut EntityTemplateBuilder { self.template.brain = Some(AIBrain::default()); self } pub fn character(&mut self, character: Character) -> &mut EntityTemplateBuilder { self.template.character = Some(character); self } pub fn colors(&mut self, fg: Color, bg: Color) -> &mut EntityTemplateBuilder { self.template.colors = Some(Colors { fg, bg }); self } pub fn solid(&mut self) -> &mut EntityTemplateBuilder { self.template.solid = Some(Solid); self } pub fn description(&mut self, short: &str, long: &str) -> &mut EntityTemplateBuilder { self.template.description = Some(Description { short: short.to_string(), long: long.to_string(), }); self } pub fn icon(&mut self, name: String) -> &mut EntityTemplateBuilder { self.template.icon = Some(IconRef { name }); self } pub fn notification(&mut self, header: String, body: String) -> &mut EntityTemplateBuilder { self.template.notification = Some(NotificationInteraction { header, body }); self } pub fn build(&mut self) -> EntityTemplate { self.template.clone() } }
true
4c9180da8b09a715c573d713610451c2ae0203f7
Rust
Azure/iotedge
/edgelet/edgelet-http-mgmt/src/module/restart_or_start_or_stop.rs
UTF-8
3,558
2.953125
3
[ "MIT" ]
permissive
// Copyright (c) Microsoft. All rights reserved. pub(crate) struct Route<M> where M: edgelet_core::ModuleRuntime + Send + Sync, { runtime: std::sync::Arc<tokio::sync::Mutex<M>>, module: String, action: Action, } #[cfg_attr(test, derive(Debug, PartialEq))] enum Action { Restart, Start, Stop, } impl std::str::FromStr for Action { type Err = (); fn from_str(s: &str) -> Result<Self, Self::Err> { let s = s.to_lowercase(); match &s[..] { "restart" => Ok(Action::Restart), "start" => Ok(Action::Start), "stop" => Ok(Action::Stop), _ => Err(()), } } } #[async_trait::async_trait] impl<M> http_common::server::Route for Route<M> where M: edgelet_core::ModuleRuntime + Send + Sync, { type ApiVersion = edgelet_http::ApiVersion; fn api_version() -> &'static dyn http_common::DynRangeBounds<Self::ApiVersion> { &((edgelet_http::ApiVersion::V2018_06_28)..) } type Service = crate::Service<M>; fn from_uri( service: &Self::Service, path: &str, _query: &[(std::borrow::Cow<'_, str>, std::borrow::Cow<'_, str>)], _extensions: &http::Extensions, ) -> Option<Self> { let uri_regex = regex::Regex::new("^/modules/(?P<module>[^/]+)/(?P<action>[^/]+)$") .expect("hard-coded regex must compile"); let captures = uri_regex.captures(path)?; let module = &captures["module"]; let module = percent_encoding::percent_decode_str(module) .decode_utf8() .ok()?; let module = module.trim_start_matches('/'); let action = &captures["action"]; let action = percent_encoding::percent_decode_str(action) .decode_utf8() .ok()?; let action = std::str::FromStr::from_str(&action).ok()?; Some(Route { runtime: service.runtime.clone(), module: module.to_owned(), action, }) } type DeleteBody = serde::de::IgnoredAny; type PostBody = serde::de::IgnoredAny; async fn post(self, _body: Option<Self::PostBody>) -> http_common::server::RouteResponse { let runtime = self.runtime.lock().await; match self.action { Action::Restart => runtime.restart(&self.module).await, Action::Start => runtime.start(&self.module).await, Action::Stop => runtime.stop(&self.module, None).await, } .map_err(|err| edgelet_http::error::runtime_error(&*runtime, &err))?; Ok(http_common::server::response::no_content()) } type PutBody = serde::de::IgnoredAny; } #[cfg(test)] mod tests { use edgelet_test_utils::{test_route_err, test_route_ok}; #[test] fn parse_uri() { // Valid URI: restart let route = test_route_ok!("/modules/testModule/restart"); assert_eq!("testModule", &route.module); assert_eq!(super::Action::Restart, route.action); // Valid URI: start let route = test_route_ok!("/modules/testModule/start"); assert_eq!("testModule", &route.module); assert_eq!(super::Action::Start, route.action); // Valid URI: stop let route = test_route_ok!("/modules/testModule/stop"); assert_eq!("testModule", &route.module); assert_eq!(super::Action::Stop, route.action); // Invalid action test_route_err!("/modules/testModule/invalid"); // Missing module name test_route_err!("/modules//restart"); } }
true
432a00a7a68e171a1e2b61713bbc005bf188421d
Rust
niguangye/os_626
/src/main.rs
UTF-8
2,556
2.578125
3
[]
no_license
#![no_std] // 禁用标准库链接 #![no_main] // 告诉Rust编译器我们不使用预定义的入口点 #![feature(custom_test_frameworks)] #![test_runner(os_626::test_runner)] #![reexport_test_harness_main = "test_main"] extern crate alloc; use alloc::{boxed::Box, vec, vec::Vec, rc::Rc}; use core::panic::PanicInfo; use os_626::println; use bootloader::{ BootInfo, entry_point }; entry_point!(kernel_main); fn kernel_main(boot_info: &'static BootInfo) -> ! { use os_626::allocator; use os_626::memory; use x86_64::{structures::paging::Page, VirtAddr}; println!("Hello World{}", "!"); os_626::init(); let phys_mem_offset = VirtAddr::new(boot_info.physical_memory_offset); let mut mapper = unsafe { memory::init(phys_mem_offset)}; let mut frame_allocator = unsafe { memory::BootInfoFrameAllocator::init(&boot_info.memory_map) }; let page = Page::containing_address(VirtAddr::new(0)); memory::create_example_mapping(page,&mut mapper, &mut frame_allocator); let page_ptr: *mut u64 = page.start_address().as_mut_ptr(); unsafe { page_ptr.offset(400).write_volatile(0x_f021_f077_f065_f04e)}; // 0x_f021_f077_f065_f04e代表字符串 New! allocator::init_heap(&mut mapper, &mut frame_allocator) .expect("heap initialization failed"); let heap_value = Box::new(41); println!("heap_value at {:p}", heap_value); let mut vec = Vec::new(); for i in 0..500 { vec.push(i); } println!("vec at {:p}", vec.as_slice()); // create a reference counted vector -> will be freed when count reaches 0 let reference_counted = Rc::new(vec![1, 2, 3]); let cloned_reference = reference_counted.clone(); println!("current reference count is {}", Rc::strong_count(&cloned_reference)); core::mem::drop(reference_counted); println!("reference count is {} now", Rc::strong_count(&cloned_reference)); #[cfg(test)] test_main(); println!("It did not crash!"); os_626::hlt_loop(); } /// 这个函数将在panic时被调用 /// 类型为PanicInfo的参数包含了panic发生的文件名、代码行数和可选的错误信息 /// 这个函数从不返回,所以他被标记为发散函数(diverging function)。发散函数的返回类型称作Never类型("never" type),记为! #[cfg(not(test))] #[panic_handler] fn panic(info: &PanicInfo) -> ! { println!("{}", info); os_626::hlt_loop(); } #[cfg(test)] #[panic_handler] fn panic(info: &PanicInfo) -> ! { os_626::test_panic_handler(info) }
true
d342e57cf1c942cf658f72636734c0aeb95b4b47
Rust
asnimansari/razorpay-rs
/src/utils.rs
UTF-8
1,070
2.796875
3
[]
no_license
use ring::hmac::{self}; use data_encoding::HEXLOWER; pub fn verify_webhook_signature(data: &str, signature: &str, secret: &str) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> { let key = hmac::Key::new(hmac::HMAC_SHA256, secret.as_bytes()); let expected_signature = hmac::sign(&key, data.as_bytes()); let hex_expected_signature = HEXLOWER.encode(expected_signature.as_ref()); if hex_expected_signature == signature { Ok(()) } else { Err(From::from("Signature do not match with expected value")) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_verify_hmac_signature() { setup(); let secret = "supersecret"; let signature = "e279c1f4cbe963b535a4fd0d54332f2d4aa0b76bb9abdb61dd1391c4804817df"; let data = "asdf"; let result = verify_webhook_signature(data, signature, &secret).ok(); assert_eq!(result, Some(())); } fn setup() { dotenv::dotenv().ok(); let _ = env_logger::builder().is_test(true).try_init(); } }
true
a3fff8caeba225dd63fc57d20b132118c0828a98
Rust
konradsz/adventofcode2018
/day20/map.rs
UTF-8
2,198
3.453125
3
[]
no_license
use std::collections::HashMap; use std::cmp; use std::fs; #[derive(Clone, PartialEq, Eq, Hash)] struct Coordinate { x: i32, y: i32 } fn main() { let content = fs::read_to_string("input").unwrap(); let input = content.trim().trim_start_matches('^').trim_end_matches('$'); let mut current_position = Coordinate {x: 0, y: 0}; let mut previous_position = Coordinate {x: 0, y: 0}; let mut coordinates = vec![current_position.clone()]; let mut distances = HashMap::new(); distances.insert(current_position.clone(), 0); let mut movement = HashMap::new(); movement.insert('N', (0, -1)); movement.insert('E', (1, 0)); movement.insert('S', (0, 1)); movement.insert('W', (-1, 0)); let mut intersections = Vec::new(); for c in input.chars() { match c { '(' => intersections.push(current_position.clone()), ')' => current_position = intersections.pop().unwrap(), '|' => current_position = intersections.last().unwrap().clone(), 'N' | 'E' | 'S' | 'W' => { current_position.x += movement.get(&c).unwrap().0; current_position.y += movement.get(&c).unwrap().1; coordinates.push(current_position.clone()); let previous_distance = distances[&previous_position]; if distances.contains_key(&current_position) { let distance = distances[&current_position]; if let Some(value) = distances.get_mut(&current_position) { *value = cmp::min(distance, previous_distance + 1); } } else { distances.insert(current_position.clone(), previous_distance + 1); } }, _ => panic!("Unknown element: {}", c) } previous_position = current_position.clone(); } let max_number_of_doors = distances.values().max().unwrap(); println!("Part 1: {}", max_number_of_doors); let rooms_behind_at_least_1000_doors = distances.values().filter(|&&v| v >= 1000).count(); println!("Part 2: {}", rooms_behind_at_least_1000_doors); }
true
d7128e5eb69adaec0e4547337fd7391779991cf3
Rust
PSeitz/tantivy
/src/aggregation/agg_result.rs
UTF-8
9,447
2.984375
3
[ "MIT" ]
permissive
//! Contains the final aggregation tree. //! This tree can be converted via the `into()` method from `IntermediateAggregationResults`. //! This conversion computes the final result. For example: The intermediate result contains //! intermediate average results, which is the sum and the number of values. The actual average is //! calculated on the step from intermediate to final aggregation result tree. use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; use super::bucket::GetDocCount; use super::metric::{PercentilesMetricResult, SingleMetricResult, Stats}; use super::{AggregationError, Key}; use crate::TantivyError; #[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)] /// The final aggegation result. pub struct AggregationResults(pub FxHashMap<String, AggregationResult>); impl AggregationResults { pub(crate) fn get_bucket_count(&self) -> u64 { self.0 .values() .map(|agg| agg.get_bucket_count()) .sum::<u64>() } pub(crate) fn get_value_from_aggregation( &self, name: &str, agg_property: &str, ) -> crate::Result<Option<f64>> { if let Some(agg) = self.0.get(name) { agg.get_value_from_aggregation(name, agg_property) } else { // Validation is be done during request parsing, so we can't reach this state. Err(TantivyError::InternalError(format!( "Can't find aggregation {name:?} in sub-aggregations" ))) } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(untagged)] /// An aggregation is either a bucket or a metric. pub enum AggregationResult { /// Bucket result variant. BucketResult(BucketResult), /// Metric result variant. MetricResult(MetricResult), } impl AggregationResult { pub(crate) fn get_bucket_count(&self) -> u64 { match self { AggregationResult::BucketResult(bucket) => bucket.get_bucket_count(), AggregationResult::MetricResult(_) => 0, } } pub(crate) fn get_value_from_aggregation( &self, _name: &str, agg_property: &str, ) -> crate::Result<Option<f64>> { match self { AggregationResult::BucketResult(_bucket) => Err(TantivyError::InternalError( "Tried to retrieve value from bucket aggregation. This is not supported and \ should not happen during collection phase, but should be caught during validation" .to_string(), )), AggregationResult::MetricResult(metric) => metric.get_value(agg_property), } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(untagged)] /// MetricResult pub enum MetricResult { /// Average metric result. Average(SingleMetricResult), /// Count metric result. Count(SingleMetricResult), /// Max metric result. Max(SingleMetricResult), /// Min metric result. Min(SingleMetricResult), /// Stats metric result. Stats(Stats), /// Sum metric result. Sum(SingleMetricResult), /// Sum metric result. Percentiles(PercentilesMetricResult), } impl MetricResult { fn get_value(&self, agg_property: &str) -> crate::Result<Option<f64>> { match self { MetricResult::Average(avg) => Ok(avg.value), MetricResult::Count(count) => Ok(count.value), MetricResult::Max(max) => Ok(max.value), MetricResult::Min(min) => Ok(min.value), MetricResult::Stats(stats) => stats.get_value(agg_property), MetricResult::Sum(sum) => Ok(sum.value), MetricResult::Percentiles(_) => Err(TantivyError::AggregationError( AggregationError::InvalidRequest("percentiles can't be used to order".to_string()), )), } } } /// BucketEntry holds bucket aggregation result types. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(untagged)] pub enum BucketResult { /// This is the range entry for a bucket, which contains a key, count, from, to, and optionally /// sub-aggregations. Range { /// The range buckets sorted by range. buckets: BucketEntries<RangeBucketEntry>, }, /// This is the histogram entry for a bucket, which contains a key, count, and optionally /// sub-aggregations. Histogram { /// The buckets. /// /// If there are holes depends on the request, if min_doc_count is 0, then there are no /// holes between the first and last bucket. /// See [`HistogramAggregation`](super::bucket::HistogramAggregation) buckets: BucketEntries<BucketEntry>, }, /// This is the term result Terms { /// The buckets. /// /// See [`TermsAggregation`](super::bucket::TermsAggregation) buckets: Vec<BucketEntry>, /// The number of documents that didn’t make it into to TOP N due to shard_size or size sum_other_doc_count: u64, #[serde(skip_serializing_if = "Option::is_none")] /// The upper bound error for the doc count of each term. doc_count_error_upper_bound: Option<u64>, }, } impl BucketResult { pub(crate) fn get_bucket_count(&self) -> u64 { match self { BucketResult::Range { buckets } => { buckets.iter().map(|bucket| bucket.get_bucket_count()).sum() } BucketResult::Histogram { buckets } => { buckets.iter().map(|bucket| bucket.get_bucket_count()).sum() } BucketResult::Terms { buckets, sum_other_doc_count: _, doc_count_error_upper_bound: _, } => buckets.iter().map(|bucket| bucket.get_bucket_count()).sum(), } } } /// This is the wrapper of buckets entries, which can be vector or hashmap /// depending on if it's keyed or not. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(untagged)] pub enum BucketEntries<T> { /// Vector format bucket entries Vec(Vec<T>), /// HashMap format bucket entries HashMap(FxHashMap<String, T>), } impl<T> BucketEntries<T> { fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = &T> + 'a> { match self { BucketEntries::Vec(vec) => Box::new(vec.iter()), BucketEntries::HashMap(map) => Box::new(map.values()), } } } /// This is the default entry for a bucket, which contains a key, count, and optionally /// sub-aggregations. /// /// # JSON Format /// ```json /// { /// ... /// "my_histogram": { /// "buckets": [ /// { /// "key": "2.0", /// "doc_count": 5 /// }, /// { /// "key": "4.0", /// "doc_count": 2 /// }, /// { /// "key": "6.0", /// "doc_count": 3 /// } /// ] /// } /// ... /// } /// ``` #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BucketEntry { #[serde(skip_serializing_if = "Option::is_none")] /// The string representation of the bucket. pub key_as_string: Option<String>, /// The identifier of the bucket. pub key: Key, /// Number of documents in the bucket. pub doc_count: u64, #[serde(flatten)] /// Sub-aggregations in this bucket. pub sub_aggregation: AggregationResults, } impl BucketEntry { pub(crate) fn get_bucket_count(&self) -> u64 { 1 + self.sub_aggregation.get_bucket_count() } } impl GetDocCount for &BucketEntry { fn doc_count(&self) -> u64 { self.doc_count } } impl GetDocCount for BucketEntry { fn doc_count(&self) -> u64 { self.doc_count } } /// This is the range entry for a bucket, which contains a key, count, and optionally /// sub-aggregations. /// /// # JSON Format /// ```json /// { /// ... /// "my_ranges": { /// "buckets": [ /// { /// "key": "*-10", /// "to": 10, /// "doc_count": 5 /// }, /// { /// "key": "10-20", /// "from": 10, /// "to": 20, /// "doc_count": 2 /// }, /// { /// "key": "20-*", /// "from": 20, /// "doc_count": 3 /// } /// ] /// } /// ... /// } /// ``` #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RangeBucketEntry { /// The identifier of the bucket. pub key: Key, /// Number of documents in the bucket. pub doc_count: u64, #[serde(flatten)] /// Sub-aggregations in this bucket. pub sub_aggregation: AggregationResults, /// The from range of the bucket. Equals `f64::MIN` when `None`. #[serde(skip_serializing_if = "Option::is_none")] pub from: Option<f64>, /// The to range of the bucket. Equals `f64::MAX` when `None`. #[serde(skip_serializing_if = "Option::is_none")] pub to: Option<f64>, /// The optional string representation for the `from` range. #[serde(skip_serializing_if = "Option::is_none")] pub from_as_string: Option<String>, /// The optional string representation for the `to` range. #[serde(skip_serializing_if = "Option::is_none")] pub to_as_string: Option<String>, } impl RangeBucketEntry { pub(crate) fn get_bucket_count(&self) -> u64 { 1 + self.sub_aggregation.get_bucket_count() } }
true
619d3a10748d65fd8414e040727e3ca00e863b2e
Rust
wuerges/iccad2020_rust
/src/nandgraph.rs
UTF-8
703
3.25
3
[]
no_license
#[derive(Debug)] pub struct Graph { adj : Vec<Vec<(bool, usize)>> } impl Graph { pub fn add_edge(&mut self, u: usize, v:usize, p:bool) { self.adj[u].push((p, v)); } pub fn new_with_n(n :usize) -> Self { Graph { adj : vec![Vec::new(); n] } } pub fn new() -> Self { Graph { adj : Vec::new() } } pub fn create_vertex(&mut self) -> usize { let r = self.adj.len(); self.adj.push(Vec::new()); r } } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let mut g = Graph::new(); let u = g.create_vertex(); let v = g.create_vertex(); g.add_edge(u, v, true); } }
true
d63ed431330a491f154cc0b2ee2c1f6cdce5b44f
Rust
caibirdme/leetcode_rust
/src/prob_538.rs
UTF-8
2,089
3.515625
4
[ "MIT" ]
permissive
// Definition for a binary tree node. #[derive(Debug, PartialEq, Eq)] pub struct TreeNode { pub val: i32, pub left: Option<Rc<RefCell<TreeNode>>>, pub right: Option<Rc<RefCell<TreeNode>>>, } impl TreeNode { #[inline] pub fn new(val: i32) -> Self { TreeNode { val, left: None, right: None } } } use std::rc::Rc; use std::cell::RefCell; impl Solution { pub fn convert_bst(mut root: Option<Rc<RefCell<TreeNode>>>) -> Option<Rc<RefCell<TreeNode>>> { let mut sum = 0; Self::dfs(&mut root, &mut sum); root } fn dfs(r: &mut Option<Rc<RefCell<TreeNode>>>, sum: &mut i32) { if r.is_none() { return; } { let rch = &mut r.as_mut().unwrap().borrow_mut().right; if rch.is_some() { Self::dfs(rch, sum); } } *sum += r.as_ref().unwrap().borrow().val; r.as_mut().unwrap().borrow_mut().val = *sum; let lch = &mut r.as_mut().unwrap().borrow_mut().left; if lch.is_some() { Self::dfs(lch, sum); } } } struct Solution; #[cfg(test)] mod tests { use super::*; #[test] fn test_convert_bst() { let root = Some(Rc::new(RefCell::new(TreeNode{ val: 5, left: Some(Rc::new(RefCell::new(TreeNode{ val: 2, left: None, right: None, }))), right: Some(Rc::new(RefCell::new(TreeNode{ val: 13, left: None, right: None, }))), }))); let expect = Some(Rc::new(RefCell::new(TreeNode{ val: 18, left: Some(Rc::new(RefCell::new(TreeNode{ val: 20, left: None, right: None, }))), right: Some(Rc::new(RefCell::new(TreeNode{ val: 13, left: None, right: None, }))), }))); assert_eq!(Solution::convert_bst(root), expect); } }
true
d20137fc57345f0571921f89ec9a2b844d377990
Rust
Elena-Qiu/InferSim
/src/config.rs
UTF-8
601
2.625
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "Apache-2.0" ]
permissive
use std::fs; use std::path::{Path, PathBuf}; use serde::Deserialize; use crate::utils::app_config::AppConfig; use crate::utils::prelude::*; #[derive(Deserialize)] pub(crate) struct OutputDir(PathBuf); impl OutputDir { pub fn file(&self, name: impl AsRef<Path>) -> Result<PathBuf> { fs::create_dir_all(&self.0).kind(ErrorKind::InvalidConfig)?; Ok(self.0.join(name)) } } pub(crate) trait AppConfigExt { fn output_dir(&self) -> Result<OutputDir>; } impl AppConfigExt for AppConfig { fn output_dir(&self) -> Result<OutputDir> { self.get("output_dir") } }
true
d9dc94c21926f337ef5921209d9322e0bdc3411e
Rust
hamaluik/project-obsidian-mill
/src/systems/hueshift.rs
UTF-8
519
2.8125
3
[ "Apache-2.0" ]
permissive
use specs::{Read, WriteStorage, System}; pub struct HueShift; impl<'a> System<'a> for HueShift { type SystemData = (Read<'a, crate::DeltaTime>, WriteStorage<'a, crate::components::Colour>); fn run(&mut self, data: Self::SystemData) { let (dt, mut colour) = data; let dt = dt.0; use specs::Join; for colour in (&mut colour).join() { colour.hue += dt * 90.0; if colour.hue >= 360.0 { colour.hue -= 360.0; } } } }
true
9b66aa4a0facbc93d92f5573f56c91f74152cd35
Rust
tock/tock
/kernel/src/process_checker.rs
UTF-8
7,103
2.796875
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
// Licensed under the Apache License, Version 2.0 or the MIT License. // SPDX-License-Identifier: Apache-2.0 OR MIT // Copyright Tock Contributors 2022. //! Traits and types for application credentials checkers, used to //! decide whether an application can be loaded. See //| the [AppID TRD](../../doc/reference/trd-appid.md). pub mod basic; use crate::config; use crate::debug; use crate::process::{Process, ShortID, State}; use crate::ErrorCode; use tock_tbf::types::TbfFooterV2Credentials; /// What a AppCredentialsChecker decided a particular application's credential /// indicates about the runnability of an application binary. #[derive(Debug)] pub enum CheckResult { /// Accept the credential and run the binary. Accept, /// Go to the next credential or in the case of the last one fall /// back to the default policy. Pass, /// Reject the credential and do not run the binary. Reject, } /// Receives callbacks on whether a credential was accepted or not. pub trait Client<'a> { fn check_done( &self, result: Result<CheckResult, ErrorCode>, credentials: TbfFooterV2Credentials, binary: &'a [u8], ); } /// Implements a Credentials Checking Policy. pub trait AppCredentialsChecker<'a> { fn set_client(&self, _client: &'a dyn Client<'a>); fn require_credentials(&self) -> bool; fn check_credentials( &self, credentials: TbfFooterV2Credentials, binary: &'a [u8], ) -> Result<(), (ErrorCode, TbfFooterV2Credentials, &'a [u8])>; } /// Default implementation. impl<'a> AppCredentialsChecker<'a> for () { fn set_client(&self, _client: &'a dyn Client<'a>) {} fn require_credentials(&self) -> bool { false } fn check_credentials( &self, credentials: TbfFooterV2Credentials, binary: &'a [u8], ) -> Result<(), (ErrorCode, TbfFooterV2Credentials, &'a [u8])> { Err((ErrorCode::NOSUPPORT, credentials, binary)) } } /// Return whether `process` can run given the identifiers, version /// numbers, and execution state of other processes. A process is /// runnable if its credentials have been approved, it is in the /// Terminated state, and one of the following conditions hold: /// /// 1. Its Application Identifier and Short ID are different from /// all other processes, or /// 2. For every other process that shares an Application Identifier /// or Short ID: /// 2A. If it has a lower or equal version number, it is not running /// 2B. If it has a higher version number, it is in the Terminated, /// CredentialsUnchecked, or CredentialsFailed state. /// /// Case 2A is because if a lower version number is currently running, it /// must be stopped before the higher version number can run. Case 2B is /// so that a lower or equal version number can be run if the higher or equal /// has been explicitly stopped (Terminated) or cannot run (Unchecked/Failed). /// This second case is designed so that at boot the highest version number /// will run (it will be in the CredentialsApproved state when this test /// runs at boot), but it can be stopped to let a lower version number run. pub fn is_runnable<AU: AppUniqueness>( process: &dyn Process, processes: &[Option<&dyn Process>], id_differ: &AU, ) -> bool { let len = processes.len(); // A process is only runnable if it has approved credentials and // is not currently running. if process.get_state() != State::CredentialsApproved && process.get_state() != State::Terminated { return false; } // Note that this causes `process` to compare against itself; // however, since `process` is not running and its version number // is the same, it will not block itself from running. for i in 0..len { let other_process = processes[i]; let other_name = other_process.map_or("None", |c| c.get_process_name()); let blocks = other_process.map_or(false, |other| { let state = other.get_state(); let creds_approve = state != State::CredentialsUnchecked && state != State::CredentialsFailed; let different = id_differ.different_identifier(process, other) && other.short_app_id() != process.short_app_id(); let newer = other.binary_version() > process.binary_version(); let running = other.is_running(); let runnable = state != State::CredentialsUnchecked && state != State::CredentialsFailed && state != State::Terminated; // Other will block process from running if // 1) Other has approved credentials, and // 2) Other has the same ShortID or Application Identifier, and // 3) Other has a higher version number *or* the same version number and is running if config::CONFIG.debug_process_credentials { debug!( "[{}]: creds_approve: {}, different: {}, newer: {}, runnable: {}, running: {}", other.get_process_name(), creds_approve, different, newer, runnable, running ); } creds_approve && !different && ((newer && runnable) || running) }); if blocks { if config::CONFIG.debug_process_credentials { debug!( "Process {} blocks {}", other_name, process.get_process_name() ); } return false; } } if config::CONFIG.debug_process_credentials { debug!( "No process blocks {}: it is runnable", process.get_process_name() ); } // No process blocks this one from running -- it's runnable true } /// Whether two processes have the same Application Identifier; two /// processes with the same Application Identifier cannot run concurrently. pub trait AppUniqueness { /// Returns whether `process_a` and `process_b` have a different identifier, /// and so can run concurrently. If this returns `false`, the kernel /// will not run `process_a` and `process_b` at the same time. fn different_identifier(&self, _process_a: &dyn Process, _process_b: &dyn Process) -> bool; } /// Default implementation. impl AppUniqueness for () { fn different_identifier(&self, _process_a: &dyn Process, _process_b: &dyn Process) -> bool { true } } /// Transforms Application Credentials into a corresponding ShortID. pub trait Compress { fn to_short_id(&self, _credentials: &TbfFooterV2Credentials) -> ShortID; } impl Compress for () { fn to_short_id(&self, _credentials: &TbfFooterV2Credentials) -> ShortID { ShortID::LocallyUnique } } pub trait CredentialsCheckingPolicy<'a>: AppCredentialsChecker<'a> + Compress + AppUniqueness { } impl<'a, T: AppCredentialsChecker<'a> + Compress + AppUniqueness> CredentialsCheckingPolicy<'a> for T { }
true
ff06ff848568a1af790c718dc96de4948375c5ec
Rust
EFanZh/LeetCode
/src/problem_0488_zuma_game/bfs.rs
UTF-8
6,098
2.84375
3
[]
no_license
pub struct Solution; // ------------------------------------------------------ snip ------------------------------------------------------ // use std::collections::{HashSet, VecDeque}; use std::hash::{Hash, Hasher}; #[derive(Clone, Copy, Eq)] struct State { buffer: [u8; 21], sizes: u8, } impl State { fn normalize_board(board: &mut [u8]) { let half = board.len() / 2; if board[board.len() - half..].iter().rev().lt(&board[..half]) { board.reverse(); } } fn normalize_hand(hand: &mut [u8]) { hand.sort_unstable(); } fn new(board: &[u8], hand: &[u8]) -> Self { let mut buffer = [0; 21]; let board_buffer = &mut buffer[..board.len()]; board_buffer.copy_from_slice(board); Self::normalize_board(board_buffer); let hand_buffer = &mut buffer[board.len()..board.len() + hand.len()]; hand_buffer.copy_from_slice(hand); Self::normalize_hand(hand_buffer); Self { buffer, sizes: ((hand.len() as u8) << 5) | (board.len() as u8), } } fn board_length(&self) -> usize { usize::from(self.sizes & 31) } fn hand_length(&self) -> usize { usize::from(self.sizes >> 5) } fn fields(&self) -> (&[u8], &[u8]) { let board_length = self.board_length(); self.buffer[..board_length + self.hand_length()].split_at(board_length) } fn nexts(&self) -> impl Iterator<Item = Self> + '_ { let (board, hand) = self.fields(); hand.iter() .copied() .enumerate() .filter(move |&(i, insert_color)| hand.get(i.wrapping_sub(1)).copied() != Some(insert_color)) .flat_map(move |(i, insert_color)| { let next_hand = (&hand[..i], &hand[i + 1..]); let next_hand_length = hand.len() - 1; (0..=board.len()).map(move |insert_index| { let (board_left, board_right) = board.split_at(insert_index); let mut stack = [(0, 0); 21]; let mut stack_length = 0_usize; for color in board_left .iter() .copied() .chain(Some(insert_color)) .chain(board_right.iter().copied()) { loop { if let Some((top_color, top_count)) = stack.get_mut(stack_length.wrapping_sub(1)) { if *top_color == color { *top_count += 1; break; } else if *top_count > 2 { stack_length -= 1; } else { stack[stack_length] = (color, 1); stack_length += 1; break; } } else { stack[stack_length] = (color, 1); stack_length += 1; break; } } } if stack[stack_length - 1].1 > 2 { stack_length -= 1; } let mut buffer = [0; 21]; let mut board_length = 0; for &(color, count) in &stack[..stack_length] { buffer[board_length] = color; board_length += 1; if count == 2 { buffer[board_length] = color; board_length += 1; } } let (board_buffer, rest_buffer) = buffer.split_at_mut(board_length); Self::normalize_board(board_buffer); let hand_buffer = rest_buffer[..next_hand_length].split_at_mut(next_hand.0.len()); hand_buffer.0.copy_from_slice(next_hand.0); hand_buffer.1.copy_from_slice(next_hand.1); Self { buffer, sizes: (next_hand_length as u8) << 5 | (board_length as u8), } }) }) } } impl PartialEq for State { fn eq(&self, other: &Self) -> bool { PartialEq::eq(&self.fields(), &other.fields()) } } impl Hash for State { fn hash<H: Hasher>(&self, state: &mut H) { self.fields().hash(state); } } impl Solution { pub fn find_min_step(board: String, hand: String) -> i32 { let mut depth = 1; let node = State::new(board.as_bytes(), hand.as_bytes()); let mut queue = VecDeque::from([node]); let mut visited = HashSet::from([node]); loop { for _ in 0..queue.len() { let state = queue.pop_front().unwrap(); for next in state.nexts() { if next.board_length() == 0 { return depth; } if visited.insert(next) { queue.push_back(next); } } } if queue.is_empty() { break; } depth += 1; } -1 } } // ------------------------------------------------------ snip ------------------------------------------------------ // impl super::Solution for Solution { fn find_min_step(board: String, hand: String) -> i32 { Self::find_min_step(board, hand) } } #[cfg(test)] mod tests { use super::State; #[test] fn test_solution() { super::super::tests::run::<super::Solution>(); } #[test] fn test_state() { let lhs = State::new(b"BGRWY", b"BGRWY"); let rhs = Clone::clone(&lhs); assert!(lhs == rhs); } }
true
e7e02c03abf44c2dcc5d927c520c909f8ef7b0b8
Rust
songyzh/leetcode-rust
/src/solution/s0371_sum_of_two_integers.rs
UTF-8
1,311
3.609375
4
[ "Apache-2.0" ]
permissive
/** * [371] Sum of Two Integers * * Calculate the sum of two integers a and b, but you are not allowed to use the operator + and -. * * <div> * Example 1: * * * Input: a = <span id="example-input-1-1">1</span>, b = <span id="example-input-1-2">2</span> * Output: <span id="example-output-1">3</span> * * * <div> * Example 2: * * * Input: a = -<span id="example-input-2-1">2</span>, b = <span id="example-input-2-2">3</span> * Output: 1 * * </div> * </div> * */ pub struct Solution {} // problem: https://leetcode.com/problems/sum-of-two-integers/ // discuss: https://leetcode.com/problems/sum-of-two-integers/discuss/?currentPage=1&orderBy=most_votes&query= // submission codes start here impl Solution { pub fn get_sum(a: i32, b: i32) -> i32 { // use `sum` to store direct sum without carriers // use `carrier` to store carriers let mut sum = a; let mut carrier = b; while sum != 0 && carrier != 0 { let tmp_sum = sum; sum = sum ^ carrier; carrier = tmp_sum & carrier; carrier <<= 1; } if sum == 0 { carrier } else { sum } } } // submission codes end #[cfg(test)] mod tests { use super::*; #[test] fn test_371() {} }
true
d4036570d6bc0b6941185855ada8eff6cc51e347
Rust
wilsonzlin/fast-spsc-queue
/src/lib.rs
UTF-8
3,971
2.96875
3
[]
no_license
use std::{mem, ptr}; struct SpscQueue<V: Send + Sync> { buffer: *mut V, capacity: usize, capacity_mask: usize, // We implement it at the queue level as it's a common requirement and so that V doesn't have to // be a heavier enum with an end message variant. ended: bool, read_next: usize, write_next: usize, } unsafe impl<V: Send + Sync> Send for SpscQueue<V> {} unsafe impl<V: Send + Sync> Sync for SpscQueue<V> {} impl<V: Send + Sync> Drop for SpscQueue<V> { fn drop(&mut self) { unsafe { let _ = Vec::from_raw_parts(self.buffer, 0, self.capacity); }; } } impl<V: Send + Sync> SpscQueue<V> { pub fn new(capacity_exponent: usize) -> SpscQueue<V> { assert!(capacity_exponent < mem::size_of::<usize>() * 8); let capacity = 1 << capacity_exponent; let mut vec = Vec::with_capacity(capacity); let ptr = vec.as_mut_ptr(); mem::forget(vec); SpscQueue { buffer: ptr, capacity, capacity_mask: capacity - 1, ended: false, read_next: 0, write_next: 0, } } } // Producer owns the underlying queue and drops it when itself is released. pub struct SpscQueueProducer<V: Send + Sync> { queue: *mut SpscQueue<V>, } unsafe impl<V: Send + Sync> Send for SpscQueueProducer<V> {} unsafe impl<V: Send + Sync> Sync for SpscQueueProducer<V> {} impl<V: Send + Sync> Drop for SpscQueueProducer<V> { fn drop(&mut self) { unsafe { let _ = Box::from_raw(self.queue); }; } } impl<V: Send + Sync> SpscQueueProducer<V> { pub fn enqueue(&mut self, value: V) -> () { let queue = unsafe { &mut *self.queue }; while queue.write_next >= queue.read_next + queue.capacity { // Wait for consumer to catch up. }; unsafe { ptr::write(queue.buffer.offset((queue.write_next & queue.capacity_mask) as isize), value) }; // Increment after setting buffer element. queue.write_next += 1; } pub fn finish(&mut self) -> () { let queue = unsafe { &mut *self.queue }; queue.ended = true; } } pub enum MaybeDequeued<V> { Ended, None, Some(V), } pub struct SpscQueueConsumer<V: Send + Sync> { queue: *mut SpscQueue<V>, } unsafe impl<V: Send + Sync> Send for SpscQueueConsumer<V> {} unsafe impl<V: Send + Sync> Sync for SpscQueueConsumer<V> {} impl<V: Send + Sync> SpscQueueConsumer<V> { #[inline(always)] fn queue(&self) -> &SpscQueue<V> { unsafe { &*self.queue } } #[inline(always)] fn queue_mut(&self) -> &mut SpscQueue<V> { unsafe { &mut *self.queue } } #[inline(always)] pub fn is_empty(&self) -> bool { let queue = self.queue(); queue.read_next >= queue.write_next } pub fn maybe_dequeue(&mut self) -> MaybeDequeued<V> { if self.is_empty() { if self.queue().ended { return MaybeDequeued::Ended; }; return MaybeDequeued::None; }; let queue = self.queue_mut(); let value = unsafe { ptr::read(queue.buffer.offset((queue.read_next & queue.capacity_mask) as isize)) }; queue.read_next += 1; MaybeDequeued::Some(value) } pub fn dequeue(&mut self) -> Option<V> { loop { match self.maybe_dequeue() { // Wait for producer to provide values. MaybeDequeued::None => {} // We've caught up to the end. MaybeDequeued::Ended => return None, MaybeDequeued::Some(v) => return Some(v), }; }; } } pub fn create_spsc_queue<V: Send + Sync>(capacity_exponent: usize) -> (SpscQueueProducer<V>, SpscQueueConsumer<V>) { let queue = Box::into_raw(Box::new(SpscQueue::<V>::new(capacity_exponent))); (SpscQueueProducer { queue }, SpscQueueConsumer { queue }) }
true
09668341478d891c799d8639ce0b47f3e9bca48d
Rust
aiifabbf/leetcode-memo
/876.rust/src/main.rs
UTF-8
1,365
3.484375
3
[]
no_license
/* .. default-role:: math 返回链表最中间的那个节点。如果链表是偶数长度的,返回中间靠右的那个节点。 先遍历一遍链表,得到链表的长度,假设长度是 `n` 吧。无论是奇数还是偶数长度,都是第 `\lfloor n / 2 \rfloor` 个节点。 */ struct Solution; // Definition for singly-linked list. #[derive(PartialEq, Eq, Clone, Debug)] pub struct ListNode { pub val: i32, pub next: Option<Box<ListNode>>, } impl ListNode { #[inline] fn new(val: i32) -> Self { ListNode { next: None, val } } } impl Solution { pub fn middle_node(head: Option<Box<ListNode>>) -> Option<Box<ListNode>> { let mut length = 0; let mut origin = head; let mut head = &origin; while let Some(inner) = head.as_ref() { head = &inner.next; length += 1; } // head = &origin; // for _ in 0..length / 2 { // head = &head.as_ref().unwrap().next; // } // return head.clone(); // 我还在想怎么才能不clone // 终于想到了,第二次遍历的时候就不要遍历引用了,直接遍历原始变量就好了。 let mut head = origin; for _ in 0..length / 2 { head = head.unwrap().next; } return head; } } fn main() {}
true
19d3a3c4ec3d13fb2ae367eb258af3c57241d5e4
Rust
CGQAQ/postcss-rs
/crates/recursive-parser/src/parser.rs
UTF-8
7,713
3.390625
3
[ "MIT" ]
permissive
use std::iter::Peekable; use tokenizer::{Token, TokenType, Tokenizer}; use crate::Lexer; pub struct Root<'a> { children: Vec<RuleOrAtRuleOrDecl<'a>>, start: usize, end: usize, } enum RuleOrAtRuleOrDecl<'a> { Rule(Rule<'a>), AtRule(AtRule<'a>), Declaration(Declaration<'a>), } // enum AtRuleOrDeclaration<'a> { // Declaration(Declaration<'a>), // AtRule(AtRule<'a>), // } pub struct Rule<'a> { children: Vec<RuleOrAtRuleOrDecl<'a>>, start: usize, end: usize, } pub struct Declaration<'a> { prop: Prop<'a>, value: Value, } pub struct Prop<'a> { content: &'a str, start: usize, end: usize, } pub struct Value { // content: &'a str, start: usize, end: usize, } pub struct AtRule<'a> { selector: Selector<'a>, start: usize, end: usize, } struct Selector<'a> { content: &'a str, start: usize, end: usize, } pub struct Parser<'a> { lexer: Peekable<Lexer<'a>>, pos: usize, } impl<'a> Parser<'a> { pub fn new(input: &'a str) -> Self { Self { lexer: Lexer::new(input).peekable(), pos: 0, } } pub fn parse(mut self) -> Root<'a> { // self.parse_element(); let mut children = vec![]; while let Some(syntax) = self.peek() { match syntax { TokenType::Space => { self.bump(); } TokenType::AtWord => { // println!("parse at rule top level"); self.parse_at_rule(); } TokenType::Comment => { self.parse_comment(); } _ => { self.parse_rule(); } }; } Root { children, start: 0, end: self.pos, } } #[inline] pub fn parse_comment(&mut self) { self.bump(); } #[inline] pub fn parse_rule(&mut self) -> Rule<'a> { let rule: Rule; let start = self.pos; if let Some(kind) = self.peek() { match kind { TokenType::OpenCurly => { let children = self.parse_curly_block(false); Rule { children, start, end: 0, } } _ => { self.parse_component(); loop { match self.peek() { Some(kind) => match kind { TokenType::OpenCurly => { // let children = return Rule { children: self.parse_curly_block(false), start, end: self.pos, }; } TokenType::Space => { self.bump(); } _ => { self.parse_component(); } }, None => { panic!(r#"expected {} found none"#, "{"); } } } } } } else { unimplemented!("should parse a Rule") } } // https://drafts.csswg.org/css-syntax/#component-value-diagram #[inline] fn parse_component(&mut self) { // self.start_node(TokenType::Component); if let Some(kind) = self.peek() { match kind { TokenType::OpenParentheses => { // println!("parse open parentheses"); self.parse_parentheses_block(); } TokenType::OpenSquare => { self.parse_square_block(); } TokenType::OpenCurly => { self.parse_curly_block(false); } _ => { // println!("need to bump {:?} from parse component", self.peek()); self.bump(); } } } // self.finish_node(); } fn parse_parentheses_block(&mut self) { self.bump(); // bump ( loop { match self.peek() { Some(kind) => match kind { TokenType::CloseParentheses => { self.bump(); break; } _ => { self.parse_component(); } }, None => { // TODO: error handle panic!("expected ) found none"); } } } } fn parse_square_block(&mut self) { self.bump(); // bump [ loop { match self.peek() { Some(kind) => match kind { TokenType::CloseSquare => { self.bump(); break; } _ => { self.parse_component(); } }, None => { // TODO: error handle panic!("expected ] found none"); } } } } fn parse_curly_block(&mut self, rule: bool) -> Vec<RuleOrAtRuleOrDecl<'a>> { use TokenType::*; // println!("parse curlyblock"); let mut ret: Vec<RuleOrAtRuleOrDecl> = vec![]; self.bump(); // bump { self.skip_whitespace(); loop { match self.peek() { Some(kind) => match kind { Semicolon => { self.bump(); } AtWord => ret.push(RuleOrAtRuleOrDecl::AtRule(self.parse_at_rule())), Space => { self.bump(); } CloseCurly => { self.bump(); // println!("finish close curly"); break; } _ => { if rule { // println!("parse rule -->"); ret.push(RuleOrAtRuleOrDecl::Rule(self.parse_rule())); } else { // println!("parse declaration"); ret.push(RuleOrAtRuleOrDecl::Declaration(self.parse_declaration())); } } }, None => { //TODO: error handle panic!("expected close curly"); } } } ret } fn parse_declaration(&mut self) -> Declaration<'a> { use TokenType::*; assert!( matches!(self.peek(), Some(Word)), "expected word found {:?}", self.peek(), ); let Token(_, content, start, end) = self.bump(); let prop = Prop { content, start, end, }; self.skip_whitespace(); assert!( matches!(self.peek(), Some(TokenType::Colon)), "expected : found {:?}", self.peek() ); self.bump(); self.skip_whitespace(); let mut has_finish = false; let mut value = Value { start: self.pos, end: 0, }; while let Some(kind) = self.peek() { match kind { CloseCurly | Semicolon => { has_finish = true; value.end = self.pos; break; } Space => { self.bump(); } _ => { // println!("parse the component"); self.parse_component(); } } } if !has_finish { value.end = self.pos; } Declaration { prop, value } } pub fn parse_at_rule(&mut self) -> AtRule<'a> { use TokenType::*; let start = self.pos; self.bump(); // bump atWord self.skip_whitespace(); while let Some(kind) = self.peek() { match kind { OpenCurly => { // self.finish_node(); finish params self.parse_curly_block(true); break; } Semicolon => { // self.finish_node(); self.bump(); break; } _ => { self.parse_component(); } } } AtRule { // FIXME: not a real selector selector: Selector { content: "", start: 0, end: 0, }, start, end: self.pos, } } #[inline] pub fn skip_whitespace(&mut self) { if let Some(TokenType::Space) = self.peek() { self.bump(); } } pub fn peek(&mut self) -> Option<TokenType> { self.lexer.peek().map(|token| token.0) } pub fn bump(&mut self) -> Token<'a> { let token = self.lexer.next().unwrap(); self.pos = token.3; token // println!("{:?}, {:?}", kind, text); } }
true
26beb2de3783c0d34646eed6a9f349f2f636caed
Rust
elprl/sensehat-rs
/src/lps25h.rs
UTF-8
2,542
2.875
3
[ "MIT", "CC-BY-4.0", "Apache-2.0" ]
permissive
//! * Driver for the LPS25H Pressure sensor //! See <http://www.st.com/en/mems-and-sensors/lps25h.html> use byteorder::{ByteOrder, LittleEndian}; use i2cdev::core::I2CDevice; pub const REG_RES_CONF: u8 = 0x10; pub const REG_CTRL_REG_1: u8 = 0x20; pub const REG_CTRL_REG_2: u8 = 0x21; pub const REG_STATUS_REG: u8 = 0x27; pub const REG_PRESS_OUT_XL: u8 = 0x28; pub const REG_PRESS_OUT_L: u8 = 0x29; pub const REG_PRESS_OUT_H: u8 = 0x2a; pub const REG_TEMP_OUT_L: u8 = 0x2b; pub const REG_TEMP_OUT_H: u8 = 0x2c; pub const REG_FIFO_CTRL: u8 = 0x2e; pub(crate) struct Lps25h<T: I2CDevice + Sized> { i2cdev: T, } impl<T> Lps25h<T> where T: I2CDevice + Sized, { /// Create a new pressure sensor handle for the given path/addr. /// Init sequence from https://github.com/RPi-Distro/RTIMULib pub fn new(mut i2cdev: T) -> Result<Lps25h<T>, T::Error> { i2cdev.smbus_write_byte_data(REG_CTRL_REG_1, 0xc4)?; i2cdev.smbus_write_byte_data(REG_RES_CONF, 0x05)?; i2cdev.smbus_write_byte_data(REG_FIFO_CTRL, 0xc0)?; i2cdev.smbus_write_byte_data(REG_CTRL_REG_2, 0x40)?; Ok(Lps25h { i2cdev }) } /// Obtain the status bitfield from the chip. pub fn status(&mut self) -> Result<u8, T::Error> { self.i2cdev.smbus_read_byte_data(REG_STATUS_REG) } /// Obtain the temperature reading from the chip. /// T(°C) = 42.5 + (TEMP_OUT / 480) pub fn get_temp(&mut self) -> Result<i16, T::Error> { let mut buf = [0u8; 2]; buf[0] = self.i2cdev.smbus_read_byte_data(REG_TEMP_OUT_L)?; buf[1] = self.i2cdev.smbus_read_byte_data(REG_TEMP_OUT_H)?; Ok(LittleEndian::read_i16(&buf)) } /// Obtain the temperature reading from the chip in deg C. pub fn get_temp_celcius(&mut self) -> Result<f64, T::Error> { self.get_temp() .and_then(|c| Ok((f64::from(c) / 480.0) + 42.5)) } /// Obtain the pressure reading from the chip. /// Pout(hPa) = PRESS_OUT / 4096 pub fn get_pressure(&mut self) -> Result<u32, T::Error> { let mut buf = [0u8; 4]; buf[0] = self.i2cdev.smbus_read_byte_data(REG_PRESS_OUT_XL)?; buf[1] = self.i2cdev.smbus_read_byte_data(REG_PRESS_OUT_L)?; buf[2] = self.i2cdev.smbus_read_byte_data(REG_PRESS_OUT_H)?; Ok(LittleEndian::read_u32(&buf)) } /// Obtain the pressure reading from the chip in hPa. pub fn get_pressure_hpa(&mut self) -> Result<f64, T::Error> { self.get_pressure().and_then(|c| Ok(f64::from(c) / 4096.0)) } }
true
92f4434d9eceb69bbb55e66432084d70554080ce
Rust
eistaa/AdventOfCode2020
/src/day16.rs
UTF-8
3,807
3
3
[]
no_license
use itertools::Itertools; use regex::Regex; use std::collections::HashSet; use std::fs; use std::path::Path; use std::str::FromStr; #[derive(Debug)] struct Rule { pub name: String, pub ranges: Vec<(i32, i32)>, } impl Rule { fn in_any_range(&self, value: &i32) -> bool { for (start, end) in &self.ranges { if *start <= *value && *value <= *end { return true; } } false } } fn parse(filename: &Path) -> Result<(Vec<Rule>, Vec<i32>, Vec<Vec<i32>>), String> { let re_rule = Regex::new(r"^(.+): ([0-9]+)-([0-9]+) or ([0-9]+)-([0-9]+)$").unwrap(); let input_blocks: Vec<String> = fs::read_to_string(filename) .map_err(|err| format!("Failed to read data for day 15: {}", err))? .split("\n\n") .map(|s| s.to_string()) .collect(); // rules let rules = input_blocks .get(0) .ok_or("No rules input block".to_string())? .lines() .map::<Result<Rule, String>, _>(|line| { let capture = re_rule.captures(line).ok_or("Failed to match rule line".to_string())?; Ok(Rule { name: capture.get(1).ok_or("No name on rule".to_string())?.as_str().to_owned(), ranges: capture .iter() .skip(2) .map(|cap| i32::from_str(cap.unwrap().as_str()).unwrap()) .tuples() .collect::<Vec<(i32, i32)>>(), }) }) .collect::<Result<Vec<Rule>, _>>()?; // my ticket let ticket = input_blocks .get(1) .ok_or("No my ticket input block".to_string())? .lines() .nth(1) .ok_or("Failed to retrieve my ticket line".to_string())? .split(",") .map(|num| i32::from_str(num).unwrap()) .collect::<Vec<i32>>(); // nearby tickets let tickets = input_blocks .get(2) .ok_or("No nearby tickets input block".to_string())? .lines() .skip(1) .map(|line| { line.split(",") .map(|num| i32::from_str(num).unwrap()) .collect::<Vec<i32>>() }) .collect::<Vec<Vec<i32>>>(); Ok((rules, ticket, tickets)) } pub fn part01(filename: &Path) -> Result<String, String> { let (rules, _, tickets) = &parse(filename)?; let error_rate: i32 = tickets .iter() .flatten() .filter(|&num| !rules.iter().any(|rule| rule.in_any_range(num))) .sum(); Ok(format!("Ticket error rate: {}", error_rate)) } pub fn part02(filename: &Path) -> Result<String, String> { let (rules, ticket, tickets) = &parse(filename)?; let tickets = tickets .iter() .filter(|ticket| { !ticket .iter() .any(|field| !rules.iter().any(|rule| rule.in_any_range(field))) }) .collect::<Vec<&Vec<i32>>>(); let mut possible = Vec::new(); for rule in rules { let mut fields = HashSet::new(); for field in 0..rules.len() { if tickets.iter().all(|ticket| rule.in_any_range(&ticket[field])) { fields.insert(field); } } possible.push(fields); } let mut ordering = Vec::new(); let mut found = HashSet::new(); for (fields, idx) in possible.iter().zip(0..rules.len()).sorted_by_key(|el| el.0.len()) { ordering.push((idx, *fields.difference(&found).next().unwrap())); found.extend(fields); } Ok(format!( "Departure fields product: {}", ordering .iter() .filter(|(idx, _)| rules[*idx].name.starts_with("departure")) .map(|(_, field)| ticket[*field] as i64) .product::<i64>() )) }
true
f16b1fd0e1c9a1664ba998714c327ed9619b524a
Rust
serpent-charmer/RIAN
/src/ian.rs
UTF-8
9,332
3.328125
3
[ "MIT" ]
permissive
pub struct Interval { left: f64, right: f64, } impl<'a, 'b> std::ops::Add<&'b Interval> for &'a Interval { type Output = Interval; fn add(self, addend: &'b Interval) -> Interval { return Interval { left: self.left + addend.left, right: - (-self.right - addend.right) } } } impl<'a> std::ops::Add<&'a Interval> for Interval { type Output = Interval; fn add(self, addend: &'a Interval) -> Interval { return Interval { left: self.left + addend.left, right: - (-self.right - addend.right) } } } impl<'a> std::ops::Add<Interval> for &'a Interval { type Output = Interval; fn add(self, addend: Interval) -> Interval { return Interval { left: self.left + addend.left, right: - (-self.right - addend.right) } } } impl<'a, 'b> std::ops::Sub<&'b Interval> for &'a Interval { type Output = Interval; fn sub(self, subtrahend: &'b Interval) -> Interval { return Interval { left: self.left - subtrahend.right, right: self.right - subtrahend.left } } } impl<'a> std::ops::Sub<&'a Interval> for Interval { type Output = Interval; fn sub(self, subtrahend: &'a Interval) -> Interval { return Interval { left: self.left - subtrahend.right, right: self.right - subtrahend.left } } } impl<'a> std::ops::Sub<Interval> for &'a Interval { type Output = Interval; fn sub(self, subtrahend:Interval) -> Interval { return Interval { left: self.left - subtrahend.right, right: self.right - subtrahend.left } } } impl<'a, 'b> std::ops::Mul<&'b Interval> for &'a Interval { type Output = Interval; fn mul(self, multiplicand: &'b Interval) -> Interval { let tvec:Vec<f64> = vec!( self.left * multiplicand.left, self.left * multiplicand.right, self.right * multiplicand.left, self.right * multiplicand.right ); return Interval { left: tvec.iter().cloned().fold(0./0., f64::min), right: tvec.iter().cloned().fold(0./0., f64::max) } } } impl<'a> std::ops::Mul<&'a Interval> for Interval { type Output = Interval; fn mul(self, multiplicand: &'a Interval) -> Interval { let tvec:Vec<f64> = vec!( self.left * multiplicand.left, self.left * multiplicand.right, self.right * multiplicand.left, self.right * multiplicand.right ); return Interval { left: tvec.iter().cloned().fold(0./0., f64::min), right: tvec.iter().cloned().fold(0./0., f64::max) } } } impl<'a> std::ops::Mul<Interval> for &'a Interval { type Output = Interval; fn mul(self, multiplicand: Interval) -> Interval { let tvec:Vec<f64> = vec!( self.left * multiplicand.left, self.left * multiplicand.right, self.right * multiplicand.left, self.right * multiplicand.right ); return Interval { left: tvec.iter().cloned().fold(0./0., f64::min), right: tvec.iter().cloned().fold(0./0., f64::max) } } } impl<'a, 'b> std::ops::Div<&'b Interval> for &'a Interval { type Output = Interval; fn div(self, divider: &'b Interval) -> Interval { if self.right == 0.0 || divider.right == 0.0 { panic!("right point can't be 0.0 in division") } let tvec:Vec<f64> = vec!( self.left / divider.left, self.left / divider.right, self.right / divider.left, self.right / divider.right ); return Interval { left: tvec.iter().cloned().fold(0./0., f64::min), right: tvec.iter().cloned().fold(0./0., f64::max) } } } impl<'a> std::ops::Div<&'a Interval> for Interval { type Output = Interval; fn div(self, divider: &'a Interval) -> Interval { if self.right == 0.0 || divider.right == 0.0 { panic!("right point can't be 0.0 in division") } let tvec:Vec<f64> = vec!( self.left / divider.left, self.left / divider.right, self.right / divider.left, self.right / divider.right ); return Interval { left: tvec.iter().cloned().fold(0./0., f64::min), right: tvec.iter().cloned().fold(0./0., f64::max) } } } impl<'a> std::ops::Div<Interval> for &'a Interval { type Output = Interval; fn div(self, divider: Interval) -> Interval { if self.right == 0.0 || divider.right == 0.0 { panic!("right point can't be 0.0 in division") } let tvec:Vec<f64> = vec!( self.left / divider.left, self.left / divider.right, self.right / divider.left, self.right / divider.right ); return Interval { left: tvec.iter().cloned().fold(0./0., f64::min), right: tvec.iter().cloned().fold(0./0., f64::max) } } } impl<'a> std::ops::AddAssign<&'a Interval> for Interval { type Output = Interval; fn add_assign(&mut self, addend: &'a Interval) { *self = Interval { left: self.left + addend.left, right: self.right + addend.right }; } } impl<'a> std::ops::SubAssign<&'a Interval> for Interval { type Output = Interval; fn sub_assign(&mut self, subtrahend: &'a Interval) { *self = Interval { left: self.left - subtrahend.right, right: self.right - subtrahend.left } } } impl<'a> std::ops::MulAssign<&'a Interval> for Interval { type Output = Interval; fn mul_assign(&mut self, multiplicand: &'a Interval) { let tvec:Vec<f64> = vec!( self.left * multiplicand.left, self.left * multiplicand.right, self.right * multiplicand.left, self.right * multiplicand.right ); *self = Interval { left: tvec.iter().cloned().fold(0./0., f64::min), right: tvec.iter().cloned().fold(0./0., f64::max) } } } impl<'a> std::ops::DivAssign<&'a Interval> for Interval { type Output = Interval; fn div_assign(&mut self, divider: &'a Interval) { let tvec:Vec<f64> = vec!( self.left / divider.left, self.left / divider.right, self.right / divider.left, self.right / divider.right ); *self = Interval { left: tvec.iter().cloned().fold(0./0., f64::min), right: tvec.iter().cloned().fold(0./0., f64::max) } } } impl<'a> std::ops::Add<f64> for &'a Interval { type Output = Interval; fn add(self, addend:f64) -> Interval { return Interval { left: self.left + addend, right: self.right + addend } } } impl<'a> std::ops::Sub<f64> for &'a Interval { type Output = Interval; fn sub(self, subtrahend:f64) -> Interval { return Interval { left: self.left - subtrahend, right: self.right - subtrahend } } } impl<'a> std::ops::Mul<f64> for &'a Interval { type Output = Interval; fn mul(self, multiplicand:f64) -> Interval { return Interval { left: self.left * multiplicand, right: self.right * multiplicand } } } impl<'a> std::ops::Div<f64> for &'a Interval { type Output = Interval; fn div(self, divider:f64) -> Interval { return Interval { left: self.left / divider, right: self.right / divider } } } impl<'a> std::ops::Add<&'a Interval> for f64 { type Output = Interval; fn add(self, addend: &'a Interval) -> Interval { return addend + self } } impl std::ops::Add<Interval> for f64 { type Output = Interval; fn add(self, mut addend: Interval) -> Interval { addend.left += self; addend.right += self; return addend; } } impl<'a> std::ops::Sub<&'a Interval> for f64 { type Output = Interval; fn sub(self, subtrahend: &'a Interval) -> Interval { return subtrahend - self } } impl<'a> std::ops::Mul<&'a Interval> for f64 { type Output = Interval; fn mul(self, multiplicand: &'a Interval) -> Interval { return multiplicand * self } } impl<'a> std::ops::Div<&'a Interval> for f64 { type Output = Interval; fn div(self, dividend: &'a Interval) -> Interval { return dividend / self } } impl<'a> std::ops::AddAssign<&'a f64> for Interval { type Output = Interval; fn add_assign(&mut self, addend: &'a f64) { *self = Interval { left: self.left + addend, right: self.right + addend }; } } impl<'a> std::ops::SubAssign<&'a f64> for Interval { type Output = Interval; fn sub_assign(&mut self, subtrahend: &'a f64) { *self = Interval { left: self.left - subtrahend, right: self.right - subtrahend }; } } impl<'a> std::ops::MulAssign<&'a f64> for Interval { type Output = Interval; fn mul_assign(&mut self, multiplicand: &'a f64) { *self = Interval { left: self.left * multiplicand, right: self.right * multiplicand }; } } impl<'a> std::ops::DivAssign<&'a f64> for Interval { type Output = Interval; fn div_assign(&mut self, divider: &'a f64) { *self = Interval { left: self.left / divider, right: self.right / divider }; } } impl std::fmt::Display for Interval { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "Interval LEFT[{:e}] RIGHT[{:e}]", self.left, self.right) } } impl Interval { pub fn new(lval: f64, rval: f64) -> Interval { return Interval { left: lval, right: rval } } pub fn from(ivl:&Interval) -> Interval { return Interval { left: ivl.left, right: ivl.right } } pub fn map(ivl:&Interval, f: fn(f64) -> f64) -> Interval { Interval { left: f(ivl.left), right: f(ivl.right) } } }
true
853d4494736048582e7f2e866af6273afb937e17
Rust
sharkbound/rust-projects
/dndgui_egui/mainapp/src/topmenu/mod.rs
UTF-8
3,024
2.578125
3
[ "MIT" ]
permissive
use eframe::egui; use eframe::egui::{Context}; use dndlib::{AbilityScores, Character, DndCampaign, Note, Race}; use crate::{file_dialog_handler, MainApp}; pub(crate) fn show_top_menu(ctx: &Context, app: &mut MainApp) { egui::TopBottomPanel::top("primary_topbar").show(ctx, |ui| { ui.horizontal(|ui| { ui.menu_button("Load", |ui| { if ui.button("load campaign from file").clicked() { file_dialog_handler::set_new_file_dialog(app); ui.close_menu(); } }); if ui.button("Load Example Campaign").clicked() { app.campaign = Some(create_filler_campaign()); } }) }); } fn create_filler_campaign() -> DndCampaign { DndCampaign::new( "Example Campaign For Testing", vec![ Character::new("Default Dan", Race::BugBear, AbilityScores::new( 16, 10, 8, 20, 15, 16, ), Some(14)).edit(|chr| { chr.edit_note(|note| { note.edit_title(|_| "Default Dan's Note!".to_owned()); note.edit_content(|_| "Default Dan's Note Content! Not much to see here, cause Default Dan is a pretty generic guy!".to_owned()); }); }), Character::new("John Doe", Race::Human, AbilityScores::new( 40, 10, 80, 20, 45, 1337, ), Some(14)).edit(|chr| { chr.edit_note(|note| { note.edit_title(|_| "John Doe's Note!".to_owned()); note.edit_content(|_| "Generic note for John Doe!".to_owned()); }); }) ], vec![ Note::new("Default Note For Testing!", "\ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. \ Neque vitae tempus quam pellentesque nec nam. Sed blandit libero volutpat sed. Tempus egestas sed sed risus pretium quam. \ Sapien faucibus et molestie ac. Blandit libero volutpat sed cras ornare arcu. \ Laoreet suspendisse interdum consectetur libero id faucibus nisl tincidunt. \ Viverra maecenas accumsan lacus vel facilisis volutpat est. Amet porttitor eget dolor morbi non arcu risus. \ Gravida rutrum quisque non tellus orci ac auctor. Sit amet porttitor eget dolor morbi. \ Duis tristique sollicitudin nibh sit amet commodo nulla. Iaculis urna id volutpat lacus laoreet. \ Enim ut tellus elementum sagittis."), Note::new("Secret Note! Dont Open!", "This is a secret note! Dont look at it!"), ]) }
true
ededb553463361ac8ba001309fb278ae2edbd0ca
Rust
fission-codes/basquiat
/src/cfg_parser.rs
UTF-8
2,260
3.453125
3
[ "Apache-2.0" ]
permissive
use std::{fs::File, path::Path, io::{BufReader, BufRead}}; use std::ops::Mul; use regex::Regex; #[derive(Copy, Clone)] pub struct Config{ pub dimensions: Resize, // pub operations: Option<Vec<Operation>> } pub struct Parser{ re: Regex } #[derive(Copy, Clone)] pub enum Resize{ Width(i32), Height(i32), Original } // pub struct Operation { // identifier: String, // parameters: Vec<Parameter> // } // // pub enum Parameter { // Integer(i32), // Text(String) // } impl Parser { pub fn new() -> Parser{ let re: Regex = Regex::new(r"(?P<width>\d+|_)x(?P<height>\d+|_)").unwrap(); Parser{re} } pub fn parse_file(&self, filepath: &Path) -> Vec<Config>{ let file = File::open(filepath).expect(&"cfg file not found"); let lines = BufReader::new(file).lines(); let mut configs : Vec<Config> = Vec::new(); for line in lines { match self.parse_from_string(&line.unwrap()) { Some(conf) => configs.push(conf), None => () } } configs } pub fn parse_from_string(&self, line: &str) -> Option<Config>{ let first_char = line.chars().next(); match first_char { Some('#') => return None, None => return None, Some(_) => () } let matched= self.re.captures(line).expect("Syntax Error in cfg file"); let width = matched.name("width").unwrap().as_str(); let height = matched.name("height").unwrap().as_str(); if width == "_" && height == "_"{ return Some(Config{dimensions: Resize::Original}) } if width == "_" { return Some(Config{dimensions: Resize::Height(height.parse::<i32>().unwrap())}) } return Some(Config{dimensions: Resize::Width(width.parse::<i32>().unwrap())}) } } impl Mul<f32> for Resize { type Output = Resize; fn mul(self, rhs: f32) -> Resize{ match self { Resize::Width(n) => return Resize::Width(((n as f32)*rhs).round() as i32), Resize::Height(n) => return Resize::Height(((n as f32)*rhs).round() as i32), Resize::Original => return Resize::Original } } }
true
b8ab02db8626eebb541e95a0e93742b041eb6fc1
Rust
RoryABrittain/Advent-of-code-2020
/src/ten.rs
UTF-8
1,533
3.296875
3
[]
no_license
use std::fs; pub fn run() { let raw_data = fs::read_to_string("ten_data.txt") .expect("Can't find file"); let lines: Vec<&str> = raw_data.split("\r\n").collect(); let mut numbers: Vec<i64> = lines.iter().map(|x| x.parse::<i64>().unwrap()).collect(); numbers.push(0); // power socket numbers.sort(); // println!("{:?}", numbers); let mut differences: Vec<i64> = Vec::new(); for (i, n) in numbers.iter().enumerate() { if i > 0 { differences.push(n - &numbers[i - 1]); } } differences.push(3); // final adaptor // println!("{:?}", differences); println!("{}\n", differences.iter().filter(|&x| *x == 1).count() * differences.iter().filter(|&x| *x == 3).count()); let mut i = 0; let mut number_of_ways = 1; while i < numbers.len() { let mut temp: Vec<i64> = Vec::new(); temp.push(numbers[i]); while i + 1 < numbers.len() && numbers[i + 1] - numbers[i] != 3 { i = i + 1; temp.push(numbers[i]); } // The data has no steps of 2 and at most 5 numbers in a row // separated by one. I just manualy worked out the number of // different ways for up to 5 becaseu it is not trivial what // the formula is. number_of_ways = number_of_ways * ([0, 1, 1, 2, 4, 7][temp.len()] as usize); i = i + 1; } println!("{}", number_of_ways); }
true
42246cf3b76998c6973999ed6e5d971b3e7f937e
Rust
illicitonion/cargo-raze
/examples/vendored/non_cratesio_library/cargo/vendor/futures-util-0.2.0/src/future/with_executor.rs
UTF-8
876
2.875
3
[ "Apache-2.0" ]
permissive
use futures_core::{Future, Poll}; use futures_core::task; use futures_core::executor::Executor; /// Future for the `with_executor` combinator, assigning an executor /// to be used when spawning other futures. /// /// This is created by the `Future::with_executor` method. #[derive(Debug)] #[must_use = "futures do nothing unless polled"] pub struct WithExecutor<F, E> where F: Future, E: Executor { executor: E, future: F } pub fn new<F, E>(future: F, executor: E) -> WithExecutor<F, E> where F: Future, E: Executor, { WithExecutor { executor, future } } impl<F, E> Future for WithExecutor<F, E> where F: Future, E: Executor, { type Item = F::Item; type Error = F::Error; fn poll(&mut self, cx: &mut task::Context) -> Poll<F::Item, F::Error> { self.future.poll(&mut cx.with_executor(&mut self.executor)) } }
true
deac41da5dd9366a41ae32e023721c9b05fafe57
Rust
alexander-akhmetov/mos
/src/memory/allocator.rs
UTF-8
1,607
2.75
3
[]
no_license
use core::alloc::GlobalAlloc; use core::alloc::Layout; const PREALLOCATED_HEAP_SIZE: usize = 32 * 1024 * 1024; // 32Mb #[repr(C)] struct PreAllocatedMemory { heap: [u8; PREALLOCATED_HEAP_SIZE], index: usize, } impl PreAllocatedMemory { const fn new() -> PreAllocatedMemory { PreAllocatedMemory { heap: [0; PREALLOCATED_HEAP_SIZE], index: 0, } } } static mut PREALLOCATED_MEM: PreAllocatedMemory = PreAllocatedMemory::new(); pub struct MGlobalAlloc; unsafe impl<'a> GlobalAlloc for &'a MGlobalAlloc { unsafe fn alloc(&self, layout: Layout) -> *mut u8 { let new_index = PREALLOCATED_MEM.index + layout.size(); if new_index > PREALLOCATED_HEAP_SIZE { panic!("allocator: memory allocation error!") } let ptr = &mut PREALLOCATED_MEM.heap[PREALLOCATED_MEM.index] as *mut u8; PREALLOCATED_MEM.index = new_index; // system_log!("[allocator]: current index: {}", new_index); // system_log!( // "allocator: alloc called, allocated {} bytes at {:#X}", // layout.size(), // ptr as usize // ); ptr } unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { // system_log!( // "allocator: dealloc called: deallocate {} bytes at {:#X}", // layout.size(), // ptr as usize // ); } } #[cfg(not(test))] #[alloc_error_handler] #[no_mangle] pub fn rust_oom(layout: Layout) -> ! { system_log!("! OOM: memory allocation of {} bytes failed", layout.size()); loop {} }
true
b958d805ad7df6dcc305684f4b5e29ad13722253
Rust
nolik/CodeSignal
/src/task/box_blur.rs
UTF-8
2,055
3.28125
3
[]
no_license
pub fn boxBlur(image: Vec<Vec<i32>>) -> Vec<Vec<i32>> { let mut result: Vec<Vec<i32>> = Vec::new(); for row in 2..image.len() { let mut vec: Vec<i32> = Vec::new(); for item in 2..image[row].len() { let box_value = box_value(row, item, &image); vec.push(box_value); } result.push(vec); } result } fn box_value(row: usize, item: usize, image: &Vec<Vec<i32>>) -> i32 { let first_raw = raw_sum_value( &image[row - 2][item - 2], &image[row - 2][item - 1], &image[row - 2][item], ); let second_raw = raw_sum_value( &image[row - 1][item - 2], &image[row - 1][item - 1], &image[row - 1][item], ); let third_raw = raw_sum_value( &image[row][item - 2], &image[row][item - 1], &image[row][item], ); (first_raw + second_raw + third_raw) / 9 } fn raw_sum_value(a: &i32, b: &i32, c: &i32) -> i32 { a + b + c } //Input: // //image: [[1,1,1], // [1,7,1], // [1,1,1]] // //Expected Output: // //[[1]] //Input: // //image: [[0,18,9], // [27,9,0], // [81,63,45]] // //Expected Output: // //[[28]] //Input: // //image: [[36,0,18,9], // [27,54,9,0], // [81,63,72,45]] // //Expected Output: // //[[40,30]] //Input: // //image: [[7,4,0,1], // [5,6,2,2], // [6,10,7,8], // [1,4,2,0]] // //Expected Output: // //[[5,4], // [4,4]] //Input: // //image: // [[36,0,18,9,9,45,27], // [27,0,54,9,0,63,90], // [81,63,72,45,18,27,0], // [0,0,9,81,27,18,45], // [45,45,27,27,90,81,72], // [45,18,9,0,9,18,45], // [27,81,36,63,63,72,81]] // //Expected Output: // //[[39,30,26,25,31], // [34,37,35,32,32], // [38,41,44,46,42], // [22,24,31,39,45], // [37,34,36,47,59]] //Input: // //image: [[36,0,18,9,9,45,27], // [27,0,254,9,0,63,90], // [81,255,72,45,18,27,0], // [0,0,9,81,27,18,45], // [45,45,227,227,90,81,72], // [45,18,9,255,9,18,45], // [27,81,36,127,255,72,81]] // //Expected Output: // //[[82,73,48,25,31], // [77,80,57,32,32], // [81,106,88,68,42], // [44,96,103,89,45], // [59,113,137,126,80]]
true
ff238418db05697fdd31b0c1c1cf83e26254658b
Rust
barreiro/euler
/src/main/rust/euler/solver106.rs
UTF-8
1,877
3.453125
3
[ "MIT" ]
permissive
// COPYRIGHT (C) 2022 barreiro. All Rights Reserved. // Rust solvers for Project Euler problems use algorithm::cast::to_i64; use algorithm::combinatorics::choose; use Solver; /// Let `S(A)` represent the sum of elements in set `A` of size `n`. We shall call it a special sum set if for any two non-empty disjoint subsets, `B` and `C`, the following properties are true: /// /// `S(B) ≠ S(C)`; that is, sums of subsets cannot be equal. /// If `B` contains more elements than `C` then `S(B) > S(C)`. /// For this problem we shall assume that a given set contains `n` strictly increasing elements and it already satisfies the second rule. /// /// Surprisingly, out of the `25` possible subset pairs that can be obtained from a set for which `n = 4`, only `1` of these pairs need to be tested for equality (first rule). /// Similarly, when `n = 7`, only `70` out of the `966` subset pairs need to be tested. /// /// For `n = 12`, how many of the `261625` subset pairs that can be obtained need to be tested for equality? /// /// NOTE: This problem is related to *Problem 103* and *Problem 105*. pub struct Solver106 { pub n: u64, } impl Default for Solver106 { fn default() -> Self { Self { n: 12 } } } impl Solver for Solver106 { fn solve(&self) -> i64 { // given a size `2 < i <= n/2`, there are `choose(n, 2*i)` ways to form two subsets of size `i` // given a subset of size `2*i (4, 6, 8, 10, ...)`, the number of subsets that need to be tested is given by the sequence `(1, 5, 21, 84, ...)` // these are the number of combinations where there is not a clear inequality relationship, in other words, it's not possible to establish that for each element in B there is a corresponding element in C that is greater. (2..=self.n >> 1).map(|i| choose(self.n, 2 * i) * choose(2 * i - 1, i - 2)).map(to_i64).sum() } }
true
e8f4c43e08b7325814f4a7452d68c5b9ba6dc6e8
Rust
woodgear/tpm
/src/main.rs
UTF-8
28,510
2.5625
3
[]
no_license
#![allow(clippy::needless_return)] use failure; use serde::{Deserialize, Serialize}; use serde_json; use std::cmp::{Ord, Ordering}; use std::collections::HashMap; use std::collections::VecDeque; use std::fmt::Debug; use std::path::{Path, PathBuf}; use std::pin::Pin; use std::ptr::NonNull; use structopt::StructOpt; mod cli; use cli::*; use context_attribute::context; use failure::ResultExt; #[context(fn)] fn app() -> Result<(), failure::Error> { let opts: Opts = Opts::from_args(); let home_dir = dirs::home_dir().ok_or_else(|| failure::err_msg("could not find home dir"))?; let home_path = home_dir.join(".tpm"); let mut app = TemplateConfigLock::new(&home_path)?; match opts.subcmd { SubCommand::Add { path } => { app.do_add(path)?; } SubCommand::New { id, expect_path } => { app.do_new(id, expect_path)?; } SubCommand::Search { tags } => { app.do_search(tags); } SubCommand::ReIndex => { app.reindex()?; } SubCommand::Update => { app.do_update()?; } SubCommand::List { show_tag } => { if show_tag { app.do_list_tag(); } else { app.do_list_template(); } } }; Ok(()) } fn main() { if let Err(e) = app() { println!("err {:?}", e); } } trait Tag { fn get_tags(&self) -> Vec<String>; } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] enum TemplateKind { Git(String), Local(String), } /// meta which in .tpm.lock #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] struct Meta { tag: Vec<String>, name: String, path: String, } impl Tag for Meta { fn get_tags(&self) -> Vec<String> { return self.tag.clone(); } } impl Ord for Meta { fn cmp(&self, other: &Self) -> Ordering { self.name.cmp(&other.name) } } impl PartialOrd for Meta { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } unsafe fn clone_from_nonnull<T>(refs: Vec<NonNull<T>>) -> Vec<T> where T: Clone, { return refs.into_iter().map(|p| p.as_ref().clone()).collect(); } #[context(fn)] fn git_clone_or_update_master(url: &str, path: &Path) -> Result<(), failure::Error> { use git_url_parse::GitUrl; let git_url = GitUrl::parse(url).map_err(|e| failure::format_err!("parser url err {:?}", e))?; let git_repo_path = path.join(&git_url.name); if git_repo_path.exists() { //what do you want more fs_extra::dir::remove(&git_repo_path)?; } use git2::build::{CloneLocal, RepoBuilder}; RepoBuilder::new() .clone_local(CloneLocal::Auto) .clone(url, &git_repo_path)?; Ok(()) } fn count_and_sort<T>(mut data_source: Vec<T>) -> Vec<T> where T: Ord + Eq + Debug, { data_source.sort(); let mut data: VecDeque<(T, u32)> = VecDeque::new(); for i in data_source.into_iter() { if let Some(front) = data.back_mut() { if i.eq(&front.0) { front.1 += 1; continue; } } let i: T = i; data.push_back((i, 1)); } let mut data: Vec<(T, u32)> = data.into_iter().collect(); data.sort_by(|b, a| { if a.1 == b.1 { return a.0.cmp(&b.0); } return a.1.cmp(&b.1); }); let data = data.into_iter().map(|i| i.0).collect(); return data; } #[context(fn)] fn generate_metas(root_path: &Path) -> Result<Vec<Meta>, failure::Error> { let tpm_config_path = root_path.join(".tpm"); if !(tpm_config_path.exists() && tpm_config_path.is_file()) { return Err(failure::format_err!( "{:?} tpm_config_path.exists {} tpm_config_path.is_file {}", root_path, tpm_config_path.exists(), tpm_config_path.is_file() )); } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] struct TpmConfig { kind: TemplateKind, #[serde(default)] tag: Vec<String>, } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] enum TemplateKind { Mutli, Single, Root, File, //not now } let config: TpmConfig = serde_json::from_str(&std::fs::read_to_string(&tpm_config_path)?)?; if config.kind == TemplateKind::Single { let name = root_path.file_name().unwrap().to_string_lossy().to_string(); let root_path = root_path.canonicalize().unwrap(); return Ok(vec![Meta { name, path: root_path.to_string_lossy().to_string(), tag: config.tag, }]); } if config.kind == TemplateKind::Root || config.kind == TemplateKind::Mutli { //iter dirs let mut metas: Vec<Meta> = vec![]; for entry in root_path.read_dir()? { let entry = entry?; let entry_path = entry.path(); let entry_name = entry_path .file_name() .ok_or_else(|| failure::format_err!("could not get name of {:?}", entry_path))?; if !entry_path.is_dir() { continue; } if entry_name == ".git" { continue; } let mut sub_metas = generate_metas(&entry_path)?; if config.kind == TemplateKind::Mutli { let name = root_path.file_name().unwrap().to_string_lossy().to_string(); for m in sub_metas.iter_mut() { m.name = format!("{}-{}", name, m.name); } } metas.append(&mut sub_metas); } return Ok(metas); } unreachable!() } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] struct TemplateConfigLock { root_path: PathBuf, metas: Vec<Meta>, } #[context(fn)] /// origin_path /a/b/c => c is a directory /// target_path /a/b/c1 => c1 doest not exist /// after copy dir, c1 and c are exactly same except and name 'c' 'c1' it self fn copy_dir(origin_path: &Path, targent_path: &Path) -> Result<(), failure::Error> { use fs_extra; let mut options = fs_extra::dir::CopyOptions::new(); options.copy_inside = true; fs_extra::dir::copy(origin_path, targent_path, &options)?; Ok(()) } mod render_template { use super::render_template_with_prefix; use crate::context; use failure::ResultExt; use serde::{Deserialize, Serialize}; use std::path::Path; // .tpm struct #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] struct Meta { template: Option<Template>, } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] struct Template { prefix: String, } #[context(fn)] pub fn render_template(path: &Path) -> Result<(), failure::Error> { use failure::ResultExt; let config_path = path.join(".tpm"); if !config_path.exists() { return Err(failure::format_err!( "could not find cofnig {:?}", config_path )); } let meta_raw_json = &std::fs::read_to_string(&config_path)?; let meta: Meta = serde_json::from_str(meta_raw_json).context("read meta json fail")?; let prefix = { if let Some(template) = meta.template { template.prefix } else { return Ok(()); } }; render_template_with_prefix(path, &prefix)?; Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_parser_meta() { let raw = r#" { "tag":["rust","cli","log"], "kind":"single", "template":{ "prefix":"_t_" } } "#; let meta: Meta = serde_json::from_str(raw).unwrap(); assert_eq!( meta, Meta { template: Some(Template { prefix: "_t_".to_string() }) } ) } } } use render_template::render_template; #[context(fn)] fn pick_names(path: &Path, prefix: &str) -> Result<Vec<String>, failure::Error> { use failure::ResultExt; use indexmap::IndexSet; use simple_replace_templete_engine::get_variables; use walkdir::WalkDir; let mut names = IndexSet::new(); for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) { let path = entry.path().display().to_string(); for var in get_variables(&path, prefix).into_iter() { names.insert(var); } let path = Path::new(&path); if path.is_file() { let content = std::fs::read_to_string(&path).context(format!("read {:?} fail", path))?; for var in get_variables(&content, prefix).into_iter() { names.insert(var); } } } Ok(names.into_iter().collect()) } #[context(fn)] fn ask_names(names: Vec<String>) -> Result<HashMap<String, String>, failure::Error> { let mut values = HashMap::new(); use dialoguer::{theme::CustomPromptCharacterTheme, Input}; let theme = CustomPromptCharacterTheme::new('>'); for n in names { let input: String = Input::with_theme(&theme).with_prompt(&n).interact()?; values.insert(n, input); } return Ok(values); } #[context(fn)] fn render_template_with_prefix(path: &Path, prefix: &str) -> Result<(), failure::Error> { let mut names = pick_names(path, prefix)?; let name = path .file_name() .ok_or(failure::err_msg("could nt find file name"))?; let name = name.to_string_lossy().to_string(); let _removed = names .iter() .position(|n| n == "name") .map(|e| names.remove(e)) .is_some(); let mut values = ask_names(names)?; values.insert("name".to_string(), name); _render_template(path, prefix, &values)?; Ok(()) } #[context(fn)] fn _render_template( path: &Path, prefix: &str, values: &HashMap<String, String>, ) -> Result<(), failure::Error> { use simple_replace_templete_engine::render; use std::fs; use walkdir::WalkDir; for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) { let path = entry.path().display().to_string(); let new_path = render(&path, prefix, &values)?; if new_path != path { fs::rename(&path, &new_path)?; } let new_path = Path::new(&new_path); if new_path.is_file() { let content = fs::read_to_string(&new_path)?; let new_content = render(&content, prefix, &values)?; if new_content != content { fs::write(&new_path, new_content)?; } } } Ok(()) } fn get_git_path(root_path: &Path) -> Result<Vec<PathBuf>, failure::Error> { let mut git_paths = vec![]; for e in root_path.read_dir()? { let e = e?; let e_path = e.path(); if e_path.is_dir() && e_path.join(".git").exists() { git_paths.push(e_path); } } Ok(git_paths) } fn git_update(git_path: &Path) -> Result<(), failure::Error> { use std::process::Command; println!("{:?}", git_path); let out = Command::new("git") .args(vec!["pull", "origin", "master"]) .current_dir(git_path) .output()?; if !out.status.success() { return Err(failure::format_err!("{:?}", out)); } Ok(()) } impl TemplateConfigLock { #[context(fn)] fn new(path: &Path) -> Result<Self, failure::Error> { let path = Self::init(&path)?; let mut s = Self { root_path: path.to_path_buf(), metas: vec![], }; let lock_path = s.root_path.join(".tpm.lock"); if lock_path.exists() { s.init_from_file(&lock_path)?; } else { s.reindex()?; } return Ok(s); } #[context(fn)] fn reindex(&mut self) -> Result<(), failure::Error> { println!("reindex"); let metas = generate_metas(&self.root_path)?; self.metas = metas; self.save_to_file(&self.root_path.join(".tpm.lock"))?; Ok(()) } #[context(fn)] fn init(path: &Path) -> Result<PathBuf, failure::Error> { if !path.exists() { std::fs::create_dir_all(path)? } std::fs::write(path.join(".tpm"), r#"{"kind":"root"}"#)?; let path = path.canonicalize()?; Ok(path) } #[context(fn)] fn do_add(&mut self, add: AddKind) -> Result<(), failure::Error> { if let AddKind::Local(path) = add { return self.do_add_local_file(&path); } if let AddKind::Git(url) = add { return self.do_add_git(&url); } return Ok(()); } #[context(fn)] fn do_update(&mut self) -> Result<(), failure::Error> { for git in get_git_path(&self.root_path)? { println!("update {:?}", git); git_update(&git)?; } self.reindex()?; Ok(()) } #[context(fn)] fn do_add_git(&mut self, url: &str) -> Result<(), failure::Error> { println!("do_add_git {}", url); git_clone_or_update_master(url, &self.root_path)?; self.reindex()?; Ok(()) } #[context(fn)] fn do_add_local_file(&mut self, path: &Path) -> Result<(), failure::Error> { if !path.exists() { return Err(failure::err_msg("could not find this local template")); } copy_dir(path, &self.root_path)?; self.reindex()?; Ok(()) } #[context(fn)] fn do_new(&self, id: String, expect_path: String) -> Result<(), failure::Error> { let template = self .metas .iter() .find(|t| t.name == id) .ok_or_else(|| failure::format_err!("could not find template {}", id))?; let expect_path = Path::new(&expect_path); copy_dir(Path::new(&template.path), expect_path)?; render_template(&expect_path)?; Ok(()) } fn do_list_tag(&self) { println!("do list tag"); use std::collections::HashSet; let mut set = HashSet::new(); for m in self.metas.iter() { for t in m.tag.iter() { set.insert(t); } } for t in set { println!("tag: {}", t); } } fn do_list_template(&self) { println!("do list template"); for m in self.metas.iter() { println!("{:?}", m); } } fn do_search(&self, tag: Vec<String>) { let search = Searable::from(self.metas.clone()); let tag: Vec<&str> = tag.iter().map(AsRef::as_ref).collect(); let templates = search.search(tag); for t in templates { println!("{:?}", t); } } } impl TemplateConfigLock { #[context(fn)] fn init_from_file(&mut self, path: &Path) -> Result<(), failure::Error> { if path.exists() { let data = std::fs::read_to_string(path)?; return self.init_from_str(&data); } return Ok(()); } #[context(fn)] fn init_from_str(&mut self, json_str: &str) -> Result<(), failure::Error> { let metas: Vec<Meta> = serde_json::from_str(json_str)?; self.metas = metas; Ok(()) } #[context(fn)] fn into_str(&self) -> Result<String, failure::Error> { let res = serde_json::to_string(&self.metas)?; Ok(res) } #[context(fn)] fn save_to_file(&self, path: &Path) -> Result<(), failure::Error> { let json_str = self.into_str()?; std::fs::write(path, json_str.as_bytes())?; Ok(()) } } struct Searable<T> where T: Tag + Clone, { hash: HashMap<String, Vec<NonNull<T>>>, data: Vec<T>, } impl<T> Searable<T> where T: Tag + Clone, { fn get_refs(self: &Pin<Box<Self>>, tag: &str) -> Vec<NonNull<T>> { let lis = self.hash.get(tag).unwrap_or(&vec![]).to_vec(); return lis.into_iter().collect(); } fn search(self: &Pin<Box<Self>>, keywords: Vec<&str>) -> Vec<T> where T: Ord + Debug, { use itertools::concat; let refs: Vec<Vec<NonNull<T>>> = keywords.iter().map(|tag| self.get_refs(tag)).collect(); let refs = concat(refs); let refs = count_and_sort(refs); return unsafe { clone_from_nonnull(refs) }; } fn from(data: Vec<T>) -> Pin<Box<Self>> { let mut res = Box::pin(Self { hash: HashMap::new(), data, }); let mut hash: HashMap<String, Vec<NonNull<T>>> = HashMap::new(); for meta in res.data.iter() { let meta = NonNull::from(meta); for t in unsafe { meta.as_ref() }.get_tags() { hash.entry(t) .and_modify(|v| v.push(meta)) .or_insert_with(|| vec![meta]); } } unsafe { let mut_ref: Pin<&mut Self> = Pin::as_mut(&mut res); Pin::get_unchecked_mut(mut_ref).hash = hash; } res } } #[cfg(test)] mod tests { use super::*; use filesystem::FakeFileSystem; use filesystem::TempDir; use filesystem::TempFileSystem; #[test] fn test_get() { #[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq)] struct Meta { tag: Vec<String>, name: String, } impl Tag for Meta { fn get_tags(&self) -> Vec<String> { return self.tag.clone(); } } let m1 = Meta { tag: vec!["t3".to_string()], name: "m1".to_string(), }; let m2 = Meta { tag: vec!["t1".to_string(), "t2".to_string()], name: "m2".to_string(), }; let m3 = Meta { tag: vec!["t1".to_string(), "t2".to_string()], name: "m3".to_string(), }; let s = Searable::from(vec![m1.clone(), m2.clone(), m3.clone()]); let l = s.search(vec!["t3"]); assert_eq!(l, vec![m1.clone()]); } #[test] fn test_search() { #[derive(Debug, Clone, Eq, PartialEq)] struct Meta { tag: Vec<String>, name: String, } impl Tag for Meta { fn get_tags(&self) -> Vec<String> { return self.tag.clone(); } } impl Ord for Meta { fn cmp(&self, other: &Self) -> Ordering { self.name.cmp(&other.name) } } impl PartialOrd for Meta { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } let m1 = Meta { tag: vec!["t1".to_string()], name: "m1".to_string(), }; let m2 = Meta { tag: vec!["t1".to_string(), "t2".to_string()], name: "m2".to_string(), }; let m3 = Meta { tag: vec!["t1".to_string(), "t3".to_string()], name: "m3".to_string(), }; let s = Searable::from(vec![m1.clone(), m2.clone(), m3.clone()]); let l = s.search(vec!["t2", "t1"]); assert_eq!(l, vec![m2.clone(), m3.clone(), m1.clone(),]); let m1 = Meta { tag: vec!["t1".to_string()], name: "m1".to_string(), }; let m2 = Meta { tag: vec!["t1".to_string(), "t2".to_string()], name: "m2".to_string(), }; let m3 = Meta { tag: vec!["t2".to_string(), "t3".to_string()], name: "m3".to_string(), }; let s = Searable::from(vec![m1.clone(), m2.clone(), m3.clone()]); let l = s.search(vec!["t2", "t3"]); assert_eq!(l, vec![m3.clone(), m2.clone()]); println!("{:?} {:?}", std::line!(), l); } fn do_assert_fs(root: &Path, mock_fs: Vec<(&str, &str, &str)>) { for (kind, path, content) in mock_fs { println!("k {} p {} c {}", kind, path, content); if kind == "f" { let full_path = root.join(path); let real_content = std::fs::read_to_string(&full_path).unwrap(); assert_eq!(real_content, content); } if kind == "d" { let full_path = root.join(path); assert_eq!(full_path.is_dir(), true); } } } fn do_mock_fs(root: &Path, mock_fs: Vec<(&str, &str, &str)>) { for (kind, path, content) in mock_fs { println!("k {} p {} c {}", kind, path, content); if kind == "f" { let full_path = root.join(path); let parent_dir = full_path.parent().unwrap(); std::fs::create_dir_all(parent_dir).unwrap(); std::fs::write(full_path, content); } } } fn assert_meta_eq( root_path: &Path, real_metas: Vec<Meta>, expect_metas: Vec<(&str, Vec<&str>, &str)>, ) { let expect_metas: Vec<Meta> = expect_metas .into_iter() .map(|(name, tags, path)| { let abs_path = root_path.join(path).canonicalize().unwrap(); Meta { name: name.to_string(), tag: tags.into_iter().map(|s| s.to_string()).collect(), path: abs_path.to_string_lossy().to_string(), } }) .collect(); assert_eq!(real_metas, expect_metas); } fn assert_generate_locks( mock_fs: Vec<(&str, &str, &str)>, expect_metas: Vec<(&str, Vec<&str>, &str)>, ) { let fake_fs = FakeFileSystem::new().temp_dir("test_tpm").unwrap(); let root_path = fake_fs.path(); let tpm_path = root_path.join(".tpm"); do_mock_fs(&root_path, mock_fs); let real_metas = generate_metas(&tpm_path).unwrap(); assert_meta_eq(root_path, real_metas, expect_metas); } #[ignore] #[test] fn test_git_update() { println!("test git update"); let home_dir = dirs::home_dir().unwrap(); git_update(&home_dir.join(".tpm").join("t")); assert_eq!(true, true); } #[ignore] #[test] fn test_get_git_path() { println!("print all git paht"); let home_dir = dirs::home_dir().unwrap(); get_git_path(&home_dir.join(".tpm")); assert_eq!(true, true); } #[test] fn test_generate_lock() { let mock_fs = vec![ ( "f", ".tpm/.tpm", r#" { "kind":"root" } "#, ), ( "f", ".tpm/a/.tpm", r#" { "kind":"mutli" } "#, ), ( "f", ".tpm/a/b/.tpm", r#" { "tag":["b"], "kind":"single" } "#, ), ( "f", ".tpm/a/c/.tpm", r#" { "tag":["c"], "kind":"single" } "#, ), ]; let expect_metas = vec![ ("a-b", vec!["b"], ".tpm/a/b"), ("a-c", vec!["c"], ".tpm/a/c"), ]; assert_generate_locks(mock_fs, expect_metas) } #[ignore] #[test] fn test_git_clone_or_update_master() { git_clone_or_update_master( "https://github.com/woodgear/t.git", Path::new("/home/oaa/.tpm"), ) .unwrap(); } #[test] fn test_add_local_file() { let local_dir_guard = FakeFileSystem::new().temp_dir("test_add_local").unwrap(); let local_path = local_dir_guard.path(); let app_dir_guard = FakeFileSystem::new() .temp_dir("test_add_local_app") .unwrap(); let app_path = app_dir_guard.path(); do_mock_fs( local_path, vec![ ("f", "a/.tpm", r#"{"kind":"mutli"}"#), ("f", "a/b/.tpm", r#"{"kind":"single","tag":["1"]}"#), ("f", "a/c/.tpm", r#"{"kind":"single","tag":["2"]}"#), ], ); println!("{:?}", app_path); let mut app = TemplateConfigLock::new(&app_path.join(".tpm")).unwrap(); app.do_add_local_file(&local_path.join("a")); let metas = app.metas; assert_meta_eq( app_path, metas, vec![ ("a-b", vec!["1"], ".tpm/a/b"), ("a-c", vec!["2"], ".tpm/a/c"), ], ); let local_dir_guard = FakeFileSystem::new().temp_dir("test_add_local").unwrap(); let local_path = local_dir_guard.path(); let app_dir_guard = FakeFileSystem::new() .temp_dir("test_add_local_app") .unwrap(); let app_path = app_dir_guard.path(); do_mock_fs( local_path, vec![("f", "a/.tpm", r#"{"kind":"single","tag":["123"]}"#)], ); let mut app = TemplateConfigLock::new(&app_path.join(".tpm")).unwrap(); app.do_add_local_file(&local_path.join("a")); let metas = app.metas; assert_meta_eq(app_path, metas, vec![("a", vec!["123"], ".tpm/a")]); } #[test] fn test_copy_dir() { let local_dir_guard = FakeFileSystem::new().temp_dir("test_copy_dir").unwrap(); let local_path = local_dir_guard.path(); do_mock_fs( local_path, vec![ ("f", "a/.tpm", r#"{"kind":"mutli"}"#), ("f", "a/b/.tpm", r#"{"kind":"single","tag":["1"]}"#), ("f", "a/c/.tpm", r#"{"kind":"single","tag":["2"]}"#), ], ); println!("local path {:?}", local_path); copy_dir(&local_path.join("a"), &local_path.join("a1")).unwrap(); do_assert_fs( local_path, vec![ ("f", "a1/.tpm", r#"{"kind":"mutli"}"#), ("f", "a1/b/.tpm", r#"{"kind":"single","tag":["1"]}"#), ("f", "a1/c/.tpm", r#"{"kind":"single","tag":["2"]}"#), ], ); } #[ignore] #[test] fn test_ask_name() { use maplit::hashmap; println!("{:?}", "test ask name"); let vals = ask_names(vec!["name".to_owned(), "age".to_owned()]).unwrap(); assert_eq!( vals, hashmap! { "name".to_owned() => "1 2".to_owned(), "age".to_owned() => "12".to_owned(), } ); } #[test] fn test_render_template() { let local_dir_guard = FakeFileSystem::new() .temp_dir("test_render_template") .unwrap(); let local_path = local_dir_guard.path(); do_mock_fs( local_path, vec![ ("f", "a/.tpm", r#"{"kind":"signle"}"#), ("f", "a/b.txt", r#"my name is _t_name_t_ what is you name?"#), ( "f", "a/c.txt", r#"you name is _t_name_t_ my name is _t_second_name_t_,nice to see you"#, ), ], ); let names = pick_names(&local_path, "_t_").unwrap(); assert_eq!(names, vec!["name".to_owned(), "second_name".to_owned()]); use maplit::hashmap; let values = hashmap! { "name".to_string() => "a (with space) b".to_string(), "second_name".to_string() => "normal".to_string(), }; _render_template(local_path, "_t_", &values).unwrap(); do_assert_fs( local_path, vec![ ("f", "a/.tpm", r#"{"kind":"signle"}"#), ( "f", "a/b.txt", r#"my name is a (with space) b what is you name?"#, ), ( "f", "a/c.txt", r#"you name is a (with space) b my name is normal,nice to see you"#, ), ], ); } }
true
73926957ab5e08658057677b3da36321b40482c5
Rust
chromium/chromium
/third_party/rust/rstest_macros/v0_17/crate/src/lib.rs
UTF-8
31,130
3.03125
3
[ "Apache-2.0", "MIT", "BSD-3-Clause", "GPL-1.0-or-later", "LGPL-2.0-or-later" ]
permissive
#![cfg_attr(use_proc_macro_diagnostic, feature(proc_macro_diagnostic))] extern crate proc_macro; // Test utility module #[cfg(test)] pub(crate) mod test; #[cfg(test)] use rstest_reuse; #[macro_use] mod error; mod parse; mod refident; mod render; mod resolver; mod utils; use syn::{parse_macro_input, ItemFn}; use crate::parse::{fixture::FixtureInfo, rstest::RsTestInfo}; use parse::ExtendWithFunctionAttrs; use quote::ToTokens; /// Define a fixture that you can use in all `rstest`'s test arguments. You should just mark your /// function as `#[fixture]` and then use it as a test's argument. Fixture functions can also /// use other fixtures. /// /// Let's see a trivial example: /// /// ``` /// use rstest::*; /// /// #[fixture] /// fn twenty_one() -> i32 { 21 } /// /// #[fixture] /// fn two() -> i32 { 2 } /// /// #[fixture] /// fn injected(twenty_one: i32, two: i32) -> i32 { twenty_one * two } /// /// #[rstest] /// fn the_test(injected: i32) { /// assert_eq!(42, injected) /// } /// ``` /// /// If the fixture function is an [`async` function](#async) your fixture become an `async` /// fixture. /// /// # Default values /// /// If you need to define argument default value you can use `#[default(expression)]` /// argument's attribute: /// /// ``` /// use rstest::*; /// /// #[fixture] /// fn injected( /// #[default(21)] /// twenty_one: i32, /// #[default(1 + 1)] /// two: i32 /// ) -> i32 { twenty_one * two } /// /// #[rstest] /// fn the_test(injected: i32) { /// assert_eq!(42, injected) /// } /// ``` /// The `expression` could be any valid rust expression, even an `async` block if you need. /// Moreover, if the type implements `FromStr` trait you can use a literal string to build it. /// /// ``` /// # use rstest::*; /// # use std::net::SocketAddr; /// # struct DbConnection {} /// #[fixture] /// fn db_connection( /// #[default("127.0.0.1:9000")] /// addr: SocketAddr /// ) -> DbConnection { /// // create connection /// # DbConnection{} /// } /// ``` /// /// # Async /// /// If you need you can write `async` fixtures to use in your `async` tests. Simply use `async` /// keyword for your function and the fixture become an `async` fixture. /// /// ``` /// use rstest::*; /// /// #[fixture] /// async fn async_fixture() -> i32 { 42 } /// /// /// #[rstest] /// async fn the_test(#[future] async_fixture: i32) { /// assert_eq!(42, async_fixture.await) /// } /// ``` /// The `#[future]` argument attribute helps to remove the `impl Future<Output = T>` boilerplate. /// In this case the macro expands it in: /// /// ``` /// # use rstest::*; /// # use std::future::Future; /// # #[fixture] /// # async fn async_fixture() -> i32 { 42 } /// #[rstest] /// async fn the_test(async_fixture: impl std::future::Future<Output = i32>) { /// assert_eq!(42, async_fixture.await) /// } /// ``` /// If you need, you can use `#[future]` attribute also with an implicit lifetime reference /// because the macro will replace the implicit lifetime with an explicit one. /// /// # Rename /// /// Sometimes you want to have long and descriptive name for your fixture but you prefer to use a much /// shorter name for argument that represent it in your fixture or test. You can rename the fixture /// using `#[from(short_name)]` attribute like following example: /// /// ``` /// use rstest::*; /// /// #[fixture] /// fn long_and_boring_descriptive_name() -> i32 { 42 } /// /// #[rstest] /// fn the_test(#[from(long_and_boring_descriptive_name)] short: i32) { /// assert_eq!(42, short) /// } /// ``` /// /// # `#[once]` Fixture /// /// Expecially in integration tests there are cases where you need a fixture that is called just once /// for every tests. `rstest` provides `#[once]` attribute for these cases. /// /// If you mark your fixture with this attribute, then `rstest` will compute a static reference to your /// fixture result and return this reference to all your tests that need this fixture. /// /// In follow example all tests share the same reference to the `42` static value. /// /// ``` /// use rstest::*; /// /// #[fixture] /// #[once] /// fn once_fixture() -> i32 { 42 } /// /// // Take care!!! You need to use a reference to the fixture value /// /// #[rstest] /// #[case(1)] /// #[case(2)] /// fn cases_tests(once_fixture: &i32, #[case] v: i32) { /// // Take care!!! You need to use a reference to the fixture value /// assert_eq!(&42, once_fixture) /// } /// /// #[rstest] /// fn single(once_fixture: &i32) { /// assert_eq!(&42, once_fixture) /// } /// ``` /// /// There are some limitations when you use `#[once]` fixture. `rstest` forbid to use once fixture /// for: /// /// - `async` function /// - Generic function (both with generic types or use `impl` trait) /// /// Take care that the `#[once]` fixture value will **never be dropped**. /// /// # Partial Injection /// /// You can also partialy inject fixture dependency using `#[with(v1, v2, ..)]` attribute: /// /// ``` /// use rstest::*; /// /// #[fixture] /// fn base() -> i32 { 1 } /// /// #[fixture] /// fn first(base: i32) -> i32 { 1 * base } /// /// #[fixture] /// fn second(base: i32) -> i32 { 2 * base } /// /// #[fixture] /// fn injected(first: i32, #[with(3)] second: i32) -> i32 { first * second } /// /// #[rstest] /// fn the_test(injected: i32) { /// assert_eq!(-6, injected) /// } /// ``` /// Note that injected value can be an arbitrary rust expression. `#[with(v1, ..., vn)]` /// attribute will inject `v1, ..., vn` expression as fixture arguments: all remaining arguments /// will be resolved as fixtures. /// /// Sometimes the return type cannot be infered so you must define it: For the few times you may /// need to do it, you can use the `#[default(type)]`, `#[partial_n(type)]` function attribute /// to define it: /// /// ``` /// use rstest::*; /// # use std::fmt::Debug; /// /// #[fixture] /// pub fn i() -> u32 { /// 42 /// } /// /// #[fixture] /// pub fn j() -> i32 { /// -42 /// } /// /// #[fixture] /// #[default(impl Iterator<Item=(u32, i32)>)] /// #[partial_1(impl Iterator<Item=(I,i32)>)] /// pub fn fx<I, J>(i: I, j: J) -> impl Iterator<Item=(I, J)> { /// std::iter::once((i, j)) /// } /// /// #[rstest] /// fn resolve_by_default(mut fx: impl Iterator<Item=(u32, i32)>) { /// assert_eq!((42, -42), fx.next().unwrap()) /// } /// /// #[rstest] /// fn resolve_partial(#[with(42.0)] mut fx: impl Iterator<Item=(f32, i32)>) { /// assert_eq!((42.0, -42), fx.next().unwrap()) /// } /// ``` /// `partial_i` is the fixture used when you inject the first `i` arguments in test call. /// /// # Old _compact_ syntax /// /// There is also a compact form for all previous features. This will mantained for a long time /// but for `fixture` I strongly recomand to migrate your code because you'll pay a little /// verbosity but get back a more readable code. /// /// Follow the previous examples in old _compact_ syntax. /// /// ## Default /// ``` /// # use rstest::*; /// #[fixture(twenty_one=21, two=2)] /// fn injected(twenty_one: i32, two: i32) -> i32 { twenty_one * two } /// ``` /// /// ## Rename /// ``` /// # use rstest::*; /// #[fixture] /// fn long_and_boring_descriptive_name() -> i32 { 42 } /// /// #[rstest(long_and_boring_descriptive_name as short)] /// fn the_test(short: i32) { /// assert_eq!(42, short) /// } /// ``` /// /// ## Partial Injection /// ``` /// # use rstest::*; /// # #[fixture] /// # fn base() -> i32 { 1 } /// # /// # #[fixture] /// # fn first(base: i32) -> i32 { 1 * base } /// # /// # #[fixture] /// # fn second(base: i32) -> i32 { 2 * base } /// # /// #[fixture(second(-3))] /// fn injected(first: i32, second: i32) -> i32 { first * second } /// ``` /// ## Partial Type Injection /// ``` /// # use rstest::*; /// # use std::fmt::Debug; /// # /// # #[fixture] /// # pub fn i() -> u32 { /// # 42 /// # } /// # /// # #[fixture] /// # pub fn j() -> i32 { /// # -42 /// # } /// # /// #[fixture(::default<impl Iterator<Item=(u32, i32)>>::partial_1<impl Iterator<Item=(I,i32)>>)] /// pub fn fx<I, J>(i: I, j: J) -> impl Iterator<Item=(I, J)> { /// std::iter::once((i, j)) /// } /// ``` #[proc_macro_attribute] pub fn fixture( args: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut info: FixtureInfo = parse_macro_input!(args as FixtureInfo); let mut fixture = parse_macro_input!(input as ItemFn); let extend_result = info.extend_with_function_attrs(&mut fixture); let mut errors = error::fixture(&fixture, &info); if let Err(attrs_errors) = extend_result { attrs_errors.to_tokens(&mut errors); } if errors.is_empty() { render::fixture(fixture, info) } else { errors } .into() } /// The attribute that you should use for your tests. Your /// annotated function's arguments can be /// [injected](attr.rstest.html#injecting-fixtures) with /// [`[fixture]`](macro@fixture)s, provided by /// [parametrized cases](attr.rstest.html#test-parametrized-cases) /// or by [value lists](attr.rstest.html#values-lists). /// /// `rstest` attribute can be applied to _any_ function and you can customize its /// parameters by using function and arguments attributes. /// /// Your test function can use generics, `impl` or `dyn` and like any kind of rust tests: /// /// - return results /// - marked by `#[should_panic]` attribute /// /// If the test function is an [`async` function](#async) `rstest` will run all tests as `async` /// tests. You can use it just with `async-std` and you should include `attributes` in /// `async-std`'s features. /// /// In your test function you can: /// /// - [injecting fixtures](#injecting-fixtures) /// - Generate [parametrized test cases](#test-parametrized-cases) /// - Generate tests for each combination of [value lists](#values-lists) /// /// ## Injecting Fixtures /// /// The simplest case is write a test that can be injected with /// [`[fixture]`](macro@fixture)s. You can just declare all used fixtures by passing /// them as a function's arguments. This can help your test to be neat /// and make your dependecy clear. /// /// ``` /// use rstest::*; /// /// #[fixture] /// fn injected() -> i32 { 42 } /// /// #[rstest] /// fn the_test(injected: i32) { /// assert_eq!(42, injected) /// } /// ``` /// /// [`[rstest]`](macro@rstest) procedural macro will desugar it to something that isn't /// so far from /// /// ``` /// #[test] /// fn the_test() { /// let injected=injected(); /// assert_eq!(42, injected) /// } /// ``` /// /// If you want to use long and descriptive names for your fixture but prefer to use /// shorter names inside your tests you use rename feature described in /// [fixture rename](attr.fixture.html#rename): /// /// ``` /// use rstest::*; /// /// #[fixture] /// fn long_and_boring_descriptive_name() -> i32 { 42 } /// /// #[rstest] /// fn the_test(#[from(long_and_boring_descriptive_name)] short: i32) { /// assert_eq!(42, short) /// } /// ``` /// /// Sometimes is useful to have some parametes in your fixtures but your test would /// override the fixture's default values in some cases. Like in /// [fixture partial injection](attr.fixture.html#partial-injection) you use `#[with]` /// attribute to indicate some fixture's arguments also in `rstest`. /// /// ``` /// # struct User(String, u8); /// # impl User { fn name(&self) -> &str {&self.0} } /// use rstest::*; /// /// #[fixture] /// fn user( /// #[default("Alice")] name: impl AsRef<str>, /// #[default(22)] age: u8 /// ) -> User { User(name.as_ref().to_owned(), age) } /// /// #[rstest] /// fn check_user(#[with("Bob")] user: User) { /// assert_eq("Bob", user.name()) /// } /// ``` /// /// ## Test Parametrized Cases /// /// If you would execute your test for a set of input data cases /// you can define the arguments to use and the cases list. Let see /// the classical Fibonacci example. In this case we would give the /// `input` value and the `expected` result for a set of cases to test. /// /// ``` /// use rstest::rstest; /// /// #[rstest] /// #[case(0, 0)] /// #[case(1, 1)] /// #[case(2, 1)] /// #[case(3, 2)] /// #[case(4, 3)] /// fn fibonacci_test(#[case] input: u32,#[case] expected: u32) { /// assert_eq!(expected, fibonacci(input)) /// } /// /// fn fibonacci(input: u32) -> u32 { /// match input { /// 0 => 0, /// 1 => 1, /// n => fibonacci(n - 2) + fibonacci(n - 1) /// } /// } /// ``` /// /// `rstest` will produce 5 indipendent tests and not just one that /// check every case. Every test can fail indipendently and `cargo test` /// will give follow output: /// /// ```text /// running 5 tests /// test fibonacci_test::case_1 ... ok /// test fibonacci_test::case_2 ... ok /// test fibonacci_test::case_3 ... ok /// test fibonacci_test::case_4 ... ok /// test fibonacci_test::case_5 ... ok /// /// test result: ok. 5 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out /// ``` /// /// The cases input values can be arbitrary Rust expresions that return the /// argument type. /// /// ``` /// use rstest::rstest; /// /// fn sum(a: usize, b: usize) -> usize { a + b } /// /// #[rstest] /// #[case("foo", 3)] /// #[case(String::from("foo"), 2 + 1)] /// #[case(format!("foo"), sum(2, 1))] /// fn test_len(#[case] s: impl AsRef<str>,#[case] len: usize) { /// assert_eq!(s.as_ref().len(), len); /// } /// ``` /// /// ### Magic Conversion /// /// You can use the magic conversion feature every time you would define a variable /// where its type define `FromStr` trait: test will parse the string to build the value. /// /// ``` /// # use rstest::rstest; /// # use std::path::PathBuf; /// # fn count_words(path: PathBuf) -> usize {0} /// #[rstest] /// #[case("resources/empty", 0)] /// #[case("resources/divine_commedy", 101.698)] /// fn test_count_words(#[case] path: PathBuf, #[case] expected: usize) { /// assert_eq!(expected, count_words(path)) /// } /// ``` /// /// ### Optional case description /// /// Optionally you can give a _description_ to every case simple by follow `case` /// with `::my_case_description` where `my_case_description` should be a a valid /// Rust ident. /// /// ``` /// # use rstest::*; /// #[rstest] /// #[case::zero_base_case(0, 0)] /// #[case::one_base_case(1, 1)] /// #[case(2, 1)] /// #[case(3, 2)] /// fn fibonacci_test(#[case] input: u32,#[case] expected: u32) { /// assert_eq!(expected, fibonacci(input)) /// } /// /// # fn fibonacci(input: u32) -> u32 { /// # match input { /// # 0 => 0, /// # 1 => 1, /// # n => fibonacci(n - 2) + fibonacci(n - 1) /// # } /// # } /// ``` /// /// Outuput will be /// ```text /// running 4 tests /// test fibonacci_test::case_1_zero_base_case ... ok /// test fibonacci_test::case_2_one_base_case ... ok /// test fibonacci_test::case_3 ... ok /// test fibonacci_test::case_4 ... ok /// /// test result: ok. 4 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out /// ``` /// /// ### Use specific `case` attributes /// /// Every function's attributes that preceding a `#[case]` attribute will /// be used in this test case and all function's attributes that follow the /// last `#[case]` attribute will mark all test cases. /// /// This feature can be use to mark just some cases as `should_panic` /// and choose to have a fine grain on expected panic messages. /// /// In follow example we run 3 tests where the first pass without any /// panic, in the second we catch a panic but we don't care about the message /// and in the third one we also check the panic message. /// /// ``` /// use rstest::rstest; /// /// #[rstest] /// #[case::no_panic(0)] /// #[should_panic] /// #[case::panic(1)] /// #[should_panic(expected="expected")] /// #[case::panic_with_message(2)] /// fn attribute_per_case(#[case] val: i32) { /// match val { /// 0 => assert!(true), /// 1 => panic!("No catch"), /// 2 => panic!("expected"), /// _ => unreachable!(), /// } /// } /// ``` /// /// Output: /// /// ```text /// running 3 tests /// test attribute_per_case::case_1_no_panic ... ok /// test attribute_per_case::case_3_panic_with_message ... ok /// test attribute_per_case::case_2_panic ... ok /// /// test result: ok. 3 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out /// ``` /// /// To mark all your tests as `#[should_panic]` use: /// /// ``` /// # use rstest::rstest; /// #[rstest] /// #[case(1)] /// #[case(2)] /// #[case(3)] /// #[should_panic] /// fn fail(#[case] v: u32) { assert_eq!(0, v) } /// ``` /// /// ## Values Lists /// /// Another useful way to write a test and execute it for some values /// is to use the values list syntax. This syntax can be usefull both /// for a plain list and for testing all combination of input arguments. /// /// ``` /// # use rstest::*; /// # fn is_valid(input: &str) -> bool { true } /// /// #[rstest] /// fn should_be_valid( /// #[values("Jhon", "alice", "My_Name", "Zigy_2001")] /// input: &str /// ) { /// assert!(is_valid(input)) /// } /// ``` /// /// or /// /// ``` /// # use rstest::*; /// # fn valid_user(name: &str, age: u8) -> bool { true } /// /// #[rstest] /// fn should_accept_all_corner_cases( /// #[values("J", "A", "A________________________________________21")] /// name: &str, /// #[values(14, 100)] /// age: u8 /// ) { /// assert!(valid_user(name, age)) /// } /// ``` /// where `cargo test` output is /// /// ```text /// test should_accept_all_corner_cases::name_1___J__::age_2_100 ... ok /// test should_accept_all_corner_cases::name_2___A__::age_1_14 ... ok /// test should_accept_all_corner_cases::name_2___A__::age_2_100 ... ok /// test should_accept_all_corner_cases::name_3___A________________________________________21__::age_2_100 ... ok /// test should_accept_all_corner_cases::name_3___A________________________________________21__::age_1_14 ... ok /// test should_accept_all_corner_cases::name_1___J__::age_1_14 ... ok /// /// test result: ok. 6 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s /// ``` /// Note that the test names contains the given expression sanitized into /// a valid Rust identifier name. This should help to identify wich case fails. /// /// /// Also value list implements the magic conversion feature: every time the value type /// implements `FromStr` trait you can use a literal string to define it. /// /// ## Use Parametrize definition in more tests /// /// If you need to use a test list for more than one test you can use /// [`rstest_reuse`](https://crates.io/crates/rstest_reuse) crate. /// With this helper crate you can define a template and use it everywhere. /// /// ``` /// # use rstest::rstest; /// # use std::net::SocketAddr; /// #[rstest] /// fn given_port(#[values("1.2.3.4:8000", "4.3.2.1:8000", "127.0.0.1:8000")] addr: SocketAddr) { /// assert_eq(8000, addr.port()) /// } /// ``` /// /// ```rust,ignore /// use rstest::rstest; /// use rstest_reuse::{self, *}; /// /// #[template] /// #[rstest] /// #[case(2, 2)] /// #[case(4/2, 2)] /// fn two_simple_cases(#[case] a: u32, #[case] b: u32) {} /// /// #[apply(two_simple_cases)] /// fn it_works(#[case] a: u32,#[case] b: u32) { /// assert!(a == b); /// } /// ``` /// /// See [`rstest_reuse`](https://crates.io/crates/rstest_reuse) for more dettails. /// /// ## Async /// /// `rstest` provides out of the box `async` support. Just mark your /// test function as `async` and it'll use `#[async-std::test]` to /// annotate it. This feature can be really useful to build async /// parametric tests using a tidy syntax: /// /// ``` /// use rstest::*; /// # async fn async_sum(a: u32, b: u32) -> u32 { a + b } /// /// #[rstest] /// #[case(5, 2, 3)] /// #[should_panic] /// #[case(42, 40, 1)] /// async fn my_async_test(#[case] expected: u32, #[case] a: u32, #[case] b: u32) { /// assert_eq!(expected, async_sum(a, b).await); /// } /// ``` /// /// Currently only `async-std` is supported out of the box. But if you need to use /// another runtime that provide it's own test attribute (i.e. `tokio::test` or /// `actix_rt::test`) you can use it in your `async` test like described in /// [Inject Test Attribute](attr.rstest.html#inject-test-attribute). /// /// To use this feature, you need to enable `attributes` in the `async-std` /// features list in your `Cargo.toml`: /// /// ```toml /// async-std = { version = "1.5", features = ["attributes"] } /// ``` /// /// If your test input is an async value (fixture or test parameter) you can use `#[future]` /// attribute to remove `impl Future<Output = T>` boilerplate and just use `T`: /// /// ``` /// use rstest::*; /// #[fixture] /// async fn base() -> u32 { 42 } /// /// #[rstest] /// #[case(21, async { 2 })] /// #[case(6, async { 7 })] /// async fn my_async_test(#[future] base: u32, #[case] expected: u32, #[future] #[case] div: u32) { /// assert_eq!(expected, base.await / div.await); /// } /// ``` /// /// As you noted you should `.await` all _future_ values and this some times can be really boring. /// In this case you can use `#[timeout(awt)]` to _awaiting_ an input or annotating your function /// with `#[awt]` attributes to globally `.await` all your _future_ inputs. Previous code can be /// simplified like follow: /// /// ``` /// use rstest::*; /// # #[fixture] /// # async fn base() -> u32 { 42 } /// /// #[rstest] /// #[case(21, async { 2 })] /// #[case(6, async { 7 })] /// #[awt] /// async fn global(#[future] base: u32, #[case] expected: u32, #[future] #[case] div: u32) { /// assert_eq!(expected, base / div); /// } /// /// #[rstest] /// #[case(21, async { 2 })] /// #[case(6, async { 7 })] /// async fn single(#[future] base: u32, #[case] expected: u32, #[future(awt)] #[case] div: u32) { /// assert_eq!(expected, base.await / div); /// } /// ``` /// /// ### Test `#[timeout()]` /// /// You can define an execution timeout for your tests with `#[timeout(<duration>)]` attribute. Timeouts /// works both for sync and async tests and is runtime agnostic. `#[timeout(<duration>)]` take an /// expression that should return a `std::time::Duration`. Follow a simple async example: /// /// ```rust /// use rstest::*; /// use std::time::Duration; /// /// async fn delayed_sum(a: u32, b: u32,delay: Duration) -> u32 { /// async_std::task::sleep(delay).await; /// a + b /// } /// /// #[rstest] /// #[timeout(Duration::from_millis(80))] /// async fn single_pass() { /// assert_eq!(4, delayed_sum(2, 2, ms(10)).await); /// } /// ``` /// In this case test pass because the delay is just 10 milliseconds and timeout is /// 80 milliseconds. /// /// You can use `timeout` attribute like any other attibute in your tests and you can /// override a group timeout with a test specific one. In the follow example we have /// 3 tests where first and third use 100 millis but the second one use 10 millis. /// Another valuable point in this example is to use an expression to compute the /// duration. /// /// ```rust /// # use rstest::*; /// # use std::time::Duration; /// # /// # async fn delayed_sum(a: u32, b: u32,delay: Duration) -> u32 { /// # async_std::task::sleep(delay).await; /// # a + b /// # } /// fn ms(ms: u32) -> Duration { /// Duration::from_millis(ms.into()) /// } /// /// #[rstest] /// #[case::pass(ms(1), 4)] /// #[timeout(ms(10))] /// #[case::fail_timeout(ms(60), 4)] /// #[case::fail_value(ms(1), 5)] /// #[timeout(ms(100))] /// async fn group_one_timeout_override(#[case] delay: Duration, #[case] expected: u32) { /// assert_eq!(expected, delayed_sum(2, 2, delay).await); /// } /// ``` /// /// If you want to use `timeout` for `async` test you need to use `async-timeout` /// feature (enabled by default). /// /// ## Inject Test Attribute /// /// If you would like to use another `test` attribute for your test you can simply /// indicate it in your test function's attributes. For instance if you want /// to test some async function with use `actix_rt::test` attribute you can just write: /// /// ``` /// use rstest::*; /// use actix_rt; /// use std::future::Future; /// /// #[rstest] /// #[case(2, async { 4 })] /// #[case(21, async { 42 })] /// #[actix_rt::test] /// async fn my_async_test(#[case] a: u32, #[case] #[future] result: u32) { /// assert_eq!(2 * a, result.await); /// } /// ``` /// Just the attributes that ends with `test` (last path segment) can be injected: /// in this case the `#[actix_rt::test]` attribute will replace the standard `#[test]` /// attribute. /// /// ## Putting all Together /// /// All these features can be used together with a mixture of fixture variables, /// fixed cases and bunch of values. For instance, you might need two /// test cases which test for panics, one for a logged in user and one for a guest user. /// /// ```rust /// # enum User { Guest, Logged, } /// # impl User { fn logged(_n: &str, _d: &str, _w: &str, _s: &str) -> Self { Self::Logged } } /// # struct Item {} /// # trait Repository { fn find_items(&self, user: &User, query: &str) -> Result<Vec<Item>, String> { Err("Invalid query error".to_owned()) } } /// # #[derive(Default)] struct InMemoryRepository {} /// # impl Repository for InMemoryRepository {} /// /// use rstest::*; /// /// #[fixture] /// fn repository() -> InMemoryRepository { /// let mut r = InMemoryRepository::default(); /// // fill repository with some data /// r /// } /// /// #[fixture] /// fn alice() -> User { /// User::logged("Alice", "2001-10-04", "London", "UK") /// } /// /// #[rstest] /// #[case::authed_user(alice())] // We can use `fixture` also as standard function /// #[case::guest(User::Guest)] // We can give a name to every case : `guest` in this case /// #[should_panic(expected = "Invalid query error")] // We whould test a panic /// fn should_be_invalid_query_error( /// repository: impl Repository, /// #[case] user: User, /// #[values(" ", "^%$some#@invalid!chars", ".n.o.d.o.t.s.")] query: &str, /// query: &str /// ) { /// repository.find_items(&user, query).unwrap(); /// } /// ``` /// /// ## Trace Input Arguments /// /// Sometimes can be very helpful to print all test's input arguments. To /// do it you can use the `#[trace]` function attribute that you can apply /// to all cases or just to some of them. /// /// ``` /// use rstest::*; /// /// #[fixture] /// fn injected() -> i32 { 42 } /// /// #[rstest] /// #[trace] /// fn the_test(injected: i32) { /// assert_eq!(42, injected) /// } /// ``` /// /// Will print an output like /// /// ```bash /// Testing started at 14.12 ... /// ------------ TEST ARGUMENTS ------------ /// injected = 42 /// -------------- TEST START -------------- /// /// /// Expected :42 /// Actual :43 /// ``` /// But /// ``` /// # use rstest::*; /// #[rstest] /// #[case(1)] /// #[trace] /// #[case(2)] /// fn the_test(#[case] v: i32) { /// assert_eq!(0, v) /// } /// ``` /// will trace just `case_2` input arguments. /// /// If you want to trace input arguments but skip some of them that don't /// implement the `Debug` trait, you can also use the /// `#[notrace]` argument attribute to skip them: /// /// ``` /// # use rstest::*; /// # struct Xyz; /// # struct NoSense; /// #[rstest] /// #[trace] /// fn the_test(injected: i32, #[notrace] xyz: Xyz, #[notrace] have_no_sense: NoSense) { /// assert_eq!(42, injected) /// } /// ``` /// # Old _compact_ syntax /// /// `rstest` support also a syntax where all options and configuration can be write as /// `rstest` attribute arguments. This syntax is a little less verbose but make /// composition harder: for istance try to add some cases to a `rstest_reuse` template /// is really hard. /// /// So we'll continue to maintain the old syntax for a long time but we strongly encourage /// to switch your test in the new form. /// /// Anyway, here we recall this syntax and rewrite the previous example in the _compact_ form. /// /// ```text /// rstest( /// arg_1, /// ..., /// arg_n[,] /// [::attribute_1[:: ... [::attribute_k]]] /// ) /// ``` /// Where: /// /// - `arg_i` could be one of the follow /// - `ident` that match to one of function arguments for parametrized cases /// - `case[::description](v1, ..., vl)` a test case /// - `fixture(v1, ..., vl) [as argument_name]` where fixture is the injected /// fixture and argument_name (default use fixture) is one of function arguments /// that and `v1, ..., vl` is a partial list of fixture's arguments /// - `ident => [v1, ..., vl]` where `ident` is one of function arguments and /// `v1, ..., vl` is a list of values for ident /// - `attribute_j` a test attribute like `trace` or `notrace` /// /// ## Fixture Arguments /// /// ``` /// # struct User(String, u8); /// # impl User { fn name(&self) -> &str {&self.0} } /// # use rstest::*; /// # /// # #[fixture] /// # fn user( /// # #[default("Alice")] name: impl AsRef<str>, /// # #[default(22)] age: u8 /// # ) -> User { User(name.as_ref().to_owned(), age) } /// # /// #[rstest(user("Bob"))] /// fn check_user(user: User) { /// assert_eq("Bob", user.name()) /// } /// ``` /// /// ## Fixture Rename /// ``` /// # use rstest::*; /// #[fixture] /// fn long_and_boring_descriptive_name() -> i32 { 42 } /// /// #[rstest(long_and_boring_descriptive_name as short)] /// fn the_test(short: i32) { /// assert_eq!(42, short) /// } /// ``` /// /// ## Parametrized /// /// ``` /// # use rstest::*; /// #[rstest(input, expected, /// case::zero_base_case(0, 0), /// case::one_base_case(1, 1), /// case(2, 1), /// case(3, 2), /// #[should_panic] /// case(4, 42) /// )] /// fn fibonacci_test(input: u32, expected: u32) { /// assert_eq!(expected, fibonacci(input)) /// } /// /// # fn fibonacci(input: u32) -> u32 { /// # match input { /// # 0 => 0, /// # 1 => 1, /// # n => fibonacci(n - 2) + fibonacci(n - 1) /// # } /// # } /// ``` /// /// ## Values Lists /// /// ``` /// # use rstest::*; /// # fn is_valid(input: &str) -> bool { true } /// /// #[rstest( /// input => ["Jhon", "alice", "My_Name", "Zigy_2001"] /// )] /// fn should_be_valid(input: &str) { /// assert!(is_valid(input)) /// } /// ``` /// /// ## `trace` and `notrace` /// /// ``` /// # use rstest::*; /// # struct Xyz; /// # struct NoSense; /// #[rstest(::trace::notrace(xzy, have_no_sense))] /// fn the_test(injected: i32, xyz: Xyz, have_no_sense: NoSense) { /// assert_eq!(42, injected) /// } /// ``` /// #[proc_macro_attribute] pub fn rstest( args: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut test = parse_macro_input!(input as ItemFn); let mut info = parse_macro_input!(args as RsTestInfo); let extend_result = info.extend_with_function_attrs(&mut test); let mut errors = error::rstest(&test, &info); if let Err(attrs_errors) = extend_result { attrs_errors.to_tokens(&mut errors); } if errors.is_empty() { if info.data.has_list_values() { render::matrix(test, info) } else if info.data.has_cases() { render::parametrize(test, info) } else { render::single(test, info) } } else { errors } .into() }
true
9dd437a0d6cf61cec25f67615edae9b2d58d612d
Rust
mongodb-rust/decimal128
/src/lib.rs
UTF-8
21,682
3.4375
3
[ "Apache-2.0" ]
permissive
//! Decimal 128 bits are broken down like so: //! [1bits] [ 14bits ] [ 113 bits ] //! sign exponent significand //! field use bitvec::{bitvec, BigEndian, BitVec}; use byteorder::*; use std::cmp::Ordering; use std::fmt; use std::io::Cursor; use std::str::FromStr; #[derive(Clone, PartialEq, PartialOrd)] pub struct Exponent { vec: BitVec<BigEndian>, } #[derive(Clone, PartialEq, PartialOrd)] pub struct Significand { vec: BitVec<BigEndian>, } #[derive(Clone)] pub struct Decimal128 { pub sign: bool, pub exponent: Exponent, pub significand: Significand, pub bytes: [u8; 16], nan: bool, inf: bool, } pub enum NumberType { NaN, Infinity, Finite, } impl From<i32> for Decimal128 { fn from(_v: i32) -> Self { unimplemented!("Creating Decimal128 from i32 is not yet implemented.") } } impl From<u32> for Decimal128 { fn from(_v: u32) -> Self { unimplemented!("Creating Decimal128 from u32 is not yet implemented.") } } impl FromStr for Decimal128 { type Err = (); fn from_str(_s: &str) -> Result<Self, ()> { unimplemented!("Creating Decimal128 from string is not yet implemented.") } } impl Into<i32> for Decimal128 { fn into(self) -> i32 { unimplemented!("Creating i32 from Decimal128 is not yet implemented.") } } impl Into<u32> for Decimal128 { fn into(self) -> u32 { unimplemented!("Creating u32 from Decimal128 is not yet implemented.") } } impl Decimal128 { pub fn zero() -> Self { Decimal128 { sign: false, exponent: Exponent::new(), significand: Significand::new(), bytes: [0u8; 16], nan: false, inf: false, } } /// Create a Decimal128 from a [u8; 16]. /// /// This method extracts out the sign, exponent and signficand, uses Binary /// Integer Decimal decoding. The byte order is LittleEndian. For more /// information on how extraction is done, please refer to /// [wikipedia](https://en.wikipedia.org/wiki/Decimal128_floating-point_format), /// or the [IEEE 754-2008](https://ieeexplore.ieee.org/document/4610935) /// ``` /// use decimal128::*; /// /// let vec: [u8; 16] = [9, 16, 3, 6, 7, 86, 76, 81, 89, 0, 3, 45, 12, 71, 52, 39]; /// let dec128 = Decimal128::from_raw_bytes(vec); /// ``` pub fn from_raw_bytes(buffer: [u8; 16]) -> Self { // decimal 128's exponent is 14bits long; we will construct a u16 and // fill up the first two bits as zeros and then get its value. let mut total_exp = Exponent::new(); // Significnad can be 113 *or* 111 bit long. Regardless of the size we // will pad it with 14 0s. We will be eventually constructing a u128 // from this eventually. let mut total_sig = Significand::new(); let byte = buffer[0]; let max = 0b1111_1111; // first bit is sign: negative or positive integer let is_negative_bitmask = 0b0111_1111; let sign = (byte | is_negative_bitmask) == max; // the next 5 bits of the first byte are combination field; these include: // first 5 bits Type Exponent MSBs Significand MSD // --------------------------------------------------------------------------- // a b c d e Finite 14bits 113bits // 1 1 c d e Finite 2 bits to right 111bits // 1 1 1 1 0 Infinity - - - - - - // 1 1 1 1 1 NaN - - - - - - // the easiest bitmask to do is for NaN, i.e. five 1s let res = byte | 0b1000_0011; let combination_field = match res { // if everything is 1s, we are looking at NaN 0b1111_1111 => NumberType::NaN, // if the last of the five bits is a 0, we are looking at Infinity 0b1111_1011 => NumberType::Infinity, // match for finite cases _ => match byte | 0b1001_1111 { 0b1111_1111 => { // since the first two bits after the sign are 11, we ignore // them and gather the remainder of the first byte. let c = if (byte | 0b1110_1111) == max { 1 } else { 0 }; let d = if (byte | 0b1111_0111) == max { 1 } else { 0 }; let e = if (byte | 0b1111_1011) == max { 1 } else { 0 }; let f = if (byte | 0b1111_1101) == max { 1 } else { 0 }; let g = if (byte | 0b1111_1110) == max { 1 } else { 0 }; let mut exp = bitvec![c, d, e, f, g]; total_exp.append(&mut exp); // in this case second byte of the buffer can just be // straight up appended to the exponent. let byte_2 = buffer[1]; let mut sb_bv: BitVec = (&[byte_2] as &[u8]).into(); total_exp.append(&mut sb_bv); // out of the third byte the first bit are part of the // exponent, and the last 7 bits are part of the significand let byte_3 = buffer[1]; let h = if (byte_2 | 0b0111_1111) == max { 1 } else { 0 }; let mut exp_cont = bitvec![h]; total_exp.append(&mut exp_cont); let i = if (byte_3 | 0b1011_1111) == max { 1 } else { 0 }; let j = if (byte_3 | 0b1101_1111) == max { 1 } else { 0 }; let k = if (byte_3 | 0b1110_1111) == max { 1 } else { 0 }; let l = if (byte_3 | 0b1111_0111) == max { 1 } else { 0 }; let m = if (byte_3 | 0b1111_1011) == max { 1 } else { 0 }; let n = if (byte_3 | 0b1111_1101) == max { 1 } else { 0 }; let o = if (byte_3 | 0b1111_1110) == max { 1 } else { 0 }; // Start a new vec for 111bit significand. This version of // the significand is offset by two bits, so we pad it with // `100` let mut sig = bitvec![1, 0, 0, i, j, k, l, m, n, o]; total_sig.append(&mut sig); NumberType::Finite } _ => { // if the first two bits after the sign are `00`, `01`, // `10`, we add the remainder of the first byte to exponent let a = if (byte | 0b1011_1111) == max { 1 } else { 0 }; let b = if (byte | 0b1101_1111) == max { 1 } else { 0 }; let c = if (byte | 0b1110_1111) == max { 1 } else { 0 }; let d = if (byte | 0b1111_0111) == max { 1 } else { 0 }; let e = if (byte | 0b1111_1011) == max { 1 } else { 0 }; let f = if (byte | 0b1111_1101) == max { 1 } else { 0 }; let g = if (byte | 0b1111_1110) == max { 1 } else { 0 }; let mut exp = bitvec![a, b, c, d, e, f, g]; total_exp.append(&mut exp); // out of the second byte the first 7 bits are part of the // exponent, and the last bit if part of the significand let byte_2 = buffer[1]; let h = if (byte_2 | 0b0111_1111) == max { 1 } else { 0 }; let i = if (byte_2 | 0b1011_1111) == max { 1 } else { 0 }; let j = if (byte_2 | 0b1101_1111) == max { 1 } else { 0 }; let k = if (byte_2 | 0b1110_1111) == max { 1 } else { 0 }; let l = if (byte_2 | 0b1111_0111) == max { 1 } else { 0 }; let m = if (byte_2 | 0b1111_1011) == max { 1 } else { 0 }; let n = if (byte_2 | 0b1111_1101) == max { 1 } else { 0 }; let mut exp_cont = bitvec![h, i, j, k, l, m, n]; total_exp.append(&mut exp_cont); let o = if (byte_2 | 0b1111_1110) == max { 1 } else { 0 }; // Start a new vec for 113bit significand. Since this // version of significand is not offset, we pad it with only // `0` let mut sig = bitvec![0, o]; total_sig.append(&mut sig); // add the whole third byte to the signficand in this case let byte_3 = buffer[2]; let mut tb_bv: BitVec = (&[byte_3] as &[u8]).into(); total_sig.append(&mut tb_bv); NumberType::Finite } }, }; // the rest of the bytes of the vec we are passed in. for bytes in 3..buffer.len() { let mut bv: BitVec = (&[buffer[bytes]] as &[u8]).into(); total_sig.append(&mut bv); } let dec128 = match combination_field { NumberType::Finite => Decimal128 { sign, exponent: total_exp, significand: total_sig, bytes: buffer, nan: false, inf: false, }, NumberType::NaN => Decimal128 { sign, exponent: total_exp, significand: total_sig, bytes: buffer, nan: true, inf: false, }, NumberType::Infinity => Decimal128 { sign, exponent: total_exp, significand: total_sig, bytes: buffer, nan: false, inf: true, }, }; dec128 } pub fn is_nan(&self) -> bool { if self.nan { return true; } else { return false; } } pub fn is_negative(&self) -> bool { if self.sign { return true; } else { return false; } } pub fn is_positive(&self) -> bool { return !self.is_negative(); } pub fn is_zero(&self) -> bool { return !self.nan && self.exponent.is_zero() && self.significand.is_zero() } /// Converts Decimal128 to string. Uses information in /// [speleotrove](http://speleotrove.com/decimal/daconvs.html) decimal /// documentation. pub fn to_string(&self) -> String { // just return NaN if we are dealing with NaN. This does not come with a // sign. if self.nan { return String::from("NaN"); }; // Everything else can have a sign. We can create a string from Infinity // or a Finite number. let str = if self.inf { "Infinity".to_string() } else { self.create_string() }; // add a sign if this is a negative number return if !self.sign { str } else { format!("-{}", str) }; } /// Returns raw bytes. pub fn to_raw_bytes(&self) -> [u8; 16] { self.bytes } fn create_string(&self) -> String { if self.use_scientific_notation() { let exp_sign = if self.exponent.to_adjusted() < 0 { "" } else { "+" }; if self.significand.as_digit_vec().len() > 1 { let mut first_significand = self.significand.as_digit_vec().clone(); // we already used the first digit, so only stringify the // remainder of the significand let remainder_significand = stringify_vec(first_significand.split_off(1)); return format!( "{first_significand}.{remainder_significand}E{exp_sign}{scientific_exponent}", first_significand = first_significand[0], remainder_significand = remainder_significand, exp_sign = exp_sign, scientific_exponent = self.scientific_exponent() ); } else { return format!( "{significand}E{exp_sign}{scientific_exponent}", significand = self.significand.to_num(), exp_sign = exp_sign, scientific_exponent = self.scientific_exponent() ); } } else if self.exponent.to_adjusted() < 0 { if self.significand.count_digits() > self.exponent.to_adjusted().abs() { let dec_point = self.get_decimal_point_index() as usize; let mut significand_vec = self.significand.as_digit_vec().clone(); let remainder_significand = stringify_vec(significand_vec.split_off(dec_point - 1)); return format!( "{first_significand}.{remainder_significand}", first_significand = significand_vec[0], remainder_significand = remainder_significand ); } else { let zero_pad = self.get_zero_padding(); return format!( "0.{zero_pad}{significand}", zero_pad = zero_pad, significand = self.significand.to_num() ); } } format!("{}", self.significand.to_num()) } fn use_scientific_notation(&self) -> bool { (self.exponent.to_adjusted() as i16) > 0 || (self.scientific_exponent() as i16) < -6 } fn scientific_exponent(&self) -> i16 { // first variable is number of digits in a significand (self.significand.count_digits() - 1) + self.exponent.to_adjusted() } // for larger numbers we want to know where to put the decimal point. fn get_decimal_point_index(&self) -> i16 { self.significand.count_digits() - self.exponent.to_adjusted().abs() } // for very small decimals, we need to know how many zeroes to pad it with. fn get_zero_padding(&self) -> String { let left_zero_pad_count = (self.exponent.to_adjusted() + self.significand.count_digits()).abs(); std::iter::repeat("0") .take(left_zero_pad_count as usize) .collect::<String>() } /// create a compare functiont that returns a decimal 128 that's either: /// * -1 = less than /// * 0 = equal /// * 1 = greater than /// When comparing and orderign Decimal128, we should end up with: /// (-) NaN | -Infinity | x < 0 | -0 | +0 | x > 0 | +Infinity | (+) NaN /// /// Even though NaN can't be negative or positive, when reading the sign bit, /// (-) NaN < (+) NaN // // TODO: once we have a method to create Decimal128 from another number type // (u32/i32/u128/i128), change this return type to be a Decimal128 as well. pub fn compare(&self, other: &Decimal128) -> isize { let self_exp = self.exponent.to_adjusted(); let other_exp = other.exponent.to_adjusted(); let self_signif = self.significand.to_num(); let other_signif = other.significand.to_num(); // NaN and Infinity will be ordered via the sign Check if self.sign > other.sign { -1 } else if self.sign < other.sign { 1 } else { // since 1x10^3 is the same number as 10x10^2, we want to try to // even out the exponents before comparing significands. let exp_dif = (self_exp - other_exp).abs(); // however, if the difference is greeater than 66, they are // definitely diffferent numbers. so we only try to mingle with // exponents if the difference is less than 66. if exp_dif <= 66 { if self_exp < other_exp { Decimal128::increase_exponent(self_signif, self_exp, other_exp); Decimal128::decrease_exponent(other_signif, other_exp, self_exp); } else if self_exp > other_exp { Decimal128::decrease_exponent(self_signif, self_exp, other_exp); Decimal128::increase_exponent(other_signif, other_exp, self_exp); } } if self_exp == other_exp { if self_signif > other_signif { 1 } else if self_signif < other_signif { -1 } else { 0 } } else { if self_exp > other_exp { 1 } else if self_exp < other_exp { -1 } else { 0 } } } } // This is part of the effort to compare two different Decimal128 numbers. fn increase_exponent(mut significand: u128, mut exponent: i16, goal: i16) { if significand == 0 as u128 { exponent = goal } while exponent < goal { let significand_divided_by_10 = significand / 10; if significand % 10 != 0 { break; } exponent += 1; significand = significand_divided_by_10 } } // This is part of the effort to compare two different Decimal128 numbers. fn decrease_exponent(mut significand: u128, mut exponent: i16, goal: i16) { if significand == 0 as u128 { exponent = goal } while exponent > goal { let significand_times_10 = significand * 10; if significand_times_10 - Significand::max_value() > 0 { break; } exponent -= 1; significand = significand_times_10 } } } impl fmt::Display for Decimal128 { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { write!(fmt, "{}", self.to_string()) } } // this should be the same as Display trait impl fmt::Debug for Decimal128 { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, fmt) } } impl PartialOrd<Decimal128> for Decimal128 { fn partial_cmp(&self, other: &Decimal128) -> Option<Ordering> { match self.compare(other) { v if v == 0 => Some(Ordering::Equal), v if v > 0 => Some(Ordering::Greater), v if v < 0 => Some(Ordering::Less), _ => None, } } } impl PartialEq<Decimal128> for Decimal128 { fn eq(&self, other: &Decimal128) -> bool { self.compare(other) == 0 } } /// Format Decimal128 as an engineering string /// TODO: this currently only uses the default to_string method for Decimal128 /// and needs to actually do the engineering string formatting. impl fmt::LowerExp for Decimal128 { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, fmt) } } /// Formats Decimal128 to hexadecimal binary representation. impl fmt::LowerHex for Decimal128 { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { for b in self.bytes.iter().rev() { write!(fmt, "{:02x}", b)?; } Ok(()) } } /// Exponent is a 14-bit portion of decimal128 that follows the sign bit. Here we /// are storing it as a 16-bit BitVec that can be later converted to a u16. impl Exponent { pub fn new() -> Self { Exponent { vec: bitvec![BigEndian, u8; 0; 2], } } pub fn append(&mut self, vec: &mut BitVec) { self.vec.append(vec) } pub fn is_zero(&self) -> bool { self.to_num() == 0 } pub fn to_num(&self) -> u16 { let mut reader = Cursor::new(&self.vec); reader.read_u16::<byteorder::BigEndian>().unwrap() } // compare current exponent value with exponent bias (largest possible // exponent value) // TODO: check if 6176 (exponent bias) can be stored as u16 pub fn to_adjusted(&self) -> i16 { self.to_num() as i16 - 6176 as i16 } } /// Significand is a padded 111- or 113-bit coefficient. We are storing it as a /// 128-bit BitVec with the padded difference. This can be converted to a u128. impl Significand { pub fn new() -> Self { Significand { vec: bitvec![BigEndian, u8; 0; 14], } } pub fn append(&mut self, vec: &mut BitVec) { self.vec.append(vec) } pub fn is_zero(&self) -> bool { // FIXME: Very inefficient, but docs are down self.count_digits() == 0 } pub fn to_num(&self) -> u128 { let mut reader = Cursor::new(&self.vec); reader.read_u128::<byteorder::BigEndian>().unwrap() } pub fn max_value() -> u128 { u128::from_str_radix("9999999999999999999999999999999999", 10).unwrap() } // count the number of digits in the significand. This method first converts // significand BitVec into a u128 number, then converts it to string to // count characters and collects them in a vec to look at the vec's length. // // We return a u16 number of digits, as it's easier to compare to the // exponent since that's also stored as a u16. fn count_digits(&self) -> i16 { self.as_digit_vec().len() as i16 } fn as_digit_vec(&self) -> Vec<u32> { let digits: Vec<u32> = self .to_num() .to_string() .chars() .map(|c| c.to_digit(10).unwrap()) .collect(); return digits; } } fn stringify_vec(vec: Vec<u32>) -> String { vec.into_iter() .map(|d| d.to_string()) .collect::<Vec<String>>() .join("") }
true
fc56557cfa5aaeb5a3af2834772a8aa37bf44d77
Rust
ms705/timely-dataflow
/src/construction/builder.rs
UTF-8
7,234
3.046875
3
[ "MIT" ]
permissive
use std::rc::Rc; use std::cell::RefCell; use progress::timestamp::RootTimestamp; use progress::{Timestamp, Scope, Subgraph}; use progress::nested::{Source, Target}; use progress::nested::product::Product; use progress::nested::scope_wrapper::ScopeWrapper; use communication::{Communicator, Data, Pullable}; use communication::observer::BoxedObserver; use serialization::Serializable; /// The fundamental operations required to add and connect operators in a timely dataflow graph. /// /// Importantly, this is often a *shared* object, backed by a `Rc<RefCell<>>` wrapper. Each method /// takes a shared reference, but can be thought of as first calling .clone() and then calling the /// method. Each method does not hold the `RefCell`'s borrow, and should prevent accidental panics. pub trait GraphBuilder : Communicator+Clone { type Timestamp : Timestamp; /// A useful name describing the builder's scope. fn name(&self) -> String; /// Connects a source of data with a target of the data. This should only link the two for /// the purposes of tracking progress, rather than effect any data movement itself. fn add_edge(&self, source: Source, target: Target); /// Adds a child `Scope` to the builder's scope. fn add_scope<SC: Scope<Self::Timestamp>+'static>(&self, scope: SC) -> u64; // returns name /// Creates a new `Subgraph` with timestamp `T`. Used by `subcomputation`, but unlikely to be /// commonly useful to end users. fn new_subscope<T: Timestamp>(&mut self) -> Subgraph<Self::Timestamp, T>; /// Creates a `Subgraph` from a closure acting on a `SubgraphBuilder`, and returning /// whatever the closure returns. /// /// Commonly used to create new timely dataflow subgraphs, either creating new input streams /// and the input handle, or ingressing data streams and returning the egresses stream. /// /// # Examples /// ``` /// use timely::construction::*; /// use timely::construction::operators::*; /// /// timely::execute(std::env::args(), |root| { /// // must specify types as nothing else drives inference. /// let input = root.subcomputation::<u64,_,_>(|subgraph| { /// let (input, stream) = subgraph.new_input::<String>(); /// let output = subgraph.subcomputation::<u32,_,_>(|subgraph2| { /// subgraph2.enter(&stream).leave() /// }); /// input /// }); /// }); /// ``` fn subcomputation<T: Timestamp, R, F:FnOnce(&mut SubgraphBuilder<Self, T>)->R>(&mut self, func: F) -> R { let subscope = Rc::new(RefCell::new(self.new_subscope())); let mut builder = SubgraphBuilder { subgraph: subscope, parent: self.clone(), }; let result = func(&mut builder); self.add_scope(builder.subgraph); result } } /// A `GraphRoot` is the entry point to a timely dataflow computation. It wraps a `Communicator`, /// and has a slot for one child `Scope`. The primary intended use of `GraphRoot` is through its /// implementation of the `GraphBuilder` trait. /// /// # Panics /// Calling `subcomputation` more than once will result in a `panic!`. /// /// Calling `step` without having called `subcomputation` will result in a `panic!`. pub struct GraphRoot<C: Communicator> { communicator: Rc<RefCell<C>>, graph: Rc<RefCell<Option<Box<Scope<RootTimestamp>>>>>, } impl<C: Communicator> GraphRoot<C> { pub fn new(c: C) -> GraphRoot<C> { GraphRoot { communicator: Rc::new(RefCell::new(c)), graph: Rc::new(RefCell::new(None)), } } pub fn step(&mut self) -> bool { if let Some(scope) = self.graph.borrow_mut().as_mut() { scope.pull_internal_progress(&mut [], &mut [], &mut []) } else { panic!("GraphRoot::step(): empty; make sure to add a subgraph!") } } } impl<C: Communicator> GraphBuilder for GraphRoot<C> { type Timestamp = RootTimestamp; fn name(&self) -> String { format!("Worker[{}]", self.communicator.borrow().index()) } fn add_edge(&self, _source: Source, _target: Target) { panic!("GraphRoot::connect(): root doesn't maintain edges; who are you, how did you get here?") } fn add_scope<SC: Scope<RootTimestamp>+'static>(&self, mut scope: SC) -> u64 { let mut borrow = self.graph.borrow_mut(); if borrow.is_none() { scope.get_internal_summary(); scope.set_external_summary(Vec::new(), &mut []); *borrow = Some(Box::new(scope)); 0 } else { panic!("GraphRoot::add_scope(): added second scope to root") } } fn new_subscope<T: Timestamp>(&mut self) -> Subgraph<RootTimestamp, T> { let name = format!("{}::Subgraph[Root]", self.name()); Subgraph::new_from(&mut (*self.communicator.borrow_mut()), 0, name) } } impl<C: Communicator> Communicator for GraphRoot<C> { fn index(&self) -> u64 { self.communicator.borrow().index() } fn peers(&self) -> u64 { self.communicator.borrow().peers() } fn new_channel<T:Data+Serializable, D:Data+Serializable>(&mut self) -> (Vec<BoxedObserver<T, D>>, Box<Pullable<T, D>>) { self.communicator.borrow_mut().new_channel() } } impl<C: Communicator> Clone for GraphRoot<C> { fn clone(&self) -> Self { GraphRoot { communicator: self.communicator.clone(), graph: self.graph.clone() }} } /// A `SubgraphBuilder` wraps a `Subgraph` and a parent `G: GraphBuilder`. It manages the addition /// of `Scope`s to a subgraph, and the connection of edges between them. pub struct SubgraphBuilder<G: GraphBuilder, T: Timestamp> { pub subgraph: Rc<RefCell<Subgraph<G::Timestamp, T>>>, pub parent: G, } impl<G: GraphBuilder, T: Timestamp> GraphBuilder for SubgraphBuilder<G, T> { type Timestamp = Product<G::Timestamp, T>; fn name(&self) -> String { self.subgraph.borrow().name() } fn add_edge(&self, source: Source, target: Target) { self.subgraph.borrow_mut().connect(source, target); } fn add_scope<SC: Scope<Self::Timestamp>+'static>(&self, scope: SC) -> u64 { let index = self.subgraph.borrow().children.len() as u64; let path = format!("{}", self.subgraph.borrow().path); self.subgraph.borrow_mut().children.push(ScopeWrapper::new(Box::new(scope), index, path)); index } fn new_subscope<T2: Timestamp>(&mut self) -> Subgraph<Product<G::Timestamp, T>, T2> { let index = self.subgraph.borrow().children() as u64; let path = format!("{}", self.subgraph.borrow().path); Subgraph::new_from(self, index, path) } } impl<G: GraphBuilder, T: Timestamp> Communicator for SubgraphBuilder<G, T> { fn index(&self) -> u64 { self.parent.index() } fn peers(&self) -> u64 { self.parent.peers() } fn new_channel<T2:Data+Serializable, D:Data+Serializable>(&mut self) -> (Vec<BoxedObserver<T2, D>>, Box<Pullable<T2, D>>) { self.parent.new_channel() } } impl<G: GraphBuilder, T: Timestamp> Clone for SubgraphBuilder<G, T> { fn clone(&self) -> Self { SubgraphBuilder { subgraph: self.subgraph.clone(), parent: self.parent.clone() }} }
true
70a1f55cd76a341dc8daf2719e97d37a4025ded8
Rust
zeeshanakram3/advent-of-code
/aoc04/src/main.rs
UTF-8
7,188
3.078125
3
[ "Unlicense", "MIT" ]
permissive
#[macro_use] extern crate lazy_static; extern crate regex; use std::collections::HashMap; use std::error::Error; use std::io::{self, Read, Write}; use std::ops::Range; use std::result; use std::slice; use std::str::FromStr; use regex::Regex; macro_rules! err { ($($tt:tt)*) => { Err(Box::<Error>::from(format!($($tt)*))) } } type Result<T> = result::Result<T, Box<Error>>; fn main() -> Result<()> { let mut input = String::new(); io::stdin().read_to_string(&mut input)?; // collect events let mut events: Vec<Event> = vec![]; for line in input.lines() { let event = line.parse().or_else(|err| { err!("failed to parse '{:?}': {}", line, err) })?; events.push(event); } if events.is_empty() { return err!("found no events"); } // sort them by time and group them by guard events.sort_by(|ev1, ev2| ev1.datetime.cmp(&ev2.datetime)); let mut events_by_guard = EventsByGuard::new(); let mut cur_guard_id = None; for ev in events { if let EventKind::StartShift { guard_id } = ev.kind { cur_guard_id = Some(guard_id); } match cur_guard_id { None => return err!("no guard id set for event"), Some(id) => { events_by_guard.entry(id).or_default().push(ev); } } } // create a by-minute frequency map for each guard let mut minutes_asleep: GuardSleepFrequency = HashMap::new(); for (&guard_id, events) in events_by_guard.iter() { let mut freq: [u32; 60] = [0; 60]; for result in MinutesAsleepIter::new(events) { for minute in result? { freq[minute as usize] += 1; } } minutes_asleep.insert(guard_id, freq); } part1(&minutes_asleep)?; part2(&minutes_asleep)?; Ok(()) } fn part1(minutes_asleep: &GuardSleepFrequency) -> Result<()> { let (&sleepiest, _) = minutes_asleep .iter() .max_by_key(|&(_, ref freqs)| -> u32 { freqs.iter().sum() }) // unwrap is OK since we're guaranteed to have at least one event .unwrap(); let minute = match sleepiest_minute(minutes_asleep, sleepiest) { None => return err!("guard {} was never asleep", sleepiest), Some(minute) => minute, }; writeln!(io::stdout(), "part 1, product: {}", sleepiest * minute)?; Ok(()) } fn part2(minutes_asleep: &GuardSleepFrequency) -> Result<()> { let mut sleepiest_minutes: HashMap<GuardID, (u32, u32)> = HashMap::new(); for (&guard_id, freqs) in minutes_asleep.iter() { let minute = match sleepiest_minute(minutes_asleep, guard_id) { None => continue, Some(minute) => minute, }; let count = freqs[minute as usize]; sleepiest_minutes.insert(guard_id, (minute, count)); } if sleepiest_minutes.is_empty() { return err!("no guards slept"); } let (&longest_asleep, &(minute, _)) = sleepiest_minutes .iter() .max_by_key(|&(_, (_, count))| count) // unwrap is OK because sleepiest_minutes is non-empty .unwrap(); writeln!(io::stdout(), "part 2, product: {}", longest_asleep * minute)?; Ok(()) } /// Return the minute that the given guard slept the most. fn sleepiest_minute( minutes_asleep: &GuardSleepFrequency, guard_id: GuardID, ) -> Option<u32> { let (sleepiest_minute, ..) = minutes_asleep[&guard_id] .iter() .enumerate() .max_by_key(|(_, freq)| -> u32 { **freq }) .expect("Iterator of sleepy minutes should not be empty"); Some(sleepiest_minute as u32) } type GuardID = u32; type EventsByGuard = HashMap<GuardID, Vec<Event>>; // maps guard to minutes asleep frequency type GuardSleepFrequency = HashMap<GuardID, [u32; 60]>; /// An iterator that coalesces "asleep" and "wakeup" events into ranges of /// minutes slept. #[derive(Debug)] struct MinutesAsleepIter<'a> { events: slice::Iter<'a, Event>, fell_asleep: Option<u32>, } impl<'a> MinutesAsleepIter<'a> { fn new(events: &'a [Event]) -> MinutesAsleepIter<'a> { MinutesAsleepIter { events: events.iter(), fell_asleep: None } } } impl<'a> Iterator for MinutesAsleepIter<'a> { type Item = Result<Range<u32>>; fn next(&mut self) -> Option<Result<Range<u32>>> { loop { let ev = match self.events.next() { Some(ev) => ev, None => { if self.fell_asleep.is_some() { return Some(err!("found sleep event without wake up")); } return None; } }; match ev.kind { EventKind::StartShift { .. } => {} EventKind::Asleep => { self.fell_asleep = Some(ev.datetime.minute); } EventKind::WakeUp => { let fell_asleep = match self.fell_asleep.take() { Some(minute) => minute, None => { return Some(err!("found wakeup without sleep")); } }; if ev.datetime.minute < fell_asleep { return Some(err!("found wakeup before sleep")); } return Some(Ok(fell_asleep..ev.datetime.minute)); } } } } } #[derive(Debug)] struct Event { datetime: DateTime, kind: EventKind, } #[derive(Debug, Eq, PartialEq, PartialOrd, Ord)] struct DateTime { year: u32, month: u32, day: u32, hour: u32, minute: u32, } #[derive(Debug)] enum EventKind { StartShift { guard_id: GuardID }, Asleep, WakeUp, } impl FromStr for Event { type Err = Box<Error>; fn from_str(s: &str) -> Result<Event> { lazy_static! { static ref RE: Regex = Regex::new(r"(?x) \[ (?P<year>[0-9]{4})-(?P<month>[0-9]{2})-(?P<day>[0-9]{2}) \s+ (?P<hour>[0-9]{2}):(?P<minute>[0-9]{2}) \] \s+ (?:Guard\ \#(?P<id>[0-9]+)\ begins\ shift|(?P<sleep>.+)) ").unwrap(); } let caps = match RE.captures(s) { None => return err!("unrecognized event"), Some(caps) => caps, }; let datetime = DateTime { year: caps["year"].parse()?, month: caps["month"].parse()?, day: caps["day"].parse()?, hour: caps["hour"].parse()?, minute: caps["minute"].parse()?, }; let kind = if let Some(m) = caps.name("id") { EventKind::StartShift { guard_id: m.as_str().parse()? } } else if &caps["sleep"] == "falls asleep" { EventKind::Asleep } else if &caps["sleep"] == "wakes up" { EventKind::WakeUp } else { return err!("could not determine event kind") }; Ok(Event { datetime, kind }) } }
true
652c68b638a9e3ae8356771dc1af65fcea0f5843
Rust
alexvilanovab/blendit
/src/main.rs
UTF-8
2,787
2.921875
3
[ "MIT" ]
permissive
extern crate image; extern crate imageproc; extern crate indicatif; extern crate rusttype; mod cli; fn main() { let args = cli::get_arguments(); let img_path = std::path::Path::new(args.value_of("image").unwrap()); let img = match image::open(img_path) { Ok(img) => img.to_rgb8(), Err(e) => { eprintln!("Could not open the given image: {}", e); std::process::exit(1); } }; let txt_path = std::path::Path::new(args.value_of("text").unwrap()); let txt = match std::fs::read_to_string(txt_path) { Ok(txt) => txt, Err(e) => { eprintln!("Could not read the given text file: {}", e); std::process::exit(1); } }; let mut txt_it = txt.chars(); let font_data: &[u8] = include_bytes!("../fonts/Bitter-Bold.ttf"); let font = match rusttype::Font::try_from_bytes(font_data) { Some(val) => val, None => { eprintln!("Could not load the given font file"); std::process::exit(1); } }; let font_size: u32 = match args.value_of("font_size").unwrap().parse() { Ok(val) => val, Err(e) => { eprintln!("Invalid value for 'font-size': {}", e); std::process::exit(1); } }; let font_scale = rusttype::Scale { x: (font_size as f32) * 1.5, y: (font_size as f32) * 1.5, }; let out_path = std::path::Path::new(args.value_of("output").unwrap()); let mut out = image::RgbImage::new(img.width() * font_size, img.height() * font_size); let bar = indicatif::ProgressBar::hidden(); bar.set_position(0); bar.set_length((img.width() * img.height()) as u64); bar.set_style( indicatif::ProgressStyle::default_bar() .template("[{elapsed_precise}] {bar:50} {pos}/{len}") .progress_chars("■ "), ); bar.set_draw_target(indicatif::ProgressDrawTarget::stderr()); for (x, y, rgb) in img.enumerate_pixels() { let c = match txt_it.next() { Some(val) => val.to_string(), None => { txt_it = txt.chars(); String::from(" ") } }; imageproc::drawing::draw_text_mut( &mut out, *rgb, x * font_size, y * font_size, font_scale, &font, &c, ); bar.inc(1) } bar.finish(); match out.save(out_path) { Ok(_) => { println!( "output image saved successfully: {}", out_path.to_str().unwrap() ); } Err(e) => { eprintln!("Failed saving output file: {}", e); std::process::exit(1); } }; }
true
ab8653a0b77a1860eba451f5b139802583d24781
Rust
njsh4261/benchmarksgame
/bencher/programs/floydwarshall/floydwarshall.rs
UTF-8
879
3.078125
3
[ "MIT", "BSD-2-Clause", "BSD-3-Clause" ]
permissive
use std::io; fn floydwarshall(graph: &mut Vec<Vec<i32>>, vertex_num: usize) { for k in 0..vertex_num { for i in 0..vertex_num { for j in 0..vertex_num { if graph[i][j] > graph[i][k] + graph[k][j] { graph[i][j] = graph[i][k] + graph[k][j]; } } } } } fn main() { let mut graph: Vec<Vec<i32>> = Vec::<Vec<i32>>::new(); let mut buf = String::new(); io::stdin().read_line(&mut buf).ok(); let vertex_num: usize = match buf.trim().parse::<usize>() { Ok(a) => a, Err(_) => 0 }; for _ in 0..vertex_num { buf.clear(); io::stdin().read_line(&mut buf).ok(); graph.push( buf.split_whitespace().map(|s| s.parse().expect("parse error")).collect() ); } floydwarshall(&mut graph, vertex_num); }
true
20f5631b3cdaf2c261968641b44dedde5ee2a135
Rust
CodeChain-io/remote-trait-object
/remote-trait-object/src/service/id.rs
UTF-8
3,499
3.109375
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use super::MethodId; use linkme::distributed_slice; use serde::{Deserialize, Serialize}; use std::collections::{HashMap, HashSet}; pub const ID_ORDERING: std::sync::atomic::Ordering = std::sync::atomic::Ordering::SeqCst; pub type MethodIdAtomic = std::sync::atomic::AtomicU32; // linkme crate smartly collects all the registrations generated by the proc-macro // into a sinlge array in the link time. // Note that too long linkme-related variable name would cause serious compiler error in MacOS // So we deliberately make it have a short name // Id of methods in services. // Note that here the two strings mean (trait name, method name) // Also you can skip calling this, then the method id will be set up for default value // decided by the order of declaration. type MethodIdentifierSetter = fn(id: MethodId); #[distributed_slice] pub static MID_REG: [(&'static str, &'static str, MethodIdentifierSetter)] = [..]; /// This will be provided by the user who cares the compatability between already-compiled service traits. #[derive(PartialEq, Serialize, Deserialize, Debug, Clone)] pub struct IdMap { // This is system-wide; All module will get same ones pub method_map: Option<HashMap<(String, String), MethodId>>, } /// A special function that sets static & global identifiers for the methods. /// /// It will be explained in more detail in the next version :) /// /// This is supposed to be called only once during the entire lifetime of the process. /// However it is ok to call multiple times if the IdMap is identical, especially in the /// tests where each test share that static id list /// # Examples /// ``` /// use remote_trait_object::macro_env::*; /// #[allow(non_upper_case_globals)] /// static ID_METHOD_MyTrait_mymethod: MethodIdAtomic = MethodIdAtomic::new(1); /// #[linkme::distributed_slice(MID_REG)] /// #[allow(non_upper_case_globals)] /// static ID_METHOD_ENTRY_MyTrait_mymethod: (&'static str, &'static str, fn(id: MethodId)) = /// ("MyTrait", "mymethod", id_method_setter_MyTrait_mymethod); /// #[allow(non_snake_case)] /// fn id_method_setter_MyTrait_mymethod(id: MethodId) { /// ID_METHOD_MyTrait_mymethod.store(id, ID_ORDERING); /// } /// #[test] /// fn setup() { /// let id_map: HashMap<(String, String), MethodId> = /// [(("MyTrait".to_owned(), "mymethod".to_owned()), 123)].iter().cloned().collect(); /// let id_map = IdMap { /// method_map: Some(id_map), /// }; /// setup_identifiers(&id_map); /// assert_eq!(ID_METHOD_MyTrait_mymethod.load(ID_ORDERING), 123); /// } /// ``` pub fn setup_identifiers(descriptor: &IdMap) { // distributed_slices integrity test { let mut bucket: HashSet<(String, String)> = HashSet::new(); for (ident1, ident2, _) in MID_REG { bucket.insert(((*ident1).to_owned(), (*ident2).to_owned())); } assert_eq!( bucket.len(), MID_REG.len(), "The service traits that this binary involved are not named; You have provided multiple traits with an identical name" ); } // method ids have default values decided by the order, so it is ok to leave them in an ordinary case. if let Some(map) = descriptor.method_map.as_ref() { for (trait_name, method_name, setter) in MID_REG { setter( *map.get(&((*trait_name).to_owned(), (*method_name).to_owned())) .expect("Invalid handle descriptor"), ); } } }
true
27454423e88ade09deadf335e03e5c4b32d1f464
Rust
iicurtis/mcp9808-rs
/src/reg_temp.rs
UTF-8
1,500
2.875
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use reg::Register; use reg_temp_generic::ReadableTempRegister; const REGISTER_PTR: u8 = 0b0101; const REGISTER_SIZE: u8 = 2; const BIT_ALERT_CRITICAL: usize = 15; const BIT_ALERT_UPPER: usize = 14; const BIT_ALERT_LOWER: usize = 13; pub trait Temperature: ReadableTempRegister { fn is_alert_critical(&self) -> bool; fn is_alert_upper(&self) -> bool; fn is_alert_lower(&self) -> bool; } pub fn new() -> Register { Register::new(REGISTER_PTR, REGISTER_SIZE) } impl Temperature for Register { fn is_alert_critical(&self) -> bool { self.get_bit(BIT_ALERT_CRITICAL) } fn is_alert_upper(&self) -> bool { self.get_bit(BIT_ALERT_UPPER) } fn is_alert_lower(&self) -> bool { self.get_bit(BIT_ALERT_LOWER) } } #[cfg(test)] mod tests { /// prevent auto-format fuckup use super::*; #[test] fn alert_critical() { let mut reg = new(); assert_eq!(reg.is_alert_critical(), false); reg.set_bit(BIT_ALERT_CRITICAL, true); assert_eq!(reg.is_alert_critical(), true); } #[test] fn alert_upper() { let mut reg = new(); assert_eq!(reg.is_alert_upper(), false); reg.set_bit(BIT_ALERT_UPPER, true); assert_eq!(reg.is_alert_upper(), true); } #[test] fn alert_lower() { let mut reg = new(); assert_eq!(reg.is_alert_lower(), false); reg.set_bit(BIT_ALERT_LOWER, true); assert_eq!(reg.is_alert_lower(), true); } }
true
a620aff5cfb1b398b347d6151719c1bb1123eb44
Rust
lecorref/linear_regression
/src/predict.rs
UTF-8
1,025
3.3125
3
[]
no_license
use std::io; use std::io::Read; use std::fs::File; fn read_file(mut file: std::fs::File, mileage: f64) -> (){ let mut input = String::new(); file.read_to_string(&mut input).expect("Cannot read file"); let vec = input.split(" ").collect::<Vec<&str>>(); let theta0: f64 = vec[0].trim().parse().expect("Theta0 is not a number"); let theta1: f64 = vec[1].trim().parse().expect("Theta1 is not a number"); let min: f64 = vec[2].trim().parse().expect("Theta0 is not a number"); let max: f64 = vec[3].trim().parse().expect("Theta1 is not a number"); println!("Price: {:.*}", 2, theta0 + (theta1 * (mileage - min) / (max - min))); } fn main() { let mut input = String::new(); println!("Please enter a mileage:"); io::stdin().read_line(&mut input).expect("Error on read"); let mileage: f64 = input.trim().parse().expect("Not a Number"); match File::open("/tmp/frale-co/data.txt") { Ok(res) => {read_file(res, mileage)}, Err(_) => {println!("Price: 0")}, }; }
true
667a25ff9c4660d99e508c67e7e7c6bfb31f4702
Rust
grufkork/scpmapper
/src/main.rs
UTF-8
14,923
2.875
3
[]
no_license
use inputbot::{self}; use std::{cmp::max, collections::HashMap, io::{Write, stdout}, thread::sleep}; use std::time::Duration; use std::fs::read_to_string; #[derive(Copy, Clone, PartialEq, Eq)] enum Direction{ Up, Right, Down, Left } #[derive(Copy, Clone)] enum Zone{ Entrance, Heavy, Light } struct Layout{ map: Vec<Vec<char>>, name: String, paths: Vec<((usize, usize),Vec<Direction>, bool)>, // .2 = is finished/terminated? (encounted room), zone: String } fn main() { enable_virtual_terminal_processing(); println!("scpmapper v1.0.1"); let loop_time = Duration::from_millis(33); let layout_meta = read_to_string("layouts.txt").unwrap(); let layout_meta = layout_meta.split("\r\n").map(|row| { row.split(" ").collect::<Vec<&str>>() }).collect::<Vec<Vec<&str>>>(); let mut layouts = layout_meta.iter().map(|map| { let path = format!("scp-sl-layouts/{}/{}.txt", map[1], map[2]); Layout{ map: { let file = read_to_string(path).unwrap(); file.split("\r\n").map(|x| x.chars().collect()).collect::<Vec<Vec<char>>>() }, name: map[0].to_string(), paths: vec![], zone: map[1].to_string() } }).collect::<Vec<Layout>>(); // Load char-to-direction file let mut char_to_dirs: HashMap<char, Vec<Direction>> = HashMap::new(); let charmap_file = read_to_string("chars.txt").unwrap(); for row in charmap_file.split("\r\n") { let mut split = row.split(" "); char_to_dirs.insert(split.next().unwrap().chars().next().unwrap(), split.map(|dir| match dir{ "up" => Direction::Up, "left" => Direction::Left, "down" => Direction::Down, "right" => Direction::Right, _ => unreachable!() }).collect()); } // Pad maps as they are not square, get starting points for layout in layouts.iter_mut(){ let mut longest_row = 0; for (y,row) in layout.map.iter().enumerate(){ longest_row = max(longest_row, row.len()); for (x, c) in row.iter().enumerate(){ if c != &' ' && char_to_dirs.get(c).unwrap().len() == 1{ layout.paths.push(((x, y), vec![char_to_dirs.get(c).unwrap()[0].clone()], false)); } } } for i in 0..layout.map.len(){ let len = layout.map[i].len(); layout.map[i].append(&mut vec![' '; longest_row - len]); } } // Find unique paths from each room print!("Building Paths..."); loop{ let mut all_good = true; let mut paths_to_extend: Vec<(usize, usize)> = vec![]; 'check_all: for a in 0..layouts.len(){ // Iterate through all paths for b in 0..layouts[a].paths.len(){ if layouts[a].paths[b].2 {continue;} if layouts[a].paths[b].1.len() > 10{layouts[a].paths[b].2 = true; continue;} // Max path length is 10, to stop loops for x in 0..layouts.len(){ // And match them against all others if layouts[x].zone != layouts[a].zone {continue;} for y in 0..layouts[x].paths.len(){ if x == a && b == y {continue;} if layouts[x].paths[y].1.iter().map(|e| direction_to_local(layouts[x].paths[y].1[0], *e)).collect::<Vec<Direction>>().iter().eq( // Rotate paths so all are pointing the same way layouts[a].paths[b].1.iter().map(|e| direction_to_local(layouts[a].paths[b].1[0], *e)).collect::<Vec<Direction>>().iter()) { all_good = false; paths_to_extend.push((x, y)); // Add equal paths to a list to expand them one step further } } } if !all_good{ paths_to_extend.push((a, b)); break 'check_all; } layouts[a].paths[b].2 = true; // Path is unique and finished, doesn't need to be iterated over again. This line alone sped things up at least 10x } } if all_good{ break;}else{ // If all paths are unique, all good, otherwise expand them for x in paths_to_extend.iter(){ if layouts[x.0].paths[x.1].2 {continue;} let mut pos = layouts[x.0].paths[x.1].0; let mut last_dir: Direction = Direction::Up; for dir in (&layouts[x.0].paths[x.1].1).iter(){ // Move to end of path last_dir = *dir; match dir{ Direction::Up => pos.1 -= 1, Direction::Right => pos.0 += 1, Direction::Down => pos.1 += 1, Direction::Left => pos.0 -= 1 } } let dirs = char_to_dirs.get(&layouts[x.0].map[pos.1][pos.0]).unwrap(); if dirs.len() == 1{ layouts[x.0].paths[x.1].2 = true; }else{ let mut paths_added = 0; for dir in dirs.iter(){ if (*dir as u8) == (last_dir as u8 + 2)%4 {continue;} // Don't go back same way as it came from if paths_added == dirs.len() - 2{ layouts[x.0].paths[x.1].1.push(*dir); // Modify old path if no new branches are needed }else{ let mut path = layouts[x.0].paths[x.1].clone(); // Add new paths if it branches path.1.push(*dir); layouts[x.0].paths.push(path); } paths_added += 1; } } } print!("."); stdout().flush().unwrap(); } } println!(); let mut pressed_last_frame = false; let mut keydown = false; let mut dirstring = "".to_string(); println!("Started!"); let mut zone = Zone::Entrance; let mut state = 0; // 0 = select zone, 1 = awaiting selection, 2 = finding zone loop{ if inputbot::KeybdKey::Numpad8Key.is_pressed() || inputbot::KeybdKey::UpKey.is_pressed(){ if !pressed_last_frame{ keydown = true; dirstring = [dirstring, "F".into()].concat(); } pressed_last_frame = true; }else if inputbot::KeybdKey::Numpad4Key.is_pressed() || inputbot::KeybdKey::LeftKey.is_pressed(){ if !pressed_last_frame{ keydown = true; dirstring = [dirstring, "L".into()].concat(); } pressed_last_frame = true; }else if inputbot::KeybdKey::Numpad6Key.is_pressed() || inputbot::KeybdKey::RightKey.is_pressed(){ if !pressed_last_frame{ keydown = true; dirstring = [dirstring, "R".into()].concat(); } pressed_last_frame = true; }else if inputbot::KeybdKey::Numpad5Key.is_pressed() || inputbot::KeybdKey::DownKey.is_pressed(){ if !pressed_last_frame{ keydown = true; dirstring = [dirstring, "E".into()].concat(); } pressed_last_frame = true; }else if inputbot::KeybdKey::Numpad0Key.is_pressed() || inputbot::KeybdKey::BackspaceKey.is_pressed(){ if !pressed_last_frame{ if dirstring.len() > 0{ keydown = true; dirstring = dirstring[0..dirstring.len() - 1].into(); } } pressed_last_frame = true; }else{ pressed_last_frame = false; } match state{ 0=> { println!(); println!("Select zone..."); println!("← Entrance"); println!("↑ Heavy Containment"); println!("→ Light Containment"); state = 1; }, 1 => { if dirstring.len() > 0{ state = 2; zone = match dirstring.chars().next().unwrap(){ 'L' => Zone::Entrance, 'F' => Zone::Heavy, 'R' => Zone::Light, _ => { state = 1; Zone::Entrance } }; if state == 2{ println!("Selected zone: {}", zone_to_string(zone)); println!(); // Make space for cursor movements } dirstring = "".to_string() } }, 2 => { if keydown{ print!("\x1b[1A\x1b[99D"); println!("{} ", dirstring); // Space to overwrite when erasing let mut directions = vec![Direction::Up]; let mut angle: u8 = 0; let mut stopped = false; for mov in dirstring.chars(){ match mov{ 'L' => angle = (angle + 3) % 4, 'R' => angle = (angle + 1) % 4, 'E' => {stopped = true; break;}, 'F' => (), _ => unreachable!() } directions.push(match angle{ 0 => Direction::Up, 1 => Direction::Right, 2 => Direction::Down, 3 => Direction::Left, _ => unreachable!() }); } let mut matching_layouts: Vec<(usize, usize)> = vec![]; for x in 0..layouts.len(){ if layouts[x].zone != zone_to_string(zone) {continue;} for y in 0..layouts[x].paths.len(){ if layouts[x].paths[y].1.len() >= directions.len() && layouts[x].paths[y].1[0..directions.len()].iter().map(|e| direction_to_local(layouts[x].paths[y].1[0], *e)).collect::<Vec<Direction>>().iter().eq(&directions) { matching_layouts.push((x, y)); } } } let mut match_found = false; if matching_layouts.len() == 1{ match_found = true; }else if stopped{ let mut count = 0; for possible_path in matching_layouts.iter(){ if layouts[possible_path.0].paths[possible_path.1].1.len() == directions.len(){ count += 1; println!("Possible match: {}", layouts[possible_path.0].name); } } if count == 1{ match_found = true; }else{ println!("!!! Multiple matches found. Clearing..."); dirstring = "".to_string(); state = 0; } } if match_found{ let mut pos = layouts[matching_layouts[0].0].paths[matching_layouts[0].1].0; for dir in (&layouts[matching_layouts[0].0].paths[matching_layouts[0].1].1).iter(){ match dir{ Direction::Up => pos.1 -= 1, Direction::Right => pos.0 += 1, Direction::Down => pos.1 += 1, Direction::Left => pos.0 -= 1 } } println!("Map: {}", layouts[matching_layouts[0].0].name); for y in 0..layouts[matching_layouts[0].0].map.len(){ // Draw map for x in 0..layouts[matching_layouts[0].0].map[y].len(){ let mut stylepre = ""; let mut stylepost = ""; if x == layouts[matching_layouts[0].0].paths[matching_layouts[0].1].0.0 && y == layouts[matching_layouts[0].0].paths[matching_layouts[0].1].0.1{ stylepre = "\x1b[0;101m"; stylepost = "\x1b[0m"; }else if x == pos.0 && y == pos.1{ stylepre = "\x1b[0;42m"; stylepost = "\x1b[0m"; } print!("{}{}{}", stylepre, layouts[matching_layouts[0].0].map[y][x], stylepost); } println!(); } println!("Facing: {}", match layouts[matching_layouts[0].0].paths[matching_layouts[0].1].1.last().unwrap(){ Direction::Up => "↑", Direction::Right => "→", Direction::Down => "↓", Direction::Left => "←" }); println!(); println!(); dirstring = "".to_string(); state = 0; } } }, _ => unreachable!() } keydown = false; sleep(loop_time); } } fn direction_to_local(facing: Direction, direction: Direction) -> Direction{ match (direction as u8 + 4 - facing as u8) % 4{ 0=> Direction::Up, 1=> Direction::Right, 2=> Direction::Down, 3=> Direction::Left, _ => unreachable!() } } fn zone_to_string(zone: Zone) -> String{ match zone{ Zone::Entrance => "entrance".to_string(), Zone::Light => "light".to_string(), Zone::Heavy => "heavy".to_string() } } #[cfg(windows)] pub fn enable_virtual_terminal_processing() { use winapi_util::console::Console; if let Ok(mut term) = Console::stdout() { let _ = term.set_virtual_terminal_processing(true); } if let Ok(mut term) = Console::stderr() { let _ = term.set_virtual_terminal_processing(true); } }
true
81db8081f1039d6f7513b713b4246b5ead6edba6
Rust
alexpana/enigma
/src/tags_old.rs
UTF-8
5,270
2.9375
3
[]
no_license
use std::collections::HashMap; use std::fs::File; use std::io::BufRead; use std::io::BufReader; use std::path::Path; use std::time::Instant; #[derive(Debug, PartialOrd, PartialEq)] pub enum TagKind { MacroDefinitions, EnumValue, FunctionDefinition, Enum, HeaderInclude, LocalVariable, ClassMember, FunctionPrototype, Struct, Typedef, Union, Variable, ForwardDeclaration, FunctionParameter, GotoLabel, Class, Namespace, NamespaceAlias, NamespaceUsingStatement, File, Unknown, } fn tag_kind_from_char(tag_kind: char) -> TagKind { match tag_kind { 'd' => TagKind::MacroDefinitions, 'e' => TagKind::EnumValue, 'f' => TagKind::FunctionDefinition, 'g' => TagKind::Enum, 'h' => TagKind::HeaderInclude, 'l' => TagKind::LocalVariable, 'm' => TagKind::ClassMember, 'p' => TagKind::FunctionPrototype, 's' => TagKind::Struct, 't' => TagKind::Typedef, 'u' => TagKind::Union, 'v' => TagKind::Variable, 'x' => TagKind::ForwardDeclaration, 'z' => TagKind::FunctionParameter, 'L' => TagKind::GotoLabel, 'c' => TagKind::Class, 'n' => TagKind::Namespace, 'A' => TagKind::NamespaceAlias, 'N' => TagKind::NamespaceUsingStatement, 'U' => TagKind::NamespaceUsingStatement, 'F' => TagKind::File, _ => TagKind::Unknown } } #[derive(Debug)] pub struct TagLocation<'a> { pub file_path: &'a str, pub line: usize, } impl<'a> TagLocation<'a> { pub fn new(file_path: &'a str, line: usize) -> TagLocation<'a> { TagLocation { file_path, line, } } } #[derive(Debug)] pub struct TagDefinition<'a> { pub name: (usize, usize), pub declaration: (usize, usize), pub location: ((usize, usize), usize), pub kind: TagKind, pub fields: Vec<(usize, usize)>, } impl<'a> TagDefinition<'a> { pub fn new_file(file_path: &str) -> TagDefinition { let path = Path::new(file_path); TagDefinition { name: path.file_name().unwrap().to_str().unwrap(), declaration: "", location: TagLocation { file_path, line: 1, }, kind: TagKind::File, fields: Vec::new(), } } } pub struct TagFile<'a> { file_path: String, lines: Vec<String>, tags: Vec<TagDefinition<'a>> } impl <'a> TagFile<'a> { pub fn from_file(input_file_path: &str) -> TagFile<'a> { let mut result = TagFile { file_path: String::from(input_file_path), lines: Vec::new(), tags: Vec::new(), }; let f = File::open(input_file_path).unwrap(); let reader = BufReader::new(&f); for (num, line) in reader.lines().enumerate() { match line { Err(e) => { println!("Error reading line {}: {}", num, e); } Ok(v) => { result.lines.push(v); } } } result } fn load_tags(&mut self) { let now = Instant::now(); for line in &self.lines { if !line.starts_with("!_") { let tag_definition = parse_tag_definition(&line); self.tags.push(tag_definition); } } let elapsed = now.elapsed(); println!("# Finished parsing {} tags file in {:.3}s", self.tags.len(), elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 / 1e9_f64); } } pub struct TagDatabase<'a> { pub tag_files: Vec<TagFile<'a>>, } impl<'a> TagDatabase<'a> { pub fn new() -> TagDatabase<'a> { TagDatabase { tag_files: Vec::new(), } } } fn parse_declaration(line: &str) -> (&str, usize) { let declaration_start_find = line.find("/^"); match declaration_start_find { None => { ("", line.find(";\"").expect("Could not parse tag file") + 3) } Some(v) => { let declaration_end = line.find(";\"").expect("Could not parse tag file") + 4; (&line[v + 2..declaration_end - 4], declaration_end + 1) } } } fn parse_tag_definition<'a>(line: &'a String) -> TagDefinition<'a> { let expect_msg = "Could not parse tag file"; let name = line.split("\t").nth(0).expect(expect_msg); let location_file_path = line.split("\t").nth(1).expect(expect_msg); let (declaration, cursor) = parse_declaration(&line[..]); let tag_kind_char = line.chars().nth(cursor).unwrap(); let fields: Vec<&str> = line[cursor + 1..].split("\t").filter(|x| x.len() > 0).collect(); let location_line = { let line_field = fields.iter().find(|x| x.starts_with("line:")); match line_field { None => 0 as usize, Some(v) => v[v.find(":").unwrap() + 1..].parse::<usize>().unwrap() } }; TagDefinition { name, declaration, location: TagLocation { file_path: location_file_path, line: location_line, }, kind: tag_kind_from_char(tag_kind_char), fields, } }
true