blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
fc2788621a5cf661683de2f96fe68d05703b474c
|
Rust
|
danielfrg/grpc-up-and-running
|
/ch3/server-stream/client/gen/ecommerce.rs
|
UTF-8
| 4,388
| 2.53125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Order {
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
#[prost(string, repeated, tag = "2")]
pub items: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
#[prost(string, tag = "3")]
pub description: ::prost::alloc::string::String,
#[prost(float, tag = "4")]
pub price: f32,
#[prost(string, tag = "5")]
pub destination: ::prost::alloc::string::String,
}
#[doc = r" Generated client implementations."]
pub mod order_management_client {
#![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]
use tonic::codegen::*;
#[derive(Debug, Clone)]
pub struct OrderManagementClient<T> {
inner: tonic::client::Grpc<T>,
}
impl OrderManagementClient<tonic::transport::Channel> {
#[doc = r" Attempt to create a new client by connecting to a given endpoint."]
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: std::convert::TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> OrderManagementClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as Body>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor<F>(
inner: T,
interceptor: F,
) -> OrderManagementClient<InterceptedService<T, F>>
where
F: tonic::service::Interceptor,
T: tonic::codegen::Service<
http::Request<tonic::body::BoxBody>,
Response = http::Response<
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
>,
>,
<T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:
Into<StdError> + Send + Sync,
{
OrderManagementClient::new(InterceptedService::new(inner, interceptor))
}
#[doc = r" Compress requests with `gzip`."]
#[doc = r""]
#[doc = r" This requires the server to support it otherwise it might respond with an"]
#[doc = r" error."]
pub fn send_gzip(mut self) -> Self {
self.inner = self.inner.send_gzip();
self
}
#[doc = r" Enable decompressing responses with `gzip`."]
pub fn accept_gzip(mut self) -> Self {
self.inner = self.inner.accept_gzip();
self
}
#[doc = " Unary"]
pub async fn get_order(
&mut self,
request: impl tonic::IntoRequest<::prost::alloc::string::String>,
) -> Result<tonic::Response<super::Order>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static("/ecommerce.OrderManagement/getOrder");
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Server streaming"]
pub async fn search_orders(
&mut self,
request: impl tonic::IntoRequest<::prost::alloc::string::String>,
) -> Result<tonic::Response<tonic::codec::Streaming<super::Order>>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path =
http::uri::PathAndQuery::from_static("/ecommerce.OrderManagement/searchOrders");
self.inner
.server_streaming(request.into_request(), path, codec)
.await
}
}
}
| true
|
4eb279eeb0c32dcfa35bfc093124505317d51138
|
Rust
|
sam-wright/Advent-of-Code
|
/2018/day2/src/main.rs
|
UTF-8
| 2,195
| 3.359375
| 3
|
[] |
no_license
|
use std::collections::HashMap;
use std::fs::File;
use std::io::{self, Read};
fn main() -> io::Result<()> {
let mut file = File::open("input.txt")?;
//let mut file = File::open("test_input.txt")?;
//let mut file = File::open("test_input2.txt")?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let collection: Vec<&str> = contents[..contents.len() - 1].split('\n').collect();
let letters = vec![
'a', 'b', 'a', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',
'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
];
let mut hash = HashMap::new();
for line in &collection {
println!("{} --", &line);
let mut double = false;
let mut triple = false;
for letter in &letters {
let n = line.matches(*letter).count();
//println!("\t {} {}", *letter, n);
match n {
0 => {}
1 => {}
2 => double = true,
3 => triple = true,
_ => println!("need more buckets!"),
};
}
if double {
let counter = hash.entry(2).or_insert(0);
*counter += 1;
}
if triple {
let counter = hash.entry(3).or_insert(0);
*counter += 1;
}
}
println!("\nord\tinst");
let mut total_hash = 1;
for val in 2..4 {
let x = hash.entry(val).or_insert(0);
println!("{}\t{}", val, *x);
total_hash *= *x;
}
println!("total_hash = {}", total_hash);
// Part-2
for line in &collection {
for compline in &collection {
let mut resline = String::from("");
for i in 0..line.len() {
if line.as_bytes()[i] == compline.as_bytes()[i] {
resline.push_str(&line[i..=i]);
}
}
if resline.len() + 1 == line.len() {
println!(
"I think we have a match!!\nline\t--\t{}\ncmpline\t--\t{}\nresline\t--\t{}",
line, compline, resline
);
}
}
}
Ok(())
}
| true
|
cf4b28b88c57c88902771a1ff2144f912a394874
|
Rust
|
presciense/actix-mqtt
|
/src/error.rs
|
UTF-8
| 1,458
| 2.859375
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use derive_more::Display;
use std::io;
/// Errors which can occur when attempting to handle mqtt connection.
#[derive(Debug)]
pub enum MqttError<E> {
/// Message handler service error
Service(E),
/// Mqtt parse error
Protocol(mqtt_codec::ParseError),
/// Unexpected packet
Unexpected(mqtt_codec::Packet, &'static str),
/// "SUBSCRIBE, UNSUBSCRIBE, and PUBLISH (in cases where QoS > 0) Control Packets MUST contain a non-zero 16-bit Packet Identifier [MQTT-2.3.1-1]."
PacketIdRequired,
/// Keep alive timeout
KeepAliveTimeout,
/// Peer disconnect
Disconnected,
/// Unexpected io error
Io(io::Error),
}
impl<E> From<mqtt_codec::ParseError> for MqttError<E> {
fn from(err: mqtt_codec::ParseError) -> Self {
MqttError::Protocol(err)
}
}
impl<E> From<io::Error> for MqttError<E> {
fn from(err: io::Error) -> Self {
MqttError::Io(err)
}
}
#[derive(Debug, Display, PartialEq)]
pub enum SendPacketError {
/// Encoder error
Encode(EncodeError),
/// Provided packet id is in use
#[display(fmt = "Provided packet id is in use")]
PacketIdInUse(u16),
/// Peer disconnected
#[display(fmt = "Peer disconnected")]
Disconnected,
#[display(fmt = "Packet id")]
PacketId,
}
#[derive(Copy, Clone, Debug, Display, PartialEq, Eq, Hash)]
pub enum EncodeError {
InvalidLength,
MalformedPacket,
PacketIdRequired,
UnsupportedVersion,
}
| true
|
1c466f5f057f7172a26339adbd65f43a766870c9
|
Rust
|
rampion/advent-of-code
|
/2020/day/07/src/main.rs
|
UTF-8
| 4,401
| 3.25
| 3
|
[] |
no_license
|
use std::collections::HashMap;
use std::error;
type Error = Box<dyn error::Error>;
macro_rules! error {
($fmt:literal $(, $e:expr)*) => { Err(Error::from(format!($fmt $(, $e)*))) };
}
fn main() -> Result<(), Error> {
for filename in std::env::args().skip(1) {
println!("{}", filename);
let rules = flatten(parse_rules(&std::fs::read_to_string(filename)?)?);
println!("\tpart 1: how many different bags could recursively contain a shiny gold bag");
println!("\t{}", part1(&rules));
println!("\tpart 2: how many different bags are recursively contained in a shiny gold bag");
println!("\t{}", part2(&rules));
}
Ok(())
}
fn parse_rules(src: &str) -> Result<Rules, Error> {
src.lines().map(parse_rule).collect()
}
fn parse_rule(src: &str) -> Result<(Bag, Counts), Error> {
let words: Vec<&str> = src.split_whitespace().collect();
let bag = match &words[0..4] {
[adverb, color, "bags", "contain"] => Ok(to_bag(adverb, color)),
terms => error!(
"expected \"<adverb> <color> bags contain\" but found {:?}",
terms.join(" ")
),
}?;
let counts = match words[4..] {
["no", "other", "bags."] => Ok(HashMap::new()),
_ => (4..words.len())
.step_by(4)
.map(|ix| match &words[ix..ix + 4] {
[count, adverb, color, _bag] => {
Ok((to_bag(adverb, color), count.parse().map_err(Error::from)?))
}
terms => error!(
"expected \"<count> <adverb> <color> bags?[.,]\", but found {:?}",
terms.join(" ")
),
})
.collect::<Result<Counts, Error>>(),
}?;
Ok((bag, counts))
}
fn to_bag(adverb: &str, color: &str) -> Bag {
(adverb.into(), color.into())
}
fn part1(rules: &Rules) -> usize {
let shiny_gold_bag = to_bag("shiny", "gold");
rules
.values()
.filter(|counts| counts.get(&shiny_gold_bag).cloned().unwrap_or(0) > 0)
.length()
}
fn part2(rules: &Rules) -> usize {
let shiny_gold_bag = to_bag("shiny", "gold");
rules
.get(&shiny_gold_bag)
.map_or(0, |counts| counts.values().sum())
}
trait Length {
fn length(self) -> usize;
}
impl<T> Length for T
where
T: IntoIterator,
{
fn length(self) -> usize {
let mut length = 0;
for _ in self {
length += 1;
}
length
}
}
type Rules = HashMap<Bag, Counts>;
type Counts = HashMap<Bag, usize>;
type Bag = (String, String);
fn scale_and_add_to(dst: &mut Counts, src: &Counts, multiplier: usize) {
for (bag, &count) in src {
*dst.entry(bag.clone()).or_insert(0) += multiplier * count;
}
}
fn flatten(rules: Rules) -> Rules {
//! calculate the nested counts using DFS
let mut results = HashMap::new();
for (outer_bag, counts) in rules.iter() {
if results.contains_key(outer_bag) {
continue;
}
let mut flattened = HashMap::new();
let mut queue = counts.iter();
let mut stack = Vec::new();
loop {
match queue.next() {
Some((inner_bag, &inner_count)) => {
*flattened.entry(inner_bag.clone()).or_insert(0) += inner_count;
match results.get(inner_bag) {
Some(inner_flattened) => {
scale_and_add_to(&mut flattened, &inner_flattened, inner_count);
}
None => {
stack.push((flattened, queue, inner_bag, inner_count));
flattened = HashMap::new();
queue = rules[&inner_bag].iter();
}
}
}
None => match stack.pop() {
Some((mut outer_flattened, outer_queue, bag, count)) => {
scale_and_add_to(&mut outer_flattened, &flattened, count);
results.insert(bag.clone(), flattened);
flattened = outer_flattened;
queue = outer_queue;
}
None => break,
},
}
}
results.insert(outer_bag.clone(), flattened);
}
results
}
| true
|
e49ee02b6c30fd62608c0aad2e204d43a0366722
|
Rust
|
GoXLR-on-Linux/goxlr-utility
|
/daemon/src/platform/unix.rs
|
UTF-8
| 876
| 2.765625
| 3
|
[
"MIT",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
use crate::events::EventTriggers;
use crate::DaemonState;
use anyhow::Result;
use log::debug;
use tokio::select;
use tokio::signal::unix::{signal, SignalKind};
use tokio::sync::mpsc;
pub async fn spawn_platform_runtime(
state: DaemonState,
tx: mpsc::Sender<EventTriggers>,
) -> Result<()> {
// This one's a little odd, because Windows doesn't directly support SIGTERM, we're going
// to monitor for it here, and trigger a shutdown if one is received.
let mut stream = signal(SignalKind::terminate())?;
let mut shutdown = state.shutdown.clone();
select! {
Some(_) = stream.recv() => {
// Trigger a Shutdown
debug!("TERM Signal Received, Triggering STOP");
let _ = tx.send(EventTriggers::Stop).await;
},
() = shutdown.recv() => {}
}
debug!("Platform Runtime Ended");
Ok(())
}
| true
|
7b34572bf6fbee487df3685703ca6190fb4e9326
|
Rust
|
inrick/raytrace
|
/rs/src/ray.rs
|
UTF-8
| 9,449
| 2.875
| 3
|
[] |
no_license
|
use std::{ffi::OsStr, fs::File, io::Write, path::PathBuf};
use crate::vec::*;
pub struct Args {
pub nsamples: u32,
pub threads: u32,
pub cam: Camera,
pub scene: Scene,
}
pub struct Image {
pub buf: Vec<u8>,
pub nx: u32,
pub ny: u32,
}
fn rand32() -> f32 {
rand::random()
}
static PI: f32 = std::f32::consts::PI;
type Error = Box<dyn std::error::Error>;
type Result<T> = ::std::result::Result<T, Error>;
type ImageWriter = fn(&mut File, &[u8], u32, u32) -> Result<()>;
pub fn save_file(img: &Image, filename: &str) -> Result<()> {
let extension = PathBuf::from(filename)
.extension()
.and_then(OsStr::to_str)
.map(|s| s.to_lowercase())
.ok_or("missing file extension")?;
let image_writer: ImageWriter = match extension.as_ref() {
"ppm" => ppm_write,
"png" => |f, buf, x, y| {
image::write_buffer_with_format(
f,
buf,
x,
y,
image::ColorType::Rgb8,
image::ImageOutputFormat::Png,
)?;
Ok(())
},
"jpg" | "jpeg" => |f, buf, x, y| {
image::write_buffer_with_format(
f,
buf,
x,
y,
image::ColorType::Rgb8,
image::ImageOutputFormat::Jpeg(90),
)?;
Ok(())
},
unknown => {
return Err(
format!(
"unknown image output format for file extension '{}' \
(only know ppm/png/jpg)",
unknown,
)
.into(),
)
}
};
let mut f = File::create(filename)?;
image_writer(&mut f, &img.buf, img.nx, img.ny)
}
pub fn raytrace(args: &Args, nx: u32, ny: u32) -> Image {
let nsamples = args.nsamples;
let threads = args.threads;
if threads == 0 || nsamples == 0 {
panic!("number of samples and threads must be positive");
}
let mut buf = vec![0; (3 * nx * ny) as usize];
std::thread::scope(|s| {
let mut ny_pos: u32 = 0;
let mut bufpos: usize = 0;
for i in 0..threads {
let ny_remaining = ny - ny_pos;
let ny_th = ny_remaining / (threads - i);
let len_th = 3 * nx * ny_th;
let ymax = (ny_remaining) as f32 / ny as f32;
let ymin = (ny_remaining - ny_th) as f32 / ny as f32;
let (cam, sc) = (&args.cam, &args.scene);
// Couldn't find a way to do different sized chunks with buf.chunks_mut
// so did the split manually instead.
let bufchunk = unsafe {
&mut *std::ptr::slice_from_raw_parts_mut(
buf.as_mut_ptr().add(bufpos),
len_th as usize,
)
};
s.spawn(move || {
render(bufchunk, cam, sc, nsamples, nx, ny_th, (ymin, ymax));
});
ny_pos += ny_th;
bufpos += len_th as usize;
}
});
Image { buf, nx, ny }
}
#[allow(clippy::identity_op)]
fn render(
buf: &mut [u8],
cam: &Camera,
sc: &Scene,
nsamples: u32,
nx: u32,
ny: u32,
(ymin, ymax): (f32, f32),
) {
assert_eq!(buf.len(), (3 * nx * ny) as usize);
let yheight = ymax - ymin;
let mut bi = 0;
for j in (0..ny).rev() {
for i in 0..nx {
let mut color = Vec3::default();
for _ in 0..nsamples {
let x = (i as f32 + rand32()) / nx as f32;
let y = ymin + yheight * (j as f32 + rand32()) / ny as f32;
let r = cam.ray_at(x, y);
color = color + sc.color(&r);
}
color = (color / nsamples as f32).sqrt();
buf[bi + 0] = (255. * color.x) as u8;
buf[bi + 1] = (255. * color.y) as u8;
buf[bi + 2] = (255. * color.z) as u8;
bi += 3;
}
}
}
fn ppm_write(f: &mut File, buf: &[u8], x: u32, y: u32) -> Result<()> {
f.write_all(format!("P6\n{} {} 255\n", x, y).as_bytes())?;
f.write_all(buf)?;
Ok(())
}
#[derive(Clone, Copy)]
struct Ray {
origin: Vec3,
dir: Vec3,
}
impl Ray {
fn eval(self, t: f32) -> Vec3 {
self.origin + self.dir * t
}
}
#[derive(Copy, Clone)]
enum Material {
Matte { albedo: Vec3 },
Metal { albedo: Vec3, fuzz: f32 },
Dielectric { ref_idx: f32 },
}
struct HitRecord {
t: f32,
p: Vec3,
normal: Vec3,
mat: Material,
}
impl Default for HitRecord {
fn default() -> Self {
HitRecord {
t: 0.,
p: Vec3::default(),
normal: Vec3::default(),
mat: Material::Matte {
albedo: Vec3::default(),
},
}
}
}
#[derive(Clone)]
struct Sphere {
center: Vec3,
radius: f32,
mat: Material,
}
impl Sphere {
#[allow(non_snake_case)]
fn hit(&self, tmin: f32, tmax: f32, r: &Ray, rec: &mut HitRecord) -> bool {
let oc = r.origin - self.center;
let a = dot(r.dir, r.dir);
let b = dot(oc, r.dir);
let c = dot(oc, oc) - self.radius * self.radius;
let D = b * b - a * c;
if D > 0. {
for t in [(-b - D.sqrt()) / a, (-b + D.sqrt()) / a] {
if tmin < t && t < tmax {
rec.t = t;
rec.p = r.eval(t);
rec.normal = (rec.p - self.center) / self.radius;
rec.mat = self.mat;
return true;
}
}
}
false
}
}
#[derive(Clone)]
pub struct Scene {
spheres: Vec<Sphere>,
}
impl Scene {
fn color(&self, r0: &Ray) -> Vec3 {
let mut rec: HitRecord = HitRecord::default();
let mut r = *r0;
let mut color = ONES;
for _depth in 0..50 {
if !self.hit(0.001, f32::MAX, &mut r, &mut rec) {
let t = 0.5 * (r.dir.normalize().y + 1.);
color = color * (vec(0.75, 0.95, 1.0) * t + ONES * (1. - t));
break;
}
let (attenuation, scattered) = scatter(&r, rec.p, rec.normal, rec.mat);
r = scattered;
color = color * attenuation;
}
color
}
fn hit(
&self,
tmin: f32,
tmax: f32,
r: &mut Ray,
rec: &mut HitRecord,
) -> bool {
let mut hit = false;
let mut closest = tmax;
for sph in &self.spheres {
if sph.hit(tmin, closest, r, rec) {
hit = true;
closest = rec.t;
}
}
hit
}
}
#[allow(dead_code)]
#[derive(Clone)]
pub struct Camera {
lower_left_corner: Vec3,
horiz: Vec3,
vert: Vec3,
origin: Vec3,
u: Vec3,
v: Vec3,
w: Vec3,
lens_radius: f32,
}
impl Camera {
pub fn new(
look_from: Vec3,
look_at: Vec3,
v_up: Vec3,
vfov: f32,
aspect: f32,
aperture: f32,
focus_dist: f32,
) -> Self {
let theta = vfov * PI / 180.;
let half_height = (theta / 2.).tan();
let half_width = aspect * half_height;
let w = (look_from - look_at).normalize();
let u = cross(v_up, w).normalize();
let v = cross(w, u);
let lower_left_corner = look_from
- u * focus_dist * half_width
- v * focus_dist * half_height
- w * focus_dist;
Camera {
lower_left_corner,
horiz: u * 2. * half_width * focus_dist,
vert: v * 2. * half_height * focus_dist,
origin: look_from,
u,
v,
w,
lens_radius: aperture / 2.,
}
}
fn ray_at(&self, x: f32, y: f32) -> Ray {
let rd = random_in_unit_ball() * self.lens_radius;
let offset = self.u * rd.x + self.v * rd.y;
let dir = self.lower_left_corner + (self.horiz * x + self.vert * y)
- self.origin
- offset;
Ray {
origin: self.origin + offset,
dir,
}
}
}
fn schlick(cosine: f32, ref_idx: f32) -> f32 {
let r0 = (1. - ref_idx) / (1. + ref_idx);
let r0 = r0 * r0;
r0 + (1. - r0) * (1. - cosine).powi(5)
}
#[allow(non_snake_case)]
fn refract(
u: Vec3,
normal: Vec3,
ni_over_nt: f32,
refracted: &mut Vec3,
) -> bool {
let un = u.normalize();
let dt = dot(un, normal);
let D = 1. - ni_over_nt * ni_over_nt * (1. - dt * dt);
if D > 0. {
let v = (un - normal * dt) * ni_over_nt - normal * D.sqrt();
*refracted = v;
return true;
}
false
}
fn scatter(r: &Ray, p: Vec3, normal: Vec3, mat: Material) -> (Vec3, Ray) {
use Material::*;
match mat {
Matte { albedo } => {
let target = p + normal + random_in_unit_ball();
let scattered = Ray {
origin: p,
dir: target - p,
};
(albedo, scattered)
}
Metal { albedo, fuzz } => {
let reflected = reflect(r.dir.normalize(), normal);
let dir = reflected + random_in_unit_ball() * fuzz;
let scattered: Ray = if dot(dir, normal) > 0. {
Ray { origin: p, dir }
} else {
*r
};
(albedo, scattered)
}
Dielectric { ref_idx } => {
let d = dot(r.dir, normal);
let outward_normal: Vec3;
let ni_over_nt: f32;
let cosine: f32;
if d > 0. {
outward_normal = -normal;
ni_over_nt = ref_idx;
cosine = ref_idx * d / r.dir.norm();
} else {
outward_normal = normal;
ni_over_nt = 1. / ref_idx;
cosine = -d / r.dir.norm();
}
let mut dir = Vec3::default();
if !refract(r.dir, outward_normal, ni_over_nt, &mut dir)
|| rand32() < schlick(cosine, ref_idx)
{
dir = reflect(r.dir, normal);
}
let scattered = Ray { origin: p, dir };
(ONES, scattered)
}
}
}
#[cfg(not(feature = "gui"))]
pub fn camera_default(nx: u32, ny: u32) -> Camera {
let look_from = vec(10., 2.5, 5.);
let look_at = vec(-4., 0., -2.);
let dist_to_focus = (look_from - look_at).norm();
let aperture = 0.05;
let (nxf, nyf) = (nx as f32, ny as f32);
Camera::new(
look_from,
look_at,
vec(0., 1., 0.),
20.,
nxf / nyf,
aperture,
dist_to_focus,
)
}
pub fn small_scene() -> Scene {
let nspheres = 3 + 360 / 15;
let mut spheres = Vec::with_capacity(nspheres);
spheres.push(Sphere {
center: vec(0., -1000., 0.),
radius: 1000.,
mat: Material::Matte {
albedo: vec(0.88, 0.96, 0.7),
},
});
spheres.push(Sphere {
center: vec(1.5, 1., 0.),
radius: 1.,
mat: Material::Dielectric { ref_idx: 1.5 },
});
spheres.push(Sphere {
center: vec(-1.5, 1., 0.),
radius: 1.,
mat: Material::Metal {
albedo: vec(0.8, 0.9, 0.8),
fuzz: 0.,
},
});
for deg in (0..360).step_by(15) {
let x = ((deg as f32) * PI / 180.).sin();
let z = ((deg as f32) * PI / 180.).cos();
let r0 = 3.;
let r1 = 0.33 + x * z / 9.;
spheres.push(Sphere {
center: vec(r0 * x, r1, r0 * z),
radius: r1,
mat: Material::Matte {
albedo: vec(x, 0.5 + x * z / 2., z),
},
});
}
Scene { spheres }
}
| true
|
3207771cfd48afab2eec392dcaa71c330d23a7af
|
Rust
|
abhikjain360/timaru
|
/timaru/src/setup.rs
|
UTF-8
| 899
| 2.640625
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::{env, path::PathBuf};
use tokio::fs;
use crate::error::Error;
#[inline]
pub async fn config_dir() -> Result<PathBuf, Error> {
let cfg_dir = if let Ok(dir) = env::var("XDG_CONFIG_HOME") {
PathBuf::from(dir).join("timaru")
} else if let Ok(dir) = env::var("HOME") {
PathBuf::from(dir).join(".config/timaru")
} else {
return Err(Error::EnvVar);
};
check_dir(cfg_dir).await
}
#[inline]
pub async fn check_dir(dir: PathBuf) -> Result<PathBuf, Error> {
if !dir.is_dir() {
match fs::create_dir(dir.clone()).await {
Ok(_) => Ok(dir),
Err(_) => Err(Error::Dir(dir)),
}
} else {
Ok(dir)
}
}
#[inline]
pub async fn check_setup() -> Result<(PathBuf, PathBuf), Error> {
let cfg_dir = config_dir().await?;
let db_dir = check_dir(cfg_dir.join("db")).await?;
Ok((cfg_dir, db_dir))
}
| true
|
364d3a107355f3732188fbf00feb07622545e8e4
|
Rust
|
grapl-security/grapl
|
/src/rust/grapl-graphql-codegen/src/identification_algorithm.rs
|
UTF-8
| 1,591
| 2.9375
| 3
|
[
"Apache-2.0"
] |
permissive
|
use graphql_parser::schema::Directive;
/// Represents which algorithm is used to identify a node
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum IdentificationAlgorithm {
Session,
Static,
}
use std::convert::TryFrom;
use crate::{
constants::{
SESSION_ALGORITHM,
STATIC_ALGORITHM,
},
errors::CodeGenError,
};
impl IdentificationAlgorithm {
pub fn from_directive(directive: &Directive<'static, String>) -> Option<Self> {
if directive.name != "grapl" {
return None;
}
directive
.arguments
.iter()
.find_map(|(arg_name, arg)| match (arg_name.as_str(), arg) {
("identity_algorithm", graphql_parser::schema::Value::String(s))
if s == SESSION_ALGORITHM =>
{
Some(Self::Session)
}
("identity_algorithm", graphql_parser::schema::Value::String(s))
if s == STATIC_ALGORITHM =>
{
Some(Self::Static)
}
(_, _) => None,
})
}
}
impl TryFrom<&[Directive<'static, String>]> for IdentificationAlgorithm {
type Error = CodeGenError;
fn try_from(directives: &[Directive<'static, String>]) -> Result<Self, Self::Error> {
directives
.iter()
.find_map(IdentificationAlgorithm::from_directive)
.ok_or_else(|| CodeGenError::MissingNodeIdentificationAlgorithm {
directives: directives.to_vec(),
})
}
}
| true
|
92e171b27b48df0d77537cf1d48903af0907a958
|
Rust
|
marcelo-a/fp-rust-book
|
/examples/book_04_01_04_reference/annotated_source.rs
|
UTF-8
| 321
| 3.28125
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA",
"LicenseRef-scancode-other-permissive",
"Unlicense"
] |
permissive
|
fn main() {
let mut <tspan data-hash="1">s</tspan> = <tspan class="fn" data-hash="0" hash="2">String::from</tspan>("hello");
<tspan data-hash="1">s</tspan>.push_str(", world!"); // push_str() appends a literal to a String
println!("{}", <tspan data-hash="1">s</tspan>); // This will print `hello, world!`
}
| true
|
7da66a97a31d624b4eac166bd9f3d02725ff8e6f
|
Rust
|
Yatekii/reactor
|
/reactor-derive/src/lib.rs
|
UTF-8
| 9,650
| 2.640625
| 3
|
[] |
no_license
|
#![recursion_limit="128"]
use quote::quote;
extern crate proc_macro;
extern crate proc_macro2;
use syn::parse::{
Parse,
ParseStream
};
#[derive(Debug, PartialEq)]
struct SubState {
ident: syn::Ident,
sub_states: Vec<SubState>,
}
impl Parse for SubState {
fn parse(input: ParseStream) -> syn::Result<Self> {
let ident;
if input.peek(syn::Token![enum]) {
input.parse::<syn::Token![enum]>()?;
ident = input.parse()?;
let content;
syn::braced!(content in input);
Ok(SubState {
ident,
sub_states: content.parse_terminated::<SubState, syn::Token![,]>(SubState::parse)?.into_iter().collect(),
})
} else if input.peek(syn::Ident) {
ident = input.parse()?;
Ok(SubState {
ident,
sub_states: vec![],
})
} else {
panic!("Expected a state identifier or a nested state enum");
}
}
}
fn generate_enum_variants(state: &SubState) -> proc_macro2::TokenStream {
let definitions = state.sub_states.iter().map(|sub_state| {
let variant = sub_state.ident.clone();
if sub_state.sub_states.is_empty() {
quote! {
#variant(#variant)
}
} else {
quote! {
#variant(#variant)
}
}
})
.collect::<Vec<_>>();
quote! {
#(#definitions),*
}
}
fn assemble_from_sub_state(root: &SubState, sub_state: &SubState) -> (proc_macro2::TokenStream, usize) {
let sub_state_name = sub_state.ident.clone();
let t = sub_state.sub_states.iter().map(|sub_state| assemble_from_sub_state(root, sub_state)).collect::<(Vec<_>)>();
let num_levels = t.iter().map(|v| v.1).fold(0, usize::max) + 1;
let sub_state_definitions = t.into_iter().map(|v| v.0).collect::<Vec<_>>();
let sub_state_variants = generate_enum_variants(sub_state);
let super_trait_impl = impl_state(root, sub_state);
(
if sub_state_variants.is_empty() {
quote! {
#[derive(Copy, Clone, Debug)]
struct #sub_state_name();
// TODO: Enable this once it's stable
// impl std::ops::Fn() -> Self for #sub_state_name {
// extern "rust-call" fn call(&self) -> Self {
// Self {}
// }
// }
#super_trait_impl
#(#sub_state_definitions)*
}
} else {
quote! {
#[derive(Copy, Clone, Debug)]
enum #sub_state_name {
#(#sub_state_variants)*
}
#super_trait_impl
#(#sub_state_definitions)*
}
},
num_levels
)
}
#[proc_macro]
pub fn state_machine(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
// Parse the string representation
let input = proc_macro2::TokenStream::from(input);
// println!("{:#?}", input);
let root: SubState = syn::parse2(input.clone()).unwrap();
println!("{:#?}", root);
let (enum_definitions, num_levels) = assemble_from_sub_state(&root, &root);
let ident = root.ident;
let res = quote! {
#enum_definitions
#[derive(Debug)]
pub struct Reactor {
state: #ident,
}
const REACTOR_MAX_LEVELS: usize = #num_levels;
impl React<Event> for Reactor {
fn new() -> Self {
let reactor = Self {
state: #ident::INITIAL_STATE,
};
reactor.state.super_enter(0);
reactor
}
fn react(&mut self, event: Event) {
match self.state.super_handle(event) {
EventResult::Transition(new_state) => {
// The initial value with the TypeId of the bool was chosen arbitrarily as there is no `::new()` or `::default()`.
// bool is no valid type in the enum tree, so no issue there.
let levels_new = &mut [core::any::TypeId::of::<bool>(); REACTOR_MAX_LEVELS];
let levels_old = &mut [core::any::TypeId::of::<bool>(); REACTOR_MAX_LEVELS];
new_state.get_levels(levels_new, 0);
self.state.get_levels(levels_old, 0);
let mut i = 0;
while i < REACTOR_MAX_LEVELS {
if levels_new[i] != levels_old[i] {
break;
}
i += 1;
}
println!("Moving {:?} -> {:?}", self, new_state);
self.state.super_exit(i as i32);
self.state = new_state;
self.state.super_enter(i as i32);
}
_ => {},
}
}
}
};
println!("{}", res.clone().to_string());
res.into()
}
fn impl_state(root: &SubState, sub_state: &SubState) -> proc_macro2::TokenStream {
let enter_match_branches = &sub_state.sub_states.iter().map(|v| {
let ident = sub_state.ident.clone();
let variant_ident = v.ident.clone();
quote! {
#ident::#variant_ident(b) => {
b.super_enter(level - 1)
}
}
}).collect::<Vec<_>>();
let handle_match_branches = sub_state.sub_states.iter().map(|v| {
let ident = sub_state.ident.clone();
let variant_ident = v.ident.clone();
quote!{
#ident::#variant_ident(b) => {
match b.super_handle(event.clone()) {
EventResult::Handled => EventResult::Handled,
EventResult::Transition(t) => EventResult::Transition(t),
EventResult::NotHandled => self.handle(event),
}
}
}
}).collect::<Vec<_>>();
let exit_match_branches = &sub_state.sub_states.iter().map(|v| {
let ident = sub_state.ident.clone();
let variant_ident = v.ident.clone();
quote! {
#ident::#variant_ident(b) => {
b.super_exit(level - 1)
}
}
}).collect::<Vec<_>>();
let level_match_branches = sub_state.sub_states.iter().map(|v| {
let ident = sub_state.ident.clone();
let variant_ident = v.ident.clone();
quote! {
#ident::#variant_ident(b) => {
levels[ptr] = core::any::TypeId::of::<#ident>();
b.get_levels(levels, ptr + 1);
}
}
}).collect::<Vec<_>>();
let name = sub_state.ident.clone();
let (
match_level_statement,
match_enter_statement,
match_handle_statement,
match_exit_statement,
initial_state_definition
) = if sub_state.sub_states.is_empty() {
let ident = sub_state.ident.clone();
(
quote! {
levels[ptr] = core::any::TypeId::of::<#ident>();
},
quote! {
self.enter()
},
quote! {
self.handle(event)
},
quote! {
self.exit()
},
quote! {
const INITIAL_STATE: Self = #name {};
}
)
} else {
let initial_variant = sub_state.sub_states.last().unwrap().ident.clone();
(
quote! {
match self {
#(#level_match_branches,)*
}
},
quote! {
if level > 0 {
match self {
#(#enter_match_branches,)*
}
} else {
self.enter();
match self {
#(#enter_match_branches,)*
}
}
},
quote! {
match self {
#(#handle_match_branches,)*
}
},
quote! {
if level > 0 {
match self {
#(#exit_match_branches,)*
}
} else {
match self {
#(#exit_match_branches,)*
}
self.exit();
}
},
quote! {
const INITIAL_STATE: Self = #name::#initial_variant(#initial_variant {});
}
)
};
let name = sub_state.ident.clone();
let root_name = root.ident.clone();
let res = quote! {
impl State<Event> for #name where #name: reactor::base::Actor<Event> {
type State = #root_name;
#initial_state_definition
fn get_levels(&self, levels: &mut [core::any::TypeId], ptr: usize) {
#match_level_statement
}
fn super_enter(&self, level: i32) {
#match_enter_statement
}
fn super_handle(&self, event: Event) -> EventResult<<Self as State<Event>>::State> {
#match_handle_statement
}
fn super_exit(&self, level: i32) {
#match_exit_statement
}
}
};
// Uncomment to debug
// println!("{}", res.to_string());
res
}
| true
|
49f239cb871bf02b9f57b419e68501a38067db3e
|
Rust
|
vangroan/vuur
|
/crates/vuur_parse/tests/test_pprint.rs
|
UTF-8
| 1,084
| 2.953125
| 3
|
[
"MIT"
] |
permissive
|
use vuur_parse::expr::Expr;
use vuur_parse::pprint::PrettyExpr;
use vuur_parse::stmt::{DefStmt, SimpleStmt};
const FG_GREEN: &str = "\x1b[32m";
const FG_RESET: &str = "\x1b[0m";
fn parse_expr(source_code: &str) -> Option<Expr> {
let mut module = vuur_parse::parse_str(source_code).expect("parsing test module");
if let Some(DefStmt::Simple(stmt)) = module.stmts.pop() {
if let SimpleStmt::Expr(expr) = stmt {
return Some(expr);
}
}
None
}
const CASES: &[&str] = &[
"1 + 2 * 3",
"(1 + 2) * 3",
"1 * 2 * 3",
"1 + 2 - 3 * 4",
"1 + (2 - 3) * 4 / 5",
"-1",
"1 + (-2)",
"x = 1 + 2",
"z = x + y * 32",
"one.two.three",
"one.two.three = x * y",
"one(2 + 3, 4 * 5, 6, seven)",
"position = Vector(3, 4)",
"x = sqrt(((1 + 2) * 3) - ((4 + 5) * 6))",
];
#[test]
fn test_expr_pretty_print() {
for case in CASES {
let expr = parse_expr(case).unwrap();
let pprint = PrettyExpr::new(&expr);
println!("{FG_GREEN}{case}{FG_RESET}");
println!("{pprint}");
}
}
| true
|
2cb309a8510185ab46564a4a633ca002d3e7cf3e
|
Rust
|
sflomenb/rust-book
|
/hello-world/src/main.rs
|
UTF-8
| 323
| 3.359375
| 3
|
[] |
no_license
|
fn main() {
println!("Hello, world!");
println!("{}", add2(2, 3))
}
fn add2(x: i32, y: i32) -> i32 {
x + y
}
#[cfg(test)]
mod tests {
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn test_add() {
assert_eq!(add2(1, 2), 3);
}
}
| true
|
e8543b365e2f436956168a4401f21985109fca2f
|
Rust
|
mooman219/beryllium
|
/src/palette.rs
|
UTF-8
| 7,057
| 3.515625
| 4
|
[
"Zlib"
] |
permissive
|
use super::*;
/// An abstract RGBA color value.
///
/// * Each channel ranges from 0 (none) to 255 (maximum).
/// * Alpha channel is "opacity", so 255 is opaque.
///
/// A color value's exact representation within an image depends on the
/// [PixelFormat] of the image. You can use the "get" and "map" methods of a
/// `PixelFormat` to convert between raw pixel data and a `Color` value. Note
/// that any `PixelFormat` that's less than 32 bits per pixel will lose
/// information when you go from `Color` to raw pixel value, so the conversion
/// isn't always reversible.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[repr(C)]
pub struct Color {
/// Red
pub r: u8,
/// Green
pub g: u8,
/// Blue
pub b: u8,
/// Alpha / opacity
pub a: u8,
}
impl From<SDL_Color> for Color {
fn from(other: SDL_Color) -> Self {
Self {
r: other.r,
g: other.g,
b: other.b,
a: other.a,
}
}
}
impl From<Color> for SDL_Color {
fn from(other: Color) -> Self {
Self {
r: other.r,
g: other.g,
b: other.b,
a: other.a,
}
}
}
/// A palette of [Color] values.
///
/// The way that the `Palette` type works is slightly different from Rust's
/// normal ownership model, so please pay attention as I explain.
///
/// A `Palette` value holds a pointer to a heap allocated [SDL_Palette]
/// ([wiki](https://wiki.libsdl.org/SDL_Palette)). That `SDL_Palette` has a
/// pointer to the heap allocated `Color` values, along with a length, reference
/// count, and version number.
///
/// When you set a `Palette` on a [Surface] or [PixelFormat] it moves some
/// pointers and adjusts the reference count of the `Palette`. Now you have the
/// `Palette`, and _also_ that thing has the same `Palette`. An edit to the
/// `Palette` data in either location will affect everyone's data. Having a
/// `&mut Palette` does _not_ mean that you have an exclusive path of access to
/// the `Palette` contents.
///
/// As a result, I cannot allow you to _ever_ construct a shared reference or
/// unique reference to the `Color` data held inside the `Palette`. This means
/// no [Deref](core::ops::Deref), [Index](core::ops::Index), or
/// [IndexMut](core::ops::IndexMut), no Iterators of any kind, none of that.
/// This definitely makes the API of the `Palette` type not quite as fun as you
/// might like.
///
/// You can allocate a `Palette` by calling [SDLToken::new_palette] and
/// specifying how many `Color` values the `Palette` should hold. However, you
/// generally do not need to do this yourself, because if a `Surface` or
/// `PixelFormat` needs palette data it will automatically allocate a palette of
/// the correct size when it is created.
///
/// All slots in a new `Palette` are initialized to opaque white (`0xFF` in all
/// four color channels).
#[derive(Debug)] // TODO: We probably want a custom Debug impl
#[repr(transparent)]
pub struct Palette<'sdl> {
pub(crate) nn: NonNull<SDL_Palette>,
pub(crate) _marker: PhantomData<&'sdl SDLToken>,
}
impl SDLToken {
/// Allocates a new [Palette] with the number of color slots given.
///
/// The initial value of the palette color values is `0xFF` in all four
/// channels (opaque white).
pub fn new_palette(&self, color_count: usize) -> Result<Palette<'_>, String> {
let max = core::i32::MAX as usize;
if color_count > max {
return Err("beryllium error: color_count > i32::MAX".to_string());
}
if color_count < 2 {
return Err("beryllium error: color_count of a palette must be at least 2".to_string());
}
match NonNull::new(unsafe { SDL_AllocPalette(color_count as i32) }) {
Some(nn) => Ok(Palette {
nn,
_marker: PhantomData,
}),
None => Err(get_error()),
}
}
}
impl Drop for Palette<'_> {
fn drop(&mut self) {
unsafe { SDL_FreePalette(self.nn.as_ptr()) }
}
}
#[allow(clippy::len_without_is_empty)]
impl Palette<'_> {
/// Gets the number of colors in the Palette
pub fn len(&self) -> usize {
unsafe { (*self.nn.as_ptr()).ncolors as usize }
}
/// Assigns a slice of colors into the `Palette`, starting at the position
/// specified.
///
/// Colors that don't "fit" because they would trail off the end are not copied.
///
/// ## Failure
///
/// * `start` values >= the length will give an error.
pub fn set_colors(&self, start: usize, new_colors: &[Color]) -> Result<(), String> {
if start >= self.len() {
return Err("beryllium error: start index out of bounds".to_string());
}
// Note(Lokathor): We'll manually clip the input length instead of relying
// on SDL2's dubious clipping process.
let clipped_length = (self.len() - start).min(new_colors.len());
debug_assert!(start + clipped_length <= self.len());
let out = unsafe {
SDL_SetPaletteColors(
self.nn.as_ptr(),
new_colors.as_ptr() as *const SDL_Color,
start as i32,
clipped_length as i32,
)
};
if out == 0 {
Ok(())
} else {
// Given our previous checks, this path should never happen.
Err(get_error())
}
}
/// Gets the [Color] at the index specified.
///
/// ## Failure
///
/// * `None` if the index is out of bounds.
pub fn get_color(&self, index: usize) -> Option<Color> {
if index >= self.len() {
None
} else {
Some(unsafe { (*(*self.nn.as_ptr()).colors.add(index)).into() })
}
}
/// Creates a new [Vec] with the same colors as this `Palette`.
pub fn to_vec(&self) -> Vec<Color> {
// Note(Lokathor): This is safe only as long as this slice never leaves
// this function call.
let self_slice = unsafe {
core::slice::from_raw_parts(
(*self.nn.as_ptr()).colors as *mut Color,
(*self.nn.as_ptr()).ncolors as usize,
)
};
self_slice.to_vec()
}
}
impl Clone for Palette<'_> {
/// Clones the colors into an entirely distinct `Palette` of the same length.
///
/// First a new palette of the same length is allocated, then all colors are
/// copied over.
///
/// ## Panics
///
/// * If the `SDL_Palette` cannot be allocated this will panic. That
/// essentially only happens if you're out of memory.
/// * If the colors cannot be copied over this will panic. It should be
/// impossible for that to fail, but hey.
fn clone(&self) -> Self {
match NonNull::new(unsafe { SDL_AllocPalette(self.len() as i32) }) {
Some(nn) => {
let out = Self {
nn,
_marker: PhantomData,
};
// Note(Lokathor): This is safe only as long as this slice never leaves
// this function call.
let self_slice = unsafe {
core::slice::from_raw_parts(
(*self.nn.as_ptr()).colors as *mut Color,
(*self.nn.as_ptr()).ncolors as usize,
)
};
out
.set_colors(0, self_slice)
.expect("Failed to copy the color data!");
out
}
None => panic!("OOM: couldn't allocate an SDL_Palette!"),
}
}
}
| true
|
be06eb79df6f1da7d70250b13c5d1f360cb0b3f5
|
Rust
|
dejankos/AoC2020
|
/src/day_13.rs
|
UTF-8
| 2,058
| 3.234375
| 3
|
[] |
no_license
|
fn solve(input: (usize, Vec<usize>)) -> usize {
let ts = input.0;
let min = input
.1
.into_iter()
.map(|id| (find_first(id, ts), id))
.min_by_key(|p| p.0)
.unwrap();
(min.0 - ts) * min.1
}
fn solve_part_2(buses: Vec<(usize, usize)>) -> usize {
let (mut curr, mut step) = (0, 1);
for (id, idx) in buses {
'inn: for ts in (curr..usize::MAX).step_by(step) {
if (ts + idx) % id == 0 {
curr = ts;
step *= id;
break 'inn;
}
}
}
curr
}
fn find_first(id: usize, ts: usize) -> usize {
let mut i = ts;
while i % id != 0 {
i += 1;
}
i
}
fn prepare_data(data: Vec<String>) -> (usize, Vec<usize>) {
let ts = data[0].parse::<usize>().unwrap();
let ids = data[1]
.split(',')
.into_iter()
.filter(|s| *s != "x")
.map(|s| s.parse::<usize>().unwrap())
.collect();
(ts, ids)
}
fn prepare_data_part_2(data: Vec<String>) -> Vec<(usize, usize)> {
data[1]
.split(',')
.into_iter()
.enumerate()
.filter(|(_, s)| *s != "x")
.map(|(i, s)| (s.parse::<usize>().unwrap(), i))
.collect()
}
#[cfg(test)]
mod tests {
use crate::data_parser::parse_lines;
use super::*;
#[test]
fn should_solve() {
let data = vec!["939".into(), "7,13,x,x,59,x,31,19".into()];
assert_eq!(295, solve(prepare_data(data)));
}
#[test]
fn should_solve_part_1() {
assert_eq!(
2845,
solve(prepare_data(parse_lines("input/day_13_data.txt")))
);
}
#[test]
fn should_solve_2() {
let data = vec!["939".into(), "7,13,x,x,59,x,31,19".into()];
assert_eq!(1068781, solve_part_2(prepare_data_part_2(data)));
}
#[test]
fn should_solve_part_2() {
assert_eq!(
487905974205117,
solve_part_2(prepare_data_part_2(parse_lines("input/day_13_data.txt")))
);
}
}
| true
|
500021d5a72a5ff7f04a96c7600aa2ab81fdf339
|
Rust
|
nvzqz/bidiff
|
/crates/bidiff/src/enc.rs
|
UTF-8
| 904
| 2.734375
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use super::Control;
use byteorder::{LittleEndian, WriteBytesExt};
use integer_encoding::VarIntWriter;
use std::io::{self, Write};
pub const MAGIC: u32 = 0xB1DF;
pub const VERSION: u32 = 0x1000;
pub struct Writer<W>
where
W: Write,
{
w: W,
}
impl<W> Writer<W>
where
W: Write,
{
pub fn new(mut w: W) -> Result<Self, io::Error> {
w.write_u32::<LittleEndian>(MAGIC)?;
w.write_u32::<LittleEndian>(VERSION)?;
Ok(Self { w })
}
pub fn write(&mut self, c: &Control) -> Result<(), io::Error> {
let w = &mut self.w;
w.write_varint(c.add.len())?;
w.write_all(c.add)?;
w.write_varint(c.copy.len())?;
w.write_all(c.copy)?;
w.write_varint(c.seek)?;
Ok(())
}
pub fn flush(&mut self) -> Result<(), io::Error> {
self.w.flush()
}
pub fn into_inner(self) -> W {
self.w
}
}
| true
|
0161ec0d3b26f69834342673a22381ee52b2b061
|
Rust
|
isgasho/shine
|
/crates/shine-math/src/triangulation/traverse/edgecirculator.rs
|
UTF-8
| 2,428
| 2.765625
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::geometry2::Position;
use crate::triangulation::graph::{Face, Triangulation, Vertex};
use crate::triangulation::query::{TopologyQuery, VertexClue};
use crate::triangulation::types::{FaceEdge, FaceIndex, Rot3, VertexIndex};
pub struct EdgeCirculator<'a, P, V, F, C>
where
P: Position,
V: Vertex<Position = P>,
F: Face,
{
tri: &'a Triangulation<P, V, F, C>,
vertex: VertexIndex,
current: FaceEdge,
}
impl<'a, P, V, F, C> EdgeCirculator<'a, P, V, F, C>
where
P: Position,
V: Vertex<Position = P>,
F: Face,
{
pub fn new<'b>(tri: &'b Triangulation<P, V, F, C>, start: VertexIndex) -> EdgeCirculator<'b, P, V, F, C> {
assert_eq!(tri.dimension(), 2);
let face = tri[start].face();
let edge = tri[face].get_vertex_index(start).unwrap().decrement();
EdgeCirculator {
tri,
vertex: start,
current: FaceEdge { face, edge },
}
}
pub fn current(&self) -> &FaceEdge {
&self.current
}
pub fn start_vertex(&self) -> VertexIndex {
self.vertex
}
pub fn end_vertex(&self) -> VertexIndex {
self.tri.vi(VertexClue::end_of(self.current))
}
pub fn face(&self) -> FaceIndex {
self.current.face
}
pub fn edge(&self) -> Rot3 {
self.current.edge
}
pub fn advance_ccw(&mut self) {
assert_eq!(self.tri.dimension(), 2);
assert!(self.current.face.is_valid());
assert!(self.tri.vi(VertexClue::start_of(self.current)) == self.vertex);
self.current.face = self.tri[self.current.face].neighbor(self.current.edge.decrement());
self.current.edge = self.tri[self.current.face].get_vertex_index(self.vertex).unwrap().decrement();
}
pub fn next_ccw(&mut self) -> FaceEdge {
let edge = *self.current();
self.advance_ccw();
edge
}
pub fn advance_cw(&mut self) {
assert_eq!(self.tri.dimension(), 2);
assert!(self.current.face.is_valid());
assert!(self.tri.vi(VertexClue::start_of(self.current)) == self.vertex);
self.current.face = self.tri[self.current.face].neighbor(self.current.edge);
self.current.edge = self.tri[self.current.face].get_vertex_index(self.vertex).unwrap().decrement();
}
pub fn next_cw(&mut self) -> FaceEdge {
let edge = *self.current();
self.advance_cw();
edge
}
}
| true
|
3df4b371a1de079ff460293a507a666fad07ff84
|
Rust
|
PatrykStronski/KnapsackProblem
|
/src/main.rs
|
UTF-8
| 5,905
| 2.78125
| 3
|
[] |
no_license
|
mod task1;
mod task2;
mod random_population;
mod selection;
mod knap;
mod crossover;
mod mutation;
mod plot_results;
mod ant_colony;
fn evaluate_winner(knp: &knap::Knapsack, crossover_rate: f32, mutation_rate: f32, tournament_size: u32, population_size: u32, n: u32) -> u32{
let mut population = random_population::init_population(n,population_size);
let iterations = 50;
for _i in 0..iterations {
population = selection::tournament(population.to_vec(), tournament_size, knp);
population = crossover::crossover_all(population.to_vec(), crossover_rate);
mutation::mutate_all(&mut population, mutation_rate);
if population.len() == 1 || population.len() == 2 {
break;
}
}
println!("crossover_rate: {}\nmutation_rate {}\npopulation_size {}\ntournament_size{}\n",crossover_rate,mutation_rate,population_size,tournament_size);
let mut winning_eval = 0;
for pot_winner in population {
let value = selection::evaluate(&pot_winner, knp);
if value > winning_eval {
winning_eval = value;
}
}
return winning_eval;
}
fn main() -> Result<(),()> {
let out = "out.csv";
let n = 1001;
let knp = task2::read(out.to_string())?;
let nmb_increments = 10;
let max_weight = 19000;
let max_size = 19000;
//task1::generate(1001,max_weight,max_size,out.to_string());
let mut mutation_rate = 0.01;
let mut tournament_size = 10;
let mut population_size = 600;
let mut crossover_rate: f32 = 0.01;
let mut increment = 0.0;
println!("Starting eval for crossover rate\n");
increment = 0.05;
let mut caption = format!("crossover_rate {}, mutation_rate {}, tournament {}, popul_size {}","MEASURED",mutation_rate,tournament_size,population_size);
let mut measured_value = Vec::<f32>::with_capacity(nmb_increments);
let mut winners = Vec::<u32>::with_capacity(nmb_increments);
for _i in 0..nmb_increments {
measured_value.push(crossover_rate);
winners.push(evaluate_winner(&knp,crossover_rate,mutation_rate,tournament_size, population_size, n));
crossover_rate+=increment;
}
for _its in 0..4 {
crossover_rate = 0.01;
for i in 0..nmb_increments {
winners[i]+=evaluate_winner(&knp,crossover_rate,mutation_rate,tournament_size, population_size, n);
crossover_rate+=increment;
}
}
plot_results::plot_results(winners.to_vec(),measured_value.to_vec(),"crossover_rate".to_string(), caption.to_string());
crossover_rate = 0.25;
println!("Starting eval for mutation rate\n");
mutation_rate = 0.001;
increment = 0.005;
caption = format!("crossover_rate {}, mutation_rate {}, tournament {}, popul_size {}",crossover_rate,"MEASURED",tournament_size,population_size);
measured_value = Vec::<f32>::with_capacity(nmb_increments);
winners = Vec::<u32>::with_capacity(nmb_increments);
for _i in 0..nmb_increments {
measured_value.push(mutation_rate);
winners.push(evaluate_winner(&knp,crossover_rate,mutation_rate,tournament_size, population_size, n));
mutation_rate+=increment;
}
for _its in 0..4 {
mutation_rate = 0.001;
for i in 0..nmb_increments {
winners[i]+=evaluate_winner(&knp,crossover_rate,mutation_rate,tournament_size, population_size, n);
mutation_rate+=increment;
}
}
plot_results::plot_results(winners.to_vec(),measured_value.to_vec(),"mutation_rate".to_string(), caption.to_string());
mutation_rate = 0.02;
println!("Starting eval for tournament size\n");
let mut increment_i: u32 = 1;
tournament_size = 2;
caption = format!("crossover_rate {}, mutation_rate {}, tournament {}, popul_size {}",crossover_rate,mutation_rate,"MEASURED",population_size);
measured_value = Vec::<f32>::with_capacity(nmb_increments);
winners = Vec::<u32>::with_capacity(nmb_increments);
for _i in 0..nmb_increments {
measured_value.push(tournament_size as f32);
winners.push(evaluate_winner(&knp,crossover_rate,mutation_rate,tournament_size, population_size, n));
tournament_size+=increment_i;
}
for _its in 0..4 {
tournament_size = 2;
for i in 0..nmb_increments {
winners[i]+=evaluate_winner(&knp,crossover_rate,mutation_rate,tournament_size, population_size, n);
tournament_size+=increment_i;
}
}
plot_results::plot_results(winners.to_vec(),measured_value.to_vec(),"tournament_size".to_string(), caption.to_string());
tournament_size = 10;
println!("Starting eval for population size\n");
increment_i = 100;
population_size = 100;
caption = format!("crossover_rate {}, mutation_rate {}, tournament {}, popul_size {}",crossover_rate,mutation_rate,tournament_size,"MEASURED");
measured_value = Vec::<f32>::with_capacity(nmb_increments);
winners = Vec::<u32>::with_capacity(nmb_increments);
for _i in 0..nmb_increments {
measured_value.push(population_size as f32);
winners.push(evaluate_winner(&knp,crossover_rate,mutation_rate,tournament_size, population_size, n));
population_size+=increment_i;
}
for _its in 0..4 {
population_size = 100;
for i in 0..nmb_increments {
winners[i]+=evaluate_winner(&knp,crossover_rate,mutation_rate,tournament_size, population_size, n);
population_size+=increment_i;
}
}
plot_results::plot_results(winners.to_vec(),measured_value.to_vec(),"population_size_size".to_string(), caption.to_string());
population_size = 800;
println!("For best values the eval is equal {}",evaluate_winner(&knp,0.001,0.006,10, 1000, n));
println!("FROM ant colony: {}", ant_colony::ant_evaluate(7, random_population::init_population(n,500).to_vec(), &knp, 7));
Ok(())
}
| true
|
e169f6cf2257500c6a4ef6c2bbcbc4bca2c996b1
|
Rust
|
sgrif/derive_deref
|
/src/lib.rs
|
UTF-8
| 2,913
| 2.515625
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
extern crate proc_macro;
extern crate proc_macro2;
#[macro_use]
extern crate quote;
extern crate syn;
use proc_macro::TokenStream;
use syn::{Path, Type, TypePath};
#[proc_macro_derive(Deref)]
pub fn derive_deref(input: TokenStream) -> TokenStream {
let item = syn::parse(input).unwrap();
let (field_ty, field_access) = parse_fields(&item, false);
let name = &item.ident;
let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl();
quote!(
impl #impl_generics core::ops::Deref for #name #ty_generics
#where_clause
{
type Target = #field_ty;
fn deref(&self) -> &Self::Target {
#field_access
}
}
).into()
}
#[proc_macro_derive(DerefMut)]
pub fn derive_deref_mut(input: TokenStream) -> TokenStream {
let item = syn::parse(input).unwrap();
let (_, field_access) = parse_fields(&item, true);
let name = &item.ident;
let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl();
quote!(
impl #impl_generics core::ops::DerefMut for #name #ty_generics
#where_clause
{
fn deref_mut(&mut self) -> &mut Self::Target {
#field_access
}
}
).into()
}
fn parse_fields(item: &syn::DeriveInput, mutable: bool) -> (syn::Type, proc_macro2::TokenStream) {
let trait_name = if mutable { "DerefMut" } else { "Deref" };
let fields = match item.data {
syn::Data::Struct(ref body) => body.fields.iter().collect::<Vec<&syn::Field>>(),
_ => panic!("#[derive({})] can only be used on structs", trait_name),
};
let field_ty = match fields.len() {
1 => Some(fields[0].ty.clone()),
2 => {
if let Type::Path(TypePath { path: Path { segments, .. }, .. }) = &fields[1].ty {
if segments
.last()
.expect("Expected path to have at least one segment")
.ident == "PhantomData"
{
Some(fields[0].ty.clone())
} else {
None
}
} else {
None
}
},
_ => None,
};
let field_ty = field_ty
.unwrap_or_else(|| {
panic!(
"#[derive({})] can only be used on structs with one field, \
and optionally a second `PhantomData` field.",
trait_name,
)
});
let field_name = match fields[0].ident {
Some(ref ident) => quote!(#ident),
None => quote!(0),
};
match (field_ty, mutable) {
(syn::Type::Reference(syn::TypeReference { elem, .. }), _) => (*elem.clone(), quote!(self.#field_name)),
(x, true) => (x, quote!(&mut self.#field_name)),
(x, false) => (x, quote!(&self.#field_name)),
}
}
| true
|
1a9d3bd81e02b0d032e4aba5171964a13c35cd8b
|
Rust
|
leejw51crypto/chain
|
/client-rpc/server/src/program.rs
|
UTF-8
| 4,606
| 2.609375
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use structopt::StructOpt;
use crate::server::Server;
use std::env;
#[derive(StructOpt, Debug)]
#[structopt(
name = "client-rpc",
about = r#"JSON-RPC server for wallet management and blockchain query
ENVIRONMENT VARIABLES:
CRYPTO_GENESIS_FINGERPRINT Set the genesis fingerprint(Optional)
"#
)]
pub struct Options {
#[structopt(
name = "host",
short,
long,
default_value = "0.0.0.0",
help = "JSON-RPC server hostname"
)]
pub host: String,
#[structopt(
name = "port",
short,
long,
default_value = "9981",
help = "JSON-RPC server port"
)]
pub port: u16,
#[structopt(name = "chain-id", short, long, help = "Full chain ID")]
pub chain_id: String,
#[structopt(
name = "storage-dir",
short,
long,
default_value = ".storage",
help = "Local data storage directory"
)]
pub storage_dir: String,
#[structopt(
name = "websocket-url",
short,
long,
default_value = "ws://localhost:26657/websocket",
help = "Url for connecting with tendermint websocket RPC"
)]
pub websocket_url: String,
#[structopt(
name = "enable-fast-forward",
long,
help = "Enable fast forward when syncing wallet, which is not secure when connecting to outside nodes"
)]
pub enable_fast_forward: bool,
#[structopt(
name = "disable-light-client",
long,
help = "Disable light client, which is not secure when connecting to outside nodes"
)]
pub disable_light_client: bool,
#[structopt(
name = "light client peer",
short = "l",
long = "light-client-peers",
help = "Light client peers"
)]
pub light_client_peers: Option<String>,
#[structopt(
name = "light client trusting period in seconds",
long = "light-client-trusting-period",
default_value = "36000000",
help = "light client trusting period in seconds"
)]
pub light_client_trusting_period_seconds: u64,
#[structopt(
name = "light client trusting height",
long = "light-client-trusting-height",
default_value = "1",
help = "light client trusting height"
)]
pub light_client_trusting_height: u64,
#[structopt(
name = "light client trusting blockhash",
long = "light-client-trusting-blockhash",
default_value = "",
help = "light client trusting blockhash"
)]
pub light_client_trusting_blockhash: String,
#[structopt(
name = "disable-address-recovery",
long,
help = "Disable address recovery when syncing wallet, which is not necessary, when addresses already exist"
)]
pub disable_address_recovery: bool,
#[structopt(
name = "batch-size",
short,
long,
default_value = "20",
help = "Number of requests per batch when syncing wallet"
)]
pub batch_size: usize,
#[structopt(
name = "block-height-ensure",
long,
default_value = "50",
help = "Number of block height to rollback the utxos in the pending transactions"
)]
pub block_height_ensure: u64,
}
#[allow(dead_code)]
pub fn run_cli() {
env_logger::init();
let options = Options::from_args();
Server::new(options).unwrap().start().unwrap();
}
#[allow(dead_code)]
pub fn find_string(args: &[String], target: &str) -> Option<usize> {
for i in 0..args.len() {
if args[i] == target && i < args.len() - 1 {
return Some(i);
}
}
None
}
#[allow(dead_code)]
pub fn run_electron() {
env_logger::init();
// "~/Electron", ".", "--chain-id", "ab"]
let args: Vec<String> = env::args().collect();
log::info!("args={:?}", args);
let mut options = Options::from_iter(vec![""].iter());
if let Some(a) = find_string(&args, "--chain-id") {
options.chain_id = args[a + 1].clone()
}
if let Some(a) = find_string(&args, "--storage-dir") {
options.storage_dir = args[a + 1].clone()
}
if let Some(a) = find_string(&args, "--websocket-url") {
options.websocket_url = args[a + 1].clone()
}
let mut storage = dirs::data_dir().expect("get storage dir");
storage.push(".cro_storage");
options.storage_dir = storage.to_str().expect("get storage dir to_str").into();
log::info!("Options={:?}", options);
log::info!("Storage={}", options.storage_dir);
Server::new(options).unwrap().start().unwrap();
}
| true
|
ec4fb0bc1b8cb4724ea54157bb0e50e61a54257e
|
Rust
|
moonmile/rust-sample
|
/src/ch06/sample-06-02/src/main.rs
|
UTF-8
| 1,331
| 3.828125
| 4
|
[] |
no_license
|
fn main() {
let v = vec![10,20,30,40,50] ;
// 全ての要素を繰り返す
print!("v is ");
for i in &v {
print!("{} ", i );
}
println!("");
// イテレータを使う
print!("v is ");
for i in v.iter() {
print!("{} ", i );
}
println!("");
// インデックス付きで繰り返す
print!("v is ");
for (i, x) in v.iter().enumerate() {
print!("{}:{} ", i, x );
}
println!("");
// 繰り返し数を指定する
print!("FOR is ");
for i in 0..10 {
print!("{} ", i );
}
println!("");
// 途中で繰り返しを止める
print!("FOR is ");
for i in 0..10 {
if i == 5 {
break;
}
print!("{} ", i );
}
println!("");
// 途中で先頭に戻る
print!("FOR is ");
for i in 0..10 {
if i % 2 == 0 {
continue ;
}
print!("{} ", i );
}
println!("");
// while を使う
print!("WHILE is ");
let mut i = 0;
while i < 10 {
print!("{} ", i );
i += 2 ;
}
println!("");
// loop を使う
print!("LOOP is ");
let mut i = 0;
loop {
if i >= 10 {
break ;
}
print!("{} ", i );
i += 1 ;
}
println!("");
}
| true
|
04fc8f2e31dc6eb459a821ade9059dc4c27c9fff
|
Rust
|
slightknack/machine-110
|
/src/cyclic.rs
|
UTF-8
| 1,121
| 3.40625
| 3
|
[] |
no_license
|
use std::fmt::Debug;
pub struct Cyclic {
pub state: Vec<bool>,
pub rules: Vec<Vec<bool>>,
}
impl Cyclic {
pub fn new(state: Vec<bool>, rules: Vec<Vec<bool>>) -> Cyclic {
Cyclic { state, rules }
}
pub fn step(&mut self) {
let rule = self.rules.remove(0);
if !self.state.is_empty() && self.state.remove(0) {
self.state.append(&mut rule.clone())
}
self.rules.push(rule);
}
}
impl Debug for Cyclic {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for rule in self.rules.iter() {
write!(f, "|")?;
for bit in rule {
match bit {
false => write!(f, " ")?,
true => write!(f, "█")?,
}
}
}
write!(f, "| -> |")?;
for bit in self.state.iter() {
match bit {
false => write!(f, " ")?,
true => write!(f, "█")?,
}
}
write!(f, "|")?;
Ok(())
}
}
pub trait Compile {
fn compile(i: Cyclic) -> Self;
}
| true
|
2b10adcdf15548d15d09e1885ddbdcd734422612
|
Rust
|
EdBuilds/holobone
|
/cli_runner/src/asteroids_game/collider.rs
|
UTF-8
| 585
| 2.6875
| 3
|
[] |
no_license
|
use specs_derive::Component;
use specs::prelude::*;
use lyon_path::Path;
#[derive(Clone, Copy)]
pub enum ColliderType{
Projectile,
Enemy,
Player
}
pub enum CollisionHandlerError{
MissingComponent,
NotImplemented,
}
//TODO: return the entities that needs to be deleted!
pub type CollisionHandler = fn(current_entity: Entity, other_entity: Entity, world:&mut World) -> Result<(), CollisionHandlerError>;
#[derive(Component)]
pub struct Collider {
pub collider_type: ColliderType,
pub collider_outline: Path,
pub collision_handler: CollisionHandler
}
| true
|
8347f9a7db95c7413308f2ea7dcffefad6f3d864
|
Rust
|
royswale/leetcode-cn
|
/maximum-subarray/rust/src/main.rs
|
UTF-8
| 1,504
| 3.765625
| 4
|
[] |
no_license
|
struct Solution {
}
impl Solution {
pub fn max_sub_array(nums: Vec<i32>) -> i32 {
let mut result: i32 = nums[0];
let mut sum: i32 = 0;
for num in nums {
if sum <= 0 {
sum = num;
} else {
sum += num;
}
if sum > result {
result = sum;
}
}
return result;
}
}
fn main() {
let nums = vec![-2,1,-3,4,-1,2,1,-5,4];
println!("{}", Solution::max_sub_array(nums));
println!("{}", Solution::max_sub_array(vec![1]));
println!("{}", Solution::max_sub_array(vec![5,4,-1,7,8]));
}
// Given an integer array nums, find the contiguous subarray (containing at least one number) which has the largest sum and return its sum.
//
// A subarray is a contiguous part of an array.
//
//
// Example 1:
//
// Input: nums = [-2,1,-3,4,-1,2,1,-5,4]
// Output: 6
// Explanation: [4,-1,2,1] has the largest sum = 6.
// Example 2:
//
// Input: nums = [1]
// Output: 1
// Example 3:
//
// Input: nums = [5,4,-1,7,8]
// Output: 23
//
// Constraints:
//
// 1 <= nums.length <= 3 * 104
// -105 <= nums[i] <= 105
//
// Follow up: If you have figured out the O(n) solution, try coding another solution using the divide and conquer approach, which is more subtle.
//
// 来源:力扣(LeetCode)
// 链接:https://leetcode-cn.com/problems/maximum-subarray
// 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
| true
|
e4c70cede93d493084213ab699c9207bb5392fbe
|
Rust
|
BartMassey/advent-of-code-2016
|
/day08/soln.rs
|
UTF-8
| 5,567
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
// Copyright © 2016 Bart Massey
// This program is licensed under the "MIT License".
// Please see the file COPYING in this distribution
// for license terms.
//! Advent of Code Day 8.
/// Turn on for display tracing.
const TRACING: bool = false;
extern crate aoc;
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate captures_at;
use captures_at::CapturesAtExt;
// Textual patterns for instructions.
lazy_static! {
static ref RECT_PATTERN: regex::Regex =
regex::Regex::new(r"^rect (\d+)x(\d+)$").expect("could not compile rect pattern");
static ref ROTATE_ROW_PATTERN: regex::Regex =
regex::Regex::new(r"^rotate row y=(\d+) by (\d+)$").expect("could not compile row pattern");
static ref ROTATE_COLUMN_PATTERN: regex::Regex =
regex::Regex::new(r"^rotate column x=(\d+) by (\d+)$")
.expect("could not compile column pattern");
}
/// Try for instruction to draw a rectangle. Returns true
/// iff successful.
fn insn_rect(insn: &str, m: &mut [Vec<char>]) -> bool {
match (*self::RECT_PATTERN).captures(insn) {
None => return false,
Some(parts) => {
// Parse arguments.
let x: usize = parts
.at(1)
.expect("insn_rect: could not find x")
.parse()
.expect("insn_rect: could not parse x");
let y: usize = parts
.at(2)
.expect("insn_rect: could not find y")
.parse()
.expect("insn_rect: could not parse y");
// Fill rectangle.
#[allow(clippy::needless_range_loop)]
for i in 0..x {
for j in 0..y {
m[i][j] = '#';
}
}
}
};
true
}
/// Try for instruction to rotate a row right. Returns true
/// iff successful.
fn insn_rotate_row(insn: &str, m: &mut [Vec<char>]) -> bool {
match (*self::ROTATE_ROW_PATTERN).captures(insn) {
None => return false,
Some(parts) => {
// Parse arguments.
let y: usize = parts
.at(1)
.expect("insn_rotate_row: could not find y")
.parse()
.expect("insn_rotate_row: could not parse y");
let n: usize = parts
.at(2)
.expect("insn_rotate_row: could not find n")
.parse()
.expect("insn_rotate_row: could not parse n");
// Rotate row.
let d = m.len();
for _ in 0..n {
let tmp = m[d - 1][y];
for x in (1..d).rev() {
m[x][y] = m[x - 1][y];
}
m[0][y] = tmp;
}
}
};
true
}
/// Try for instruction to rotate a column down. Returns
/// true iff successful.
fn insn_rotate_column(insn: &str, m: &mut [Vec<char>]) -> bool {
match (*self::ROTATE_COLUMN_PATTERN).captures(insn) {
None => return false,
Some(parts) => {
// Parse arguments.
let x: usize = parts
.at(1)
.expect("insn_rotate_column: could not find x")
.parse()
.expect("insn_rotate_column: could not parse x");
let n: usize = parts
.at(2)
.expect("insn_rotate_column: could not find n")
.parse()
.expect("insn_rotate_column: could not parse n");
// Rotate column.
let d = m[0].len();
for _ in 0..n {
let tmp = m[x][d - 1];
for y in (1..d).rev() {
m[x][y] = m[x][y - 1];
}
m[x][0] = tmp;
}
}
};
true
}
/// Display the given screen.
fn display(m: &[Vec<char>]) {
for r in m.iter() {
for c in r.iter() {
print!("{}", c);
}
println!();
}
}
/// Run the instructions and print the number of on pixels or
/// the pixels themselves at the end.
fn main() {
let (part, dims) = aoc::get_part_args();
assert!(dims.len() == 2);
// Parse arguments.
let x_size: usize = dims[0].parse().expect("main: could not parse x_size");
let y_size: usize = dims[1].parse().expect("main: could not parse y_size");
// Set up state.
let mut m = Vec::new();
for _ in 0..x_size {
let mut v = Vec::with_capacity(y_size);
v.resize(y_size, '.');
m.push(v);
}
let insns = [insn_rect, insn_rotate_column, insn_rotate_row];
// Read strings from the input file and process them.
for l in aoc::input_lines() {
// Search through the instructions until finding one
// that works.
let mut processed = false;
for f in &insns {
if f(&l, &mut m) {
processed = true;
if TRACING {
println!();
display(&m);
};
break;
}
}
if !processed {
panic!("undentified instruction");
}
}
// Count up and report the on pixels.
let mut count = 0;
#[allow(clippy::needless_range_loop)]
for x in 0..x_size {
for y in 0..y_size {
if m[x][y] == '#' {
count += 1;
}
}
}
if TRACING {
println!();
};
// Show final answer.
if part == 1 {
println!("{}", count);
} else {
display(&m);
};
}
| true
|
8a7b716d30460bc8eb3ccdfdf8e4d86f1fe781a4
|
Rust
|
dinfuehr/dora
|
/dora-frontend/src/implck.rs
|
UTF-8
| 6,112
| 2.578125
| 3
|
[
"MIT"
] |
permissive
|
use std::collections::{HashMap, HashSet};
use crate::error::msg::ErrorMessage;
use crate::sema::Sema;
pub fn check(sa: &mut Sema) {
for impl_ in sa.impls.iter() {
let impl_for = {
let impl_ = impl_.read();
let trait_ = sa.traits[impl_.trait_id()].read();
let all: HashSet<_> = trait_.methods.iter().cloned().collect();
let mut defined = HashSet::new();
let mut impl_for = HashMap::new();
for &method_id in &impl_.methods {
let method = sa.fcts.idx(method_id);
let method = method.read();
if let Some(fid) = trait_.find_method_with_replace(
sa,
method.is_static,
method.name,
Some(impl_.extended_ty.clone()),
method.params_without_self(),
) {
defined.insert(fid);
impl_for.insert(fid, method_id);
let trait_method = sa.fcts.idx(fid);
let trait_method = trait_method.read();
let return_type_valid = method.return_type
== if trait_method.return_type.is_self() {
impl_.extended_ty.clone()
} else {
trait_method.return_type.clone()
};
if !return_type_valid {
let impl_return_type = method.return_type.name_fct(sa, &*method);
let trait_return_type =
trait_method.return_type.name_fct(sa, &*trait_method);
let msg =
ErrorMessage::ReturnTypeMismatch(impl_return_type, trait_return_type);
sa.report(impl_.file_id, method.span, msg);
}
} else {
let args = method
.params_without_self()
.iter()
.map(|a| a.name_fct(sa, &*method))
.collect::<Vec<String>>();
let mtd_name = sa.interner.str(method.name).to_string();
let trait_name = sa.interner.str(trait_.name).to_string();
let msg = if method.is_static {
ErrorMessage::StaticMethodNotInTrait(trait_name, mtd_name, args)
} else {
ErrorMessage::MethodNotInTrait(trait_name, mtd_name, args)
};
sa.report(impl_.file_id, method.span, msg)
}
}
for &method_id in all.difference(&defined) {
let method = sa.fcts.idx(method_id);
let method = method.read();
if method.has_body() {
// method has a default implementation, use that one
impl_for.insert(method_id, method_id);
continue;
}
let args = method
.params_without_self()
.iter()
.map(|a| a.name_fct(sa, &*method))
.collect::<Vec<String>>();
let mtd_name = sa.interner.str(method.name).to_string();
let trait_name = sa.interner.str(trait_.name).to_string();
let msg = if method.is_static {
ErrorMessage::StaticMethodMissingFromTrait(trait_name, mtd_name, args)
} else {
ErrorMessage::MethodMissingFromTrait(trait_name, mtd_name, args)
};
sa.report(impl_.file_id, impl_.span, msg)
}
impl_for
};
impl_.write().impl_for = impl_for;
}
}
#[cfg(test)]
mod tests {
use crate::error::msg::ErrorMessage;
use crate::tests::*;
#[test]
fn method_not_in_trait() {
err(
"
trait Foo {}
class A
impl Foo for A {
fn bar() {}
}",
(5, 17),
ErrorMessage::MethodNotInTrait("Foo".into(), "bar".into(), vec![]),
);
}
#[test]
fn method_missing_in_impl() {
err(
"
trait Foo {
fn bar();
}
class A
impl Foo for A {}",
(6, 13),
ErrorMessage::MethodMissingFromTrait("Foo".into(), "bar".into(), vec![]),
);
}
#[test]
fn method_returning_self() {
ok("trait Foo {
fn foo(): Self;
}
class A
impl Foo for A {
fn foo(): A { return A(); }
}");
}
#[test]
fn static_method_not_in_trait() {
err(
"
trait Foo {}
class A
impl Foo for A {
static fn bar() {}
}",
(5, 24),
ErrorMessage::StaticMethodNotInTrait("Foo".into(), "bar".into(), vec![]),
);
}
#[test]
fn static_method_missing_in_impl() {
err(
"
trait Foo {
static fn bar();
}
class A
impl Foo for A {}",
(6, 13),
ErrorMessage::StaticMethodMissingFromTrait("Foo".into(), "bar".into(), vec![]),
);
}
#[test]
fn method_return_type_check() {
err(
"trait X {
fn m(): Bool;
fn n(): Bool;
}
class CX
impl X for CX {
fn m(): Int32 { 0 }
fn n(): Bool { true }
}",
(9, 17),
ErrorMessage::ReturnTypeMismatch("Int32".into(), "Bool".into()),
);
}
#[test]
fn impl_method_with_default_body() {
ok("
trait Foo {
fn foo(): Int32 { 1 }
}
class Bar {}
impl Foo for Bar {}");
}
}
| true
|
9c4e9c00d851de42b68beb0a108fc2fc683f8eeb
|
Rust
|
wnz27/vim-clap
|
/crates/types/src/query.rs
|
UTF-8
| 1,083
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
use crate::search_term::{ExactTerm, FuzzyTerm, InverseTerm, SearchTerm, TermType};
#[derive(Debug, Clone)]
pub struct Query {
pub fuzzy_terms: Vec<FuzzyTerm>,
pub exact_terms: Vec<ExactTerm>,
pub inverse_terms: Vec<InverseTerm>,
}
impl From<&str> for Query {
fn from(query: &str) -> Self {
let mut fuzzy_terms = Vec::new();
let mut exact_terms = Vec::new();
let mut inverse_terms = Vec::new();
for token in query.split_whitespace() {
let SearchTerm { ty, word } = token.into();
match ty {
TermType::Fuzzy(term_ty) => fuzzy_terms.push(FuzzyTerm::new(term_ty, word)),
TermType::Exact(term_ty) => exact_terms.push(ExactTerm::new(term_ty, word)),
TermType::Inverse(term_ty) => inverse_terms.push(InverseTerm::new(term_ty, word)),
}
}
Self {
fuzzy_terms,
exact_terms,
inverse_terms,
}
}
}
impl From<String> for Query {
fn from(s: String) -> Self {
s.as_str().into()
}
}
| true
|
e75fa6c230155df1a896925b075b562610d03ea6
|
Rust
|
KaiserY/trpl-zh-cn
|
/listings/ch14-more-about-cargo/listing-14-05/src/lib.rs
|
UTF-8
| 862
| 3.328125
| 3
|
[
"MIT"
] |
permissive
|
// ANCHOR: here
//! # Art
//!
//! A library for modeling artistic concepts.
pub use self::kinds::PrimaryColor;
pub use self::kinds::SecondaryColor;
pub use self::utils::mix;
pub mod kinds {
// --snip--
// ANCHOR_END: here
/// The primary colors according to the RYB color model.
pub enum PrimaryColor {
Red,
Yellow,
Blue,
}
/// The secondary colors according to the RYB color model.
pub enum SecondaryColor {
Orange,
Green,
Purple,
}
// ANCHOR: here
}
pub mod utils {
// --snip--
// ANCHOR_END: here
use crate::kinds::*;
/// Combines two primary colors in equal amounts to create
/// a secondary color.
pub fn mix(c1: PrimaryColor, c2: PrimaryColor) -> SecondaryColor {
SecondaryColor::Orange
}
// ANCHOR: here
}
// ANCHOR_END: here
| true
|
bc9efe0336f1d8edbecf501f91ac8d6f259ea5ff
|
Rust
|
ryo97321/rust-workspace
|
/yukicoder/level1/no79.rs
|
UTF-8
| 1,277
| 3.265625
| 3
|
[] |
no_license
|
use std::collections::HashMap;
fn getline() -> String {
let mut __ret = String::new();
std::io::stdin().read_line(&mut __ret).ok();
return __ret;
}
fn main() {
let n: usize = getline().trim().parse().unwrap();
let line = getline();
let params: Vec<_> = line.trim().split(' ').collect();
let mut level_map: HashMap<String, i32> = HashMap::new();
for i in 0..n {
let count = level_map.entry(params[i].to_string()).or_insert(0);
*count += 1;
}
let mut level_map_vec: Vec<(&String, &i32)> = level_map.iter().collect();
level_map_vec.sort_by(|a, b| b.1.cmp(a.1));
let mut tuple_vec: Vec<(String, i32)> = Vec::new();
tuple_vec.push((level_map_vec[0].0.to_string(), *level_map_vec[0].1));
for i in 1..level_map_vec.len() {
if tuple_vec[0].1 == *level_map_vec[i].1 {
tuple_vec.push((level_map_vec[i].0.to_string(), *level_map_vec[i].1));
}
}
let mut most_number_vec: Vec<i32> = Vec::new();
for tuple in &tuple_vec {
let number: i32 = tuple.0.parse().unwrap();
most_number_vec.push(number);
}
most_number_vec.sort();
let the_most_common_answer = most_number_vec[most_number_vec.len() - 1];
println!("{}", the_most_common_answer);
}
| true
|
46b26999598d6e50147803f306da8e667b0c2f65
|
Rust
|
PsypherPunk/advent-of-code
|
/2017/21/src/main.rs
|
UTF-8
| 634
| 2.828125
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
use std::fs;
use ::day21::*;
use std::str::FromStr;
fn main() {
let input = fs::read_to_string("input.txt").expect("Error reading input.txt");
let rules = Rules::from_str(&input).unwrap();
let mut image = Image::new();
for _ in 0..5 {
image = image.get_iteration(&rules);
}
println!(
"How many pixels stay on after 5 iterations? {}",
image.get_on_count(),
);
let mut image = Image::new();
for _ in 0..18 {
image = image.get_iteration(&rules);
}
println!(
"How many pixels stay on after 18 iterations? {}",
image.get_on_count(),
);
}
| true
|
67da26377ddcceb6a950825cc2caf765262c335b
|
Rust
|
diogobaeder/eulerust
|
/p019/src/main.rs
|
UTF-8
| 4,358
| 4.03125
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#[derive(Debug)]
struct Date {
year: u32,
month: u8,
day: u8,
week_day: u8,
}
impl Date {
fn new() -> Date {
Date {
year: 1900,
month: 1,
day: 1,
week_day: 2,
}
}
fn add(&mut self) {
self.day += 1;
self.week_day += 1;
if self.week_day > 7 {
self.week_day = 1;
}
if self.day > end_of_month(self.year, self.month) {
self.month += 1;
self.day = 1;
}
if self.month > 12 {
self.year += 1;
self.month = 1;
}
}
}
fn end_of_month(year: u32, month: u8) -> u8 {
match month {
1 | 3 | 5 | 7 | 8 | 10 | 12 => 31,
4 | 6 | 9 | 11 => 30,
_ => {
if year % 400 == 0 || (year % 4 == 0 && year % 100 != 0) {
29
} else {
28
}
}
}
}
impl PartialEq for Date {
fn eq(&self, other: &Date) -> bool {
if self.year != other.year || self.month != other.month || self.day != other.day ||
self.week_day != other.week_day
{
return false;
}
true
}
}
fn main() {
let mut count = 0;
let mut date = Date {
year: 1901,
month: 1,
day: 1,
week_day: 3,
};
let end_date = Date {
year: 2000,
month: 12,
day: 31,
week_day: 1,
};
while date != end_date {
if date.day == 1 && date.week_day == 1 {
count += 1;
}
date.add();
}
println!("{}", count);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn gets_new_date() {
let date = Date::new();
assert_eq!(date.year, 1900);
assert_eq!(date.month, 1);
assert_eq!(date.day, 1);
assert_eq!(date.week_day, 2);
}
#[test]
fn adds_one_day() {
let mut date = Date::new();
date.add();
assert_eq!(date.year, 1900);
assert_eq!(date.month, 1);
assert_eq!(date.day, 2);
assert_eq!(date.week_day, 3);
}
#[test]
fn equals_to_date() {
let mut date1 = Date::new();
let mut date2 = Date::new();
date1.add();
date2.add();
assert_eq!(date1, date2);
}
#[test]
fn unequals_to_date() {
let mut date1 = Date::new();
let date2 = Date::new();
date1.add();
assert_ne!(date1, date2);
}
#[test]
fn adds_7_days() {
let mut date = Date::new();
for _ in 1..8 {
date.add();
}
assert_eq!(
date,
Date {
year: 1900,
month: 1,
day: 8,
week_day: 2,
}
);
}
#[test]
fn adds_31_days() {
let mut date = Date::new();
for _ in 1..32 {
date.add();
}
assert_eq!(
date,
Date {
year: 1900,
month: 2,
day: 1,
week_day: 5,
}
);
}
#[test]
fn gets_end_of_months() {
assert_eq!(end_of_month(1900, 1), 31);
assert_eq!(end_of_month(1900, 2), 28);
assert_eq!(end_of_month(1900, 3), 31);
assert_eq!(end_of_month(1900, 4), 30);
assert_eq!(end_of_month(1900, 5), 31);
assert_eq!(end_of_month(1900, 6), 30);
assert_eq!(end_of_month(1900, 7), 31);
assert_eq!(end_of_month(1900, 8), 31);
assert_eq!(end_of_month(1900, 9), 30);
assert_eq!(end_of_month(1900, 10), 31);
assert_eq!(end_of_month(1900, 11), 30);
assert_eq!(end_of_month(1900, 12), 31);
}
#[test]
fn gets_leap_years() {
assert_eq!(end_of_month(1904, 2), 29);
assert_eq!(end_of_month(1908, 2), 29);
assert_eq!(end_of_month(2000, 2), 29);
assert_eq!(end_of_month(1800, 2), 28);
}
#[test]
fn adds_365_days() {
let mut date = Date::new();
for _ in 1..366 {
date.add();
}
assert_eq!(
date,
Date {
year: 1901,
month: 1,
day: 1,
week_day: 3,
}
);
}
}
| true
|
d9cf925cf97bff4979eb0082d8d82c98f93ca965
|
Rust
|
BenBergman/advent-of-code-2019
|
/src/bin/aoc02.rs
|
UTF-8
| 4,544
| 2.875
| 3
|
[] |
no_license
|
use itertools::repeat_n;
use itertools::Itertools;
fn main() {
let original_intcode = vec![
1, 0, 0, 3, 1, 1, 2, 3, 1, 3, 4, 3, 1, 5, 0, 3, 2, 1, 6, 19, 1, 5, 19, 23, 1, 23, 6, 27, 1,
5, 27, 31, 1, 31, 6, 35, 1, 9, 35, 39, 2, 10, 39, 43, 1, 43, 6, 47, 2, 6, 47, 51, 1, 5, 51,
55, 1, 55, 13, 59, 1, 59, 10, 63, 2, 10, 63, 67, 1, 9, 67, 71, 2, 6, 71, 75, 1, 5, 75, 79,
2, 79, 13, 83, 1, 83, 5, 87, 1, 87, 9, 91, 1, 5, 91, 95, 1, 5, 95, 99, 1, 99, 13, 103, 1,
10, 103, 107, 1, 107, 9, 111, 1, 6, 111, 115, 2, 115, 13, 119, 1, 10, 119, 123, 2, 123, 6,
127, 1, 5, 127, 131, 1, 5, 131, 135, 1, 135, 6, 139, 2, 139, 10, 143, 2, 143, 9, 147, 1,
147, 6, 151, 1, 151, 13, 155, 2, 155, 9, 159, 1, 6, 159, 163, 1, 5, 163, 167, 1, 5, 167,
171, 1, 10, 171, 175, 1, 13, 175, 179, 1, 179, 2, 183, 1, 9, 183, 0, 99, 2, 14, 0, 0,
];
let mut intcode = original_intcode.clone();
intcode[1] = 12;
intcode[2] = 2;
let result = get_intcode_result(intcode);
println!(
"{}-1: {:?}",
std::env::current_exe()
.unwrap()
.file_name()
.unwrap()
.to_str()
.unwrap(),
result
);
for x in repeat_n(0..original_intcode.len(), 2).multi_cartesian_product() {
let mut intcode = original_intcode.clone();
intcode[1] = x[0];
intcode[2] = x[1];
let result = get_intcode_result(intcode);
if result == 19_690_720 {
println!(
"{}-1: {:?}",
std::env::current_exe()
.unwrap()
.file_name()
.unwrap()
.to_str()
.unwrap(),
100 * x[0] + x[1]
);
}
}
}
fn get_intcode_result(intcode: Vec<usize>) -> usize {
let mut local_intcode = intcode;
execute_intcode(&mut local_intcode);
local_intcode[0]
}
fn execute_intcode(intcode: &mut Vec<usize>) {
for i in (0..intcode.len()).step_by(4) {
match intcode[i] {
1 => {
if let [a, b, c] = intcode[(i + 1)..=(i + 3)] {
intcode[c] = intcode[a] + intcode[b];
}
}
2 => {
if let [a, b, c] = intcode[(i + 1)..=(i + 3)] {
intcode[c] = intcode[a] * intcode[b];
}
}
99 => break,
opcode => panic!("{:?} not allowed as opcode", opcode),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_example() {
let mut program = vec![1, 9, 10, 3, 2, 3, 11, 0, 99, 30, 40, 50];
let expected = vec![3500, 9, 10, 70, 2, 3, 11, 0, 99, 30, 40, 50];
execute_intcode(&mut program);
assert_eq!(program, expected);
}
#[test]
fn test_example_2() {
let mut program = vec![1, 0, 0, 0, 99];
let expected = vec![2, 0, 0, 0, 99];
execute_intcode(&mut program);
assert_eq!(program, expected);
}
#[test]
fn test_example_3() {
let mut program = vec![2, 3, 0, 3, 99];
let expected = vec![2, 3, 0, 6, 99];
execute_intcode(&mut program);
assert_eq!(program, expected);
}
#[test]
fn test_example_4() {
let mut program = vec![2, 4, 4, 5, 99, 0];
let expected = vec![2, 4, 4, 5, 99, 9801];
execute_intcode(&mut program);
assert_eq!(program, expected);
}
#[test]
fn test_example_5() {
let mut program = vec![1, 1, 1, 4, 99, 5, 6, 0, 99];
let expected = vec![30, 1, 1, 4, 2, 5, 6, 0, 99];
execute_intcode(&mut program);
assert_eq!(program, expected);
}
#[test]
fn test_example_result() {
let program = vec![1, 9, 10, 3, 2, 3, 11, 0, 99, 30, 40, 50];
assert_eq!(get_intcode_result(program), 3500);
}
#[test]
fn test_example_2_result() {
let program = vec![1, 0, 0, 0, 99];
assert_eq!(get_intcode_result(program), 2);
}
#[test]
fn test_example_3_result() {
let program = vec![2, 3, 0, 3, 99];
assert_eq!(get_intcode_result(program), 2);
}
#[test]
fn test_example_4_result() {
let program = vec![2, 4, 4, 5, 99, 0];
assert_eq!(get_intcode_result(program), 2);
}
#[test]
fn test_example_5_result() {
let program = vec![1, 1, 1, 4, 99, 5, 6, 0, 99];
assert_eq!(get_intcode_result(program), 30);
}
}
| true
|
521004509ef58b36d3ab5d3fd5672bbf8aaa47fa
|
Rust
|
reproto/reproto
|
/lib/path-lexer/path_lexer.rs
|
UTF-8
| 5,709
| 3.34375
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Lexer for paths.
use crate::errors::{Error, Result};
use crate::path_token::PathToken;
use std::str::CharIndices;
pub struct PathLexer<'input> {
source: CharIndices<'input>,
source_len: usize,
source_str: &'input str,
n0: Option<(usize, char)>,
n1: Option<(usize, char)>,
buffer: String,
segment_start: Option<usize>,
variable_mode: bool,
}
impl<'input> PathLexer<'input> {
/// Advance the source iterator.
#[inline]
fn step(&mut self) {
self.n0 = self.n1;
self.n1 = self.source.next();
}
#[inline]
fn step_n(&mut self, n: usize) -> Option<usize> {
for _ in 0..n {
self.step();
}
self.n0.map(|v| v.0)
}
#[inline]
fn one(&mut self) -> Option<(usize, char)> {
self.n0
}
#[inline]
fn two(&mut self) -> Option<(usize, char, char)> {
if let (Some((pos, a)), Some((_, b))) = (self.n0, self.n1) {
Some((pos, a, b))
} else {
None
}
}
#[inline]
fn pos(&self) -> usize {
self.n0.map(|n| n.0).unwrap_or(self.source_len)
}
fn token(
&mut self,
start: usize,
token: PathToken<'input>,
) -> Result<(usize, PathToken<'input>, usize)> {
let end = self.step_n(1).unwrap_or(self.source_len);
Ok((start, token, end))
}
fn identifier(&mut self, start: usize) -> Result<(usize, PathToken<'input>, usize)> {
let (end, content) = take!(self, start, 'a'..='z' | '0'..='9' | '_');
Ok((start, PathToken::Identifier(content.into()), end))
}
fn unexpected(&self) -> Error {
Error::Unexpected { pos: self.pos() }
}
fn normal_mode_next(&mut self) -> Option<Result<(usize, PathToken<'input>, usize)>> {
// one character keywords
while let Some((pos, c)) = self.one() {
let segment_start = match c {
'{' | '}' | '/' => false,
'\\' => {
match self.two().map(|v| v.2) {
Some(c @ '\\') | Some(c @ '/') | Some(c @ '{') | Some(c @ '}') => {
self.buffer.push(c)
}
_ => return Some(Err(self.unexpected())),
}
self.step_n(2);
true
}
'a'..='z' if self.variable_mode => {
return Some(self.identifier(pos));
}
c if !self.variable_mode => {
self.buffer.push(c);
self.step_n(1);
true
}
_ => return Some(Err(self.unexpected())),
};
if segment_start {
if self.segment_start.is_none() {
self.segment_start = Some(pos);
}
continue;
}
// return buffered segment.
if let Some(start) = self.segment_start.take() {
if !self.buffer.is_empty() {
let buffer = self.buffer.clone();
self.buffer.clear();
return Some(Ok((start, PathToken::Segment(buffer), pos)));
}
}
let out = match c {
'{' if !self.variable_mode => {
self.variable_mode = true;
self.token(pos, PathToken::LeftCurly)
}
'}' if self.variable_mode => {
self.variable_mode = false;
self.token(pos, PathToken::RightCurly)
}
'/' => self.token(pos, PathToken::Slash),
_ => return Some(Err(self.unexpected())),
};
return Some(out);
}
if let Some(start) = self.segment_start.take() {
if !self.buffer.is_empty() {
let buffer = self.buffer.clone();
return Some(Ok((start, PathToken::Segment(buffer), self.source_len)));
}
}
None
}
}
impl<'input> Iterator for PathLexer<'input> {
type Item = Result<(usize, PathToken<'input>, usize)>;
fn next(&mut self) -> Option<Self::Item> {
self.normal_mode_next()
}
}
pub fn path_lex(input: &str) -> PathLexer {
let mut source = input.char_indices();
let n0 = source.next();
let n1 = source.next();
PathLexer {
source: source,
source_len: input.len(),
source_str: input,
n0: n0,
n1: n1,
buffer: String::new(),
segment_start: None,
variable_mode: false,
}
}
#[cfg(test)]
pub mod tests {
use super::PathToken::*;
use super::*;
fn tokenize(input: &str) -> Result<Vec<(usize, PathToken, usize)>> {
path_lex(input).collect()
}
#[test]
pub fn test_path_lexer() {
let input = "foo/\\{bar/\\/baz/{id}\\/\\\\\\{\\}";
let expected = vec![
(0, Segment("foo".to_string()), 3),
(3, Slash, 4),
(4, Segment("{bar".to_string()), 9),
(9, Slash, 10),
(10, Segment("/baz".to_string()), 15),
(15, Slash, 16),
(16, LeftCurly, 17),
(17, Identifier("id".into()), 19),
(19, RightCurly, 20),
(20, Segment("/\\{}".to_string()), 28),
];
assert_eq!("{id}", &input[16..20]);
assert_eq!(expected, tokenize(input).unwrap());
}
#[test]
pub fn test_path_err() {
let input = " \\id";
let expected = Err(Error::Unexpected { pos: 1 });
assert_eq!(expected, tokenize(input));
}
}
| true
|
3a98cb4b33fe73a0fe207d0b4bcb2a17357171bf
|
Rust
|
zrma/1d1rust
|
/src/boj/p10k/p10157.rs
|
UTF-8
| 3,954
| 3.21875
| 3
|
[] |
no_license
|
#[allow(dead_code)]
fn solve10157(col: i32, row: i32, k: i32) -> (i32, i32, bool) {
if row * col < k {
return (0, 0, false);
}
let mut x = 1;
let mut y = 1;
let mut k0 = k - 1;
let mut col0 = col - 1;
let mut row0 = row;
let mut dir = Direction::Up;
loop {
match dir {
Direction::Up => {
if k0 < row0 {
return (x, y + k0, true);
}
x += 1;
y += row0 - 1;
k0 -= row0;
row0 -= 1;
dir = turn(dir);
}
Direction::Right => {
if k0 < col0 {
return (x + k0, y, true);
}
x += col0 - 1;
y -= 1;
k0 -= col0;
col0 -= 1;
dir = turn(dir);
}
Direction::Down => {
if k0 < row0 {
return (x, y - k0, true);
}
x -= 1;
y -= row0 - 1;
k0 -= row0;
row0 -= 1;
dir = turn(dir);
}
Direction::Left => {
if k0 < col0 {
return (x - k0, y, true);
}
x -= col0 - 1;
y += 1;
k0 -= col0;
col0 -= 1;
dir = turn(dir);
}
}
}
}
#[derive(Clone, Copy)]
enum Direction {
Up = 0,
Right,
Down,
Left,
}
fn turn(d: Direction) -> Direction {
match d {
Direction::Up => Direction::Right,
Direction::Right => Direction::Down,
Direction::Down => Direction::Left,
Direction::Left => Direction::Up,
}
}
// https://www.acmicpc.net/problem/10157
// 자리배정
#[test]
fn test_solve10157() {
struct TestData {
col: i32,
row: i32,
k: i32,
want_ok: bool,
want_x: i32,
want_y: i32,
}
for data in vec![
TestData {
col: 7,
row: 6,
k: 1,
want_ok: true,
want_x: 1,
want_y: 1,
},
TestData {
col: 7,
row: 6,
k: 6,
want_ok: true,
want_x: 1,
want_y: 6,
},
TestData {
col: 7,
row: 6,
k: 7,
want_ok: true,
want_x: 2,
want_y: 6,
},
TestData {
col: 7,
row: 6,
k: 11,
want_ok: true,
want_x: 6,
want_y: 6,
},
TestData {
col: 7,
row: 6,
k: 12,
want_ok: true,
want_x: 7,
want_y: 6,
},
TestData {
col: 7,
row: 6,
k: 13,
want_ok: true,
want_x: 7,
want_y: 5,
},
TestData {
col: 7,
row: 6,
k: 17,
want_ok: true,
want_x: 7,
want_y: 1,
},
TestData {
col: 7,
row: 6,
k: 87,
want_ok: false,
want_x: 0,
want_y: 0,
},
TestData {
col: 100,
row: 100,
k: 3000,
want_ok: true,
want_x: 9,
want_y: 64,
},
TestData {
col: 10000,
row: 10000,
k: 99999999,
want_ok: true,
want_x: 5001,
want_y: 5001,
},
] {
let (got_x, got_y, got_ok) = solve10157(data.col, data.row, data.k);
assert_eq!(got_ok, data.want_ok);
if got_ok {
assert_eq!(got_x, data.want_x);
assert_eq!(got_y, data.want_y);
}
}
}
| true
|
8d8c77cbdf49dda75de5a9cfe8fa96e6fe11a996
|
Rust
|
zarik5/bridgevr-dev
|
/bridgevr/vulkan_test/src/main.rs
|
UTF-8
| 1,813
| 2.78125
| 3
|
[] |
no_license
|
mod graphics;
use bridgevr_common::*;
use graphics::*;
use safe_transmute::*;
use std::{mem::size_of, sync::Arc};
const TRACE_CONTEXT: &str = "Main";
const IMAGE_SIZE: (u32, u32) = (3200, 2400);
#[derive(Debug)]
struct Hello {}
fn run() -> StrResult {
println!("Starting...");
let context = Arc::new(trace_err!(graphics::GraphicsContext::new(None, &[], &[]))?);
let buffer = Arc::new(Buffer::new(
context.clone(),
IMAGE_SIZE.0 as u64 * IMAGE_SIZE.1 as u64 * 4 * size_of::<f32>() as u64,
BufferType::Storage,
)?);
let shader_bytecode = include_bytes!(concat!(env!("OUT_DIR"), "/shader.spv"));
let operation_buffer = OperationBuffer::new(
context,
Operation::Render {
layout: vec![vec![Binding::Buffer(buffer.clone())]],
shader: shader_bytecode.to_vec(),
resolution: IMAGE_SIZE,
},
)?;
operation_buffer.execute()?;
buffer.download(|data| {
println!("Converting data...");
let data = transmute_many::<f32, PermissiveGuard>(data).unwrap();
let data = data.iter().map(|f| (f * 255.0) as u8).collect::<Vec<_>>();
let file = std::fs::File::create("./mandelbrot.png").unwrap();
let mut file_buf = std::io::BufWriter::new(file);
let mut encoder = png::Encoder::new(&mut file_buf, IMAGE_SIZE.0, IMAGE_SIZE.1);
encoder.set_color(png::ColorType::RGBA);
encoder.set_depth(png::BitDepth::Eight);
let mut writer = encoder.write_header().unwrap();
println!("Saving data...");
trace_err!(writer.write_image_data(&data))?;
Ok(())
})?;
println!("Closing...");
Ok(())
}
fn main() {
match run() {
Ok(()) => println!("All right!"),
Err(e) => println!("{}", e),
}
}
| true
|
2527bfa7a5c5068b70ace6bb6cf5db7291d06903
|
Rust
|
Cheshulko/Algorithms
|
/Leetcode/LongestCycleinaGraph.rs
|
UTF-8
| 1,317
| 3.15625
| 3
|
[] |
no_license
|
// https://leetcode.com/problems/longest-cycle-in-a-graph
struct Graph {
edges: Vec<i32>,
data: Vec<(i32, i32, bool)>,
ans: i32,
}
impl Graph {
fn new(edges: Vec<i32>) -> Self {
let cnt = edges.len();
Graph {
edges,
data: vec![(0, 0, false); cnt],
ans: -1,
}
}
fn dfs(&mut self, cur: usize, cur_iter: &i32, depth: i32) {
self.data[cur].2 = true;
self.data[cur].1 = depth;
self.data[cur].0 = *cur_iter;
let to = self.edges[cur];
if to != -1 {
if self.data[to as usize].2 {
if *cur_iter == self.data[to as usize].0 {
self.ans = std::cmp::max(self.ans, depth - self.data[to as usize].1 + 1);
}
} else {
self.dfs(to as usize, cur_iter, depth + 1)
}
}
}
fn find(&mut self) -> i32 {
let mut cur_iter = 1;
for cur in 0..self.edges.len() {
if !self.data[cur].2 {
self.dfs(cur, &cur_iter, 0);
cur_iter += 1;
}
}
self.ans
}
}
struct Solution {}
impl Solution {
pub fn longest_cycle(edges: Vec<i32>) -> i32 {
let mut graph = Graph::new(edges);
graph.find()
}
}
| true
|
154c0a5aebd53d3ab309b5c53e86768102ffa26f
|
Rust
|
huonw/slow_primes
|
/src/fast_sieve.rs
|
UTF-8
| 4,117
| 3.171875
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
#![allow(dead_code)]
use bit::{BitVec};
use std::{cmp};
use Primes;
/// A segmented sieve that yields only a small run of primes at a
/// time.
///
/// This is heavily inspired by this [segmented
/// sieve](http://primesieve.org/segmented_sieve.html) code.
#[derive(Debug)]
pub struct StreamingSieve {
small: Primes,
// stores which numbers *aren't* prime, i.e. true == composite.
sieve: BitVec,
primes: Vec<(usize, usize)>,
low: usize,
current: usize,
limit: usize,
}
const CACHE: usize = 32 << 10;
// 8 for the bit vector, 2 for storing odd numbers only
const SEG_ELEMS: usize = 8 * CACHE;
const SEG_LEN: usize = 2 * SEG_ELEMS;
impl StreamingSieve {
/// Create a new instance of the streaming sieve that will
/// correctly progressively filter primes up to `limit`.
pub fn new(limit: usize) -> StreamingSieve {
let small = Primes::sieve((limit as f64).sqrt() as usize + 1);
let current = 3;
let low = 0;
let elems = cmp::min(limit, SEG_ELEMS);
StreamingSieve {
small: small,
sieve: BitVec::from_elem(elems, false),
primes: vec![],
low: low,
current: current,
limit: limit
}
}
/// Extract the next chunk of filtered primes, the return value is
/// `Some((low, v))` or `None` if the sieve has reached the limit.
///
/// The vector stores bits for each odd number starting at `low`.
/// Bit `n` of `v` is set if and only if `low + 2 * n + 1` is
/// prime.
///
/// NB. the prime 2 is not included in any of these sieves and so
/// needs special handling.
pub fn next(&mut self) -> Option<(usize, &BitVec)> {
if self.low >= self.limit {
return None
}
let low = self.low;
self.low += SEG_LEN;
let high = cmp::min(low + SEG_LEN - 1, self.limit);
self.sieve.clear();
let mut s = self.current;
while s * s <= high {
if self.small.is_prime(s) {
self.primes.push((s, s * s - low));
}
s += 1
}
self.current = s;
let top = cmp::min(SEG_LEN, self.limit) / 2;
for &mut (k, ref mut next) in self.primes.iter_mut() {
let mut j = *next / 2;
while j < top {
unsafe {
self.sieve.set_unchecked(j, true);
}
j += k;
}
// if this wraps, we've hit the limit, and so won't be
// continuing, so whatever, it can be junk.
*next = (2 * j + 1).wrapping_sub(SEG_LEN);
}
if low == 0 {
// 1 is not prime.
self.sieve.set(0, true);
}
Some((low, &self.sieve))
}
}
#[cfg(test)]
mod tests {
use super::StreamingSieve;
#[test]
fn test() {
const LIMIT: usize = 2_000_000;
let mut sieve = StreamingSieve::new(LIMIT);
let primes = ::Primes::sieve(LIMIT);
while let Some((low, next)) = sieve.next() {
let mut i = low + 1;
while i < low + next.len() {
if i > LIMIT { break }
assert!(primes.is_prime(i) == !next[(i - low) / 2],
"failed for {} (is prime = {})", i, primes.is_prime(i));
i += 2;
}
}
}
}
#[cfg(all(test, feature = "unstable"))]
mod benches {
use test::Bencher;
use bit::BitVec;
use super::StreamingSieve;
fn run(b: &mut Bencher, n: usize) {
b.iter(|| {
let mut sieve = StreamingSieve::new(n);
while sieve.next().is_some() {}
})
}
#[bench]
fn sieve_small(b: &mut Bencher) {
run(b, 100)
}
#[bench]
fn sieve_medium(b: &mut Bencher) {
run(b, 10_000)
}
#[bench]
fn sieve_large(b: &mut Bencher) {
run(b, 100_000)
}
#[bench]
fn sieve_larger(b: &mut Bencher) {
run(b, 1_000_000)
}
#[bench]
fn sieve_huge(b: &mut Bencher) {
run(b, 10_000_000)
}
}
| true
|
2764a27682113506086426ae4939e90e4dbe3073
|
Rust
|
dfinke/MultiLanguagePSCrescendo
|
/RustAndCrescendo/hello.rs
|
UTF-8
| 233
| 3.046875
| 3
|
[] |
no_license
|
use std::env;
fn main() {
let args: Vec<String> = env::args().collect();
let name = &args[1];
let age = &args[2];
println!("name = {}", name);
println!("age = {}", age);
println!("language = Rust");
}
| true
|
eed08dae7878fbfa5369daaeac4430821a310a81
|
Rust
|
Goliworks/Roma
|
/src/utils.rs
|
UTF-8
| 392
| 2.96875
| 3
|
[] |
no_license
|
use dirs::home_dir;
pub fn host_to_domain(host: String) -> String {
let domain: Vec<&str> = host.split(":").collect();
domain[0].to_string()
}
pub fn resolve_path(path: String) -> String {
let spl: Vec<&str> = path.split("~/").collect();
if spl[0].is_empty() && spl.len() > 1 {
return format!("{}/{}", home_dir().unwrap().to_str().unwrap(), spl[1])
}
path
}
| true
|
150afa889edda70ce636ef13d19cab351d32080b
|
Rust
|
RWTH-OS/rust-x86
|
/src/bits32/irq.rs
|
UTF-8
| 1,923
| 2.984375
| 3
|
[
"MIT"
] |
permissive
|
//! Interrupt description and set-up code.
use shared::descriptor::*;
use shared::paging::VAddr;
use shared::PrivilegeLevel;
/// An interrupt gate or trap gate descriptor.
///
/// See Intel manual 3a for details, specifically section 6.11.
#[derive(Debug, Copy, Clone)]
#[repr(C, packed)]
pub struct IdtEntry {
/// Lower 16 bits of ISR.
pub offset_lo: u16,
/// Segment selector.
pub selector: u16,
/// This must always be zero.
pub reserved: u8,
/// flags.
pub flags: Flags,
/// The upper 16 bits of ISR.
pub offset_hi: u16
}
impl IdtEntry {
pub const MISSING: IdtEntry = IdtEntry {
offset_lo: 0,
selector: 0,
reserved: 0,
flags: Flags::BLANK,
offset_hi: 0
};
/// Create a new IdtEntry pointing at `handler`, which must be a function
/// with interrupt calling conventions. (This must be currently defined in
/// assembly language.) The `gdt_code_selector` value must be the offset of
/// code segment entry in the GDT.
///
/// The "Present" flag set, which is the most common case. If you need
/// something else, you can construct it manually.
pub const fn new(handler: VAddr, gdt_code_selector: u16,
dpl: PrivilegeLevel, block: bool) -> IdtEntry {
IdtEntry {
offset_lo: ((handler.as_usize() as u32) & 0xFFFF) as u16,
offset_hi: ((handler.as_usize() as u32 & 0xFFFF0000) >> 16) as u16,
selector: gdt_code_selector,
reserved: 0,
// Nice bitflags operations don't work in const fn, hence these
// ad-hoc methods.
flags: Flags::from_priv(dpl)
.const_or(FLAGS_TYPE_SYS_NATIVE_INTERRUPT_GATE
.const_mux(FLAGS_TYPE_SYS_NATIVE_TRAP_GATE,
block))
.const_or(FLAGS_PRESENT),
}
}
}
| true
|
1b3c694ee91874fb216e7301b5c10b76949c7ffb
|
Rust
|
glium/glium
|
/src/program/compute.rs
|
UTF-8
| 8,074
| 2.53125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use crate::gl;
use crate::context::CommandContext;
use crate::backend::Facade;
use std::fmt;
use std::collections::hash_map::{self, HashMap};
use std::os::raw;
use std::hash::BuildHasherDefault;
use fnv::FnvHasher;
use crate::CapabilitiesSource;
use crate::GlObject;
use crate::ProgramExt;
use crate::Handle;
use crate::RawUniformValue;
use crate::program::{COMPILER_GLOBAL_LOCK, ProgramCreationError, Binary, GetBinaryError, SpirvEntryPoint};
use crate::program::reflection::{Uniform, UniformBlock};
use crate::program::reflection::{ShaderStage, SubroutineData};
use crate::program::shader::{build_shader, build_spirv_shader, check_shader_type_compatibility};
use crate::program::raw::RawProgram;
use crate::buffer::BufferSlice;
use crate::uniforms::Uniforms;
/// A combination of compute shaders linked together.
pub struct ComputeShader {
raw: RawProgram,
}
impl ComputeShader {
/// Returns true if the backend supports compute shaders.
#[inline]
pub fn is_supported<C: ?Sized>(ctxt: &C) -> bool where C: CapabilitiesSource {
check_shader_type_compatibility(ctxt, gl::COMPUTE_SHADER)
}
/// Builds a new compute shader from some source code.
#[inline]
pub fn from_source<F: ?Sized>(facade: &F, src: &str) -> Result<ComputeShader, ProgramCreationError>
where F: Facade
{
let _lock = COMPILER_GLOBAL_LOCK.lock();
let shader = build_shader(facade, gl::COMPUTE_SHADER, src)?;
Ok(ComputeShader {
raw: RawProgram::from_shaders(facade, &[shader], false, false, false, None)?
})
}
/// Builds a new compute shader from SPIR-V module.
#[inline]
pub fn from_spirv<F: ?Sized>(facade: &F, spirv: &SpirvEntryPoint) -> Result<ComputeShader, ProgramCreationError>
where F: Facade
{
let _lock = COMPILER_GLOBAL_LOCK.lock();
let shader = build_spirv_shader(facade, gl::COMPUTE_SHADER, spirv)?;
Ok(ComputeShader {
raw: RawProgram::from_shaders(facade, &[shader], false, false, false, None)?
})
}
/// Builds a new compute shader from some binary.
#[inline]
pub fn from_binary<F: ?Sized>(facade: &F, data: Binary) -> Result<ComputeShader, ProgramCreationError>
where F: Facade
{
let _lock = COMPILER_GLOBAL_LOCK.lock();
Ok(ComputeShader {
raw: RawProgram::from_binary(facade, data)?
})
}
/// Executes the compute shader.
///
/// `x * y * z` work groups will be started. The current work group can be retrieved with
/// `gl_WorkGroupID`. Inside each work group, additional local work groups can be started
/// depending on the attributes of the compute shader itself.
#[inline]
pub fn execute<U>(&self, uniforms: U, x: u32, y: u32, z: u32) where U: Uniforms {
unsafe { self.raw.dispatch_compute(uniforms, x, y, z) }.unwrap(); // FIXME: return error
}
/// Executes the compute shader.
///
/// This is similar to `execute`, except that the parameters are stored in a buffer.
#[inline]
pub fn execute_indirect<U>(&self, uniforms: U, buffer: BufferSlice<'_, ComputeCommand>)
where U: Uniforms
{
unsafe { self.raw.dispatch_compute_indirect(uniforms, buffer) }.unwrap(); // FIXME: return error
}
/// Returns the program's compiled binary.
///
/// You can store the result in a file, then reload it later. This avoids having to compile
/// the source code every time.
#[inline]
pub fn get_binary(&self) -> Result<Binary, GetBinaryError> {
self.raw.get_binary()
}
/// Returns informations about a uniform variable, if it exists.
#[inline]
pub fn get_uniform(&self, name: &str) -> Option<&Uniform> {
self.raw.get_uniform(name)
}
/// Returns an iterator to the list of uniforms.
///
/// ## Example
///
/// ```no_run
/// # fn example(program: glium::Program) {
/// for (name, uniform) in program.uniforms() {
/// println!("Name: {} - Type: {:?}", name, uniform.ty);
/// }
/// # }
/// ```
#[inline]
pub fn uniforms(&self) -> hash_map::Iter<'_, String, Uniform> {
self.raw.uniforms()
}
/// Returns a list of uniform blocks.
///
/// ## Example
///
/// ```no_run
/// # fn example(program: glium::Program) {
/// for (name, uniform) in program.get_uniform_blocks() {
/// println!("Name: {}", name);
/// }
/// # }
/// ```
#[inline]
pub fn get_uniform_blocks(&self)
-> &HashMap<String, UniformBlock, BuildHasherDefault<FnvHasher>> {
self.raw.get_uniform_blocks()
}
/// Returns the list of shader storage blocks.
///
/// ## Example
///
/// ```no_run
/// fn example(program: glium::Program) {
/// for (name, uniform) in program.get_shader_storage_blocks() {
/// println!("Name: {}", name);
/// }
/// # }
/// ```
#[inline]
pub fn get_shader_storage_blocks(&self)
-> &HashMap<String, UniformBlock, BuildHasherDefault<FnvHasher>> {
self.raw.get_shader_storage_blocks()
}
}
impl fmt::Debug for ComputeShader {
#[inline]
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(formatter, "{:?}", self.raw)
}
}
impl GlObject for ComputeShader {
type Id = Handle;
#[inline]
fn get_id(&self) -> Handle {
self.raw.get_id()
}
}
impl ProgramExt for ComputeShader {
#[inline]
fn use_program(&self, ctxt: &mut CommandContext<'_>) {
self.raw.use_program(ctxt)
}
#[inline]
fn set_uniform(&self, ctxt: &mut CommandContext<'_>, uniform_location: gl::types::GLint,
value: &RawUniformValue)
{
self.raw.set_uniform(ctxt, uniform_location, value)
}
#[inline]
fn set_uniform_block_binding(&self, ctxt: &mut CommandContext<'_>, block_location: gl::types::GLuint,
value: gl::types::GLuint)
{
self.raw.set_uniform_block_binding(ctxt, block_location, value)
}
#[inline]
fn set_shader_storage_block_binding(&self, ctxt: &mut CommandContext<'_>,
block_location: gl::types::GLuint,
value: gl::types::GLuint)
{
self.raw.set_shader_storage_block_binding(ctxt, block_location, value)
}
#[inline]
fn set_subroutine_uniforms_for_stage(&self, ctxt: &mut CommandContext<'_>,
stage: ShaderStage,
indices: &[gl::types::GLuint])
{
self.raw.set_subroutine_uniforms_for_stage(ctxt, stage, indices);
}
#[inline]
fn get_uniform(&self, name: &str) -> Option<&Uniform> {
self.raw.get_uniform(name)
}
#[inline]
fn get_uniform_blocks(&self) -> &HashMap<String, UniformBlock, BuildHasherDefault<FnvHasher>> {
self.raw.get_uniform_blocks()
}
#[inline]
fn get_shader_storage_blocks(&self)
-> &HashMap<String, UniformBlock, BuildHasherDefault<FnvHasher>> {
self.raw.get_shader_storage_blocks()
}
#[inline]
fn get_atomic_counters(&self)
-> &HashMap<String, UniformBlock, BuildHasherDefault<FnvHasher>> {
self.raw.get_atomic_counters()
}
#[inline]
fn get_subroutine_data(&self) -> &SubroutineData {
self.raw.get_subroutine_data()
}
}
/// Represents a compute shader command waiting to be dispatched.
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ComputeCommand {
/// Number of X groups.
pub num_groups_x: raw::c_uint,
/// Number of Y groups.
pub num_groups_y: raw::c_uint,
/// Number of Z groups.
pub num_groups_z: raw::c_uint,
}
implement_uniform_block!(ComputeCommand, num_groups_x, num_groups_y, num_groups_z);
| true
|
29595e4893a4998c6483603bcec79812837bfa4e
|
Rust
|
jhspetersson/fselect
|
/src/fileinfo.rs
|
UTF-8
| 405
| 2.578125
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
use zip;
use zip::DateTime;
pub struct FileInfo {
pub name: String,
pub size: u64,
pub mode: Option<u32>,
pub modified: DateTime,
}
pub fn to_file_info(zipped_file: &zip::read::ZipFile) -> FileInfo {
FileInfo {
name: zipped_file.name().to_string(),
size: zipped_file.size(),
mode: zipped_file.unix_mode(),
modified: zipped_file.last_modified()
}
}
| true
|
d6c40fa9848c2e2418579752d14f5ef856d7b5f2
|
Rust
|
burrbull/stm32f1-gates
|
/src/stm32f107/dma1/isr.rs
|
UTF-8
| 14,781
| 2.921875
| 3
|
[] |
no_license
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
impl super::ISR {
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
}
#[doc = "Possible values of the field `GIF1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GIF1R {
#[doc = "No transfer error, half event, complete event"]
NOEVENT,
#[doc = "A transfer error, half event or complete event has occured"]
EVENT,
}
impl GIF1R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
GIF1R::NOEVENT => false,
GIF1R::EVENT => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> GIF1R {
match value {
false => GIF1R::NOEVENT,
true => GIF1R::EVENT,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline]
pub fn is_no_event(&self) -> bool {
*self == GIF1R::NOEVENT
}
#[doc = "Checks if the value of the field is `EVENT`"]
#[inline]
pub fn is_event(&self) -> bool {
*self == GIF1R::EVENT
}
}
#[doc = "Possible values of the field `TCIF1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TCIF1R {
#[doc = "No transfer complete event"]
NOTCOMPLETE,
#[doc = "A transfer complete event has occured"]
COMPLETE,
}
impl TCIF1R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
TCIF1R::NOTCOMPLETE => false,
TCIF1R::COMPLETE => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> TCIF1R {
match value {
false => TCIF1R::NOTCOMPLETE,
true => TCIF1R::COMPLETE,
}
}
#[doc = "Checks if the value of the field is `NOTCOMPLETE`"]
#[inline]
pub fn is_not_complete(&self) -> bool {
*self == TCIF1R::NOTCOMPLETE
}
#[doc = "Checks if the value of the field is `COMPLETE`"]
#[inline]
pub fn is_complete(&self) -> bool {
*self == TCIF1R::COMPLETE
}
}
#[doc = "Possible values of the field `HTIF1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum HTIF1R {
#[doc = "No half transfer event"]
NOTHALT,
#[doc = "A half transfer event has occured"]
HALF,
}
impl HTIF1R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
HTIF1R::NOTHALT => false,
HTIF1R::HALF => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> HTIF1R {
match value {
false => HTIF1R::NOTHALT,
true => HTIF1R::HALF,
}
}
#[doc = "Checks if the value of the field is `NOTHALT`"]
#[inline]
pub fn is_not_halt(&self) -> bool {
*self == HTIF1R::NOTHALT
}
#[doc = "Checks if the value of the field is `HALF`"]
#[inline]
pub fn is_half(&self) -> bool {
*self == HTIF1R::HALF
}
}
#[doc = "Possible values of the field `TEIF1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TEIF1R {
#[doc = "No transfer error"]
NOERROR,
#[doc = "A transfer error has occured"]
ERROR,
}
impl TEIF1R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
TEIF1R::NOERROR => false,
TEIF1R::ERROR => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> TEIF1R {
match value {
false => TEIF1R::NOERROR,
true => TEIF1R::ERROR,
}
}
#[doc = "Checks if the value of the field is `NOERROR`"]
#[inline]
pub fn is_no_error(&self) -> bool {
*self == TEIF1R::NOERROR
}
#[doc = "Checks if the value of the field is `ERROR`"]
#[inline]
pub fn is_error(&self) -> bool {
*self == TEIF1R::ERROR
}
}
#[doc = "Possible values of the field `GIF2`"]
pub type GIF2R = GIF1R;
#[doc = "Possible values of the field `TCIF2`"]
pub type TCIF2R = TCIF1R;
#[doc = "Possible values of the field `HTIF2`"]
pub type HTIF2R = HTIF1R;
#[doc = "Possible values of the field `TEIF2`"]
pub type TEIF2R = TEIF1R;
#[doc = "Possible values of the field `GIF3`"]
pub type GIF3R = GIF1R;
#[doc = "Possible values of the field `TCIF3`"]
pub type TCIF3R = TCIF1R;
#[doc = "Possible values of the field `HTIF3`"]
pub type HTIF3R = HTIF1R;
#[doc = "Possible values of the field `TEIF3`"]
pub type TEIF3R = TEIF1R;
#[doc = "Possible values of the field `GIF4`"]
pub type GIF4R = GIF1R;
#[doc = "Possible values of the field `TCIF4`"]
pub type TCIF4R = TCIF1R;
#[doc = "Possible values of the field `HTIF4`"]
pub type HTIF4R = HTIF1R;
#[doc = "Possible values of the field `TEIF4`"]
pub type TEIF4R = TEIF1R;
#[doc = "Possible values of the field `GIF5`"]
pub type GIF5R = GIF1R;
#[doc = "Possible values of the field `TCIF5`"]
pub type TCIF5R = TCIF1R;
#[doc = "Possible values of the field `HTIF5`"]
pub type HTIF5R = HTIF1R;
#[doc = "Possible values of the field `TEIF5`"]
pub type TEIF5R = TEIF1R;
#[doc = "Possible values of the field `GIF6`"]
pub type GIF6R = GIF1R;
#[doc = "Possible values of the field `TCIF6`"]
pub type TCIF6R = TCIF1R;
#[doc = "Possible values of the field `HTIF6`"]
pub type HTIF6R = HTIF1R;
#[doc = "Possible values of the field `TEIF6`"]
pub type TEIF6R = TEIF1R;
#[doc = "Possible values of the field `GIF7`"]
pub type GIF7R = GIF1R;
#[doc = "Possible values of the field `TCIF7`"]
pub type TCIF7R = TCIF1R;
#[doc = "Possible values of the field `HTIF7`"]
pub type HTIF7R = HTIF1R;
#[doc = "Possible values of the field `TEIF7`"]
pub type TEIF7R = TEIF1R;
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Channel 1 Global interrupt flag"]
#[inline]
pub fn gif1(&self) -> GIF1R {
GIF1R::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - Channel 1 Transfer Complete flag"]
#[inline]
pub fn tcif1(&self) -> TCIF1R {
TCIF1R::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 2 - Channel 1 Half Transfer Complete flag"]
#[inline]
pub fn htif1(&self) -> HTIF1R {
HTIF1R::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 3 - Channel 1 Transfer Error flag"]
#[inline]
pub fn teif1(&self) -> TEIF1R {
TEIF1R::_from({
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 4 - Channel 2 Global interrupt flag"]
#[inline]
pub fn gif2(&self) -> GIF2R {
GIF2R::_from({
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 5 - Channel 2 Transfer Complete flag"]
#[inline]
pub fn tcif2(&self) -> TCIF2R {
TCIF2R::_from({
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 6 - Channel 2 Half Transfer Complete flag"]
#[inline]
pub fn htif2(&self) -> HTIF2R {
HTIF2R::_from({
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 7 - Channel 2 Transfer Error flag"]
#[inline]
pub fn teif2(&self) -> TEIF2R {
TEIF2R::_from({
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 8 - Channel 3 Global interrupt flag"]
#[inline]
pub fn gif3(&self) -> GIF3R {
GIF3R::_from({
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 9 - Channel 3 Transfer Complete flag"]
#[inline]
pub fn tcif3(&self) -> TCIF3R {
TCIF3R::_from({
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 10 - Channel 3 Half Transfer Complete flag"]
#[inline]
pub fn htif3(&self) -> HTIF3R {
HTIF3R::_from({
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 11 - Channel 3 Transfer Error flag"]
#[inline]
pub fn teif3(&self) -> TEIF3R {
TEIF3R::_from({
const MASK: bool = true;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 12 - Channel 4 Global interrupt flag"]
#[inline]
pub fn gif4(&self) -> GIF4R {
GIF4R::_from({
const MASK: bool = true;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 13 - Channel 4 Transfer Complete flag"]
#[inline]
pub fn tcif4(&self) -> TCIF4R {
TCIF4R::_from({
const MASK: bool = true;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 14 - Channel 4 Half Transfer Complete flag"]
#[inline]
pub fn htif4(&self) -> HTIF4R {
HTIF4R::_from({
const MASK: bool = true;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 15 - Channel 4 Transfer Error flag"]
#[inline]
pub fn teif4(&self) -> TEIF4R {
TEIF4R::_from({
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 16 - Channel 5 Global interrupt flag"]
#[inline]
pub fn gif5(&self) -> GIF5R {
GIF5R::_from({
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 17 - Channel 5 Transfer Complete flag"]
#[inline]
pub fn tcif5(&self) -> TCIF5R {
TCIF5R::_from({
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 18 - Channel 5 Half Transfer Complete flag"]
#[inline]
pub fn htif5(&self) -> HTIF5R {
HTIF5R::_from({
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 19 - Channel 5 Transfer Error flag"]
#[inline]
pub fn teif5(&self) -> TEIF5R {
TEIF5R::_from({
const MASK: bool = true;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 20 - Channel 6 Global interrupt flag"]
#[inline]
pub fn gif6(&self) -> GIF6R {
GIF6R::_from({
const MASK: bool = true;
const OFFSET: u8 = 20;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 21 - Channel 6 Transfer Complete flag"]
#[inline]
pub fn tcif6(&self) -> TCIF6R {
TCIF6R::_from({
const MASK: bool = true;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 22 - Channel 6 Half Transfer Complete flag"]
#[inline]
pub fn htif6(&self) -> HTIF6R {
HTIF6R::_from({
const MASK: bool = true;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 23 - Channel 6 Transfer Error flag"]
#[inline]
pub fn teif6(&self) -> TEIF6R {
TEIF6R::_from({
const MASK: bool = true;
const OFFSET: u8 = 23;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 24 - Channel 7 Global interrupt flag"]
#[inline]
pub fn gif7(&self) -> GIF7R {
GIF7R::_from({
const MASK: bool = true;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 25 - Channel 7 Transfer Complete flag"]
#[inline]
pub fn tcif7(&self) -> TCIF7R {
TCIF7R::_from({
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 26 - Channel 7 Half Transfer Complete flag"]
#[inline]
pub fn htif7(&self) -> HTIF7R {
HTIF7R::_from({
const MASK: bool = true;
const OFFSET: u8 = 26;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 27 - Channel 7 Transfer Error flag"]
#[inline]
pub fn teif7(&self) -> TEIF7R {
TEIF7R::_from({
const MASK: bool = true;
const OFFSET: u8 = 27;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
| true
|
c3194b3fc108afde73042f25c19221cf1d9c6ba1
|
Rust
|
dennisss/dacha
|
/pkg/image/src/format/jpeg/stuffed.rs
|
UTF-8
| 1,652
| 3.203125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use std::io::{Read, Write};
pub struct StuffedReader<'a, T: Read> {
inner: &'a mut T,
}
impl<'a, T: Read> StuffedReader<'a, T> {
pub fn new(inner: &'a mut T) -> Self {
Self { inner }
}
}
impl<'a, T: Read> Read for StuffedReader<'a, T> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
if buf.len() != 1 {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Only reading one byte at a time is currently supported",
));
}
{
let n = self.inner.read(buf)?;
if n == 0 {
return Ok(0);
}
}
if buf[0] == 0xff {
let mut temp = [0u8; 1];
let n = self.inner.read(&mut temp)?;
if n != 1 || temp[0] != 0x00 {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Expected 0xFF to be stuffed by 0x00",
));
}
}
Ok((1))
}
}
pub struct StuffedWriter<'a, T: Write> {
inner: &'a mut T,
}
impl<'a, T: Write> StuffedWriter<'a, T> {
pub fn new(inner: &'a mut T) -> Self {
Self { inner }
}
}
impl<'a, T: Write> Write for StuffedWriter<'a, T> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
for v in buf.iter().cloned() {
self.inner.write(&[v])?;
if v == 0xff {
self.inner.write(&[0])?;
}
}
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
self.inner.flush()
}
}
| true
|
4ee1291ee4d308cf71c2bdfdeb7f0bf8d6801ddc
|
Rust
|
vorner/spirit
|
/src/app.rs
|
UTF-8
| 5,881
| 3.21875
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
//! The running application part.
//!
//! The convenient way to manage the application runtime is through
//! [`Builder::run`][crate::SpiritBuilder::run]. If more flexibility is needed, the
//! [`Builder::build`][crate::SpiritBuilder::build] can be used instead. That method returns the
//! [`App`][crate::app::App] object, representing the application runner. The application can then
//! be run at any later time, as convenient.
use std::process;
use std::sync::Arc;
use std::thread;
use log::debug;
use serde::de::DeserializeOwned;
use structopt::StructOpt;
use crate::bodies::{InnerBody, WrapBody};
use crate::error;
use crate::spirit::Spirit;
use crate::terminate_guard::TerminateGuard;
use crate::utils::FlushGuard;
use crate::AnyError;
/// The running application part.
///
/// This is returned by [`Builder::build`][crate::SpiritBuilder::build] and represents the rest of
/// the application runtime except the actual application body. It can be used to run at any later
/// time, after the spirit has been created.
///
/// This carries all the [around-bodies][crate::Extensible::run_around] and
/// [before-bodies][crate::Extensible::run_before]. If you run the application body directly, not
/// through this, some of the pipelines or extensions might not work as expected.
///
/// The [`Builder::run`][crate::SpiritBuilder::run] is just a convenient wrapper around this. Note
/// that that one handles and logs errors from the application startup as well as from its runtime.
/// Here it is up to the caller to handle the startup errors.
///
/// # Examples
///
/// ```rust
/// use spirit::{AnyError, Empty, Spirit};
/// use spirit::prelude::*;
///
/// # fn main() -> Result<(), AnyError> {
/// Spirit::<Empty, Empty>::new()
/// .build(true)?
/// .run_term(|| {
/// println!("Hello world");
/// Ok(())
/// });
/// # Ok(())
/// # }
/// ```
pub struct App<O, C> {
spirit: Arc<Spirit<O, C>>,
inner: InnerBody,
wrapper: WrapBody,
}
impl<O, C> App<O, C>
where
O: StructOpt + Send + Sync + 'static,
C: DeserializeOwned + Send + Sync + 'static,
{
pub(crate) fn new(spirit: Arc<Spirit<O, C>>, inner: InnerBody, wrapper: WrapBody) -> Self {
Self {
spirit,
inner,
wrapper,
}
}
/// Access to the built spirit object.
///
/// The object can be used to manipulate the runtime of the application, access the current
/// configuration and register further callbacks (and extensions and pipelines).
///
/// Depending on your needs, you may pass it to the closure started with [`run`][App::run] or
/// even placed into some kind of global storage.
pub fn spirit(&self) -> &Arc<Spirit<O, C>> {
&self.spirit
}
/// Run the application with provided body.
///
/// This will run the provided body. However, it'll wrap it in all the
/// [around-bodies][crate::Extensible::run_around] and precede it with all the
/// [before-bodies][crate::Extensible::run_before]. If any of these fail, or if the `body`
/// fails, the error is propagated (and further bodies are not started).
///
/// Furthermore, depending on the [`autojoin_bg_thread`][crate::Extensible::autojoin_bg_thread]
/// configuration, termination and joining of the background thread may be performed. If the
/// body errors, termination is done unconditionally (which may be needed in some corner cases
/// to not deadlock on error).
///
/// In other words, unless you have very special needs, this is how you actually invoke the
/// application itself.
///
/// Any errors are simply returned and it is up to the caller to handle them somehow.
pub fn run<B>(self, body: B) -> Result<(), AnyError>
where
B: FnOnce() -> Result<(), AnyError> + Send + 'static,
{
debug!("Running bodies");
let _flush = FlushGuard;
struct ScopeGuard<F: FnOnce()>(Option<F>);
impl<F: FnOnce()> Drop for ScopeGuard<F> {
fn drop(&mut self) {
self.0.take().expect("Drop called twice")();
}
}
let spirit = &self.spirit;
let _thread = ScopeGuard(Some(|| {
if thread::panicking() {
spirit.terminate();
}
spirit.maybe_autojoin_bg_thread();
}));
let inner = self.inner;
let inner = move || inner().and_then(|()| body());
let result = (self.wrapper)(Box::new(inner));
if result.is_err() {
self.spirit.terminate();
}
result
}
/// Similar to [`run`][App::run], but with error handling.
///
/// This calls the [`run`][App::run]. However, if there are any errors, they are logged and the
/// application terminates with non-zero exit code.
pub fn run_term<B>(self, body: B)
where
B: FnOnce() -> Result<(), AnyError> + Send + 'static,
{
let flush = FlushGuard;
if error::log_errors("top-level", || self.run(body)).is_err() {
drop(flush);
process::exit(1);
}
}
/// Run the application in a background thread for testing purposes.
///
/// This'll run the application and return an RAII guard. That guard can be used to access the
/// [Spirit] and manipulate it. It also terminates the application and background thread when
/// dropped.
///
/// This is for testing purposes (it panics if there are errors). See the [testing guide].
///
/// testing guide: crate::guide::testing
pub fn run_test<B>(self, body: B) -> TerminateGuard<O, C>
where
B: FnOnce() -> Result<(), AnyError> + Send + 'static,
{
let spirit = Arc::clone(self.spirit());
let bg_thread = thread::spawn(move || self.run(body));
TerminateGuard::new(spirit, bg_thread)
}
}
| true
|
2b7587c1e7bb7bd55ddc38f1c69b12239962b039
|
Rust
|
Stanislav-Lapata/oxide-auth
|
/src/frontends/rouille.rs
|
UTF-8
| 3,869
| 2.734375
| 3
|
[
"MIT"
] |
permissive
|
//! Offers bindings for the code_grant module with rouille servers.
//!
//! Following the simplistic and minimal style of rouille, this module defines only the
//! implementations for `WebRequest` and `WebResponse` and re-exports the available flows.
extern crate rouille;
extern crate serde_urlencoded;
use std::borrow::Cow;
use endpoint::{QueryParameter, WebRequest, WebResponse};
use self::rouille::{Request, Response, ResponseBody};
use url::Url;
// In the spirit of simplicity, this module does not implement any wrapper structures. In order to
// allow efficient and intuitive usage, we simply re-export common structures.
pub use frontends::simple::endpoint::{FnSolicitor, Generic as GenericEndpoint, Vacant};
/// Something went wrong with the rouille http request or response.
#[derive(Debug)]
pub enum WebError {
/// A parameter was encoded incorrectly.
///
/// This may happen for example due to a query parameter that is not valid utf8 when the query
/// parameters are necessary for OAuth processing.
Encoding,
}
impl<'a> WebRequest for &'a Request {
type Error = WebError;
type Response = Response;
fn query(&mut self) -> Result<Cow<dyn QueryParameter + 'static>, Self::Error> {
let query = self.raw_query_string();
let data = serde_urlencoded::from_str(query)
.map_err(|_| WebError::Encoding)?;
Ok(Cow::Owned(data))
}
fn urlbody(&mut self) -> Result<Cow<dyn QueryParameter + 'static>, Self::Error> {
match self.header("Content-Type") {
None | Some("application/x-www-form-urlencoded") => (),
_ => return Err(WebError::Encoding),
}
let body = self.data().ok_or(WebError::Encoding)?;
let data = serde_urlencoded::from_reader(body)
.map_err(|_| WebError::Encoding)?;
Ok(Cow::Owned(data))
}
fn authheader(&mut self) -> Result<Option<Cow<str>>, Self::Error> {
Ok(self.header("Authorization").map(|st| st.into()))
}
}
impl WebResponse for Response {
type Error = WebError;
fn ok(&mut self) -> Result<(), Self::Error> {
self.status_code = 200;
Ok(())
}
fn redirect(&mut self, url: Url) -> Result<(), Self::Error> {
self.status_code = 302;
self.headers.retain(|header| !header.0.eq_ignore_ascii_case("Location"));
self.headers.push(("Location".into(), url.into_string().into()));
Ok(())
}
fn client_error(&mut self) -> Result<(), Self::Error> {
self.status_code = 400;
Ok(())
}
fn unauthorized(&mut self, kind: &str) -> Result<(), Self::Error> {
self.status_code = 401;
self.headers.retain(|header| !header.0.eq_ignore_ascii_case("www-authenticate"));
self.headers.push(("WWW-Authenticate".into(), kind.to_string().into()));
Ok(())
}
fn body_text(&mut self, text: &str) -> Result<(), Self::Error> {
self.headers.retain(|header| !header.0.eq_ignore_ascii_case("Content-Type"));
self.headers.push(("Content-Type".into(), "text/plain".into()));
self.data = ResponseBody::from_string(text);
Ok(())
}
fn body_json(&mut self, data: &str) -> Result<(), Self::Error> {
self.headers.retain(|header| !header.0.eq_ignore_ascii_case("Content-Type"));
self.headers.push(("Content-Type".into(), "application/json".into()));
self.data = ResponseBody::from_string(data);
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn multi_query() {
let mut request = &Request::fake_http("GET", "/authorize?fine=val¶m=a¶m=b", vec![], vec![]);
let query = WebRequest::query(&mut request).unwrap();
assert_eq!(Some(Cow::Borrowed("val")), query.unique_value("fine"));
assert_eq!(None, query.unique_value("param"));
}
}
| true
|
ea7da728d5d91ba8b1c6d1e10950d7368b8d3992
|
Rust
|
d3v3l0/futuretea-whyrust
|
/jsonserde/src/main.rs
|
UTF-8
| 912
| 2.515625
| 3
|
[] |
no_license
|
use serde::*;
use serde_json;
#[derive(Serialize,Deserialize,Debug,Clone)]
struct Student(String);
#[derive(Serialize,Deserialize,Debug,Clone)]
struct Professor(String);
#[derive(Serialize,Deserialize,Debug,Clone)]
struct FutureData {
phd_students: Vec<Student>,
professors: Vec<Professor>,
}
fn main() {
let fds = FutureData {
phd_students: vec![
"cody", "deeptir", "egan1",
"fabuzaid", "kaisheng", "kexin",
"kraftp", "pratiksha", "sahaana",
"shoumik", "ddkang", "deepakn94",
"jamesjoethomas", "keshav",
"ankit", "saachi", "justin",
"swetha", "animesh"
].into_iter().map(|e| Student(e.to_string())).collect(),
professors: vec!["matei", "pbailis"].into_iter()
.map(|e| Professor(e.to_string())).collect(),
};
println!("{}", serde_json::to_string(&fds).unwrap());
}
| true
|
956068f01f7420079c84753bb095ed034c8dfc08
|
Rust
|
uiamn/tar_archiver
|
/src/main.rs
|
UTF-8
| 3,988
| 2.75
| 3
|
[] |
no_license
|
use std::io::Write;
use std::io::Read;
use std::os::unix::fs::MetadataExt;
struct Header {
name: String,
mode: String,
uid: u32,
gid: u32,
mtime: u64,
typeflag: [char; 1],
size: u64
}
fn octal(n: u64, pad: usize) -> String {
format!("{:0>pad$}", format!("{:o}", n), pad=pad)
}
// TODO: typeflag
fn generate_header(path: &std::path::Path, typeflag: [char; 1]) -> Header {
let meta = std::fs::metadata(path).unwrap();
Header {
name: path.to_str().unwrap().to_string(),
mode: format!("{:0>7}\0", format!("{:o}", meta.mode())),
uid: meta.uid(),
gid: meta.uid(),
mtime: meta.mtime().unsigned_abs(),
typeflag: typeflag,
size: meta.len()
}
}
fn create_tar_headers(org_path: &str) -> Vec<Header> {
let path = std::path::Path::new(org_path);
if path.is_dir() {
let mut v = vec![generate_header(path, ['5'])];
for p in std::fs::read_dir(path).unwrap() {
v.append(&mut create_tar_headers(p.unwrap().path().to_str().unwrap()));
}
v
} else {
vec![generate_header(path, ['0'])]
}
}
fn write_padding(f: &mut std::io::BufWriter<std::fs::File>, len: usize) {
f.write_all((std::iter::repeat('\0').take(len).collect::<String>()).as_bytes()).unwrap();
}
fn write_header(f: &mut std::io::BufWriter<std::fs::File>, h: &Header) {
let mut header_bytes: std::vec::Vec<u8> = std::vec::Vec::new();
// name: \0で終はる
header_bytes.append(&mut format!("{:\0<100}", String::from(&h.name)).into_bytes());
// mode: \0で終はる
header_bytes.append(&mut String::from(&h.mode).into_bytes());
// uid, gid: \0で終はる サイズ8
header_bytes.append(&mut format!("{}\0", octal(h.uid.into(), 8 - 1)).into_bytes());
header_bytes.append(&mut format!("{}\0", octal(h.gid.into(), 8 - 1)).into_bytes());
// size: \0で終はる サイズ12
header_bytes.append(&mut format!("{}\0", octal(h.size, 12 - 1)).into_bytes());
// mtime: \0で終はる サイズ12
header_bytes.append(&mut format!("{}\0", octal(h.mtime.into(), 12 - 1)).into_bytes());
// checksumは空白で埋まってゐると仮定して計算する
// サイズ8 null+空白で終はる
header_bytes.append(&mut vec![' ' as u8; 8]);
// typeflag
header_bytes.append(&mut vec![h.typeflag[0] as u8]);
// linkname
header_bytes.append(&mut vec![0; 100]);
// magic, version
header_bytes.append(&mut vec!['u' as u8, 's' as u8, 't' as u8, 'a' as u8, 'r' as u8, ' ' as u8]);
header_bytes.append(&mut vec![' ' as u8, '\0' as u8]);
// uname, gname
header_bytes.append(&mut vec![0; 32]);
header_bytes.append(&mut vec![0; 32]);
// devmajor, devminor
header_bytes.append(&mut vec![0; 8]);
header_bytes.append(&mut vec![0; 8]);
// prefix
header_bytes.append(&mut vec![0; 155]);
// padding
header_bytes.append(&mut vec![0; 12]);
// calc checksum
let mut checksum: u32 = 0;
for &b in &header_bytes {
checksum += b as u32;
}
let checksum = octal(checksum.into(), 6);
for i in 0..6 {
header_bytes[148+i] = checksum.chars().nth(i).unwrap() as u8;
}
f.write_all(&header_bytes).unwrap();
}
fn generate_tar_file(dist: &str, headers: Vec<Header>) {
let mut f = std::io::BufWriter::new(std::fs::File::create(dist).unwrap());
for header in headers {
write_header(&mut f, &header);
if header.typeflag[0] == '0' {
let mut rf = std::fs::File::open(std::path::Path::new(&header.name)).unwrap();
let mut buf = Vec::new();
let _ = rf.read_to_end(&mut buf);
f.write_all(&buf).unwrap();
write_padding(&mut f, 512usize - (&header.size % 512) as usize);
}
}
// end of archive
write_padding(&mut f, 1024);
}
fn main() {
let a = create_tar_headers("test_dir");
generate_tar_file("hoge.tar", a);
}
| true
|
d2b2ddc124305897129599296834f9e7c4c65ea3
|
Rust
|
aylax/mathze
|
/recursion/backtracking/n_queens/solution.rs
|
UTF-8
| 815
| 3.15625
| 3
|
[
"MIT"
] |
permissive
|
impl Solution {
pub fn total_n_queens(n: i32) -> i32 {
queen(n).len() as i32
}
}
type Queen = Vec<Vec<i32>>;
fn queen(n: i32) -> Queen {
de(n, n)
}
fn de(n: i32, u: i32) -> Queen {
if u == 0 {
return vec![vec![]];
}
de(n, u - 1)
.iter()
.map(|xs| {
(1..=n)
.filter(|&x| safe(x, &xs))
.map(|x| {
let mut ret = vec![x];
ret.extend_from_slice(&xs);
ret
})
.collect::<Queen>()
})
.flatten()
.collect::<Queen>()
}
fn safe(n: i32, xs: &[i32]) -> bool {
!{ (1..).zip(xs).any(|(col, &row)| has(n, col, row)) }
}
fn has(n: i32, col: i32, row: i32) -> bool {
n == row || (row - n).abs() == col
}
| true
|
525c1df73f46695ada35169523655e78522911ef
|
Rust
|
mobilipia/milestone-core
|
/cli/src/lib.rs
|
UTF-8
| 11,813
| 2.6875
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Helper crate for secure and convenient configuration of the Exonum nodes.
//!
//! `exonum-cli` supports multi-stage configuration process made with safety in mind. It involves
//! 4 steps (or stages) and allows to configure and run multiple blockchain nodes without
//! need in exchanging private keys between administrators.
//!
//! # How to Run the Network
//!
//! 1. Generate common (template) part of the nodes configuration using `generate-template` command.
//! Generated `.toml` file must be spread among all the nodes and must be used in the following
//! configuration step.
//! 2. Generate public and secret (private) parts of the node configuration using `generate-config`
//! command. At this step, Exonum will generate master key from which consensus and service
//! validator keys are derived. Master key is stored in the encrypted file. Consensus secret key
//! is used for communications between the nodes, while service secret key is used
//! mainly to sign transactions generated by the node. Both secret keys may be encrypted with a
//! password. The public part of the node configuration must be spread among all nodes, while the
//! secret part must be only accessible by the node administrator only.
//! 3. Generate final node configuration using `finalize` command. Exonum combines secret part of
//! the node configuration with public configurations of every other node, producing a single
//! configuration file with all the necessary node and network settings.
//! 4. Use `run` command and provide it with final node configuration file produced at the previous
//! step. If the secret keys are protected with passwords, the user need to enter the password.
//! Running node will automatically connect to other nodes in the network using IP addresses from
//! public parts of the node configurations.
//!
//! ## Additional Commands
//!
//! `exonum-cli` also supports additional CLI commands for performing maintenance actions by node
//! administrators and easier debugging.
//!
//! * `run-dev` command automatically generates network configuration with a single node and runs
//! it. This command can be useful for fast testing of the services during development process.
//! * `maintenance` command allows to clear node's consensus messages with `clear-cache`, and
//! restart node's service migration script with `restart-migration`.
//!
//! ## How to Extend Parameters
//!
//! `exonum-cli` allows to extend the list of the parameters for any command and even add new CLI
//! commands with arbitrary behavior. To do so, you need to implement a structure with a list of
//! additional parameters and use `flatten` macro attribute of [`serde`][serde] and
//! [`structopt`][structopt] libraries.
//!
//! ```ignore
//! #[derive(Serialize, Deserialize, StructOpt)]
//! struct MyRunCommand {
//! #[serde(flatten)]
//! #[structopt(flatten)]
//! default: Run
//! /// My awesome parameter
//! secret_number: i32
//! }
//! ```
//!
//! You can also create own list of commands by implementing an enum with a similar principle:
//!
//! ```ignore
//! #[derive(StructOpt)]
//! enum MyCommands {
//! #[structopt(name = "run")
//! DefaultRun(Run),
//! #[structopt(name = "my-run")
//! MyAwesomeRun(MyRunCommand),
//! }
//! ```
//!
//! While implementing custom behavior for your commands, you may use
//! [`StandardResult`](./command/enum.StandardResult.html) enum for
//! accessing node configuration files created and filled by the standard Exonum commands.
//!
//! [serde]: https://crates.io/crates/serde
//! [structopt]: https://crates.io/crates/structopt
#![deny(missing_docs)]
pub use crate::config_manager::DefaultConfigManager;
pub use structopt;
use exonum::{
blockchain::config::{GenesisConfig, GenesisConfigBuilder, InstanceInitParams},
merkledb::RocksDB,
runtime::{RuntimeInstance, WellKnownRuntime},
};
use exonum_explorer_service::ExplorerFactory;
use exonum_node::{Node, NodeBuilder as CoreNodeBuilder};
use exonum_rust_runtime::{DefaultInstance, RustRuntimeBuilder, ServiceFactory};
use exonum_supervisor::{Supervisor, SupervisorConfig};
use exonum_system_api::SystemApiPlugin;
use structopt::StructOpt;
use tempfile::TempDir;
use std::{env, ffi::OsString, iter, path::PathBuf};
use crate::command::{run::NodeRunConfig, Command, ExonumCommand, StandardResult};
pub mod command;
pub mod config;
pub mod io;
pub mod password;
mod config_manager;
/// Rust-specific node builder used for constructing a node with a list
/// of provided services.
#[derive(Debug)]
pub struct NodeBuilder {
rust_runtime: RustRuntimeBuilder,
external_runtimes: Vec<RuntimeInstance>,
builtin_instances: Vec<InstanceInitParams>,
args: Option<Vec<OsString>>,
temp_dir: Option<TempDir>,
}
impl Default for NodeBuilder {
fn default() -> Self {
Self::new()
}
}
impl NodeBuilder {
/// Creates a new builder.
pub fn new() -> Self {
Self {
rust_runtime: RustRuntimeBuilder::new()
.with_factory(Supervisor)
.with_factory(ExplorerFactory),
external_runtimes: vec![],
builtin_instances: vec![],
args: None,
temp_dir: None,
}
}
/// Creates a new builder with the provided command-line arguments. The path
/// to the current executable **does not** need to be specified as the first argument.
#[doc(hidden)] // unstable
pub fn with_args<I>(args: I) -> Self
where
I: IntoIterator,
I::Item: Into<OsString>,
{
let mut this = Self::new();
let executable = env::current_exe()
.map(PathBuf::into_os_string)
.unwrap_or_else(|_| "node".into());
let all_args = iter::once(executable)
.chain(args.into_iter().map(Into::into))
.collect();
this.args = Some(all_args);
this
}
/// Creates a single-node development network with default settings. The node stores
/// its data in a temporary directory, which is automatically removed when the node is stopped.
///
/// # Return value
///
/// Returns an error if the temporary directory cannot be created.
pub fn development_node() -> Result<Self, failure::Error> {
let temp_dir = TempDir::new()?;
let mut this = Self::with_args(vec![
OsString::from("run-dev"),
OsString::from("--artifacts-dir"),
temp_dir.path().into(),
]);
this.temp_dir = Some(temp_dir);
Ok(this)
}
/// Adds new Rust service to the list of available services.
pub fn with_rust_service(mut self, service: impl ServiceFactory) -> Self {
self.rust_runtime = self.rust_runtime.with_factory(service);
self
}
/// Adds a new `Runtime` to the list of available runtimes.
///
/// Note that you don't have to add the Rust runtime, since it is included by default.
pub fn with_external_runtime(mut self, runtime: impl WellKnownRuntime) -> Self {
self.external_runtimes.push(runtime.into());
self
}
/// Adds a service instance that will be available immediately after creating a genesis block.
///
/// For Rust services, the service factory needs to be separately supplied
/// via [`with_rust_service`](#method.with_rust_service).
pub fn with_instance(mut self, instance: impl Into<InstanceInitParams>) -> Self {
self.builtin_instances.push(instance.into());
self
}
/// Adds a default Rust service instance that will be available immediately after creating a
/// genesis block.
pub fn with_default_rust_service(self, service: impl DefaultInstance) -> Self {
self.with_instance(service.default_instance())
.with_rust_service(service)
}
/// Executes a command received from the command line.
///
/// # Return value
///
/// Returns:
///
/// - `Ok(Some(_))` if the command lead to the node creation
/// - `Ok(None)` if the command executed successfully and did not lead to node creation
/// - `Err(_)` if an error occurred during command execution
#[doc(hidden)] // unstable
pub fn execute_command(self) -> Result<Option<Node>, failure::Error> {
let command = if let Some(args) = self.args {
Command::from_iter(args)
} else {
Command::from_args()
};
if let StandardResult::Run(run_config) = command.execute()? {
let genesis_config = Self::genesis_config(&run_config, self.builtin_instances);
let db_options = &run_config.node_config.private_config.database;
let database = RocksDB::open(run_config.db_path, db_options)?;
let node_config_path = run_config.node_config_path.to_string_lossy();
let config_manager = DefaultConfigManager::new(node_config_path.into_owned());
let rust_runtime = self.rust_runtime;
let node_config = run_config.node_config.into();
let node_keys = run_config.node_keys;
let mut node_builder = CoreNodeBuilder::new(database, node_config, node_keys)
.with_genesis_config(genesis_config)
.with_config_manager(config_manager)
.with_plugin(SystemApiPlugin)
.with_runtime_fn(|channel| rust_runtime.build(channel.endpoints_sender()));
for runtime in self.external_runtimes {
node_builder = node_builder.with_runtime(runtime);
}
Ok(Some(node_builder.build()))
} else {
Ok(None)
}
}
/// Configures the node using parameters provided by user from stdin and then runs it.
pub fn run(mut self) -> Result<(), failure::Error> {
// Store temporary directory until the node is done.
let _temp_dir = self.temp_dir.take();
if let Some(node) = self.execute_command()? {
node.run()
} else {
Ok(())
}
}
fn genesis_config(
run_config: &NodeRunConfig,
default_instances: Vec<InstanceInitParams>,
) -> GenesisConfig {
let mut builder = GenesisConfigBuilder::with_consensus_config(
run_config.node_config.public_config.consensus.clone(),
);
// Add builtin services to genesis config.
builder = builder
.with_artifact(Supervisor.artifact_id())
.with_instance(Self::supervisor_service(&run_config))
.with_artifact(ExplorerFactory.artifact_id())
.with_instance(ExplorerFactory.default_instance());
// Add default instances.
for instance in default_instances {
builder = builder
.with_artifact(instance.instance_spec.artifact.clone())
.with_instance(instance)
}
builder.build()
}
fn supervisor_service(run_config: &NodeRunConfig) -> InstanceInitParams {
let mode = run_config
.node_config
.public_config
.general
.supervisor_mode
.clone();
Supervisor::builtin_instance(SupervisorConfig { mode })
}
}
| true
|
6381f931368c582d327147fbdfd4b06343c205bd
|
Rust
|
aQaTL/advent_of_code_2019
|
/day_18/src/main.rs
|
UTF-8
| 2,248
| 3.1875
| 3
|
[] |
no_license
|
use bit_set::BitSet;
use std::collections::{HashMap, HashSet, VecDeque};
fn main() {
let input = std::fs::read_to_string("day_18/input.txt").unwrap();
let mut grid = HashMap::<(i64, i64), char>::new();
let mut keys = Vec::<char>::new();
let mut start = (0, 0);
{
let (mut x, mut y) = (0, 0);
for cell in input.chars() {
if cell == '\n' {
y += 1;
x = 0;
continue;
} else {
grid.insert((x, y), cell);
}
if cell == '@' {
start = (x, y);
} else if cell.is_ascii_lowercase() {
keys.push(cell);
}
x += 1;
}
}
let mut robots = Vec::with_capacity(4);
{
for dir in &[(-1, -1), (-1, 1), (1, -1), (-1, 1)] {
let pos = (start.0 + dir.0, start.1 + dir.1);
grid.insert(pos, '@');
robots.push(pos);
}
for pos in (0..4).map(|i| move_point(start, i)) {
grid.insert(pos, '#');
}
grid.insert(start, '#');
}
let mut queue = VecDeque::<((i64, i64), BitSet)>::new();
let mut visited = HashMap::<((i64, i64), BitSet), i64>::new();
visited.insert((start, BitSet::with_capacity(keys.len())), 0);
queue.push_back((start, BitSet::with_capacity(keys.len())));
let mut moves_min = None;
while let Some(state) = queue.pop_front() {
let steps = match visited.get(&state) {
Some(s) => *s,
None => break,
};
if state.1.len() == keys.len() {
moves_min = Some(steps);
break;
}
for i in 0..4 {
let pos = move_point(state.0, i);
let c = match grid.get(&pos) {
Some(c) => *c,
None => continue,
};
if c == '#' {
continue;
}
if c.is_ascii_uppercase() && !state.1.contains((c as u8 - b'A') as usize) {
continue;
}
let mut new_state = (pos, state.1.clone());
if visited.contains_key(&new_state) {
continue;
}
if c.is_ascii_lowercase() && !new_state.1.contains((c as u8 - b'a') as usize) {
new_state.1.insert((c as u8 - b'a') as usize);
}
visited.insert(new_state.clone(), steps + 1);
queue.push_back(new_state);
}
}
println!("Part 2: {}", moves_min.unwrap());
}
fn move_point(pos: (i64, i64), dir: i64) -> (i64, i64) {
match dir {
0 => (pos.0, pos.1 - 1),
1 => (pos.0 + 1, pos.1),
2 => (pos.0, pos.1 + 1),
3 => (pos.0 - 1, pos.1),
_ => panic!(format!("Invalid dir: {}", dir)),
}
}
| true
|
d6d213aaadb7f5497715d5c43b82213e2a3874eb
|
Rust
|
GreenPix/lycan
|
/src/actor/mob.rs
|
UTF-8
| 1,979
| 2.625
| 3
|
[] |
no_license
|
use std::collections::HashSet;
use std::fmt::{self,Debug,Formatter};
use behaviour_tree::tree::BehaviourTreeNode;
use id::Id;
use actor::ActorId;
use entity::{Entity,EntityStore};
use messages::{self,Command,Notification,EntityOrder};
use scripts::{BehaviourTree};
use ai::{BehaviourTreeData,Context};
pub struct AiActor {
id: ActorId,
entity: Option<Id<Entity>>,
entities: HashSet<Id<Entity>>, // XXX: Do we really need this?
tree: BehaviourTree,
tree_data: BehaviourTreeData,
}
impl Debug for AiActor {
fn fmt(&self, f: &mut Formatter) -> Result<(),fmt::Error> {
let tree = "[behaviour tree]";
f.debug_struct("AiActor")
.field("id", &self.id)
.field("entities", &self.entities)
.field("tree", &tree)
.finish()
}
}
impl AiActor {
pub fn get_id(&self) -> ActorId {
self.id
}
pub fn get_commands(&mut self, _commands: &mut Vec<Command>) {
}
pub fn execute_orders(&mut self,
entities: &mut EntityStore,
_notifications: &mut Vec<Notification>,
_previous: &[Notification]) {
// Context should give access to storage / current game state
let me = match self.entity {
None => {
warn!("Trying to execute behaviour tree on AI without main entity {}", self.id);
return;
}
Some(me) => me,
};
let mut context = Context::new(me, entities, &mut self.tree_data);
self.tree.visit(&mut context);
}
pub fn register_entity(&mut self, entity: Id<Entity>) {
self.entity = Some(entity);
self.entities.insert(entity);
}
pub fn fake(tree: BehaviourTree) -> AiActor {
AiActor {
id: Id::new(),
entity: None,
entities: Default::default(),
tree: tree,
tree_data: BehaviourTreeData::new(),
}
}
}
| true
|
ecc2bc0036f219bac1dd3b57dbee836d109e7aa3
|
Rust
|
dprint/dprint
|
/crates/dprint/src/plugins/cache_manifest.rs
|
UTF-8
| 11,262
| 2.609375
| 3
|
[
"MIT"
] |
permissive
|
use serde::Deserialize;
use serde::Serialize;
use std::collections::HashMap;
use std::path::PathBuf;
use anyhow::Result;
use dprint_core::plugins::PluginInfo;
use super::implementations::WASMER_COMPILER_VERSION;
use crate::environment::Environment;
const PLUGIN_CACHE_SCHEMA_VERSION: usize = 8;
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct PluginCacheManifest {
schema_version: usize,
wasm_cache_version: String,
plugins: HashMap<String, PluginCacheManifestItem>,
}
impl PluginCacheManifest {
pub(super) fn new() -> PluginCacheManifest {
PluginCacheManifest {
schema_version: PLUGIN_CACHE_SCHEMA_VERSION,
wasm_cache_version: WASMER_COMPILER_VERSION.to_string(),
plugins: HashMap::new(),
}
}
pub fn add_item(&mut self, key: String, item: PluginCacheManifestItem) {
self.plugins.insert(key, item);
}
pub fn get_item(&self, key: &str) -> Option<&PluginCacheManifestItem> {
self.plugins.get(key)
}
pub fn remove_item(&mut self, key: &str) -> Option<PluginCacheManifestItem> {
self.plugins.remove(key)
}
fn is_different_schema(&self) -> bool {
self.schema_version != PLUGIN_CACHE_SCHEMA_VERSION
}
fn is_new_wasm_cache(&self) -> bool {
// bust when upgrading, but not downgrading
version_gt(&self.wasm_cache_version, WASMER_COMPILER_VERSION)
}
}
fn version_gt(file: &str, current: &str) -> bool {
use std::cmp::Ordering;
for (file, current) in file.split('.').zip(current.split('.')) {
if let Ok(file) = file.parse::<usize>() {
if let Ok(current) = current.parse::<usize>() {
match current.cmp(&file) {
Ordering::Greater => return true,
Ordering::Less => return false,
Ordering::Equal => {
// keep searching
}
}
}
}
}
false // equal
}
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct PluginCacheManifestItem {
/// Created time in *seconds* since epoch.
pub created_time: u64,
#[serde(skip_serializing_if = "Option::is_none")]
pub file_hash: Option<u64>,
pub info: PluginInfo,
}
pub fn read_manifest(environment: &impl Environment) -> PluginCacheManifest {
return match try_deserialize(environment) {
Ok(manifest) => {
if manifest.is_different_schema() || manifest.is_new_wasm_cache() {
if manifest.is_different_schema() {
log_verbose!(environment, "Busting plugins cache due to different schema.");
}
if manifest.is_new_wasm_cache() {
log_verbose!(environment, "Busting plugins cache due to new wasm cache version.");
}
let _ = environment.remove_dir_all(environment.get_cache_dir().join("plugins"));
PluginCacheManifest::new()
} else {
manifest
}
}
Err(err) => {
log_verbose!(environment, "Busting plugins cache due to deserialization error: {:#}", err);
let _ = environment.remove_dir_all(environment.get_cache_dir().join("plugins"));
PluginCacheManifest::new()
}
};
fn try_deserialize(environment: &impl Environment) -> Result<PluginCacheManifest> {
let file_path = get_manifest_file_path(environment);
match environment.read_file(file_path) {
Ok(text) => Ok(serde_json::from_str::<PluginCacheManifest>(&text)?),
Err(_) => Ok(PluginCacheManifest::new()),
}
}
}
pub fn write_manifest(manifest: &PluginCacheManifest, environment: &impl Environment) -> Result<()> {
let file_path = get_manifest_file_path(environment);
let serialized_manifest = serde_json::to_string(&manifest)?;
environment.atomic_write_file_bytes(file_path, serialized_manifest.as_bytes())
}
fn get_manifest_file_path(environment: &impl Environment) -> PathBuf {
let cache_dir = environment.get_cache_dir();
cache_dir.join("plugin-cache-manifest.json")
}
#[cfg(test)]
mod test {
use super::*;
use crate::environment::TestEnvironment;
use pretty_assertions::assert_eq;
#[test]
fn should_read_ok_manifest() {
let environment = TestEnvironment::new();
environment
.write_file(
&environment.get_cache_dir().join("plugin-cache-manifest.json"),
r#"{
"schemaVersion": 8,
"wasmCacheVersion": "99.9.9",
"plugins": {
"a": {
"createdTime": 123,
"info": {
"name": "dprint-plugin-typescript",
"version": "0.1.0",
"configKey": "typescript",
"helpUrl": "help url",
"configSchemaUrl": "schema url"
}
},
"c": {
"createdTime": 456,
"fileHash": 10,
"info": {
"name": "dprint-plugin-json",
"version": "0.2.0",
"configKey": "json",
"helpUrl": "help url 2",
"configSchemaUrl": "schema url 2"
}
},
"cargo": {
"createdTime": 210530,
"fileHash": 1226,
"info": {
"name": "dprint-plugin-cargo",
"version": "0.2.1",
"configKey": "cargo",
"helpUrl": "cargo help url",
"configSchemaUrl": "cargo schema url",
"updateUrl": "cargo update url"
}
}
}
}"#,
)
.unwrap();
let mut expected_manifest = PluginCacheManifest::new();
// should not change a newer version
expected_manifest.wasm_cache_version = "99.9.9".to_string();
expected_manifest.add_item(
String::from("a"),
PluginCacheManifestItem {
created_time: 123,
file_hash: None,
info: PluginInfo {
name: "dprint-plugin-typescript".to_string(),
version: "0.1.0".to_string(),
config_key: "typescript".to_string(),
help_url: "help url".to_string(),
config_schema_url: "schema url".to_string(),
update_url: None,
},
},
);
expected_manifest.add_item(
String::from("c"),
PluginCacheManifestItem {
created_time: 456,
file_hash: Some(10),
info: PluginInfo {
name: "dprint-plugin-json".to_string(),
version: "0.2.0".to_string(),
config_key: "json".to_string(),
help_url: "help url 2".to_string(),
config_schema_url: "schema url 2".to_string(),
update_url: None,
},
},
);
expected_manifest.add_item(
String::from("cargo"),
PluginCacheManifestItem {
created_time: 210530,
file_hash: Some(1226),
info: PluginInfo {
name: "dprint-plugin-cargo".to_string(),
version: "0.2.1".to_string(),
config_key: "cargo".to_string(),
help_url: "cargo help url".to_string(),
config_schema_url: "cargo schema url".to_string(),
update_url: Some("cargo update url".to_string()),
},
},
);
assert_eq!(read_manifest(&environment), expected_manifest);
}
#[test]
fn should_cache_bust_for_old_wasm_cache_version() {
let environment = TestEnvironment::new();
environment
.write_file(
&environment.get_cache_dir().join("plugin-cache-manifest.json"),
r#"{
"schemaVersion": 8,
"wasmCacheVersion": "0.1.0",
"plugins": {
"a": {
"createdTime": 123,
"info": {
"name": "dprint-plugin-typescript",
"version": "0.1.0",
"configKey": "typescript",
"helpUrl": "help url",
"configSchemaUrl": "schema url"
}
}
}
}"#,
)
.unwrap();
// cache busts since the wasm cache version is old
let expected_manifest = PluginCacheManifest::new();
assert_eq!(read_manifest(&environment), expected_manifest);
}
#[test]
fn should_not_error_for_old_manifest() {
let environment = TestEnvironment::new();
environment
.write_file(
&environment.get_cache_dir().join("plugin-cache-manifest.json"),
r#"{
"schemaVersion": 1,
"plugins": {
"a": {
"createdTime": 123,
"info": {
"name": "dprint-plugin-typescript",
"version": "0.1.0",
"fileExtensions": [".ts"],
"helpUrl": "help url",
"configSchemaUrl": "schema url"
}
}
}
}"#,
)
.unwrap();
let expected_manifest = PluginCacheManifest::new();
assert_eq!(read_manifest(&environment), expected_manifest);
}
#[test]
fn should_have_empty_manifest_for_deserialization_error() {
let environment = TestEnvironment::new();
environment
.write_file(
&environment.get_cache_dir().join("plugin-cache-manifest.json"),
r#"{ "plugins": { "a": { file_name: "b", } } }"#,
)
.unwrap();
assert_eq!(read_manifest(&environment), PluginCacheManifest::new());
}
#[test]
fn should_deal_with_non_existent_manifest() {
let environment = TestEnvironment::new();
assert_eq!(read_manifest(&environment), PluginCacheManifest::new());
assert_eq!(environment.take_stderr_messages().len(), 0);
}
#[test]
fn it_save_manifest() {
let environment = TestEnvironment::new();
let mut manifest = PluginCacheManifest::new();
manifest.add_item(
String::from("a"),
PluginCacheManifestItem {
created_time: 456,
file_hash: Some(256),
info: PluginInfo {
name: "dprint-plugin-typescript".to_string(),
version: "0.1.0".to_string(),
config_key: "typescript".to_string(),
help_url: "help url".to_string(),
config_schema_url: "schema url".to_string(),
update_url: Some("update url".to_string()),
},
},
);
manifest.add_item(
String::from("b"),
PluginCacheManifestItem {
created_time: 456,
file_hash: None,
info: PluginInfo {
name: "dprint-plugin-json".to_string(),
version: "0.2.0".to_string(),
config_key: "json".to_string(),
help_url: "help url 2".to_string(),
config_schema_url: "schema url 2".to_string(),
update_url: None,
},
},
);
write_manifest(&manifest, &environment).unwrap();
// Just read and compare again because the hash map will serialize properties
// in a non-deterministic order.
assert_eq!(read_manifest(&environment), manifest);
}
#[test]
fn test_version_gt() {
assert!(!version_gt("1.1.0", "1.1.0"));
assert!(version_gt("1.1.0", "1.1.1"));
assert!(version_gt("1.1.0", "1.1.2"));
assert!(version_gt("1.1.0", "1.2.0"));
assert!(!version_gt("1.1.0", "1.0.0"));
assert!(version_gt("1.1.0", "2.1.0"));
assert!(!version_gt("1.1.0", "0.1.0"));
}
#[test]
fn is_new_wasm_cache() {
let mut manifest = PluginCacheManifest::new();
assert!(!manifest.is_new_wasm_cache());
manifest.wasm_cache_version = "0.0.1".to_string();
assert!(manifest.is_new_wasm_cache());
manifest.wasm_cache_version = "100.0.1".to_string();
assert!(!manifest.is_new_wasm_cache());
}
}
| true
|
1e67f710bc791c76bce0ade83f0d7c5090d255e8
|
Rust
|
burdges/lake
|
/Xolotl/src/keys/implicit.rs
|
UTF-8
| 751
| 2.671875
| 3
|
[] |
no_license
|
//! TODO: An [implicit certificate](https://en.wikipedia.org/wiki/Implicit_certificate)
//! scheme could shave 32 bytes off the `ROUTING_KEY_CERT_LENGTH`.
//! We must know that someone who compramises the node's long term
//! certificate for issuing routing keys, and some routing keys,
//! cannot compute later routing keys, but the security proof in [0]
//! should show that the certificate issuer cannot compramise alpha,
//! whence our desired security property follows.
//!
//! [0] Brown, Daniel R. L.; Gallant, Robert P.; Vanstone, Scott A.
//! "Provably Secure Implicit Certificate Schemes".
//! Financial Cryptography 2001. Lecture Notes in Computer Science.
//! Springer Berlin Heidelberg. 2339 (1): 156–165.
//! doi:10.1007/3-540-46088-8_15.
| true
|
d60c44a55dabc8a17636f0153391fc9172d57f67
|
Rust
|
anotak/riftwizardstats
|
/src/main.rs
|
UTF-8
| 7,647
| 2.609375
| 3
|
[
"MIT"
] |
permissive
|
mod parse;
mod types;
mod hashmap;
use crate::parse::*;
use crate::types::{Run,Realm, MergedRuns, Outcome};
use crate::hashmap::*;
use crate::hashmap::HashMapExtensions;
use clap::{crate_authors, crate_version};
use std::fs;
use std::path::*;
use std::io;
use std::io::BufRead;
use std::collections::HashMap;
use std::collections::HashSet;
use std::cmp;
use clap::{Arg, App};
fn main() {
let matches = App::new("Rift Wizard Stats")
.version(crate_version!())
.author(crate_authors!())
.about("Turns Rift Wizard logs into stats")
.arg(
Arg::with_name("SAVELOCATION")
.help("Location of Rift Wizard's saves folder")
// TODO - check registry for folder and so on?
.default_value(r#"C:\Program Files (x86)\Steam\steamapps\common\Rift Wizard\RiftWizard\saves\"#)
.index(1)
.multiple(true),
)
.get_matches();
let save_location_list : Vec<_> = matches.values_of("SAVELOCATION").unwrap().collect();
let all_runs =
{
let mut all_runs = None;
for save_location in save_location_list {
all_runs = {
let rift_wiz_save_folder = PathBuf::from(save_location);
let result = read_all_saves(rift_wiz_save_folder.into_boxed_path(), all_runs);
match result {
// TODO - add proper error handling
Err(err) => {println!("\nerror: {:?}", err); panic!("TODO add proper error handling"); },
Ok(r) => Some(r)
}
};
}
all_runs.unwrap()
};
print_merged_runs_info(all_runs);
}
#[derive(Debug)]
enum ReadSaveError
{
Io(io::Error),
Gather(GatherError),
IllegalFilename,
InvalidDirectory(String)
}
fn read_all_saves(save_folder : Box<Path>, merged_runs : Option<MergedRuns>) -> Result<MergedRuns, ReadSaveError>
{
if !save_folder.is_dir() {
return Err(ReadSaveError::InvalidDirectory(
save_folder.to_str().unwrap_or("failed to stringify Save Folder").to_string()
));
}
let mut merged_runs = match merged_runs {
Some(r) => r,
None => MergedRuns { ..Default::default() },
};
for entry in fs::read_dir(save_folder).map_err(ReadSaveError::Io)? {
let path = entry.map_err(ReadSaveError::Io)?.path();
if path.is_dir() {
let result = read_save(path.into_boxed_path());
match result {
Err(err) => println!("\nrealm read error: {:?}", err),
Ok(r) => { merged_runs = merged_runs.merge_run(generate_run_report(r)); }
}
}
}
Ok(merged_runs)
}
fn read_save(save_folder : Box<Path>) -> Result<Vec<Realm>, ReadSaveError>
{
if !save_folder.is_dir() {
return Err(ReadSaveError::InvalidDirectory(
save_folder.to_str().unwrap_or("failed to stringify Save Folder").to_string()
));
}
let mut realms = Vec::with_capacity(1);
for entry in fs::read_dir(save_folder).map_err(ReadSaveError::Io)? {
let path = entry.map_err(ReadSaveError::Io)?.path();
if path.is_file() {
match path.file_name() {
Some(filename) => {
if filename.to_str().ok_or(ReadSaveError::IllegalFilename)?.starts_with("stats")
&& filename.to_str().ok_or(ReadSaveError::IllegalFilename)?.ends_with(".txt")
{
let result = gather_stats_from_file(path.into_boxed_path()).map_err(ReadSaveError::Gather)?;
println!("{:?}", result);
realms.push(result);
/*match result {
Err(err) => println!("{:?}", err),
Ok(realm) => println!("{:?}", realm?)
}*/
} else {
println!("skipping {}", filename.to_str().unwrap());
}
},
None => println!("{} is not a file?", path.display())
}
//let filename = rift_wiz_save_folder.join("21/stats.level_25.txt").into_boxed_path();
}
}
Ok(realms)
}
fn print_merged_runs_info(merged_runs : MergedRuns)
{
println!("");
println!("====================");
println!("MERGED RUNS");
println!("{:?}", merged_runs);
let total_runs = merged_runs.num_victory + merged_runs.num_defeat + merged_runs.num_abandoned + merged_runs.num_unknown;
println!("runs: {} won, {} lost, {} abandoned, {} unknown / {} total", merged_runs.num_victory, merged_runs.num_defeat, merged_runs.num_abandoned, merged_runs.num_unknown, total_runs);
{
let dmg = lazy_init(merged_runs.damage_to_enemies);
println!("====================================");
println!("DAMAGE TO ENEMIES (TOP 10 SUM OF ALL RUNS)");
print_top_ten(&dmg);
println!("");
println!("mean damage per turn: {}", get_mean_per_turn(&dmg, merged_runs.turns_taken));
println!("");
}
{
let dmg = lazy_init(merged_runs.damage_to_wizard);
println!("===================================");
println!("DAMAGE TO WIZARD (TOP 10 SUM OF ALL RUNS)");
print_top_ten(&dmg);
println!("");
println!("mean damage per turn: {}", get_mean_per_turn(&dmg, merged_runs.turns_taken));
println!("");
}
}
fn generate_run_report(mut realms : Vec<Realm>) -> Run
{
realms.sort_by(|a, b| { a.realm_number.cmp(&b.realm_number) });
let mut run = Run {
realms : realms.len() as i64,
outcome : Outcome::Victory,
..Default::default() };
for realm in realms {
if run.outcome == Outcome::Victory {
match realm.outcome {
Outcome::Unknown => run.outcome = Outcome::Unknown,
Outcome::Defeat => run.outcome = Outcome::Defeat,
_ => (),
}
}
run.turns_taken_run = cmp::max(run.turns_taken_run, realm.turns_taken_run);
run.spell_casts = run.spell_casts.merge_add(realm.spell_casts);
run.damage_to_enemies = run.damage_to_enemies.merge_add(realm.damage_to_enemies);
run.damage_to_wizard = run.damage_to_wizard.merge_add(realm.damage_to_wizard);
run.items_used = run.items_used.merge_add(realm.items_used);
}
if run.outcome == Outcome::Victory && run.realms < 25
{
run.outcome = Outcome::Abandoned;
}
run
}
fn get_mean_per_turn(map : &HashMap<String, usize>, turns : i64) -> f64
{
let mut running_sum = 0;
for (_key, value) in map.iter() {
running_sum += value;
}
running_sum as f64 / (turns as f64)
}
fn print_top_ten(map : &HashMap<String, usize>)
{
let mut v = Vec::with_capacity(map.len());
for (key, _) in map.iter() {
v.push(key.clone())
}
v.sort_by(|a, b| { map.get(b).unwrap().cmp(map.get(a).unwrap()) });
for key in v.iter().take(10) {
println!("{} - {}", key, map.get(key).unwrap());
}
}
| true
|
6cb441452784f20a491656c3909996505a94b70b
|
Rust
|
mcarton/strfmt
|
/src/fmtf64.rs
|
UTF-8
| 2,124
| 2.984375
| 3
|
[
"MIT"
] |
permissive
|
use std::fmt::Write;
use std::string::String;
use types::*;
use formatter::Formatter;
/// implement formatting of f64
impl<'a, 'b> Formatter<'a, 'b> {
/// format the given string onto the buffer
pub fn f64(&mut self, f: f64) -> Result<()> {
let ty = match self.ty() {
None => 'f',
Some(c) => c,
};
if !self.is_float_type() {
let mut msg = String::new();
write!(msg, "Unknown format code {:?} for object of type f64", ty).unwrap();
return Err(FmtError::TypeError(msg));
}
if self.alternate() {
return Err(FmtError::TypeError("Alternate form (#) not allowed in f64 format \
specifier"
.to_string()));
} else if self.thousands() {
return Err(FmtError::Invalid("thousands specifier not yet supported".to_string()));
} else if self.fill() == '0' && self.align() == Alignment::Right {
return Err(FmtError::Invalid("sign aware 0 padding not yet supported".to_string()));
}
let mut s = String::new();
// handle the sign
if f >= 0_f64 && self.sign_plus() {
self.write_str("+").unwrap();
}
match self.precision() {
None => {
match ty {
'f' => write!(s, "{}", f).unwrap(),
'e' => write!(s, "{:e}", f).unwrap(),
'E' => write!(s, "{:E}", f).unwrap(),
_ => unreachable!(),
}
}
Some(p) => {
match ty {
'f' => write!(s, "{:.*}", p, f).unwrap(),
'e' => write!(s, "{:.*e}", p, f).unwrap(),
'E' => write!(s, "{:.*E}", p, f).unwrap(),
_ => unreachable!(),
}
}
}
let prev_prec = self.precision();
self.set_precision(None);
let out = self.str_unchecked(s.as_str());
self.set_precision(prev_prec);
out
}
}
| true
|
0a5647783327b07775b8fd0a3540d65dc203227f
|
Rust
|
isidornygren/ogaboga
|
/src/sequencer/mod.rs
|
UTF-8
| 2,568
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
use std::{thread, time::Instant};
pub mod generator;
use crate::rhythm::RhythmController;
// mod pool;
// mod sequencer_thread;
#[derive(Clone)]
pub enum SequenceStep {
None,
Beat,
Freq(f32),
Amp(f32),
FreqAmp(f32, f32),
}
pub type Sequence = Vec<SequenceStep>;
pub struct Sequencer {
sequences: Vec<Sequence>,
rhythm_controller: Box<RhythmController>,
}
impl Sequencer {
#[inline]
pub fn run<F>(&mut self, func: F)
where
F: Fn(usize, Option<&SequenceStep>),
{
let mut seq_index = 0;
let mut start_time = Instant::now();
loop {
for (index, sequence) in self.sequences.iter().enumerate() {
let mod_index = seq_index % sequence.len();
func(index, sequence.get(mod_index));
}
seq_index += 1;
let sleep_time = self.rhythm_controller.step();
thread::sleep(sleep_time - start_time.elapsed());
start_time = Instant::now();
}
}
pub fn set_sequence(&mut self, sequences: Vec<Sequence>) {
self.sequences = sequences;
}
#[inline]
pub fn run_then<F, G>(&mut self, func: F, sequence_mutator: G)
where
F: Fn(usize, Option<&SequenceStep>),
G: Fn(usize, &Sequence) -> Sequence,
{
// let sleep_time = Duration::from_secs_f64(self.bpm_s);
let mut seq_index = 0;
let mut start_time = Instant::now();
loop {
for (index, sequence) in self.sequences.iter_mut().enumerate() {
let mod_index = seq_index % sequence.len();
if mod_index == 0 {
*sequence = sequence_mutator(index, sequence);
}
func(index, sequence.get(mod_index));
}
seq_index += 1;
let sleep_time = self.rhythm_controller.step();
thread::sleep(sleep_time - start_time.elapsed());
start_time = Instant::now();
}
}
}
pub struct SequencerBuilder {
sequences: Vec<Sequence>,
rhythm_controller: Box<dyn RhythmController>,
}
impl SequencerBuilder {
#[must_use]
#[inline]
pub fn new(rhythm_controller: Box<dyn RhythmController>) -> Self {
return Self {
rhythm_controller,
sequences: vec![],
};
}
#[must_use]
#[inline]
pub fn add_sequence(mut self, sequence: Sequence) -> Self {
self.sequences.push(sequence);
return self;
}
#[must_use]
#[inline]
pub fn build(self) -> Sequencer {
return Sequencer {
rhythm_controller: self.rhythm_controller,
sequences: self.sequences,
};
}
}
| true
|
fa08e65121a6743c128294d43464ce7346badffe
|
Rust
|
ViniciosLugli/vcargo
|
/src/cargo/mod.rs
|
UTF-8
| 903
| 2.9375
| 3
|
[
"MIT"
] |
permissive
|
use ansi_term::Colour;
use std::process::Command;
pub struct CargoCommand {
pub default_command: &'static str,
}
pub trait Cargo {
fn run(&self, args: Vec<String>) -> bool;
}
impl Cargo for CargoCommand {
fn run(&self, args: Vec<String>) -> bool {
let mut init_cmd: Command = Command::new(self.default_command);
for arg in args {
init_cmd.arg(arg);
}
println!("{}", Colour::Purple.paint("Calling cargo process..."));
let mut child = init_cmd.spawn().unwrap();
match child.try_wait() {
Ok(Some(_)) => {
return true;
}
Ok(None) => {
child
.wait()
.expect("Unexpected error on command! Check the args for cargo package...");
return true;
}
Err(_) => return false,
};
}
}
| true
|
3c5c311e75c058019e17e70a2a96cc5fc12b51b0
|
Rust
|
dxfrontiers/sudoku_verifier_rust
|
/src/main.rs
|
UTF-8
| 3,908
| 3.390625
| 3
|
[] |
no_license
|
use std::env;
use std::fs::File;
use std::io::{BufReader, BufRead, Error, ErrorKind, Read};
use std::convert::TryInto;
use rayon::prelude::*;
/**
A struct holding a 9x9 filed sudoku board
*/
struct Sudoku{
fields: [u8;81]
}
/**
Provide implementations for different views into the data structure
Since the values (u8) are smaller than pointers to them,
the functions allocate a new slice and copy over the values.
*/
impl Sudoku{
/**
Returns the field values as an array of arrays, each one representing the rows
*/
fn as_rows(&self)-> [[u8;9];9] {
let mut res = [[0u8;9];9];
self.fields.iter().enumerate().for_each(
|(e,v)| res[e/9][e%9]=*v
);
res
}
/**
Returns the field values as an array of arrays, each one representing the columns
*/
fn as_columns(&self)-> [[u8;9];9]{
let mut res = [[0u8;9];9];
self.fields.iter().enumerate().for_each(
|(e,v)| res[e%9][e/9]=*v
);
res
}
/**
Returns the field values as an array of arrays, each one representing one of the 3 3x3 boxes
x = (e/3)%3 box for each row
+ (e/27)*3 (integer division cut-off!) offset for each 3 rows
y = (e%3) column in box
+ (e/9)%3*3 row in box
*/
fn as_boxes(&self)-> [[u8;9];9]{
let mut res = [[0u8;9];9];
self.fields.iter().enumerate().for_each(
|(e,v)|
res [(e/3)%3 + (e/27)*3] [(e%3) + (e/9)%3*3] = *v
);
res
}
}
/**
Determines if all the slice are unique
*/
fn is_valid_view(sl: [[u8;9];9]) -> bool {
sl.iter().all(|e| is_slice_unique(e) )
}
/**
Determines if all 9 numbers in the slice are unique
*/
fn is_slice_unique(sl: &[u8;9]) -> bool{
// 9 unique numbers shifting '1' their value to the left will yield 9 ones
sl.iter().fold(0u16,|a,e|
a | 1u16<<(*e-1) as u16
) == (1u16<<9) -1
}
/**
Evaluate a given sudoku
For a sudoku, filled with valid values, to be valid, all of the following needs to contain no duplicates:
- rows
- columns
- boxes
*/
fn eval_sudoku(sudoku: Sudoku) -> bool{
is_valid_view(sudoku.as_columns())
&& is_valid_view(sudoku.as_rows())
&& is_valid_view(sudoku.as_boxes())
}
/**
Create a sudoku from a string representation, if it describes a sudoku filled with valid values (1-9)
*/
fn parse_raw_line_to_sudoku(input: &str) -> Option<Sudoku>{
let mut fields = [0;81];
let count = input.split(',')
.filter_map(|split| split.trim().parse::<u8>().ok())
.take(81)
.filter(|e| *e<10 && *e>0)
.enumerate()
.map(|(idx,num)| fields[idx] = num)
.count();
match count {
81 => {
Some(Sudoku{ fields })
},
_ => None
}
}
/**
Run all the things
We read the file line by
*/
fn main() -> std::io::Result<()> {
let input_file = env::args()
.nth(1)
.ok_or(Error::from(ErrorKind::InvalidInput))?;
let file = File::open(input_file)?;
let mut valid_count = 0;
for i in 0 .. 100 {
let mut reader = BufReader::new(&file);
// The sequential version
valid_count += reader.lines()
.filter_map(|line|line.ok())
.filter_map(|line| parse_raw_line_to_sudoku(&line) )
.map(|sudoku| eval_sudoku(sudoku) )
.filter(|b|*b)
.count();
// The parallel version
/*
let mut lines = String::new();
reader.read_to_string(&mut lines).unwrap();
valid_count += lines.par_lines()
.filter_map(|line| parse_raw_line_to_sudoku(line) )
.map(|sudoku| eval_sudoku(sudoku) )
.filter(|b|*b)
.count();
*/
}
println!("Valid sudokus: {}",valid_count);
Ok(())
}
| true
|
41a899337b67b6d19f5f4406dffc6ec4079e90f3
|
Rust
|
overthink/pt
|
/src/vector.rs
|
UTF-8
| 1,958
| 3.53125
| 4
|
[] |
no_license
|
use std::ops::{Add, Mul, Neg, Sub};
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub struct Vector3 {
pub x: f64,
pub y: f64,
pub z: f64,
}
impl Vector3 {
pub fn zero() -> Vector3 {
Vector3 {
x: 0.0,
y: 0.0,
z: 0.0,
}
}
// aka manitude, Eudclidian norm, l2-norm
pub fn length(&self) -> f64 {
(self.x * self.x + self.y * self.y + self.z * self.z).sqrt()
}
// divide each vector component by its length
pub fn normalize(self) -> Vector3 {
self * self.length().recip()
}
pub fn dot(&self, other: &Vector3) -> f64 {
self.x * other.x + self.y * other.y + self.z * other.z
}
pub fn cross(&self, other: &Vector3) -> Vector3 {
Vector3 {
x: self.y * other.z - self.z * other.y,
y: self.z * other.x - self.x * other.z,
z: self.x * other.y - self.y * other.x,
}
}
}
impl Mul<f64> for Vector3 {
type Output = Vector3;
fn mul(self, rhs: f64) -> Self::Output {
Vector3 {
x: self.x * rhs,
y: self.y * rhs,
z: self.z * rhs,
}
}
}
impl Mul<Vector3> for f64 {
type Output = Vector3;
fn mul(self, rhs: Vector3) -> Self::Output {
rhs * self
}
}
impl Add for Vector3 {
type Output = Vector3;
fn add(self, rhs: Vector3) -> Self::Output {
Vector3 {
x: self.x + rhs.x,
y: self.y + rhs.y,
z: self.z + rhs.z,
}
}
}
impl Sub for Vector3 {
type Output = Vector3;
fn sub(self, rhs: Vector3) -> Self::Output {
Vector3 {
x: self.x - rhs.x,
y: self.y - rhs.y,
z: self.z - rhs.z,
}
}
}
impl Neg for Vector3 {
type Output = Vector3;
fn neg(self) -> Self::Output {
Vector3 {
x: -self.x,
y: -self.y,
z: -self.z,
}
}
}
| true
|
2cc5f015f1c01041f3d395dc28dc96c58f8c18cb
|
Rust
|
flier/rust-atomic-traits
|
/src/lib.rs
|
UTF-8
| 12,346
| 3.53125
| 4
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! The traits for generic atomic operations
//!
//! # Compatibility
//!
//! The crate is tested for rustc 1.8 and greater.
//!
//! # Example
//!
//! ```
//! # extern crate num_traits;
//! # extern crate atomic_traits;
//! use std::sync::atomic::{AtomicUsize, Ordering};
//!
//! use num_traits::One;
//! use atomic_traits::{Atomic, NumOps, fetch};
//! # use atomic_traits::fetch::{Add, Sub};
//!
//! #[derive(Debug, Default)]
//! pub struct RefCnt<T>(T);
//!
//! impl<T> RefCnt<T>
//! where
//! T: Atomic + NumOps + Default,
//! <T as Atomic>::Type: One
//! {
//! pub fn inc(&self) -> <T as Atomic>::Type {
//! self.0.fetch_add(<T as Atomic>::Type::one(), Ordering::Acquire)
//! }
//!
//! pub fn dec(&self) -> <T as Atomic>::Type {
//! self.0.fetch_sub(<T as Atomic>::Type::one(), Ordering::Release)
//! }
//!
//! pub fn val(&self) -> <T as Atomic>::Type {
//! self.0.load(Ordering::SeqCst)
//! }
//! }
//!
//! # fn main() {
//! let refcnt = RefCnt::<AtomicUsize>::default();
//!
//! assert_eq!(refcnt.inc(), 0);
//! assert_eq!(refcnt.dec(), 1);
//! assert_eq!(refcnt.val(), 0);
//! # }
//! ```
#![no_std]
#![deny(missing_docs)]
#[macro_use]
extern crate cfg_if;
use core::sync::atomic::*;
pub mod fetch;
/// Generic atomic types
pub trait Atomic {
/// The underlying type
type Type;
/// Creates a new atomic type.
fn new(v: Self::Type) -> Self;
/// Returns a mutable reference to the underlying type.
#[cfg(any(feature = "atomic_access", feature = "since_1_15_0"))]
fn get_mut(&mut self) -> &mut Self::Type;
/// Consumes the atomic and returns the contained value.
#[cfg(any(feature = "atomic_access", feature = "since_1_15_0"))]
fn into_inner(self) -> Self::Type;
/// Loads a value from the atomic type.
fn load(&self, order: Ordering) -> Self::Type;
/// Stores a value into the atomic type.
fn store(&self, val: Self::Type, order: Ordering);
/// Stores a value into the atomic type, returning the previous value.
fn swap(&self, val: Self::Type, order: Ordering) -> Self::Type;
/// Stores a value into the atomic type if the current value is the same as the `current` value.
///
/// The return value is always the previous value. If it is equal to `current`, then the value was updated.
#[cfg_attr(
feature = "since_1_50_0",
deprecated = "Use `compare_exchange` or `compare_exchange_weak` instead"
)]
fn compare_and_swap(&self, current: Self::Type, new: Self::Type, order: Ordering)
-> Self::Type;
/// Stores a value into the atomic type if the current value is the same as the `current` value.
///
/// The return value is a result indicating whether the new value was written and containing the previous value.
/// On success this value is guaranteed to be equal to `current`.
#[cfg(any(feature = "extended_compare_and_swap", feature = "since_1_10_0"))]
fn compare_exchange(
&self,
current: Self::Type,
new: Self::Type,
success: Ordering,
failure: Ordering,
) -> Result<Self::Type, Self::Type>;
/// Stores a value into the atomic type if the current value is the same as the current value.
///
/// Unlike `compare_exchange`, this function is allowed to spuriously fail even when the comparison succeeds,
/// which can result in more efficient code on some platforms.
/// The return value is a result indicating whether the new value was written and containing the previous value.
#[cfg(any(feature = "extended_compare_and_swap", feature = "since_1_10_0"))]
fn compare_exchange_weak(
&self,
current: Self::Type,
new: Self::Type,
success: Ordering,
failure: Ordering,
) -> Result<Self::Type, Self::Type>;
/// Returns a mutable pointer to the underlying integer.
#[cfg(feature = "atomic_mut_ptr")]
fn as_mut_ptr(&self) -> *mut usize;
}
cfg_if! {
if #[cfg(any(feature = "atomic_nand", feature = "since_1_27_0"))] {
/// The trait for types implementing atomic bitwise operations
pub trait Bitwise:
Atomic
+ fetch::And<Type = <Self as Atomic>::Type>
+ fetch::Nand<Type = <Self as Atomic>::Type>
+ fetch::Or<Type = <Self as Atomic>::Type>
+ fetch::Xor<Type = <Self as Atomic>::Type>
{
}
} else {
/// The trait for types implementing atomic bitwise operations
pub trait Bitwise:
Atomic
+ fetch::And<Type = <Self as Atomic>::Type>
+ fetch::Or<Type = <Self as Atomic>::Type>
+ fetch::Xor<Type = <Self as Atomic>::Type>
{
}
}
}
cfg_if! {
if #[cfg(any(feature = "since_1_45_0"))] {
/// The trait for types implementing atomic numeric operations
pub trait NumOps:
Atomic
+ fetch::Add<Type = <Self as Atomic>::Type>
+ fetch::Sub<Type = <Self as Atomic>::Type>
+ fetch::Update<Type = <Self as Atomic>::Type>
+ fetch::Max<Type = <Self as Atomic>::Type>
+ fetch::Min<Type = <Self as Atomic>::Type>
{
}
} else {
/// The trait for types implementing atomic numeric operations
pub trait NumOps:
Atomic
+ fetch::Add<Type = <Self as Atomic>::Type>
+ fetch::Sub<Type = <Self as Atomic>::Type>
{
}
}
}
macro_rules! impl_atomic {
($atomic:ident : $primitive:ty ; $( $traits:tt ),*) => {
impl_atomic!(__impl atomic $atomic : $primitive);
$(
impl_atomic!(__impl $traits $atomic : $primitive);
)*
};
($atomic:ident < $param:ident >) => {
impl<$param> Atomic for $atomic <$param> {
type Type = *mut $param;
impl_atomic!(__impl atomic_methods $atomic);
}
};
(__impl atomic $atomic:ident : $primitive:ty) => {
impl Atomic for $atomic {
type Type = $primitive;
impl_atomic!(__impl atomic_methods $atomic);
}
};
(__impl atomic_methods $atomic:ident) => {
#[inline(always)]
fn new(v: Self::Type) -> Self {
Self::new(v)
}
#[cfg(any(feature = "atomic_access", feature = "since_1_15_0"))]
#[inline(always)]
fn get_mut(&mut self) -> &mut Self::Type {
Self::get_mut(self)
}
#[cfg(any(feature = "atomic_access", feature = "since_1_15_0"))]
#[inline(always)]
fn into_inner(self) -> Self::Type {
Self::into_inner(self)
}
#[inline(always)]
fn load(&self, order: Ordering) -> Self::Type {
Self::load(self, order)
}
#[inline(always)]
fn store(&self, val: Self::Type, order: Ordering) {
Self::store(self, val, order)
}
#[inline(always)]
fn swap(&self, val: Self::Type, order: Ordering) -> Self::Type {
Self::swap(self, val, order)
}
#[inline(always)]
fn compare_and_swap(
&self,
current: Self::Type,
new: Self::Type,
order: Ordering,
) -> Self::Type {
#[cfg_attr(feature = "since_1_50_0", allow(deprecated))]
Self::compare_and_swap(self, current, new, order)
}
#[cfg(any(feature = "extended_compare_and_swap", feature = "since_1_10_0"))]
#[inline(always)]
fn compare_exchange(
&self,
current: Self::Type,
new: Self::Type,
success: Ordering,
failure: Ordering,
) -> Result<Self::Type, Self::Type> {
Self::compare_exchange(self, current, new, success, failure)
}
#[cfg(any(feature = "extended_compare_and_swap", feature = "since_1_10_0"))]
#[inline(always)]
fn compare_exchange_weak(
&self,
current: Self::Type,
new: Self::Type,
success: Ordering,
failure: Ordering,
) -> Result<Self::Type, Self::Type> {
Self::compare_exchange_weak(self, current, new, success, failure)
}
#[cfg(feature = "atomic_mut_ptr")]
#[inline(always)]
fn as_mut_ptr(&self) -> *mut usize {
Self::as_mut_ptr(self)
}
};
(__impl bitwise $atomic:ident : $primitive:ty) => {
impl Bitwise for $atomic {}
impl $crate::fetch::And for $atomic {
type Type = $primitive;
#[inline(always)]
fn fetch_and(&self, val: Self::Type, order: Ordering) -> Self::Type {
Self::fetch_and(self, val, order)
}
}
#[cfg(any(feature = "atomic_nand", feature = "since_1_27_0"))]
impl $crate::fetch::Nand for $atomic {
type Type = $primitive;
#[inline(always)]
fn fetch_nand(&self, val: Self::Type, order: Ordering) -> Self::Type {
Self::fetch_nand(self, val, order)
}
}
impl $crate::fetch::Or for $atomic {
type Type = $primitive;
#[inline(always)]
fn fetch_or(&self, val: Self::Type, order: Ordering) -> Self::Type {
Self::fetch_or(self, val, order)
}
}
impl $crate::fetch::Xor for $atomic {
type Type = $primitive;
#[inline(always)]
fn fetch_xor(&self, val: Self::Type, order: Ordering) -> Self::Type {
Self::fetch_xor(self, val, order)
}
}
};
(__impl numops $atomic:ident : $primitive:ty) => {
impl NumOps for $atomic {}
impl $crate::fetch::Add for $atomic {
type Type = $primitive;
#[inline(always)]
fn fetch_add(&self, val: Self::Type, order: Ordering) -> Self::Type {
Self::fetch_add(self, val, order)
}
}
impl $crate::fetch::Sub for $atomic {
type Type = $primitive;
#[inline(always)]
fn fetch_sub(&self, val: Self::Type, order: Ordering) -> Self::Type {
Self::fetch_sub(self, val, order)
}
}
cfg_if! {
if #[cfg(any(feature = "since_1_45_0"))] {
impl $crate::fetch::Update for $atomic {
type Type = $primitive;
#[inline(always)]
fn fetch_update<F>(
&self,
fetch_order: Ordering,
set_order: Ordering,
f: F,
) -> Result<Self::Type, Self::Type>
where
F: FnMut(Self::Type) -> Option<Self::Type> {
Self::fetch_update(self, fetch_order, set_order, f)
}
}
impl $crate::fetch::Max for $atomic {
type Type = $primitive;
#[inline(always)]
fn fetch_max(&self, val: Self::Type, order: Ordering) -> Self::Type {
Self::fetch_max(self, val, order)
}
}
impl $crate::fetch::Min for $atomic {
type Type = $primitive;
#[inline(always)]
fn fetch_min(&self, val: Self::Type, order: Ordering) -> Self::Type {
Self::fetch_min(self, val, order)
}
}
}
}
};
}
impl_atomic!(AtomicBool: bool; bitwise);
impl_atomic!(AtomicIsize: isize; bitwise, numops);
impl_atomic!(AtomicUsize: usize; bitwise, numops);
impl_atomic!(AtomicPtr<T>);
#[cfg(any(feature = "integer_atomics", feature = "since_1_34_0"))]
mod integer_atomics {
use super::*;
impl_atomic!(AtomicI8: i8; bitwise, numops);
impl_atomic!(AtomicI16: i16; bitwise, numops);
impl_atomic!(AtomicI32: i32; bitwise, numops);
impl_atomic!(AtomicI64: i64; bitwise, numops);
impl_atomic!(AtomicU8: u8; bitwise, numops);
impl_atomic!(AtomicU16: u16; bitwise, numops);
impl_atomic!(AtomicU32: u32; bitwise, numops);
impl_atomic!(AtomicU64: u64; bitwise, numops);
}
| true
|
54eb4ecc94e9dc7dca044fc13237a33b6eb17c59
|
Rust
|
portier/portier-broker
|
/src/config/i18n.rs
|
UTF-8
| 882
| 2.765625
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use gettext::Catalog;
use std::fs::File;
use std::path::PathBuf;
// Contains all gettext catalogs we use in compiled form.
pub struct I18n {
pub catalogs: Vec<(&'static str, Catalog)>,
}
const SUPPORTED_LANGUAGES: &[&str] = &["en", "de", "nl"];
impl I18n {
pub fn new(data_dir: &str) -> I18n {
let data_dir: PathBuf = data_dir.into();
let catalogs = SUPPORTED_LANGUAGES
.iter()
.map(|lang| {
let mut path = data_dir.clone();
path.push("lang");
path.push(lang);
path.set_extension("mo");
let file = File::open(path).expect("could not open catalog file");
let catalog = Catalog::parse(file).expect("could not parse catalog file");
(*lang, catalog)
})
.collect();
I18n { catalogs }
}
}
| true
|
18d96e54b01efdd447943ff908321c97e44caa51
|
Rust
|
Caluka/CHIP8
|
/src/chip8.rs
|
UTF-8
| 1,525
| 3.03125
| 3
|
[] |
no_license
|
use super::bus;
use super::cpu;
use std::time::Instant;
pub struct Chip8 {
pub cpu: cpu::Cpu,
pub bus: bus::DataBus,
pub delay_timer: std::time::Instant,
}
impl Chip8 {
pub fn new() -> Self {
Self {
cpu: cpu::Cpu::new(),
bus: bus::DataBus::new(),
delay_timer: std::time::Instant::now(),
}
}
pub fn exec_cycle(&mut self) {
if self.cpu.instr_timer.elapsed().as_micros() > self.cpu.instr_time {
let instruction = self.fetch_instruction();
self.cpu.exec_instruction(&mut self.bus, instruction);
self.cpu.instr_timer = Instant::now();
}
self.dec_dt_st();
}
fn dec_dt_st(&mut self) {
// DEC dt and st at 60hz
if self.delay_timer.elapsed().as_nanos() > 16666667 {
if self.cpu.dt > 0 {
self.cpu.dt -= 1;
}
if self.cpu.st > 0 {
self.cpu.st -= 1;
}
self.delay_timer = Instant::now();
}
}
fn fetch_instruction(&self) -> u16 {
let byte1: u8 = self.bus.ram.read_byte(self.cpu.pc as usize);
let byte2: u8 = self.bus.ram.read_byte(self.cpu.pc as usize + 1);
(byte1 as u16) << 8 | byte2 as u16
}
pub fn load_mem(&mut self, rom: &Vec<u8>, offset: usize) {
for (i, value) in rom.iter().enumerate() {
self.bus.ram.write_byte(offset + i, *value)
}
}
}
| true
|
862870a94465e1c5cefc9b3d6767e9f50fe69409
|
Rust
|
glpeon/rpfm
|
/rpfm_ui/src/shortcuts_ui/slots.rs
|
UTF-8
| 1,826
| 2.578125
| 3
|
[
"MIT"
] |
permissive
|
//---------------------------------------------------------------------------//
// Copyright (c) 2017-2020 Ismael Gutiérrez González. All rights reserved.
//
// This file is part of the Rusted PackFile Manager (RPFM) project,
// which can be found here: https://github.com/Frodo45127/rpfm.
//
// This file is licensed under the MIT license, which can be found here:
// https://github.com/Frodo45127/rpfm/blob/master/LICENSE.
//---------------------------------------------------------------------------//
/*!
Module with all the code related to `ShortcutsUISlots`.
!*/
use qt_core::Slot;
use crate::shortcuts_ui::ShortcutsUI;
use crate::ui_state::shortcuts::Shortcuts;
//-------------------------------------------------------------------------------//
// Enums & Structs
//-------------------------------------------------------------------------------//
/// This struct contains all the slots we need to respond to signals of EVERY widget/action in the `ShortcutsUI` struct.
///
/// This means everything you can do with the stuff you have in the `ShortcutsUI` goes here.
pub struct ShortcutsUISlots {
pub restore_default: Slot<'static>,
}
//-------------------------------------------------------------------------------//
// Implementations
//-------------------------------------------------------------------------------//
/// Implementation of `ShortcutsUISlots`.
impl ShortcutsUISlots {
/// This function creates a new `ShortcutsUISlots`.
pub unsafe fn new(ui: &ShortcutsUI) -> Self {
// What happens when we hit the "Restore Default" action.
let mut ui = ui.clone();
let restore_default = Slot::new(move || {
ui.load(&Shortcuts::new())
});
ShortcutsUISlots {
restore_default
}
}
}
| true
|
c6e21e0d019042ace77543af06f938e583ad06a7
|
Rust
|
DanglingPointer/rust-ttt
|
/src/grid.rs
|
UTF-8
| 7,538
| 3.296875
| 3
|
[] |
no_license
|
use std::fmt;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Mark {
Cross,
Nought,
}
#[derive(Default, Debug)]
pub struct Grid {
side_length: usize,
data: Vec<Option<Mark>>,
}
impl Grid {
pub fn new(side_length: usize) -> Grid {
Grid {
side_length,
data: vec![None; side_length * side_length],
}
}
pub fn get_size(&self) -> usize {
self.data.len()
}
pub fn get_side_length(&self) -> usize {
self.side_length
}
pub fn get_at_ind(&self, ind: usize) -> Option<Mark> {
self.data[ind]
}
pub fn get_at_pos(&self, x: usize, y: usize) -> Option<Mark> {
self.get_at_ind(y * self.side_length + x)
}
pub fn set_at_ind(&mut self, ind: usize, what: Mark) -> Result<(), Mark> {
let sqr = &mut self.data[ind];
match *sqr {
Some(side) => Err(side),
None => {
*sqr = Some(what);
Ok(())
}
}
}
pub fn set_at_pos(&mut self, x: usize, y: usize, what: Mark) -> Result<(), Mark> {
self.set_at_ind(y * self.side_length + x, what)
}
pub fn unset_at_ind(&mut self, ind: usize) {
self.data[ind] = None;
}
pub fn is_full(&self) -> bool {
self.data.iter().all(|e| e.is_some())
}
}
impl fmt::Display for Grid {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "\n ")?;
for col in 0..self.side_length {
write!(formatter, "{:^3} ", col)?;
}
for row in 0..self.side_length {
write!(formatter, "\n{:^3}|", row)?;
for col in 0..self.side_length {
let symbol = match self.get_at_pos(col, row) {
Some(Mark::Cross) => 'X',
Some(Mark::Nought) => 'O',
None => ' ',
};
write!(formatter, "{:^3}|", symbol)?;
}
}
write!(formatter, "\n")?;
Ok(())
}
}
pub fn get_winner(g: &Grid) -> Option<Mark> {
fn get_col_winner(g: &Grid, col_ind: usize) -> Option<Mark> {
let first = g.data[col_ind];
let start_ind = col_ind + g.side_length;
let slice: &[Option<Mark>] = &g.data[start_ind..];
if slice.iter().step_by(g.side_length).all(|e| *e == first) {
first
} else {
None
}
}
for col_ind in 0..g.get_side_length() {
if let Some(w) = get_col_winner(g, col_ind) {
return Some(w);
}
}
fn get_row_winner(g: &Grid, row_ind: usize) -> Option<Mark> {
let start_ind = row_ind * g.side_length;
let end_ind = start_ind + g.side_length;
let first = g.data[start_ind];
let slice = &g.data[start_ind + 1..end_ind];
if slice.iter().all(|e| *e == first) {
first
} else {
None
}
}
for row_ind in 0..g.get_side_length() {
if let Some(w) = get_row_winner(g, row_ind) {
return Some(w);
}
}
fn get_inc_diag_winner(g: &Grid) -> Option<Mark> {
let first = g.data[0];
let interval = g.side_length + 1;
let slice: &[Option<Mark>] = &g.data[interval..];
if slice.iter().step_by(interval).all(|e| *e == first) {
first
} else {
None
}
}
if let Some(w) = get_inc_diag_winner(g) {
return Some(w);
}
fn get_mix_diag_winner(g: &Grid) -> Option<Mark> {
let interval = g.side_length - 1;
let start_ind = interval + interval;
let end_ind = g.data.len() - 1;
let first = g.data[interval];
let slice: &[Option<Mark>] = &g.data[start_ind..end_ind];
if slice.iter().step_by(interval).all(|e| *e == first) {
first
} else {
None
}
}
if let Some(w) = get_mix_diag_winner(g) {
return Some(w);
}
None
}
#[cfg(test)]
mod tests {
use super::{Mark::*, *};
#[test]
fn test_constructed_grid_is_empty() {
let g = Grid::new(5);
assert_eq!(5, g.get_side_length());
for i in 0..g.get_side_length() {
for j in 0..g.get_side_length() {
assert_eq!(None, g.get_at_pos(i, j));
}
}
}
#[test]
fn test_set_cross() {
let mut g = Grid::new(3);
g.set_at_pos(1, 1, Cross).unwrap();
assert_eq!(Cross, g.set_at_pos(1, 1, Cross).unwrap_err());
assert_eq!(Cross, g.set_at_pos(1, 1, Nought).unwrap_err());
for i in 0..g.get_side_length() {
for j in 0..g.get_side_length() {
match (i, j) {
(1, 1) => assert_eq!(Some(Cross), g.get_at_pos(1, 1)),
_ => assert_eq!(None, g.get_at_pos(i, j)),
}
}
}
}
#[test]
fn test_set_nought() {
let mut g = Grid::new(3);
g.set_at_pos(0, 2, Nought).unwrap();
assert_eq!(Nought, g.set_at_pos(0, 2, Nought).unwrap_err());
assert_eq!(Nought, g.set_at_pos(0, 2, Cross).unwrap_err());
for i in 0..g.get_side_length() {
for j in 0..g.get_side_length() {
match (i, j) {
(0, 2) => assert_eq!(Some(Nought), g.get_at_pos(0, 2)),
_ => assert_eq!(None, g.get_at_pos(i, j)),
}
}
}
}
#[test]
fn test_no_winner() {
let mut g = Grid::new(3);
g.set_at_pos(1, 1, Cross).unwrap();
g.set_at_pos(0, 2, Nought).unwrap();
assert_eq!(None, get_winner(&g));
}
#[test]
fn test_col_winner() {
let mut g = Grid::new(3);
g.set_at_pos(0, 0, Nought).unwrap();
g.set_at_pos(0, 1, Nought).unwrap();
g.set_at_pos(0, 2, Nought).unwrap();
assert_eq!(Some(Nought), get_winner(&g));
g = Grid::new(3);
g.set_at_pos(2, 0, Cross).unwrap();
g.set_at_pos(2, 1, Cross).unwrap();
g.set_at_pos(2, 2, Cross).unwrap();
assert_eq!(Some(Cross), get_winner(&g));
}
#[test]
fn test_row_winner() {
let mut g = Grid::new(3);
g.set_at_pos(2, 2, Nought).unwrap();
g.set_at_pos(0, 2, Nought).unwrap();
g.set_at_pos(1, 2, Nought).unwrap();
assert_eq!(Some(Nought), get_winner(&g));
g = Grid::new(3);
g.set_at_pos(0, 0, Cross).unwrap();
g.set_at_pos(1, 0, Cross).unwrap();
g.set_at_pos(2, 0, Cross).unwrap();
assert_eq!(Some(Cross), get_winner(&g));
}
#[test]
fn test_diag_winner() {
let mut g = Grid::new(3);
g.set_at_pos(0, 0, Cross).unwrap();
g.set_at_pos(1, 1, Cross).unwrap();
g.set_at_pos(2, 2, Cross).unwrap();
assert_eq!(Some(Cross), get_winner(&g));
g = Grid::new(3);
g.set_at_pos(0, 2, Nought).unwrap();
g.set_at_pos(1, 1, Nought).unwrap();
g.set_at_pos(2, 0, Nought).unwrap();
assert_eq!(Some(Nought), get_winner(&g));
}
#[test]
fn test_is_full() {
let mut g = Grid::new(3);
assert!(!g.is_full());
g.set_at_pos(0, 0, Cross).unwrap();
assert!(!g.is_full());
for i in 0..g.get_side_length() {
for j in 0..g.get_side_length() {
let _ = g.set_at_pos(i, j, if i + j % 2 == 0 { Cross } else { Nought });
}
}
assert!(g.is_full());
}
}
| true
|
4955462dbed7c406a7173fdf672cf2db351bd2d8
|
Rust
|
Pedrotojal/automaat
|
/src/web-client/src/component/task_result.rs
|
UTF-8
| 3,106
| 3.171875
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! A single task result shown in the UI when searching for tasks.
use crate::model::task::Task;
use crate::router::Route;
use dodrio::bumpalo::collections::string::String;
use dodrio::bumpalo::format;
use dodrio::{Node, Render, RenderContext};
use std::marker::PhantomData;
/// The `TaskResult` component.
pub(crate) struct TaskResult<'a, C> {
/// A reference to the task for which the (sparse) details are shown in the
/// list of tasks.
task: &'a Task,
/// Reference to application controller.
_controller: PhantomData<C>,
}
impl<'a, C> TaskResult<'a, C> {
/// Create a new `TaskResult` component with the provided task reference.
pub(crate) const fn new(task: &'a Task) -> Self {
Self {
task,
_controller: PhantomData,
}
}
}
/// The trait implemented by this component to render all its views.
trait Views<'b> {
/// The header part of the result, showing the name of the task.
fn header(&self, cx: &mut RenderContext<'b>) -> Node<'b>;
/// The description of the task.
fn description(&self, cx: &mut RenderContext<'b>) -> Node<'b>;
/// The button to open the details view of the task.
fn open_button(&self, cx: &mut RenderContext<'b>) -> Node<'b>;
}
impl<'a, 'b, C> Views<'b> for TaskResult<'a, C> {
fn header(&self, cx: &mut RenderContext<'b>) -> Node<'b> {
use dodrio::builder::*;
let name = String::from_str_in(self.task.name(), cx.bump).into_bump_str();
div(&cx)
.attr("class", "header")
.child(div(&cx).child(h1(&cx).child(text(name)).finish()).finish())
.finish()
}
fn description(&self, cx: &mut RenderContext<'b>) -> Node<'b> {
use dodrio::builder::*;
let description = String::from_str_in(self.task.description(), cx.bump).into_bump_str();
div(&cx)
.attr("class", "description")
.child(
div(&cx)
.child(p(&cx).child(text(description)).finish())
.finish(),
)
.finish()
}
fn open_button(&self, cx: &mut RenderContext<'b>) -> Node<'b> {
use dodrio::builder::*;
let route = Route::Task(self.task.id());
let url = format!(in cx.bump, "{}", route).into_bump_str();
let label = format!(in cx.bump, "Open task: {}", self.task.name()).into_bump_str();
a(&cx)
.attr("href", url)
.attr("tabindex", "0")
.attr("aria-label", label)
.child(div(&cx).child(i(&cx).finish()).finish())
.finish()
}
}
impl<'a, C> Render for TaskResult<'a, C> {
fn render<'b>(&self, cx: &mut RenderContext<'b>) -> Node<'b> {
use dodrio::builder::*;
let details = div(&cx)
.children([self.header(cx), self.description(cx)])
.finish();
let content = div(&cx).children([details, self.open_button(cx)]).finish();
div(&cx)
.attr("class", "task-result")
.child(div(&cx).child(content).finish())
.finish()
}
}
| true
|
2e760c79608c5c5192e89fb7ea665ade3a057cd9
|
Rust
|
ngtkana/aribook
|
/crates/chap3_4/number_of_length_k_paths/src/main.rs
|
UTF-8
| 2,806
| 2.859375
| 3
|
[] |
no_license
|
// dbg {{{
#[allow(dead_code)]
mod dbg {
use std::fmt::{Debug, Formatter};
#[derive(Clone)]
pub struct Tabular<'a, T: Debug>(pub &'a [T]);
impl<'a, T: Debug> Debug for Tabular<'a, T> {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
for i in 0..self.0.len() {
writeln!(f, "{:2} | {:?}", i, &self.0[i])?;
}
Ok(())
}
}
#[derive(Clone)]
pub struct BooleanTable<'a>(pub &'a [Vec<bool>]);
impl<'a> Debug for BooleanTable<'a> {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
for i in 0..self.0.len() {
writeln!(f, "{:2} | {:?}", i, BooleanSlice(&self.0[i]))?;
}
Ok(())
}
}
#[derive(Clone)]
pub struct BooleanSlice<'a>(pub &'a [bool]);
impl<'a> Debug for BooleanSlice<'a> {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(
f,
"{}",
self.0
.iter()
.map(|&b| if b { "1 " } else { "0 " })
.collect::<String>()
)?;
Ok(())
}
}
}
// }}}
use proconio::{input, marker::Usize1};
const MODULUS: u32 = 10_007;
#[allow(clippy::many_single_char_names)]
fn main() {
fn pow(mut a: Vec<Vec<u32>>, mut b: u64) -> Vec<Vec<u32>> {
let n = a.len();
let mut ans = vec![vec![0; n]; n];
(0..n).for_each(|i| ans[i][i] = 1);
while 0 != b {
if b % 2 == 1 {
ans = mul(&ans, &a);
}
a = mul(&a, &a);
b /= 2;
}
ans
}
fn mul(a: &[Vec<u32>], b: &[Vec<u32>]) -> Vec<Vec<u32>> {
let n = a.len();
let mut c = vec![vec![0; n]; n];
for (i, ai) in a.iter().enumerate() {
for j in 0..n {
for (k, bk) in b.iter().enumerate() {
c[i][j] += ai[k] * bk[j];
}
}
}
c.iter_mut()
.map(|v| v.iter_mut())
.flatten()
.for_each(|x| *x %= MODULUS);
c
}
input!(n: usize, m: usize, k: u64, uv: [(Usize1, Usize1); m]);
let mut a = vec![vec![0; n]; n];
for (u, v) in uv {
a[u][v] = 1;
}
let b = pow(a, k);
println!(
"{}",
b.iter().map(|v| v.iter()).flatten().sum::<u32>() % MODULUS
);
}
#[cfg(test)]
mod chap3_4_number_of_length_k_paths_tests {
const BIN: &str = "chap3_4_number_of_length_k_paths";
fn test_sample(input: &str, output: &str) {
proconcli::test_sample(input, output, BIN);
}
#[test]
fn sample1() {
test_sample(
r#"4 5 2
1 2
1 3
2 3
3 4
4 1
"#,
"6\n",
);
}
}
| true
|
2159c7d59ed1927e1e2d5fe59dd0239a90d43a2e
|
Rust
|
stillmatic/advent
|
/2021/d1/src/main.rs
|
UTF-8
| 1,079
| 3.234375
| 3
|
[
"MIT"
] |
permissive
|
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::Path;
fn main() {
// let lines = numbers_from_file("../input.txt");
// let file_name = "../example.txt";
let file_name = "../input.txt";
let input_str = std::fs::read_to_string(file_name).unwrap();
let lines: Vec<i32> = input_str
.split("\n")
.filter(|l| l.len() > 0)
.map(|i| Ok::<i32,std::num::ParseIntError>(i.parse().unwrap()).expect("Could not parse line") )
.collect();
let p1: i32 = lines.windows(2)
.map(|x| if x[0] < x[1] {1} else {0})
.sum();
println!("Part 1: {}", p1);
let p2: i32 = lines.windows(4)
.map(|x| if x[0] < x[3] {1} else {0})
.sum();
println!("Part 2: {}", p2);
}
// fn numbers_from_file(filename: impl AsRef<Path>) -> Vec<i32> {
// let file = File::open(filename).expect("no such file");
// let buf = BufReader::new(file);
// buf.lines()
// .map(|l| l.expect("Could not parse line"))
// .map(|i| i.parse::<i32>().unwrap())
// .collect()
// }
| true
|
e213ea95d72d66371b9cf895faf18729193dd31b
|
Rust
|
TrendingTechnology/ruzzt
|
/ruzzt_engine/src/sounds.rs
|
UTF-8
| 4,383
| 2.984375
| 3
|
[
"MIT"
] |
permissive
|
use lazy_static::lazy_static;
lazy_static! {
/// When playing a sound effect, it takes one of these arrays depending on the selected sound effect
/// index, then it toggles the speaker for (1/sample) seconds. Eg. for index 1, the speaker might be
/// up for 1/1100 seconds, then down for 1/1200 seconds, then up for 1/1300 seconds, and so forth.
pub static ref SOUND_EFFECT_WAVES: Vec<Vec<u16>> = vec![
vec![3200],
vec![1100, 1200, 1300, 1400, 1500, 1600, 1700, 1800, 1900, 2000, 2100, 2200, 2300, 2400],
vec![4800, 4800, 8000, 1600, 4800, 4800, 8000, 1600, 4800, 4800, 8000, 1600, 4800, 4800],
vec![0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
vec![500, 2556, 1929, 3776, 3386, 4517, 1385, 1103, 4895, 3396, 874, 1616, 5124, 606],
vec![1600, 1514, 1600, 821, 1600, 1715, 1600, 911, 1600, 1968, 1600, 1490, 1600, 1722],
vec![2200, 1760, 1760, 1320, 2640, 880, 2200, 1760, 1760, 1320, 2640, 880, 2200, 1760],
vec![688, 676, 664, 652, 640, 628, 616, 604, 592, 580, 568, 556, 544, 532],
vec![1207, 1224, 1163, 1127, 1159, 1236, 1269, 1314, 1127, 1224, 1320, 1332, 1257, 1327],
vec![378, 331, 316, 230, 224, 384, 480, 320, 358, 412, 376, 621, 554, 426],
];
}
/// The priority of a sound that will be added to the sound player. Music is appended to whatever is
/// currently playing. Sounds with higher levels will replace currently playing sounds, and lower
/// levels will be ignored.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum SoundPriority {
Music,
Level(usize),
}
impl SoundPriority {
/// Returns true if this priority is higher than the `other`, or is music (and will get appended
/// to whatever is playing currently).
pub fn is_higher_priority_than(&self, other: &SoundPriority) -> bool {
match (self, other) {
(SoundPriority::Level(self_level), SoundPriority::Level(other_level)) => {
self_level >= other_level
}
(SoundPriority::Music, _) => true,
_ => false,
}
}
}
/// A single note or sound effect that can be stringed together to make game sounds.
#[derive(Debug, Clone, PartialEq)]
pub struct SoundEntry {
/// The code of the sound to play. 0-239 are notes, and 240-255 are sound effects from the
/// `SOUND_EFFECT_WAVES` list.
pub sound_code: u8,
// 1 means 32nd note, 2 means 16th note...
pub length_multiplier: u8,
}
/// Get a notes string as written in ZZT OOP, and convert it to a list of `SoundEntry` (which is
/// what the sound player actually accepts).
pub fn process_notes_string(notes_string: &[u8]) -> Vec<SoundEntry> {
let mut current_note_index = 0;
let mut octave_offset = 3;
let mut length_multiplier = 1;
let mut result = vec![];
while current_note_index < notes_string.len() {
match notes_string[current_note_index].to_ascii_lowercase() {
b't' => {
length_multiplier = 1;
}
b's' => {
length_multiplier = 2;
}
b'i' => {
length_multiplier = 4;
}
b'q' => {
length_multiplier = 8;
}
b'h' => {
length_multiplier = 16;
}
b'w' => {
length_multiplier = 32;
}
b'3' => {
length_multiplier /= 3;
}
b'.' => {
length_multiplier = length_multiplier + (length_multiplier / 2);
}
b'+' => {
if octave_offset < 6 {
octave_offset += 1
}
}
b'-' => {
if octave_offset > 1 {
octave_offset -= 1
}
}
b'x' => {
result.push(SoundEntry{
sound_code: 0,
length_multiplier,
});
}
note_name @ b'a' ..= b'g' => {
let scale_indices: [u8; 7] = [9, 11, 0, 2, 4, 5, 7];
let mut scale_index: u8 = scale_indices[(note_name - b'a') as usize];
if let Some(sharp_flat) = notes_string.get(current_note_index + 1) {
match sharp_flat {
b'#' => {
scale_index = scale_index.wrapping_add(1);
current_note_index += 1;
}
b'!' => {
scale_index = scale_index.wrapping_sub(1);
current_note_index += 1;
}
_ => {}
}
}
let sound_code = octave_offset * 16 + scale_index;
result.push(SoundEntry{
sound_code,
length_multiplier,
});
}
// This doesn't include b'3', which is matched above.
sound_effect_char @ b'0'..= b'9' => {
let sound_effect_index = sound_effect_char - b'0';
let sound_code = sound_effect_index + 240;
result.push(SoundEntry{
sound_code,
length_multiplier,
});
}
_ => {}
}
current_note_index += 1;
}
result
}
| true
|
d49284b61584e4bf25778ef80f4b451d8bda1467
|
Rust
|
NiklasBorson/AdventDayOfCode2020
|
/day15/src/main.rs
|
UTF-8
| 1,469
| 3.71875
| 4
|
[] |
no_license
|
use std::collections::HashMap;
fn main() {
let input = [ 0, 14, 1, 3, 7, 9 ];
let mut game = GameState::new(&input);
// Part 1
game.advance_to(2020);
println!("{}", game.last_number);
// Part 2
game.advance_to(30000000);
println!("{}", game.last_number);
}
struct GameState {
next_index : usize,
last_number : usize,
turn_map : HashMap<usize, usize>
}
impl GameState {
fn new(input : &[usize]) -> GameState {
let mut turn_map = HashMap::new();
let next_index = input.len();
let last_index = next_index - 1;
let last_number = input[last_index as usize];
for i in 0..last_index {
let n = input[i];
//println!("{}. {}", i + 1, n);
turn_map.insert(n, i + 1);
}
//println!("{}. {}", next_index, last_number);
GameState {
next_index,
last_number,
turn_map
}
}
fn next(&mut self) {
let n = match self.turn_map.get(&self.last_number) {
Some(index) => self.next_index - *index,
None => 0
};
self.turn_map.insert(self.last_number, self.next_index);
self.last_number = n;
self.next_index += 1;
//println!("{}. {}", self.next_index, self.last_number);
}
fn advance_to(&mut self, turn_index : usize) {
while self.next_index < turn_index {
self.next();
}
}
}
| true
|
f8c0f1ed927dafc263b98cc3449dd9d17060040b
|
Rust
|
davidB/kubectl-view-allocations
|
/src/qty.rs
|
UTF-8
| 11,377
| 2.96875
| 3
|
[
"CC0-1.0"
] |
permissive
|
// see [Definitions of the SI units: The binary prefixes](https://physics.nist.gov/cuu/Units/binary.html)
// see [Managing Compute Resources for Containers - Kubernetes](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/)
//TODO rewrite to support exponent, ... see [apimachinery/quantity.go at master · kubernetes/apimachinery](https://github.com/kubernetes/apimachinery/blob/master/pkg/api/resource/quantity.go)
use std::cmp::Ordering;
use std::str::FromStr;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Failed to parse scale in '{0}'")]
ScaleParseError(String),
#[error("Failed to read Qty (num) from '{input}'")]
QtyNumberParseError {
input: String,
#[source] // optional if field name is `source`
source: std::num::ParseFloatError,
},
}
#[derive(Debug, Clone, Eq, PartialEq, Default)]
pub struct Scale {
label: &'static str,
base: u32,
pow: i32,
}
// should be sorted in DESC
#[rustfmt::skip]
static SCALES: [Scale;15] = [
Scale{ label:"Pi", base: 2, pow: 50},
Scale{ label:"Ti", base: 2, pow: 40},
Scale{ label:"Gi", base: 2, pow: 30},
Scale{ label:"Mi", base: 2, pow: 20},
Scale{ label:"Ki", base: 2, pow: 10},
Scale{ label:"P", base: 10, pow: 15},
Scale{ label:"T", base: 10, pow: 12},
Scale{ label:"G", base: 10, pow: 9},
Scale{ label:"M", base: 10, pow: 6},
Scale{ label:"k", base: 10, pow: 3},
Scale{ label:"", base: 10, pow: 0},
Scale{ label:"m", base: 10, pow: -3},
Scale{ label:"u", base: 10, pow: -6},
Scale{ label:"μ", base: 10, pow: -6},
Scale{ label:"n", base: 10, pow: -9},
];
impl FromStr for Scale {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
SCALES
.iter()
.find(|v| v.label == s)
.cloned()
.ok_or_else(|| Error::ScaleParseError(s.to_owned()))
}
}
impl From<&Scale> for f64 {
fn from(v: &Scale) -> f64 {
if v.pow == 0 || v.base == 0 {
1.0
} else {
f64::from(v.base).powf(f64::from(v.pow))
}
}
}
impl PartialOrd for Scale {
//TODO optimize accuracy with big number
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
let v1 = f64::from(self);
let v2 = f64::from(other);
if v1 > v2 {
Some(Ordering::Greater)
} else if v1 < v2 {
Some(Ordering::Less)
} else if (v1 - v2).abs() < std::f64::EPSILON {
Some(Ordering::Equal)
} else {
None
}
}
}
impl Scale {
pub fn min(&self, other: &Scale) -> Scale {
if self < other {
self.clone()
} else {
other.clone()
}
}
}
#[derive(Debug, Clone, Eq, PartialEq, Default)]
pub struct Qty {
pub value: i64,
pub scale: Scale,
}
impl From<&Qty> for f64 {
fn from(v: &Qty) -> f64 {
(v.value as f64) * 0.001
}
}
impl Qty {
pub fn lowest_positive() -> Qty {
Qty {
value: 1,
scale: Scale::from_str("m").unwrap(),
}
}
pub fn is_zero(&self) -> bool {
self.value == 0
}
pub fn calc_percentage(&self, base100: &Self) -> f64 {
if base100.value != 0 {
f64::from(self) * 100f64 / f64::from(base100)
} else {
core::f64::NAN
}
}
pub fn adjust_scale(&self) -> Qty {
let valuef64 = f64::from(self);
let scale = SCALES
.iter()
.filter(|s| s.base == self.scale.base || self.scale.base == 0)
.find(|s| f64::from(*s) <= valuef64);
match scale {
Some(scale) => Qty {
value: self.value,
scale: scale.clone(),
},
None => self.clone(),
}
}
}
impl FromStr for Qty {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (num_str, scale_str): (&str, &str) = match s.find(|c: char| {
!c.is_ascii_digit() && c != 'E' && c != 'e' && c != '+' && c != '-' && c != '.'
}) {
Some(pos) => (&s[..pos], &s[pos..]),
None => (s, ""),
};
let scale = Scale::from_str(scale_str.trim())?;
let num = f64::from_str(num_str).map_err(|source| Error::QtyNumberParseError {
input: num_str.to_owned(),
source,
})?;
let value = (num * f64::from(&scale) * 1000f64) as i64;
Ok(Qty { value, scale })
}
}
impl std::fmt::Display for Qty {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{:.1}{}",
(self.value as f64 / (f64::from(&self.scale) * 1000f64)),
self.scale.label
)
}
}
impl PartialOrd for Qty {
//TODO optimize accuracy with big number
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
let v1 = self.value; // f64::from(self);
let v2 = other.value; // f64::from(other);
v1.partial_cmp(&v2)
}
}
impl Ord for Qty {
//TODO optimize accuracy with big number
fn cmp(&self, other: &Self) -> Ordering {
let v1 = self.value; // f64::from(self);
let v2 = other.value; // f64::from(other);
v1.partial_cmp(&v2).unwrap() // i64 should always be comparable (no NaNs or anything crazy like that)
}
}
pub fn select_scale_for_add(v1: &Qty, v2: &Qty) -> Scale {
if v2.value == 0 {
v1.scale.clone()
} else if v1.value == 0 {
v2.scale.clone()
} else {
v1.scale.min(&v2.scale)
}
}
impl std::ops::Add for Qty {
type Output = Qty;
fn add(self, other: Self) -> Qty {
&self + &other
}
}
impl std::ops::Add for &Qty {
type Output = Qty;
fn add(self, other: Self) -> Qty {
Qty {
value: self.value + other.value,
scale: select_scale_for_add(self, other),
}
}
}
impl<'b> std::ops::AddAssign<&'b Qty> for Qty {
fn add_assign(&mut self, other: &'b Self) {
*self = Qty {
value: self.value + other.value,
scale: select_scale_for_add(self, other),
}
}
}
impl std::ops::Sub for Qty {
type Output = Qty;
fn sub(self, other: Self) -> Qty {
&self - &other
}
}
impl std::ops::Sub for &Qty {
type Output = Qty;
fn sub(self, other: Self) -> Qty {
Qty {
value: self.value - other.value,
scale: select_scale_for_add(self, other),
}
}
}
impl<'b> std::ops::SubAssign<&'b Qty> for Qty {
fn sub_assign(&mut self, other: &'b Self) {
*self = Qty {
value: self.value - other.value,
scale: select_scale_for_add(self, other),
};
}
}
#[cfg(test)]
mod tests {
use super::*;
use spectral::prelude::*;
#[test]
fn test_to_base() -> Result<(), Box<dyn std::error::Error>> {
assert_that!(f64::from(&Qty::from_str("1k")?))
.is_close_to(f64::from(&Qty::from_str("1000000m")?), 0.01);
assert_that!(Qty::from_str("1Ki")?).is_equal_to(Qty {
value: 1024000,
scale: Scale {
label: "Ki",
base: 2,
pow: 10,
},
});
Ok(())
}
#[test]
fn expectation_ok_for_adjust_scale() -> Result<(), Box<dyn std::error::Error>> {
let cases = vec![
("1k", "1.0k"),
("10k", "10.0k"),
("100k", "100.0k"),
("999k", "999.0k"),
("1000k", "1.0M"),
("1999k", "2.0M"), //TODO 1.9M should be better ?
("1Ki", "1.0Ki"),
("10Ki", "10.0Ki"),
("100Ki", "100.0Ki"),
("1000Ki", "1000.0Ki"),
("1024Ki", "1.0Mi"),
("25641877504", "25.6G"),
("1770653738944", "1.8T"),
("1000m", "1.0"),
("100m", "100.0m"),
("1m", "1.0m"),
];
for (input, expected) in cases {
assert_that!(format!("{}", &Qty::from_str(input)?.adjust_scale()))
.is_equal_to(expected.to_string());
}
Ok(())
}
#[test]
fn test_display() -> Result<(), Box<dyn std::error::Error>> {
let cases = vec![
("1k", "1.0k"),
("10k", "10.0k"),
("100k", "100.0k"),
("999k", "999.0k"),
("1000k", "1000.0k"),
("1999k", "1999.0k"),
("1Ki", "1.0Ki"),
("10Ki", "10.0Ki"),
("100Ki", "100.0Ki"),
("1000Ki", "1000.0Ki"),
("1024Ki", "1024.0Ki"),
("25641877504", "25641877504.0"),
("1000m", "1000.0m"),
("100m", "100.0m"),
("1m", "1.0m"),
("1000000n", "1000000.0n"),
// lowest precision is m, under 1m value is trunked
("1u", "0.0u"),
("1μ", "0.0μ"),
("1n", "0.0n"),
("999999n", "0.0n"),
];
for input in cases {
assert_that!(format!("{}", &Qty::from_str(input.0)?)).is_equal_to(input.1.to_string());
assert_that!(format!("{}", &Qty::from_str(input.1)?)).is_equal_to(input.1.to_string());
}
Ok(())
}
#[test]
fn test_f64_from_scale() -> Result<(), Box<dyn std::error::Error>> {
assert_that!(f64::from(&Scale::from_str("m")?)).is_close_to(0.001, 0.00001);
Ok(())
}
#[test]
fn test_f64_from_qty() -> Result<(), Box<dyn std::error::Error>> {
assert_that!(f64::from(&Qty::from_str("20m")?)).is_close_to(0.020, 0.00001);
assert_that!(f64::from(&Qty::from_str("300m")?)).is_close_to(0.300, 0.00001);
assert_that!(f64::from(&Qty::from_str("1000m")?)).is_close_to(1.000, 0.00001);
assert_that!(f64::from(&Qty::from_str("+1000m")?)).is_close_to(1.000, 0.00001);
assert_that!(f64::from(&Qty::from_str("-1000m")?)).is_close_to(-1.000, 0.00001);
assert_that!(f64::from(&Qty::from_str("3145728e3")?)).is_close_to(3145728000.000, 0.00001);
Ok(())
}
#[test]
fn test_add() -> Result<(), Box<dyn std::error::Error>> {
assert_that!(
&(Qty::from_str("1")?
+ Qty::from_str("300m")?
+ Qty::from_str("300m")?
+ Qty::from_str("300m")?
+ Qty::from_str("300m")?)
)
.is_equal_to(&Qty::from_str("2200m")?);
assert_that!(&(Qty::default() + Qty::from_str("300m")?))
.is_equal_to(Qty::from_str("300m")?);
assert_that!(&(Qty::default() + Qty::from_str("16Gi")?))
.is_equal_to(Qty::from_str("16Gi")?);
assert_that!(&(Qty::from_str("20m")? + Qty::from_str("300m")?))
.is_equal_to(Qty::from_str("320m")?);
assert_that!(&(Qty::from_str("1k")? + Qty::from_str("300m")?))
.is_equal_to(&Qty::from_str("1000300m")?);
assert_that!(&(Qty::from_str("1Ki")? + Qty::from_str("1Ki")?))
.is_equal_to(&Qty::from_str("2Ki")?);
assert_that!(&(Qty::from_str("1Ki")? + Qty::from_str("1k")?)).is_equal_to(&Qty {
value: 2024000,
scale: Scale {
label: "k",
base: 10,
pow: 3,
},
});
Ok(())
}
}
| true
|
9e5a77a6d8679c749b261f7437800d434bcd30e6
|
Rust
|
ggez/ggez
|
/examples/blend_modes.rs
|
UTF-8
| 6,671
| 3.0625
| 3
|
[
"MIT"
] |
permissive
|
//! An example drawing semi-transparent venn diagrams
//! using different blend modes.
//!
//! It also shows why you'd usually want to draw canvases
//! using the `Premultiplied` blend mode
//! (for more explanations on this see https://github.com/ggez/ggez/issues/694#issuecomment-853724926)
use ggez::context::HasMut;
use ggez::event::{self, EventHandler};
use ggez::glam::Vec2;
use ggez::graphics::{self, BlendMode, Color, DrawParam, GraphicsContext};
use ggez::input::keyboard::KeyInput;
use ggez::{Context, GameResult};
use std::env;
use std::path;
struct MainState {
layer: graphics::ScreenImage,
layer_blend: BlendMode,
circle: graphics::Mesh,
}
impl MainState {
fn new(ctx: &mut Context) -> GameResult<MainState> {
let layer = graphics::ScreenImage::new(ctx, None, 1., 1., 1);
let circle = graphics::Mesh::new_circle(
ctx,
graphics::DrawMode::fill(),
Vec2::new(0.0, 0.0),
45.0,
0.5,
Color::WHITE,
)?;
let s = Self {
layer,
layer_blend: BlendMode::PREMULTIPLIED,
circle,
};
Ok(s)
}
fn draw_venn(
&self,
_gfx: &mut impl HasMut<GraphicsContext>,
canvas: &mut graphics::Canvas,
pos: Vec2,
name: &str,
) -> GameResult {
const TRI_COLORS: [Color; 3] = [
Color::new(0.8, 0., 0., 0.5),
Color::new(0., 0.8, 0., 0.5),
Color::new(0., 0., 0.8, 0.5),
];
const OFFSET: f32 = 24.;
const REL_POSITIONS: [[f32; 2]; 3] = [
[-OFFSET, -OFFSET / 2.],
[OFFSET, -OFFSET / 2.],
[0., OFFSET],
];
// draw the diagram
for i in 0..3 {
canvas.draw(
&self.circle,
graphics::DrawParam::new()
.dest(pos + Vec2::from(REL_POSITIONS[i]))
.color(TRI_COLORS[i]),
);
}
// draw text naming the blend mode
canvas.set_blend_mode(BlendMode::ALPHA);
let mut text = graphics::Text::new(name);
text.set_scale(20.);
let text_offset = Vec2::new(0., -100.);
canvas.draw(
&text,
graphics::DrawParam::from(pos + text_offset)
.offset([0.5, 0.0])
.color(Color::WHITE),
);
Ok(())
}
fn draw_venn_diagrams(
&mut self,
ctx: &mut Context,
(w, h): (f32, f32),
canvas: &mut graphics::Canvas,
) -> GameResult {
let y = h / 4.;
const MODE_COUNT: usize = 8;
let x_step = w / (MODE_COUNT + 1) as f32;
// draw with Alpha
canvas.set_blend_mode(BlendMode::ALPHA);
self.draw_venn(ctx, canvas, [x_step, y].into(), "Alpha")?;
// draw with Add
canvas.set_blend_mode(BlendMode::ADD);
self.draw_venn(ctx, canvas, [x_step * 2., y].into(), "Add")?;
// draw with Sub
canvas.set_blend_mode(BlendMode::SUBTRACT);
self.draw_venn(ctx, canvas, [x_step * 3., y].into(), "Subtract")?;
// draw with Multiply
canvas.set_blend_mode(BlendMode::MULTIPLY);
self.draw_venn(ctx, canvas, [x_step * 4., y].into(), "Multiply")?;
// draw with Invert
canvas.set_blend_mode(BlendMode::INVERT);
self.draw_venn(ctx, canvas, [x_step * 5., y].into(), "Invert")?;
// draw with Replace
canvas.set_blend_mode(BlendMode::REPLACE);
self.draw_venn(ctx, canvas, [x_step * 6., y].into(), "Replace")?;
// draw with Darken
canvas.set_blend_mode(BlendMode::DARKEN);
self.draw_venn(ctx, canvas, [x_step * 7., y].into(), "Darken")?;
// draw with Lighten
canvas.set_blend_mode(BlendMode::LIGHTEN);
self.draw_venn(ctx, canvas, [x_step * 8., y].into(), "Lighten")?;
Ok(())
}
}
impl EventHandler for MainState {
fn update(&mut self, _: &mut Context) -> GameResult {
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
let (w, h) = ctx.gfx.drawable_size();
// draw everything onto self.layer
let layer = self.layer.image(ctx);
let mut canvas =
graphics::Canvas::from_image(ctx, layer.clone(), Color::new(0., 0., 0., 0.));
self.draw_venn_diagrams(ctx, (w, h), &mut canvas)?;
canvas.finish(ctx)?;
// now start drawing to the screen
let mut canvas = graphics::Canvas::from_frame(ctx, Color::new(0.3, 0.3, 0.3, 1.0));
// draw everything directly onto the screen once
self.draw_venn_diagrams(ctx, (w, h), &mut canvas)?;
// draw layer onto the screen
canvas.set_blend_mode(self.layer_blend);
canvas.draw(
&layer,
DrawParam::default().dest(mint::Point2 { x: 0., y: h / 2. }),
);
// draw text pointing out which is which
let y = h / 2.;
canvas.draw(
graphics::Text::new("drawn directly:").set_scale(20.),
graphics::DrawParam::from([8., 4.]).color(Color::WHITE),
);
canvas.draw(
graphics::Text::new("drawn onto a (transparent black) canvas:").set_scale(20.),
graphics::DrawParam::from([8., 4. + y]).color(Color::WHITE),
);
canvas.finish(ctx)?;
Ok(())
}
fn key_down_event(&mut self, _ctx: &mut Context, _input: KeyInput, repeat: bool) -> GameResult {
if !repeat {
if self.layer_blend == BlendMode::ALPHA {
self.layer_blend = BlendMode::PREMULTIPLIED;
println!("Drawing canvas with premultiplied alpha mode");
} else {
self.layer_blend = BlendMode::ALPHA;
println!("Drawing canvas with default alpha mode");
}
}
Ok(())
}
}
pub fn main() -> GameResult {
let resource_dir = if let Ok(manifest_dir) = env::var("CARGO_MANIFEST_DIR") {
let mut path = path::PathBuf::from(manifest_dir);
path.push("resources");
path
} else {
path::PathBuf::from("./resources")
};
let cb = ggez::ContextBuilder::new("blend_modes", "ggez")
.window_mode(ggez::conf::WindowMode::default().dimensions(1400., 600.))
.window_setup(
ggez::conf::WindowSetup::default()
.title("blend modes -- Press a button to change the canvas blend mode!"),
)
.add_resource_path(resource_dir);
let (mut ctx, event_loop) = cb.build()?;
let state = MainState::new(&mut ctx)?;
event::run(ctx, event_loop, state)
}
| true
|
d67a945ef10739e44adc67a92bba19c068e4de98
|
Rust
|
AntonGepting/tmux-interface-rs
|
/src/commands/status_line/display_message.rs
|
UTF-8
| 5,888
| 2.75
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
use crate::commands::constants::*;
use crate::TmuxCommand;
use std::borrow::Cow;
/// Structure for displaying a message
///
/// # Manual
///
/// tmux ^3.2:
/// ```text
/// display-message [-aINpv] [-c target-client] [-d delay] [-t target-pane] [message]
/// (alias: display)
/// ```
///
/// tmux ^3.0:
/// ```text
/// display-message [-aIpv] [-c target-client] [-t target-pane] [message]
/// (alias: display)
/// ```
///
/// tmux ^2.9a:
/// ```text
/// display-message [-apv] [-c target-client] [-t target-pane] [message]
/// (alias: display)
/// ```
///
/// tmux ^1.5:
/// ```text
/// display-message [-p] [-c target-client] [-t target-pane] [message]
/// (alias: display)
/// ```
///
/// tmux ^1.2:
/// ```text
/// display-message [-p] [-t target-client] [message]
/// (alias: display)
/// ```
///
/// tmux ^1.0:
/// ```text
/// display-message [-t target-client] [message]
/// (alias: display)
/// ```
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default)]
pub struct DisplayMessage<'a> {
/// `[-a]` - list the format variables and their values
#[cfg(feature = "tmux_2_9a")]
pub list_format_vars: bool,
/// `[-I]` - forward any input read from stdin to the empty pane given by target-pane
#[cfg(feature = "tmux_3_0")]
pub forward_stdin: bool,
/// `[-N]` - ignores key presses and closes only after the delay expires
#[cfg(feature = "tmux_3_2")]
pub ignore_keys: bool,
/// `[-p]` - the output is printed to stdout
#[cfg(feature = "tmux_2_9a")]
pub print: bool,
/// `[-v]` - print verbose logging as the format is parsed
#[cfg(feature = "tmux_2_9a")]
pub verbose: bool,
/// `[-c target-client]` - target-client
#[cfg(feature = "tmux_1_0")]
pub target_client: Option<Cow<'a, str>>,
/// `[-d delay]` - delay
#[cfg(feature = "tmux_3_2")]
pub delay: Option<usize>,
/// `[-t target-pane]` - target-pane
#[cfg(feature = "tmux_1_5")]
pub target_pane: Option<Cow<'a, str>>,
/// `[message]` - message
#[cfg(feature = "tmux_1_0")]
pub message: Option<Cow<'a, str>>,
}
impl<'a> DisplayMessage<'a> {
pub fn new() -> Self {
Default::default()
}
/// `[-a]` - list the format variables and their values
#[cfg(feature = "tmux_2_9a")]
pub fn list_format_vars(mut self) -> Self {
self.list_format_vars = true;
self
}
/// `[-I]` - forward any input read from stdin to the empty pane given by target-pane
#[cfg(feature = "tmux_3_0")]
pub fn forward_stdin(mut self) -> Self {
self.forward_stdin = true;
self
}
/// `[-N]` - ignores key presses and closes only after the delay expires
#[cfg(feature = "tmux_3_2")]
pub fn ignore_keys(mut self) -> Self {
self.ignore_keys = true;
self
}
/// `[-p]` - the output is printed to stdout
#[cfg(feature = "tmux_2_9a")]
pub fn print(mut self) -> Self {
self.print = true;
self
}
/// `[-v]` - print verbose logging as the format is parsed
#[cfg(feature = "tmux_2_9a")]
pub fn verbose(mut self) -> Self {
self.verbose = true;
self
}
/// `[-c target-client]` - target-client
#[cfg(feature = "tmux_1_0")]
pub fn target_client<S: Into<Cow<'a, str>>>(mut self, target_client: S) -> Self {
self.target_client = Some(target_client.into());
self
}
/// `[-d delay]` - delay
#[cfg(feature = "tmux_3_2")]
pub fn delay(mut self, delay: usize) -> Self {
self.delay = Some(delay);
self
}
/// `[-t target-pane]` - target-pane
#[cfg(feature = "tmux_1_5")]
pub fn target_pane<S: Into<Cow<'a, str>>>(mut self, target_pane: S) -> Self {
self.target_pane = Some(target_pane.into());
self
}
/// `[message]` - message
#[cfg(feature = "tmux_1_0")]
pub fn message<S: Into<Cow<'a, str>>>(mut self, message: S) -> Self {
self.message = Some(message.into());
self
}
pub fn build(self) -> TmuxCommand<'a> {
let mut cmd = TmuxCommand::new();
cmd.name(DISPLAY_MESSAGE);
// `[-a]` - list the format variables and their values
#[cfg(feature = "tmux_2_9a")]
if self.list_format_vars {
cmd.push_flag(A_LOWERCASE_KEY);
}
// `[-I]` - forward any input read from stdin to the empty pane given by target-pane
#[cfg(feature = "tmux_3_0")]
if self.forward_stdin {
cmd.push_flag(I_UPPERCASE_KEY);
}
// `[-N]` - ignores key presses and closes only after the delay expires
#[cfg(feature = "tmux_3_2")]
if self.ignore_keys {
cmd.push_flag(N_UPPERCASE_KEY);
}
// `[-p]` - the output is printed to stdout
#[cfg(feature = "tmux_2_9a")]
if self.print {
cmd.push_flag(P_LOWERCASE_KEY);
}
// `[-v]` - print verbose logging as the format is parsed
#[cfg(feature = "tmux_2_9a")]
if self.verbose {
cmd.push_flag(V_LOWERCASE_KEY);
}
// `[-c target-client]` - target-client
#[cfg(feature = "tmux_1_0")]
if let Some(target_client) = self.target_client {
cmd.push_option(C_LOWERCASE_KEY, target_client);
}
// `[-d delay]` - delay
#[cfg(feature = "tmux_3_2")]
if let Some(delay) = self.delay {
cmd.push_option(D_LOWERCASE_KEY, delay.to_string());
}
// `[-t target-pane]` - target-pane
#[cfg(feature = "tmux_1_5")]
if let Some(target_pane) = self.target_pane {
cmd.push_option(T_LOWERCASE_KEY, target_pane);
}
// `[message]` - message
#[cfg(feature = "tmux_1_0")]
if let Some(message) = self.message {
cmd.push_param(message);
}
cmd
}
}
| true
|
cb6fc63d42c28ee501db31e6bd928dfbecebcc13
|
Rust
|
Mr-Llama-s-Wonderful-Soundboard/mlws_lib
|
/src/sound/source.rs
|
UTF-8
| 2,432
| 2.875
| 3
|
[] |
no_license
|
// Initial version from Rodio APACHE LICENSE 2.0
//! Sources of sound and various filters.
use std::time::Duration;
use super::sample::Sample;
///
pub trait Source: Iterator
where
Self::Item: Sample,
{
/// Returns the number of samples before the current frame ends. `None` means "infinite" or
/// "until the sound ends".
/// Should never return 0 unless there's no more data.
///
/// After the engine has finished reading the specified number of samples, it will check
/// whether the value of `channels()` and/or `sample_rate()` have changed.
fn current_frame_len(&self) -> Option<usize>;
/// Returns the number of channels. Channels are always interleaved.
fn channels(&self) -> u16;
/// Returns the rate at which the source should be played. In number of samples per second.
fn sample_rate(&self) -> u32;
/// Returns the total duration of this source, if known.
///
/// `None` indicates at the same time "infinite" or "unknown".
fn total_duration(&self) -> Option<Duration>;
}
impl<S> Source for Box<dyn Source<Item = S>>
where
S: Sample,
{
#[inline]
fn current_frame_len(&self) -> Option<usize> {
(**self).current_frame_len()
}
#[inline]
fn channels(&self) -> u16 {
(**self).channels()
}
#[inline]
fn sample_rate(&self) -> u32 {
(**self).sample_rate()
}
#[inline]
fn total_duration(&self) -> Option<Duration> {
(**self).total_duration()
}
}
impl<S> Source for Box<dyn Source<Item = S> + Send>
where
S: Sample,
{
#[inline]
fn current_frame_len(&self) -> Option<usize> {
(**self).current_frame_len()
}
#[inline]
fn channels(&self) -> u16 {
(**self).channels()
}
#[inline]
fn sample_rate(&self) -> u32 {
(**self).sample_rate()
}
#[inline]
fn total_duration(&self) -> Option<Duration> {
(**self).total_duration()
}
}
impl<S> Source for Box<dyn Source<Item = S> + Send + Sync>
where
S: Sample,
{
#[inline]
fn current_frame_len(&self) -> Option<usize> {
(**self).current_frame_len()
}
#[inline]
fn channels(&self) -> u16 {
(**self).channels()
}
#[inline]
fn sample_rate(&self) -> u32 {
(**self).sample_rate()
}
#[inline]
fn total_duration(&self) -> Option<Duration> {
(**self).total_duration()
}
}
| true
|
b7fdfaa6aadbfd93dd701ad2cd9b7c3be2abcf58
|
Rust
|
jkatajamki/rust-stuff
|
/chapter-three/fib.rs
|
UTF-8
| 449
| 3.578125
| 4
|
[] |
no_license
|
const NUM_OF_ELEMENTS: usize = 20;
fn main() {
// calculate fib sequence for NUM_OF_ELEMENTS elements
let mut fib_seq = vec![1, 1];
for x in 2..NUM_OF_ELEMENTS {
let prev_i = fib_seq[x - 1];
let prev_of_prev_i = fib_seq[x - 2];
let next_num = prev_i + prev_of_prev_i;
fib_seq.push(next_num);
}
println!("Fib sequence for {} elements calculated", NUM_OF_ELEMENTS);
for n in fib_seq.iter() {
print!("{} ", n);
}
}
| true
|
7efb98add9e09ff228483fb9b90240a7e2dd751d
|
Rust
|
etalab/transport-validator
|
/src/validators/fare_attributes.rs
|
UTF-8
| 3,416
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
use crate::issues::*;
pub fn validate(gtfs: >fs_structures::Gtfs) -> Vec<Issue> {
let missing_price = gtfs
.fare_attributes
.values()
.filter(empty_price)
.map(|fare_attributes| make_issue(fare_attributes, IssueType::MissingPrice));
let invalid_currency = gtfs
.fare_attributes
.values()
.filter(invalid_currency)
.map(|fare_attributes| make_issue(fare_attributes, IssueType::InvalidCurrency));
let invalid_transfers = gtfs
.fare_attributes
.values()
.filter(|fare_attributes| !valid_transfers(*fare_attributes))
.map(|fare_attributes| make_issue(fare_attributes, IssueType::InvalidTransfers));
let invalid_duration = gtfs
.fare_attributes
.values()
.filter(|fare_attributes| !valid_duration(*fare_attributes))
.map(|fare_attributes| make_issue(fare_attributes, IssueType::InvalidTransferDuration));
missing_price
.chain(invalid_currency)
.chain(invalid_transfers)
.chain(invalid_duration)
.collect()
}
fn make_issue<T: gtfs_structures::Id>(o: &T, issue_type: IssueType) -> Issue {
Issue::new(Severity::Error, issue_type, o.id()).object_type(gtfs_structures::ObjectType::Fare)
}
fn empty_price(fare_attributes: &>fs_structures::FareAttribute) -> bool {
fare_attributes.price.is_empty()
}
fn invalid_currency(fare_attributes: &>fs_structures::FareAttribute) -> bool {
iso4217::alpha3(&fare_attributes.currency).is_none()
}
fn valid_transfers(fare_attributes: >fs_structures::FareAttribute) -> bool {
!matches!(
fare_attributes.transfers,
gtfs_structures::Transfers::Other(_)
)
}
fn valid_duration(fare_attributes: >fs_structures::FareAttribute) -> bool {
fare_attributes.transfer_duration.is_none() || fare_attributes.transfer_duration >= Some(0)
}
#[test]
fn test_missing_price() {
let gtfs = gtfs_structures::Gtfs::new("test_data/fare_attributes").unwrap();
let issues = validate(>fs);
let missing_price_issue: Vec<_> = issues
.iter()
.filter(|issue| issue.issue_type == IssueType::MissingPrice)
.collect();
assert_eq!(1, missing_price_issue.len());
assert_eq!("50", missing_price_issue[0].object_id);
assert_eq!(IssueType::MissingPrice, missing_price_issue[0].issue_type);
}
#[test]
fn test_valid_currency() {
let gtfs = gtfs_structures::Gtfs::new("test_data/fare_attributes").unwrap();
let issues = validate(>fs);
let invalid_currency_issue: Vec<_> = issues
.iter()
.filter(|issue| issue.issue_type == IssueType::InvalidCurrency)
.collect();
assert_eq!(1, invalid_currency_issue.len());
assert_eq!("61", invalid_currency_issue[0].object_id);
assert_eq!(
IssueType::InvalidCurrency,
invalid_currency_issue[0].issue_type
);
}
#[test]
fn test_valid_transfers() {
let gtfs = gtfs_structures::Gtfs::new("test_data/fare_attributes").unwrap();
let issues = validate(>fs);
let invalid_transfers_issue: Vec<_> = issues
.iter()
.filter(|issue| issue.issue_type == IssueType::InvalidTransfers)
.collect();
assert_eq!(1, invalid_transfers_issue.len());
assert_eq!("61", invalid_transfers_issue[0].object_id);
assert_eq!(
IssueType::InvalidTransfers,
invalid_transfers_issue[0].issue_type
);
}
| true
|
9ef7ed4e89e50707094772fb35e5b912635bb2e9
|
Rust
|
study-core/my-rust-coding-studing
|
/src/typeTest.rs
|
UTF-8
| 281
| 2.828125
| 3
|
[] |
no_license
|
fn main() {
let a : usize = 12;
println!(a);
let u = User{
username: "ss",
email: "sd",
sign_in_count: 45,
active: false,
};
}
struct User {
username: String,
email: String,
sign_in_count: u64,
active: bool,
}
| true
|
3a09af30291ad81f13bc65e61a86878fc1501cb1
|
Rust
|
OzieGamma/RustWorkShopEPFL12Nov2014
|
/hello_world/src/main.rs
|
UTF-8
| 52
| 2.796875
| 3
|
[] |
no_license
|
fn main() {
let x = 5i;
let y = 6i;
x + y;
}
| true
|
3f8cf02e7f3216e3561e48fa82f25c86b8bc4b74
|
Rust
|
doytsujin/yew
|
/packages/yew-macro/src/function_component.rs
|
UTF-8
| 14,403
| 2.6875
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use proc_macro2::{Span, TokenStream};
use quote::{quote, ToTokens};
use syn::parse::{Parse, ParseStream};
use syn::punctuated::Punctuated;
use syn::token::{Comma, Fn};
use syn::{
parse_quote, parse_quote_spanned, visit_mut, Attribute, Block, FnArg, Generics, Ident, Item,
ItemFn, LitStr, ReturnType, Type, Visibility,
};
use crate::hook::BodyRewriter;
#[derive(Clone)]
pub struct FunctionComponent {
block: Box<Block>,
props_type: Box<Type>,
arg: FnArg,
generics: Generics,
vis: Visibility,
attrs: Vec<Attribute>,
name: Ident,
return_type: Box<Type>,
fn_token: Fn,
component_name: Option<Ident>,
}
impl Parse for FunctionComponent {
fn parse(input: ParseStream) -> syn::Result<Self> {
let parsed: Item = input.parse()?;
let func = match parsed {
Item::Fn(m) => m,
item => {
return Err(syn::Error::new_spanned(
item,
"`function_component` attribute can only be applied to functions",
))
}
};
let ItemFn {
attrs,
vis,
sig,
block,
} = func;
if sig.generics.lifetimes().next().is_some() {
return Err(syn::Error::new_spanned(
sig.generics,
"function components can't have generic lifetime parameters",
));
}
if sig.asyncness.is_some() {
return Err(syn::Error::new_spanned(
sig.asyncness,
"function components can't be async",
));
}
if sig.constness.is_some() {
return Err(syn::Error::new_spanned(
sig.constness,
"const functions can't be function components",
));
}
if sig.abi.is_some() {
return Err(syn::Error::new_spanned(
sig.abi,
"extern functions can't be function components",
));
}
let return_type = match sig.output {
ReturnType::Default => {
return Err(syn::Error::new_spanned(
sig,
"function components must return `yew::Html` or `yew::HtmlResult`",
))
}
ReturnType::Type(_, ty) => ty,
};
let mut inputs = sig.inputs.into_iter();
let arg = inputs
.next()
.unwrap_or_else(|| syn::parse_quote! { _: &() });
let ty = match &arg {
FnArg::Typed(arg) => match &*arg.ty {
Type::Reference(ty) => {
if ty.lifetime.is_some() {
return Err(syn::Error::new_spanned(
&ty.lifetime,
"reference must not have a lifetime",
));
}
if ty.mutability.is_some() {
return Err(syn::Error::new_spanned(
ty.mutability,
"reference must not be mutable",
));
}
ty.elem.clone()
}
ty => {
let msg = format!(
"expected a reference to a `Properties` type (try: `&{}`)",
ty.to_token_stream()
);
return Err(syn::Error::new_spanned(ty, msg));
}
},
FnArg::Receiver(_) => {
return Err(syn::Error::new_spanned(
arg,
"function components can't accept a receiver",
));
}
};
// Checking after param parsing may make it a little inefficient
// but that's a requirement for better error messages in case of receivers
// `>0` because first one is already consumed.
if inputs.len() > 0 {
let params: TokenStream = inputs.map(|it| it.to_token_stream()).collect();
return Err(syn::Error::new_spanned(
params,
"function components can accept at most one parameter for the props",
));
}
Ok(Self {
props_type: ty,
block,
arg,
generics: sig.generics,
vis,
attrs,
name: sig.ident,
return_type,
fn_token: sig.fn_token,
component_name: None,
})
}
}
impl FunctionComponent {
/// Filters attributes that should be copied to component definition.
fn filter_attrs_for_component_struct(&self) -> Vec<Attribute> {
self.attrs
.iter()
.filter_map(|m| {
m.path()
.get_ident()
.and_then(|ident| match ident.to_string().as_str() {
"doc" | "allow" => Some(m.clone()),
_ => None,
})
})
.collect()
}
/// Filters attributes that should be copied to the component impl block.
fn filter_attrs_for_component_impl(&self) -> Vec<Attribute> {
self.attrs
.iter()
.filter_map(|m| {
m.path()
.get_ident()
.and_then(|ident| match ident.to_string().as_str() {
"allow" => Some(m.clone()),
_ => None,
})
})
.collect()
}
fn phantom_generics(&self) -> Punctuated<Ident, Comma> {
self.generics
.type_params()
.map(|ty_param| ty_param.ident.clone()) // create a new Punctuated sequence without any type bounds
.collect::<Punctuated<_, Comma>>()
}
fn merge_component_name(&mut self, name: FunctionComponentName) -> syn::Result<()> {
if let Some(ref m) = name.component_name {
if m == &self.name {
return Err(syn::Error::new_spanned(
m,
"the component must not have the same name as the function",
));
}
}
self.component_name = name.component_name;
Ok(())
}
fn inner_fn_ident(&self) -> Ident {
if self.component_name.is_some() {
self.name.clone()
} else {
Ident::new("inner", Span::mixed_site())
}
}
fn component_name(&self) -> Ident {
self.component_name
.clone()
.unwrap_or_else(|| self.name.clone())
}
// We need to cast 'static on all generics for base component.
fn create_static_component_generics(&self) -> Generics {
let mut generics = self.generics.clone();
let where_clause = generics.make_where_clause();
for ty_generic in self.generics.type_params() {
let ident = &ty_generic.ident;
let bound = parse_quote_spanned! { ident.span() =>
#ident: 'static
};
where_clause.predicates.push(bound);
}
where_clause.predicates.push(parse_quote! { Self: 'static });
generics
}
/// Prints the impl fn.
fn print_inner_fn(&self) -> TokenStream {
let name = self.inner_fn_ident();
let FunctionComponent {
ref fn_token,
ref attrs,
ref block,
ref return_type,
ref generics,
ref arg,
..
} = self;
let mut block = *block.clone();
let (impl_generics, _ty_generics, where_clause) = generics.split_for_impl();
// We use _ctx here so if the component does not use any hooks, the usused_vars lint will
// not be triggered.
let ctx_ident = Ident::new("_ctx", Span::mixed_site());
let mut body_rewriter = BodyRewriter::new(ctx_ident.clone());
visit_mut::visit_block_mut(&mut body_rewriter, &mut block);
quote! {
#(#attrs)*
#fn_token #name #impl_generics (#ctx_ident: &mut ::yew::functional::HookContext, #arg) -> #return_type
#where_clause
{
#block
}
}
}
fn print_base_component_impl(&self) -> TokenStream {
let component_name = self.component_name();
let props_type = &self.props_type;
let static_comp_generics = self.create_static_component_generics();
let (impl_generics, ty_generics, where_clause) = static_comp_generics.split_for_impl();
// TODO: replace with blanket implementation when specialisation becomes stable.
quote! {
#[automatically_derived]
impl #impl_generics ::yew::html::BaseComponent for #component_name #ty_generics #where_clause {
type Message = ();
type Properties = #props_type;
#[inline]
fn create(ctx: &::yew::html::Context<Self>) -> Self {
Self {
_marker: ::std::marker::PhantomData,
function_component: ::yew::functional::FunctionComponent::<Self>::new(ctx),
}
}
#[inline]
fn update(&mut self, _ctx: &::yew::html::Context<Self>, _msg: Self::Message) -> ::std::primitive::bool {
true
}
#[inline]
fn changed(&mut self, _ctx: &::yew::html::Context<Self>, _old_props: &Self::Properties) -> ::std::primitive::bool {
true
}
#[inline]
fn view(&self, ctx: &::yew::html::Context<Self>) -> ::yew::html::HtmlResult {
::yew::functional::FunctionComponent::<Self>::render(
&self.function_component,
::yew::html::Context::<Self>::props(ctx)
)
}
#[inline]
fn rendered(&mut self, _ctx: &::yew::html::Context<Self>, _first_render: ::std::primitive::bool) {
::yew::functional::FunctionComponent::<Self>::rendered(&self.function_component)
}
#[inline]
fn destroy(&mut self, _ctx: &::yew::html::Context<Self>) {
::yew::functional::FunctionComponent::<Self>::destroy(&self.function_component)
}
#[inline]
fn prepare_state(&self) -> ::std::option::Option<::std::string::String> {
::yew::functional::FunctionComponent::<Self>::prepare_state(&self.function_component)
}
}
}
}
fn print_debug_impl(&self) -> TokenStream {
let component_name = self.component_name();
let (impl_generics, ty_generics, where_clause) = self.generics.split_for_impl();
let component_name_lit = LitStr::new(&format!("{component_name}<_>"), Span::mixed_site());
quote! {
#[automatically_derived]
impl #impl_generics ::std::fmt::Debug for #component_name #ty_generics #where_clause {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::std::write!(f, #component_name_lit)
}
}
}
}
fn print_fn_provider_impl(&self) -> TokenStream {
let func = self.print_inner_fn();
let component_impl_attrs = self.filter_attrs_for_component_impl();
let component_name = self.component_name();
let fn_name = self.inner_fn_ident();
let (impl_generics, ty_generics, where_clause) = self.generics.split_for_impl();
let props_type = &self.props_type;
let fn_generics = ty_generics.as_turbofish();
let component_props = Ident::new("props", Span::mixed_site());
let ctx_ident = Ident::new("ctx", Span::mixed_site());
quote! {
// we cannot disable any lints here because it will be applied to the function body
// as well.
#(#component_impl_attrs)*
impl #impl_generics ::yew::functional::FunctionProvider for #component_name #ty_generics #where_clause {
type Properties = #props_type;
fn run(#ctx_ident: &mut ::yew::functional::HookContext, #component_props: &Self::Properties) -> ::yew::html::HtmlResult {
#func
::yew::html::IntoHtmlResult::into_html_result(#fn_name #fn_generics (#ctx_ident, #component_props))
}
}
}
}
fn print_struct_def(&self) -> TokenStream {
let component_attrs = self.filter_attrs_for_component_struct();
let component_name = self.component_name();
let generics = &self.generics;
let (_impl_generics, _ty_generics, where_clause) = self.generics.split_for_impl();
let phantom_generics = self.phantom_generics();
let vis = &self.vis;
quote! {
#(#component_attrs)*
#[allow(unused_parens)]
#vis struct #component_name #generics #where_clause {
_marker: ::std::marker::PhantomData<(#phantom_generics)>,
function_component: ::yew::functional::FunctionComponent<Self>,
}
}
}
}
pub struct FunctionComponentName {
component_name: Option<Ident>,
}
impl Parse for FunctionComponentName {
fn parse(input: ParseStream) -> syn::Result<Self> {
if input.is_empty() {
return Ok(Self {
component_name: None,
});
}
let component_name = input.parse()?;
Ok(Self {
component_name: Some(component_name),
})
}
}
pub fn function_component_impl(
name: FunctionComponentName,
mut component: FunctionComponent,
) -> syn::Result<TokenStream> {
component.merge_component_name(name)?;
let base_comp_impl = component.print_base_component_impl();
let debug_impl = component.print_debug_impl();
let provider_fn_impl = component.print_fn_provider_impl();
let struct_def = component.print_struct_def();
let quoted = quote! {
#struct_def
#provider_fn_impl
#debug_impl
#base_comp_impl
};
Ok(quoted)
}
| true
|
bf68e16f98d72b61842e620fad8aa23fee743487
|
Rust
|
lelandbatey/rust_buddhabrot
|
/trajectory-gen/src/main.rs
|
UTF-8
| 6,544
| 3.171875
| 3
|
[] |
no_license
|
extern crate argparse;
extern crate rand;
extern crate serde;
extern crate serde_json;
extern crate time;
extern crate buddhabrot;
use std::collections::HashMap;
use std::sync::mpsc::{channel, Sender};
use std::thread;
use std::time::Duration;
use argparse::{ArgumentParser, Store};
use buddhabrot::buddha::{Complex, Trajectory};
use rand::Rng;
fn main() {
let mut thread_count = 3;
let mut trajectory_count = 1000;
let mut max_iterations: i64 = 1024;
let mut min_iterations: i64 = 64;
{
let mut argparse = ArgumentParser::new();
argparse.refer(&mut thread_count).add_option(
&["-t", "--threads"],
Store,
"Number of threads to use (default 3)",
);
argparse.refer(&mut trajectory_count).add_option(
&["--trajectory-count"],
Store,
"Absolute number of trajectories to find",
);
argparse.refer(&mut max_iterations).add_option(
&["--max-iters"],
Store,
"Maximum number of allowed iterations.",
);
argparse.refer(&mut min_iterations).add_option(
&["--min-iters"],
Store,
"Minimum required number of iterations.",
);
argparse.parse_args_or_exit();
}
coordinate_search(
thread_count,
trajectory_count,
max_iterations,
min_iterations,
)
}
// Function to coordinate other functions
// Function to search for candidates and write them to a channel
// Function to recieve from the channel and write them to the output of choice
fn coordinate_search(
thread_count: usize,
trajectory_count: usize,
max_iterations: i64,
min_iterations: i64,
) {
// Choose an output file based on the current time. This file name is a good candidate for a
// user-providable CLI parameter in the future.
//let filename = time::strftime("trajectory_candidates_%Y-%m-%d__%H-%M-%S.json", &time::now()).unwrap();
// Set up the variables necessary for candidate searching
//
// Calculate the number of trajectories each thread should search for. We do integer division
// to find the number of trajectories each thread should search for, so the total count may be
// less than the count specified on the CLI
let per_thread_traj_count: usize = trajectory_count / thread_count;
let total_trajectory_count: usize = per_thread_traj_count * thread_count;
let thread_trajectories: Vec<usize> =
(0..thread_count).map(|_| per_thread_traj_count).collect();
// Start the threads that do the searching
let (sender, reciever) = channel();
for trajs in thread_trajectories.iter() {
let child_sender = sender.clone();
let t = trajs.clone();
let _ = thread::spawn(move || {
search_and_transmit(t, max_iterations, min_iterations, child_sender);
});
}
// Start the function for recieving and writing to the file
// If the program is failing to find *anything* for long enough, we want it to time out and
// just print what we've got. The timeout is thus based on a bare minimum, 1/4 of a second,
// plus a number of milliseconds equal to 100 * the minimum number of iterations. It's a pretty
// usable heuristic.
let timeout = Duration::from_millis(250 + (100 * min_iterations) as u64);
// Recieve all the trajectories and print them (for now)
for _ in 0..total_trajectory_count {
match reciever.recv_timeout(timeout) {
Ok(trajectory) => {
println!("{}", serde_json::to_string(&trajectory).unwrap());
}
Err(_) => {
println!("\n\nTimed Out!\n\n");
break;
}
}
}
}
// tells us if a point in the complex plane will loop forever by telling us if it's within the main
// cardiod or within the second-order bulb. This returning false doesn't guarantee that there's a
// finite number of loops, as this is just a quick special case to speed things up.
fn will_loop_forever(z: Complex) -> bool {
let x = z.re;
let y = z.im;
let p: f64 = ((x - 0.25).powi(2) + y.powi(2)).sqrt();
if x < (p - (2.0 * p.powi(2)) + 0.25) {
return true;
}
if ((x + 1.0).powi(2) + y.powi(2)) < 0.0625 {
return true;
}
return false;
}
fn search_and_transmit(
trajectory_count: usize,
max_iterations: i64,
min_iterations: i64,
sender: Sender<Trajectory>,
) {
let mut rng = rand::thread_rng();
let mut valid_trajectory_count = 0;
// centerx : hard coded at -0.75
// centery : hard coded at 0
// x span: [-2.5, 1.0]
// y span: [-1.0, 1.0]
//
let (startx, stopx): (f64, f64) = (-2.5, 1.0);
let (starty, stopy): (f64, f64) = (-1.0, 1.0);
let xspan = stopx - startx;
let yspan = stopy - starty;
while valid_trajectory_count < trajectory_count {
let mut escaped = false;
let mut z = Complex::new(0.0, 0.0);
let cn = Complex::new(
startx + rng.gen::<f64>() * xspan,
starty + rng.gen::<f64>() * yspan,
);
let mut trajectory: Trajectory = Trajectory {
init_c: cn,
waypoints: Vec::new(),
length: 0,
};
if will_loop_forever(cn) {
continue;
}
let mut periods = HashMap::new();
for itercount in 0..max_iterations {
trajectory.length = itercount as i64;
if escaped {
break;
}
z = z * z + cn;
if z.norm() > 2.0 {
escaped = true;
}
// Check if we've encountered this point before (useful for avoiding cyclical
// but never ending z's). This bit of math is a fancy way of checking if
// itercount is a power of 2. This algorithm is called "Brent's Algorithm" and
// I originally found it here: https://softologyblog.wordpress.com/2011/06/26/buddhabrot-fractals/
if itercount & (itercount - 1) == 0 {
let k = format!("{:?}", z);
if periods.contains_key(&k) {
break;
}
periods.insert(k, itercount);
}
}
if escaped && !(trajectory.length < min_iterations) {
match sender.send(trajectory) {
Ok(_) => (),
Err(_) => break,
}
valid_trajectory_count += 1;
}
}
}
| true
|
9d964503ad87bc8c5de75bae691163f792cddf4a
|
Rust
|
moisutsu/rsa
|
/src/main.rs
|
UTF-8
| 1,701
| 2.625
| 3
|
[] |
no_license
|
use clap::Clap;
use rsa::Opt;
use std::ops::*;
#[allow(clippy::clippy::many_single_char_names)]
fn main() {
let opt = Opt::parse();
let p = opt.p;
let q = opt.q;
let n = p * q;
let l = (p - 1) * (q - 1);
let m = opt.plaintext;
println!("Plaintext = {}", m);
println!("p = {}, q = {}", p, q);
let e = generate_e(l);
let d = generate_d(l, e);
println!("EncryptionKey = (e: {}, n: {})", e, n);
println!("DecryptionKey = (d: {}, n: {})", d, n);
let c = encode(m, e, n);
println!("Cryptogram = {}", c);
let m = decode(c, d, n);
println!("DecryptedText = {}", m);
}
macro_rules! mod_pow {
($ a : expr , $ n : expr , $ mod : expr ) => {{
let mut ret = 1;
let mut a = $a;
let mut n = $n;
while n > 0 {
if n & 1 == 1 {
ret = ret * a % $mod;
}
a = a * a % $mod;
n >>= 1;
}
ret
}};
}
fn encode(m: u128, e: u128, n: u128) -> u128 {
mod_pow!(m, e, n)
}
fn decode(c: u128, d: u128, n: u128) -> u128 {
mod_pow!(c, d, n)
}
fn generate_d(l: u128, e: u128) -> u128 {
for i in 1.. {
if (i * l + 1) % e == 0 {
return (i * l + 1) / e;
}
}
unreachable!()
}
fn generate_e(l: u128) -> u128 {
for i in 2..l - 1 {
if gcd(i, l) == 1 {
return i;
}
}
unreachable!()
}
fn gcd<T>(a: T, b: T) -> T
where
T: Copy + Default + Ord + Sub<Output = T> + Rem<Output = T>,
{
let (mut a, mut b) = if a < b { (b, a) } else { (a, b) };
let zero = T::default();
while b > zero {
let r = a % b;
a = b;
b = r;
}
a
}
| true
|
d8c749ff262d03c528691ac8895d870b290c58e4
|
Rust
|
xyzith/rust_practice
|
/datatype/src/main.rs
|
UTF-8
| 420
| 3.59375
| 4
|
[] |
no_license
|
fn main() {
// Note that char literals are specified with single quotes, as opposed to string literals, which use double quotes.
let emoji = '😎';
println!("emoji char test {}", emoji);
//tuple
let tup: (i32, f64, u8) = (500, 6.4, 1);
let (_x, y, _z) = tup;
println!("y = {}", y);
println!("y = {}", tup.1);
let a: [u8; 3] = [1,2,3];
let b = [3; 5];
println!("a = {}", a[1]);
println!("a = {}", b[1]);
}
| true
|
4a134a92983fef0cf7dd16d521016a7d73c40c49
|
Rust
|
sgrowe/advent-of-code-2020
|
/src/day_eighteen.rs
|
UTF-8
| 4,416
| 3.4375
| 3
|
[] |
no_license
|
use super::utils::start_day;
use std::iter::Peekable;
use std::str::CharIndices;
pub fn main() {
let input = start_day("eighteen");
println!("Part one: {}", part_one(&input));
println!("Part two: {}", part_two(&input));
println!();
}
fn part_one(input: &str) -> u64 {
input
.lines()
.map(|l| {
let mut tokens = Tokeniser::of(l);
eval_expr(&mut tokens)
})
.sum()
}
fn part_two(input: &str) -> u64 {
input
.lines()
.map(|l| {
let mut tokens = Tokeniser::of(l);
eval_expr_v2(&mut tokens)
})
.sum()
}
fn eval_expr(mut tokens: &mut Tokeniser) -> u64 {
let mut value = get_number(&mut tokens);
while let Some(token) = tokens.next() {
match token {
Token::Add => {
value += get_number(&mut tokens);
}
Token::Multiply => {
value *= get_number(&mut tokens);
}
Token::CloseParen => return value,
_ => panic!(),
}
}
value
}
fn get_number(mut tokens: &mut Tokeniser) -> u64 {
match tokens.next().unwrap() {
Token::Number(a) => a,
Token::OpenParen => eval_expr(&mut tokens),
_ => panic!(),
}
}
fn eval_expr_v2(mut tokens: &mut Tokeniser) -> u64 {
let mut value: u64 = get_number_v2(&mut tokens);
let mut multiply_by = 1;
while let Some(token) = tokens.next() {
match token {
Token::Add => {
value += get_number_v2(&mut tokens);
}
Token::Multiply => {
multiply_by *= value;
value = get_number_v2(&mut tokens);
}
Token::CloseParen => {
return value * multiply_by;
}
_ => panic!(),
}
}
value * multiply_by
}
fn get_number_v2(mut tokens: &mut Tokeniser) -> u64 {
match tokens.next().unwrap() {
Token::Number(a) => a,
Token::OpenParen => eval_expr_v2(&mut tokens),
_ => panic!(),
}
}
#[derive(Debug, Copy, Clone)]
enum Token {
OpenParen,
CloseParen,
Number(u64),
Add,
Multiply,
}
struct Tokeniser<'a> {
text: &'a str,
chars: Peekable<CharIndices<'a>>,
}
impl<'a> Tokeniser<'a> {
pub fn of(line: &'a str) -> Self {
Tokeniser {
text: line,
chars: line.char_indices().peekable(),
}
}
}
impl<'a> Iterator for Tokeniser<'a> {
type Item = Token;
fn next(&mut self) -> Option<Self::Item> {
let (i, char) = self.chars.next()?;
let next = match char {
' ' => self.next()?,
'(' => Token::OpenParen,
')' => Token::CloseParen,
'+' => Token::Add,
'*' => Token::Multiply,
c if c.is_ascii_digit() => {
let start = i;
let mut end = i + 1;
while let Some(&(i, c)) = self.chars.peek() {
if c.is_ascii_digit() {
self.chars.next()?;
end = i;
} else {
break;
}
}
let num = (&self.text[start..end]).parse().unwrap();
Token::Number(num)
}
c => panic!("Unexpected char {}", c),
};
Some(next)
}
}
#[cfg(test)]
mod tests {
use super::*;
use test_case::test_case;
#[test_case("1 + 2 * 3 + 4 * 5 + 6", 71)]
#[test_case("1 + (2 * 3) + (4 * (5 + 6))", 51)]
#[test_case("2 * 3 + (4 * 5)", 26)]
#[test_case("5 + (8 * 3 + 9 + 3 * 4 * 3)", 437)]
#[test_case("5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))", 12240)]
#[test_case("((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2", 13632)]
fn sample_input_part_one(expr: &str, expected: u64) {
assert_eq!(part_one(&expr), expected);
}
#[test_case("1 + 2 * 3 + 4 * 5 + 6", 231)]
#[test_case("1 + (2 * 3) + (4 * (5 + 6))", 51)]
#[test_case("2 * 3 + (4 * 5)", 46)]
#[test_case("5 + (8 * 3 + 9 + 3 * 4 * 3)", 1445)]
#[test_case("5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))", 669060)]
#[test_case("((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2", 23340)]
fn sample_input_part_two(expr: &str, expected: u64) {
assert_eq!(part_two(&expr), expected);
}
}
| true
|
7df5d58e46ad97344638262d63357c5f87d00e4b
|
Rust
|
chastell/talks
|
/balkan-ruby-2018/code/popcount_rust/src/lib.rs
|
UTF-8
| 1,213
| 3
| 3
|
[
"MIT"
] |
permissive
|
#[macro_use]
extern crate helix;
ruby! {
class Popcount {
def count(int: u64) -> u32 {
int.count_ones()
}
}
class HTML {
def map_escape(input: String) -> String {
input.chars().map(|chr| {
match chr {
'<' => String::from("<"),
'>' => String::from(">"),
'&' => String::from("&"),
'\'' => String::from("'"),
'"' => String::from("""),
_ => chr.to_string(),
}
}).collect()
}
def push_escape(input: String) -> String {
let mut result = String::with_capacity(2 * input.len());
for chr in input.chars() {
match chr {
'<' => result.push_str("<"),
'>' => result.push_str(">"),
'&' => result.push_str("&"),
'\'' => result.push_str("'"),
'"' => result.push_str("""),
_ => result.push(chr),
}
}
result
}
}
}
| true
|
3d520a5426f2f66edd0d918410608d3f403abba2
|
Rust
|
iMplode-nZ/ul-test
|
/src/fs.rs
|
UTF-8
| 1,318
| 2.59375
| 3
|
[] |
no_license
|
use crate::conversions::from_ul_string;
use std::os::raw::c_char;
use std::os::raw::c_longlong;
use ul_sys::*;
#[allow(unused_variables)]
unsafe extern "C" fn file_exists(path: ULString) -> bool {
println!("Finding: {:?}", from_ul_string(path));
false
}
#[allow(unused_variables)]
unsafe extern "C" fn get_file_size(handle: ULFileHandle, result: *mut c_longlong) -> bool {
false
}
#[allow(unused_variables)]
unsafe extern "C" fn get_file_mime_type(path: ULString, result: ULString) -> bool {
println!("Finding Mime Type: {:?}", from_ul_string(path));
false
}
#[allow(unused_variables)]
unsafe extern "C" fn open_file(path: ULString, open_for_writing: bool) -> ULFileHandle {
println!("Opening: {}", from_ul_string(path));
0
}
#[allow(unused_variables)]
unsafe extern "C" fn close_file(handle: ULFileHandle) {}
#[allow(unused_variables)]
unsafe extern "C" fn read_from_file(
handle: ULFileHandle,
data: *mut c_char,
length: c_longlong,
) -> c_longlong {
0
}
pub fn fs() -> ULFileSystem {
ULFileSystem {
file_exists: Some(file_exists),
get_file_size: Some(get_file_size),
get_file_mime_type: Some(get_file_mime_type),
open_file: Some(open_file),
close_file: Some(close_file),
read_from_file: Some(read_from_file),
}
}
| true
|
b972c32cec851b1f233bf72e6e1f3e873d2d3e7a
|
Rust
|
isgasho/cargo-sort-ck
|
/src/main.rs
|
UTF-8
| 5,818
| 2.921875
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use std::env;
use std::fs::{read_to_string, OpenOptions};
use std::io::Write;
use std::path::PathBuf;
use clap::{App, Arg};
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
use toml_parse::{parse_it, sort_toml_items, Formatter, Matcher, SyntaxNodeExtTrait};
const HEADERS: [&str; 3] = [
"[dependencies]",
"[dev-dependencies]",
"[build-dependencies]",
];
const HEADER_SEG: [&str; 3] = ["dependencies.", "dev-dependencies.", "build-dependencies."];
const MATCHER: Matcher<'_> = Matcher {
heading: &HEADERS,
segmented: &HEADER_SEG,
heading_key: &[("[workspace]", "members"), ("[workspace]", "exclude")],
};
fn write_err(msg: &str) -> std::io::Result<()> {
let mut stderr = StandardStream::stderr(ColorChoice::Auto);
stderr.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?;
write!(stderr, "Failure: ")?;
stderr.reset()?;
writeln!(stderr, "{}", msg)
}
fn write_succ(msg: &str) -> std::io::Result<()> {
let mut stdout = StandardStream::stdout(ColorChoice::Auto);
stdout.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?;
write!(stdout, "Success: ")?;
stdout.reset()?;
writeln!(stdout, "{}", msg)
}
//Takes a file path and reads its contents in as plain text
fn load_file_contents(path: &str) -> String {
read_to_string(path).unwrap_or_else(|_| {
let msg = format!("No file found at: {}", path);
write_err(&msg).unwrap();
std::process::exit(1);
})
}
fn load_toml_file(path: &PathBuf) -> String {
//Check if a valid .toml filepath
let path = path.to_str().unwrap_or_else(|| {
write_err("path could not be represented as str").unwrap();
std::process::exit(1)
});
if !path.contains(".toml") {
let msg = format!("invalid path to .toml file: {}", path);
write_err(&msg).unwrap();
std::process::exit(1)
}
load_file_contents(path)
}
// TODO:
// it would be nice to be able to check if the file had been saved recently
// or check if uncommited changes were present
fn write_file(path: &PathBuf, toml: &str) -> std::io::Result<()> {
let mut fd = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(path)?;
write!(fd, "{}", toml)
}
fn check_toml(path: &str, matches: &clap::ArgMatches) -> bool {
let mut path = PathBuf::from(path);
if path.extension().is_none() {
path.push("Cargo.toml");
}
let toml_raw = load_toml_file(&path);
// parses the toml file for sort checking
let tkn_tree = parse_it(&toml_raw)
.unwrap_or_else(|e| {
let msg = format!("toml parse error: {}", e);
write_err(&msg).unwrap();
std::process::exit(1);
})
.syntax();
// check if appropriate tables in file are sorted
let sorted = sort_toml_items(&tkn_tree, &MATCHER);
let was_sorted = !sorted.deep_eq(&tkn_tree);
let fmted = Formatter::new(&sorted).format().to_string();
if matches.is_present("print") {
print!("{}", fmted);
if !matches.is_present("write") {
return true;
}
}
if matches.is_present("write") {
write_file(&path, &fmted).unwrap_or_else(|e| {
let msg = format!("failed to rewrite file: {:?}", e);
write_err(&msg).unwrap();
});
let msg = format!("dependencies are now sorted for {:?}", path);
write_succ(&msg).unwrap();
return true;
}
if was_sorted {
let msg = format!("dependencies are not sorted for {:?}", path);
write_err(&msg).unwrap();
false
} else {
let msg = format!("dependencies are sorted for {:?}", path);
write_succ(&msg).unwrap();
true
}
}
fn main() {
let matches = App::new("Cargo Sort Check")
.author("Devin R <devin.ragotzy@gmail.com>")
.about("Ensure Cargo.toml dependency tables are sorted.")
.usage("cargo-sort-ck [FLAGS] [CWD]")
.arg(
Arg::with_name("cwd")
.value_name("CWD")
.multiple(true)
.help("Sets cwd, must contain Cargo.toml"),
)
.arg(
Arg::with_name("write")
.short("w")
.long("write")
.help("rewrites Cargo.toml file so it is lexically sorted"),
)
.arg(
Arg::with_name("print")
.short("p")
.long("print")
.help("prints Cargo.toml, lexically sorted, to the screen"),
)
.arg(
Arg::with_name("CRLF")
.long("crlf")
.help("output uses windows style line endings (\\r\\n)"),
)
.get_matches();
let cwd = env::current_dir().unwrap_or_else(|e| {
let msg = format!("no current directory found: {}", e);
write_err(&msg).unwrap();
std::process::exit(1);
});
let dir = cwd.to_str().unwrap_or_else(|| {
let msg = "could not represent path as string";
write_err(msg).unwrap();
std::process::exit(1);
});
// remove "sort-ck" when invoked `cargo sort-ck` sort-ck is the first arg
// https://github.com/rust-lang/cargo/issues/7653
let filtered_matches = matches.values_of("cwd").map_or(vec![dir], |s| {
let args = s.filter(|it| *it != "sort-ck").collect::<Vec<&str>>();
if args.is_empty() {
vec![dir]
} else {
args
}
});
let mut flag = true;
filtered_matches
.iter()
.map(|path| check_toml(path, &matches))
.for_each(|sorted| {
if !sorted {
flag = false;
}
});
if flag {
std::process::exit(0)
} else {
std::process::exit(1)
}
}
| true
|
529625d388058a2cbeeddaef6327100fce8c96b0
|
Rust
|
puzza007/ErlangRT
|
/src/emulator/disasm.rs
|
UTF-8
| 1,419
| 2.5625
| 3
|
[
"MIT"
] |
permissive
|
use beam::gen_op;
use emulator::code::{CodePtr, opcode, Labels, RefCode};
use emulator::code_srv::CodeServer;
use rt_defs::Word;
use term::lterm::*;
/// Print to screen disassembly of the current function.
#[allow(dead_code)]
pub unsafe fn disasm(code: RefCode, _labels: Option<&Labels>,
code_server: &CodeServer) {
let mut ip = &code[0] as *const Word;
let iend = ip.offset(code.len() as isize);
while ip < iend {
ip = disasm_op(ip, code_server);
}
}
/// Given an IP code pointer which points to the opcode - print the opcode and
/// args. Returns updated IP which points at the next opcode.
pub unsafe fn disasm_op(ip0: *const Word,
code_server: &CodeServer) -> *const Word {
let mut ip = ip0;
let op = opcode::from_memory_ptr(ip);
assert!(op < gen_op::OPCODE_MAX);
if let Some(mfa) = code_server.code_reverse_lookup(CodePtr::new(ip)) {
print!("{} ", mfa)
}
print!("{:p}: {} ", ip, gen_op::opcode_name(op as u8));
ip = ip.offset(1);
let n_args = gen_op::opcode_arity(op as u8) as Word;
disasm_op_args(ip, n_args);
println!();
ip.offset(n_args as isize)
}
unsafe fn disasm_op_args(ip: *const Word, n_args: Word) {
for arg_index in 0..n_args {
let arg_raw = *ip.offset(arg_index as isize);
let arg = LTerm::from_raw(arg_raw);
print!("{}", arg);
if arg_index < n_args - 1 {
print!(", ")
}
}
}
| true
|
f1410cf45339972d2c130cbdb44664136e197d9c
|
Rust
|
pantsbuild/pants
|
/src/rust/engine/options/src/env.rs
|
UTF-8
| 1,883
| 2.640625
| 3
|
[
"Apache-2.0"
] |
permissive
|
// Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::collections::HashMap;
use std::env;
use super::id::{NameTransform, OptionId, Scope};
use super::OptionsSource;
use crate::parse::{parse_bool, parse_string_list};
use crate::ListEdit;
#[derive(Debug)]
pub(crate) struct Env {
pub(crate) env: HashMap<String, String>,
}
impl Env {
pub(crate) fn capture() -> Env {
Env {
env: env::vars().collect::<HashMap<_, _>>(),
}
}
fn env_var_names(id: &OptionId) -> Vec<String> {
let name = id.name("_", NameTransform::ToUpper);
let mut names = vec![format!(
"PANTS_{}_{}",
id.0.name().replace('-', "_").to_ascii_uppercase(),
name
)];
if id.0 == Scope::Global {
names.push(format!("PANTS_{name}"));
}
if name.starts_with("PANTS_") {
names.push(name);
}
names
}
}
impl OptionsSource for Env {
fn display(&self, id: &OptionId) -> String {
Self::env_var_names(id).pop().unwrap()
}
fn get_string(&self, id: &OptionId) -> Result<Option<String>, String> {
let env_var_names = Self::env_var_names(id);
for env_var_name in &env_var_names {
if let Some(value) = self.env.get(env_var_name) {
return Ok(Some(value.to_owned()));
}
}
Ok(None)
}
fn get_bool(&self, id: &OptionId) -> Result<Option<bool>, String> {
if let Some(value) = self.get_string(id)? {
parse_bool(&value)
.map(Some)
.map_err(|e| e.render(self.display(id)))
} else {
Ok(None)
}
}
fn get_string_list(&self, id: &OptionId) -> Result<Option<Vec<ListEdit<String>>>, String> {
if let Some(value) = self.get_string(id)? {
parse_string_list(&value)
.map(Some)
.map_err(|e| e.render(self.display(id)))
} else {
Ok(None)
}
}
}
| true
|
f7a06fddf983fa734f90e97ac211fcba8f6089ba
|
Rust
|
nicolasjhampton/rust-server-framework
|
/http/src/response_mod/status.rs
|
UTF-8
| 4,193
| 3.09375
| 3
|
[] |
no_license
|
use std::fmt;
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum Status {
CODE1(u8),
CODE2(u8),
CODE3(u8),
CODE4(u8),
CODE5(u8),
INFO,
SUCCESS,
REDIRECT,
BAD,
ERROR
}
impl fmt::Display for Status {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let (code, message) = match self {
Status::CODE1(code) => {
match code {
01 => (101, "Switching Protocol"),
02 => (102, "Processing"),
03 => (103, "Early Hints"),
_ => (100, "Continue")
}
},
Status::CODE2(code) => {
match code {
01 => (201, "Created"),
02 => (202, "Accepted"),
03 => (203, "Non-Authoritative Information"),
04 => (204, "No Content"),
05 => (205, "Reset Content"),
06 => (206, "Partial Content"),
_ => (200, "OK")
}
},
Status::CODE3(code) => {
match code {
01 => (301, "Moved Permanently"),
02 => (302, "Found"),
03 => (303, "See Other"),
04 => (304, "Not Modified"),
07 => (307, "Temporary Redirect"),
08 => (308, "Permanent Redirect"),
_ => (300, "Multiple Choice")
}
},
Status::CODE4(code) => {
match code {
01 => (401, "Unauthorized"),
02 => (402, "Payment Required"),
03 => (403, "Forbidden"),
04 => (404, "Not Found"),
05 => (405, "Method Not Allowed"),
06 => (406, "Not Acceptable"),
07 => (407, "Proxy Authentication Required"),
08 => (408, "Request Timeout"),
09 => (409, "Conflict"),
10 => (410, "Gone"),
11 => (411, "Length Required"),
12 => (412, "Precondition Failed"),
13 => (413, "Payload Too Large"),
14 => (414, "URI Too Large"),
15 => (415, "Unsupported Media Type"),
16 => (416, "Range Not Satisfiable"),
17 => (417, "Expectation Failed"),
18 => (418, "I'm a teapot"),
21 => (421, "Misdirected Request"),
22 => (422, "Unprocessable Entity"),
23 => (423, "Locked"),
24 => (424, "Failed Dependency"),
25 => (425, "Too Early"),
26 => (426, "Upgrade Required"),
28 => (428, "Precondition Required"),
29 => (429, "Too Many Requests"),
31 => (431, "Request Header Fields Too Large"),
51 => (451, "Lawyers who needs um am I right"),
_ => (400, "Bad Request")
}
},
Status::CODE5(code) => {
match code {
01 => (501, "Not Implemented"),
02 => (502, "Bad Gateway"),
03 => (503, "Service Unavailable"),
04 => (504, "Gateway Timeout"),
05 => (505, "HTTP Version Not Supported"),
06 => (506, "Variant Also Negotiates"),
07 => (507, "Insufficient Storage"),
08 => (508, "Loop Detected"),
10 => (510, "Not Extended"),
11 => (511, "Network Authentication Required"),
_ => (500, "Internal Server Error")
}
},
Status::INFO => (100, "Continue"),
Status::SUCCESS => (200, "OK"),
Status::REDIRECT => (300, "Multiple Choice"),
Status::BAD => (400, "Bad Request"),
Status::ERROR => (500, "Internal Server Error"),
};
write!(f, "{} {}", code, message)
}
}
| true
|
8e94f4d5c8073df69e610f6b0d080bb05cd73a71
|
Rust
|
deniauma/Neatro
|
/simple-alloc/src/alloc.rs
|
UTF-8
| 1,440
| 2.65625
| 3
|
[] |
no_license
|
use core::mem::{size_of, align_of};
use core::ffi::c_void;
use win32::{HANDLE, GetProcessHeap, HeapAlloc, HeapFree};
pub struct Layout
{
pub size: usize,
pub align: usize,
}
impl Layout
{
pub fn new(size: usize) -> Self
{
Self
{
size,
align: 4,
}
}
pub fn from_type<T>() -> Self
{
Self
{
size: size_of::<T>(),
align: align_of::<T>(),
}
}
pub fn from_array_type<T>(length: usize) -> Self
{
Self
{
size: size_of::<T>() * length,
align: align_of::<T>(),
}
}
}
pub trait Allocator
{
unsafe fn alloc(&mut self, layout: Layout) -> Option<*mut c_void>;
unsafe fn dealloc(&mut self, ptr: *mut c_void);
}
pub struct Win32HeapAllocator
{
heap: HANDLE,
}
impl Default for Win32HeapAllocator {
fn default() -> Self
{
Self
{
heap: unsafe { GetProcessHeap() },
}
}
}
impl Allocator for Win32HeapAllocator {
unsafe fn alloc(&mut self, layout: Layout) -> Option<*mut c_void>
{
let ptr = HeapAlloc(self.heap, 0, layout.size);
if ptr.is_null()
{
None
}
else
{
Some(ptr as *mut c_void)
}
}
unsafe fn dealloc(&mut self, ptr: *mut c_void)
{
HeapFree(self.heap, 0, ptr as win32::LPVOID);
}
}
| true
|
dd8d2a3bb832f97972a111fdda11a0e6b6279c27
|
Rust
|
csssuf/rust-gpg-error
|
/src/lib.rs
|
UTF-8
| 7,948
| 2.96875
| 3
|
[] |
no_license
|
extern crate libgpg_error_sys as ffi;
use std::borrow::Cow;
use std::error;
use std::ffi::{CStr, NulError};
use std::fmt::{self, Write};
use std::io::{self, ErrorKind};
use std::os::raw::c_int;
use std::result;
use std::str;
pub type ErrorSource = ffi::gpg_err_source_t;
pub type ErrorCode = ffi::gpg_err_code_t;
/// A type wrapping errors produced by GPG libraries.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Error(ffi::gpg_error_t);
impl Error {
/// Creates a new error from a raw error value.
#[inline]
pub fn new(err: ffi::gpg_error_t) -> Error {
Error(err)
}
/// Returns the raw error value that this error wraps.
#[inline]
pub fn raw(&self) -> ffi::gpg_error_t {
self.0
}
/// Creates a new error from an error source and an error code.
#[inline]
pub fn from_source(source: ErrorSource, code: ErrorCode) -> Error {
Error::new(ffi::gpg_err_make(source, code))
}
/// Creates a new error from an error code using the default
/// error source `SOURCE_UNKNOWN`.
#[inline]
pub fn from_code(code: ErrorCode) -> Error {
Error::from_source(Self::SOURCE_UNKNOWN, code)
}
/// Returns an error representing the last OS error that occurred.
#[inline]
pub fn last_os_error() -> Error {
unsafe { Error::new(ffi::gpg_error_from_syserror()) }
}
/// Creates a new error from an OS error code.
#[inline]
pub fn from_errno(code: i32) -> Error {
unsafe { Error::new(ffi::gpg_error_from_errno(code as c_int)) }
}
/// Returns the OS error that this error represents.
#[inline]
pub fn to_errno(&self) -> i32 {
unsafe { ffi::gpg_err_code_to_errno(self.code()) }
}
/// Returns the error code.
#[inline]
pub fn code(&self) -> ErrorCode {
ffi::gpg_err_code(self.0)
}
/// Returns a description of the source of the error as a UTF-8 string.
#[inline]
pub fn source(&self) -> Option<&'static str> {
self.raw_source().and_then(|s| str::from_utf8(s).ok())
}
/// Returns an `Error` with the same code from the provided source.
#[inline]
pub fn with_source(&self, src: ErrorSource) -> Self {
Error::from_source(src, self.code())
}
/// Returns a description of the source of the error as a slice of bytes.
#[inline]
pub fn raw_source(&self) -> Option<&'static [u8]> {
unsafe {
let source = ffi::gpg_strsource(self.0);
if !source.is_null() {
Some(CStr::from_ptr(source).to_bytes())
} else {
None
}
}
}
/// Returns a printable description of the error.
#[inline]
pub fn description(&self) -> Cow<'static, str> {
let mut buf = [0; 1024];
match self.write_description(&mut buf) {
Ok(b) => Cow::Owned(String::from_utf8_lossy(b).into_owned()),
Err(_) => Cow::Borrowed("Unknown error"),
}
}
/// Returns a description of the error as a slice of bytes.
#[inline]
pub fn raw_description(&self) -> Cow<'static, [u8]> {
let mut buf = [0; 1024];
match self.write_description(&mut buf) {
Ok(b) => Cow::Owned(b.to_owned()),
Err(_) => Cow::Borrowed(b"Unknown error"),
}
}
/// Writes a description of the error to the provided buffer
/// and returns a slice of the buffer containing the description.
///
/// # Errors
///
/// Returns an error if the provided buffer is not long enough or
/// if the error is not recognized.
#[inline]
pub fn write_description<'r>(&self, buf: &'r mut [u8]) -> result::Result<&'r mut [u8], ()> {
let p = buf.as_mut_ptr();
unsafe {
if ffi::gpg_strerror_r(self.0, p as *mut _, buf.len()) == 0 {
match buf.iter().position(|&b| b == b'\0') {
Some(x) => Ok(&mut buf[..x]),
None => Ok(buf),
}
} else {
Err(())
}
}
}
}
impl error::Error for Error {
#[inline]
fn description(&self) -> &str {
"gpg error"
}
}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
struct Escaped<'a>(&'a [u8]);
impl<'a> fmt::Debug for Escaped<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::ascii;
f.write_char('"')?;
for b in self.0.iter().flat_map(|&b| ascii::escape_default(b)) {
f.write_char(b as char)?;
}
f.write_char('"')
}
}
let mut buf = [0; 1024];
let desc = self.write_description(&mut buf)
.map(|x| &*x)
.unwrap_or(b"Unknown error");
f.debug_struct("Error")
.field("source", &self.source())
.field("code", &self.code())
.field("description", &Escaped(desc))
.finish()
}
}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
// TODO: Use write_description and char::decode_utf8
write!(fmt, "{} (gpg error {})", self.description(), self.code())
}
}
impl From<NulError> for Error {
#[inline]
fn from(_: NulError) -> Error {
Error::from_code(ffi::GPG_ERR_INV_VALUE)
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
let kind = err.kind();
if let Some(Ok(err)) = err.into_inner().map(|e| e.downcast::<Error>()) {
*err
} else {
match kind {
ErrorKind::NotFound => Self::ENOENT,
ErrorKind::PermissionDenied => Self::EACCES,
ErrorKind::ConnectionRefused => Self::ECONNREFUSED,
ErrorKind::ConnectionReset => Self::ECONNRESET,
ErrorKind::ConnectionAborted => Self::ECONNABORTED,
ErrorKind::NotConnected => Self::ENOTCONN,
ErrorKind::AddrInUse => Self::EADDRINUSE,
ErrorKind::AddrNotAvailable => Self::EADDRNOTAVAIL,
ErrorKind::BrokenPipe => Self::EPIPE,
ErrorKind::AlreadyExists => Self::EEXIST,
ErrorKind::WouldBlock => Self::EWOULDBLOCK,
ErrorKind::InvalidInput => Self::EINVAL,
ErrorKind::TimedOut => Self::ETIMEDOUT,
ErrorKind::Interrupted => Self::EINTR,
_ => Error::EIO,
}
}
}
}
impl From<Error> for io::Error {
fn from(err: Error) -> io::Error {
let kind = match err.with_source(Error::SOURCE_UNKNOWN) {
Error::ECONNREFUSED => ErrorKind::ConnectionRefused,
Error::ECONNRESET => ErrorKind::ConnectionReset,
Error::EPERM | Error::EACCES => ErrorKind::PermissionDenied,
Error::EPIPE => ErrorKind::BrokenPipe,
Error::ENOTCONN => ErrorKind::NotConnected,
Error::ECONNABORTED => ErrorKind::ConnectionAborted,
Error::EADDRNOTAVAIL => ErrorKind::AddrNotAvailable,
Error::EADDRINUSE => ErrorKind::AddrInUse,
Error::ENOENT => ErrorKind::NotFound,
Error::EINTR => ErrorKind::Interrupted,
Error::EINVAL => ErrorKind::InvalidInput,
Error::ETIMEDOUT => ErrorKind::TimedOut,
Error::EEXIST => ErrorKind::AlreadyExists,
x if x == Error::EAGAIN || x == Error::EWOULDBLOCK => ErrorKind::WouldBlock,
_ => ErrorKind::Other,
};
io::Error::new(kind, err)
}
}
include!(concat!(env!("OUT_DIR"), "/constants.rs"));
pub type Result<T> = result::Result<T, Error>;
#[macro_export]
macro_rules! return_err {
($e:expr) => (match $crate::Error::new($e) {
$crate::Error::NO_ERROR => (),
err => return Err(From::from(err)),
});
}
| true
|
02c1efa7b54997e4b49b4d4687a3505da280b604
|
Rust
|
prz23/zinc
|
/zinc-vm/src/gadgets/auto_const.rs
|
UTF-8
| 1,737
| 2.828125
| 3
|
[
"Apache-2.0"
] |
permissive
|
//!
//! The constant optimizing macro.
//!
pub mod prelude {
pub use crate::constraint_systems::constant::Constant as ConstantCS;
pub use crate::gadgets::scalar::variant::Variant as ScalarVariant;
pub use crate::gadgets::scalar::Scalar;
use crate::error::Error;
use crate::IEngine;
pub trait ToConstant: Sized {
fn to_constant(&self) -> Result<Self, Error>;
}
impl<E: IEngine> ToConstant for Scalar<E> {
fn to_constant(&self) -> Result<Self, Error> {
self.to_constant_unchecked()
}
}
impl<E: IEngine> ToConstant for (Scalar<E>, Scalar<E>) {
fn to_constant(&self) -> Result<Self, Error> {
Ok((
self.0.to_constant_unchecked()?,
self.1.to_constant_unchecked()?,
))
}
}
}
#[macro_export]
macro_rules! auto_const {
// Unary operators
($op:path, $cs:expr, $a:expr) => {{
let a = $a;
match a.get_variant() {
ScalarVariant::Constant { .. } => {
let const_cs = ConstantCS::default();
let result = $op(const_cs, a);
result.and_then(|result| result.to_constant())
}
_ => $op($cs, a),
}
}};
// Binary operators
($op:path, $cs:expr, $a:expr, $b:expr) => {{
let a = $a;
let b = $b;
match (a.get_variant(), b.get_variant()) {
(ScalarVariant::Constant { .. }, ScalarVariant::Constant { .. }) => {
let const_cs = ConstantCS::default();
let result = $op(const_cs, a, b);
result.and_then(|result| result.to_constant())
}
_ => $op($cs, a, b),
}
}};
}
| true
|
00478f0e3a9329393a776beb8745bf96aa883f78
|
Rust
|
ksdme/launchk
|
/xpc-sys/src/objects/xpc_shmem.rs
|
UTF-8
| 2,446
| 2.671875
| 3
|
[
"MIT"
] |
permissive
|
use crate::objects::xpc_error::XPCError;
use crate::objects::xpc_object::XPCObject;
use crate::{
mach_port_t, mach_task_self_, rs_strerror, vm_address_t, vm_allocate, vm_deallocate, vm_size_t,
xpc_shmem_create,
};
use std::ffi::c_void;
use std::os::raw::c_int;
use std::ptr::null_mut;
use std::sync::Arc;
/// Wrapper around vm_allocate() vm_deallocate() with an XPCObject
/// member of XPC type _xpc_type_shmem
#[derive(Debug, Clone)]
pub struct XPCShmem {
pub task: mach_port_t,
pub size: vm_size_t,
pub region: *mut c_void,
pub xpc_object: Arc<XPCObject>,
}
unsafe impl Send for XPCShmem {}
impl XPCShmem {
/// Allocate a region of memory of vm_size_t & flags, then wrap in a XPC Object
#[must_use]
pub fn new(task: mach_port_t, size: vm_size_t, flags: c_int) -> Result<XPCShmem, XPCError> {
let mut region: *mut c_void = null_mut();
let err = unsafe {
vm_allocate(
task,
&mut region as *const _ as *mut vm_address_t,
size,
flags,
)
};
if err > 0 {
Err(XPCError::IOError(rs_strerror(err)))
} else {
let xpc_object: XPCObject =
unsafe { xpc_shmem_create(region as *mut c_void, size as u64).into() };
log::info!(
"XPCShmem new (region: {:p}, xpc_object_t {:p})",
region,
xpc_object.as_ptr()
);
Ok(XPCShmem {
task,
size,
region,
xpc_object: xpc_object.into(),
})
}
}
/// new() with _mach_task_self
/// https://web.mit.edu/darwin/src/modules/xnu/osfmk/man/mach_task_self.html
#[must_use]
pub fn new_task_self(size: vm_size_t, flags: c_int) -> Result<XPCShmem, XPCError> {
unsafe { Self::new(mach_task_self_, size, flags) }
}
}
impl Drop for XPCShmem {
fn drop(&mut self) {
let XPCShmem {
size,
task,
region,
xpc_object,
} = self;
log::info!(
"XPCShmem drop (region: {:p}, xpc_object_t {:p})",
region,
xpc_object.as_ptr()
);
let ok = unsafe { vm_deallocate(*task, *region as vm_address_t, *size) };
if ok != 0 {
panic!("shmem won't drop (vm_deallocate errno {})", ok);
}
}
}
| true
|
0366fd4b515c22b9d1dcd5a694a87534d1ddcfa3
|
Rust
|
saks/hb_api
|
/octo-budget-api/src/apps/frontend_app.rs
|
UTF-8
| 236
| 2.609375
| 3
|
[
"MIT"
] |
permissive
|
use actix_web::{get, http::header, HttpResponse, Result};
#[get("/")]
pub async fn index() -> Result<HttpResponse> {
Ok(HttpResponse::PermanentRedirect()
.header(header::LOCATION, "/public/index.html")
.finish())
}
| true
|
2812711c026f3fcde787004d0ecb492e91891dde
|
Rust
|
nphyx/scrapsrl
|
/src/system/input/player.rs
|
UTF-8
| 2,814
| 2.625
| 3
|
[] |
no_license
|
use crate::component::{Cursor, Direction, MovePlan, Orientation, Player, Pos, Region};
use crate::resource::{GameState, UserInput};
use specs::{Entities, Join, Read, ReadStorage, System, Write, WriteStorage};
use tcod::input::Key;
use tcod::input::KeyCode::*;
use super::movement_util::get_movement;
/// handle input that controls the player's character
pub struct PlayerInput;
impl<'a> System<'a> for PlayerInput {
type SystemData = (
WriteStorage<'a, Cursor>,
WriteStorage<'a, Orientation>,
WriteStorage<'a, Pos>,
WriteStorage<'a, MovePlan>,
ReadStorage<'a, Player>,
WriteStorage<'a, Region>,
Read<'a, GameState>,
Write<'a, UserInput>,
Entities<'a>,
);
fn run(
&mut self,
(
mut cursors,
mut orientations,
mut positions,
mut plans,
players,
mut regions,
state,
mut input,
entities,
): Self::SystemData,
) {
let mut player_pos: Pos = Pos::default();
let mut player_region: Region = Region::default();
if state.paused {
return;
} // no moving while paused
for (pos, to, region, ..) in (&positions, &mut plans, &mut regions, &players).join() {
player_pos = *pos;
player_region = *region;
if let Some(plan) = get_movement(&input) {
to.x = plan.x;
to.y = plan.y;
input.consume();
return;
}
}
match input.get() {
Some(Key { code: Enter, .. })
| Some(Key {
code: NumPadEnter, ..
})
| Some(Key { code: NumPad5, .. }) => {
entities
.build_entity()
.with(Cursor, &mut cursors)
.with(
Region {
x: player_region.x,
y: player_region.y,
},
&mut regions,
)
.with(MovePlan::default(), &mut plans)
.with(
Pos {
x: player_pos.x,
y: player_pos.y,
},
&mut positions,
)
.with(
Orientation {
dir: Direction::North,
},
&mut orientations,
)
.build();
input.consume();
return;
}
_ => {
return;
}
}
}
}
| true
|
333b612da9fba6a510a8ea032b56bc87acc92efa
|
Rust
|
avhz/QuantMath
|
/src/data/bumptime.rs
|
UTF-8
| 880
| 3.28125
| 3
|
[
"MIT"
] |
permissive
|
use dates::Date;
/// Bump that defines all the supported bumps to the spot date
pub struct BumpTime {
spot_date: Date,
spot_dynamics: SpotDynamics,
}
impl BumpTime {
pub fn new(spot_date: Date, spot_dynamics: SpotDynamics) -> BumpTime {
BumpTime {
spot_date: spot_date,
spot_dynamics: spot_dynamics,
}
}
pub fn spot_date(&self) -> Date {
self.spot_date
}
pub fn spot_dynamics(&self) -> SpotDynamics {
self.spot_dynamics
}
}
/// Enum that defines how spot moves when time is bumped.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Hash)]
pub enum SpotDynamics {
/// Spot stays the same, except that any dividends going ex are subtracted
StickySpot,
/// Forwards after the spot date stay the same. In other words, spot moves
/// up the forward.
StickyForward,
}
| true
|
983da4e09a7ce8decbe8b0380601718a3c480690
|
Rust
|
speederking07/LZW_with_universal_coder
|
/src/dictionary.rs
|
UTF-8
| 3,132
| 3.59375
| 4
|
[] |
no_license
|
use std::ops::{Index, IndexMut};
#[derive(Debug)]
pub struct Dictionary {
words: Vec<Vec<u8>>,
tree: WordsTree
}
impl Index<usize> for Dictionary {
type Output = Vec<u8>;
fn index(&self, index: usize) -> &Self::Output {
return &self.words[index];
}
}
impl IndexMut<usize> for Dictionary {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
return &mut self.words[index];
}
}
impl Dictionary {
const MAX_SIZE:usize = 4194304;
pub fn new() -> Self {
let mut res = Self {
words: Vec::new(),
tree: WordsTree::new()
};
for i in 0..=255 {
res.add(vec![i]);
}
res
}
pub fn vec_eq<A>(a: &Vec<A>, b: &Vec<A>) -> bool
where A: PartialEq
{
if a.len() != b.len() {
return false;
}
for i in 0..a.len() {
if a[i] != b[i] {
return false;
}
}
true
}
pub fn word_position(&self, seq: &Vec<u8>) -> Option<usize> {
self.tree.get(seq.as_slice())
/*for i in 0..self.words.len() {
if Self::vec_eq(&seq, &self.words[i]) {
return Some(i);
}
}
None*/
}
pub fn add(&mut self, seq: Vec<u8>) -> Option<usize> {
match self.word_position(&seq) {
None => {
return if self.words.len() < Self::MAX_SIZE {
self.tree.add(seq.as_slice(), self.words.len());
//eprintln!("{} {:?}", self.words.len(), seq);
self.words.push(seq);
Some(self.words.len())
} else {
None
};
}
Some(v) => Some(v)
}
}
pub fn len(&self) -> usize {
self.words.len()
}
}
#[derive(Debug, Clone)]
pub enum WordsTree{
Node(usize, Box<Vec<WordsTree>>),
Leaf
}
impl WordsTree{
fn new() -> Self{
WordsTree::Node(0, Box::new(vec![WordsTree::Leaf; 256]))
}
fn add(&mut self, path: &[u8], index: usize){
match self {
WordsTree::Node(_, rest) => {
let x = path[0];
rest[x as usize].add(&path[1..], index);
}
WordsTree::Leaf => {
*self = WordsTree::Node(index, Box::new(vec![WordsTree::Leaf; 256]));
}
}
}
fn get(&self, path: &[u8]) -> Option<usize>{
return match self {
WordsTree::Node(index, rest) => {
if path.is_empty() {
return Some(*index);
}
let x = path[0];
return rest[x as usize].get(&path[1..])
}
WordsTree::Leaf => {
None
}
}
}
}
#[cfg(test)]
mod dict_test {
use crate::dictionary::WordsTree;
#[test]
fn tree_test() {
let mut tree = WordsTree::new();
tree.add(&[0], 0);
tree.add(&[1], 1);
tree.add(&[0, 1], 2);
assert_eq!(tree.get(&[0,1]), Some(2))
}
}
| true
|
b1b3d2945529dfedfb46c289061273b43dae5572
|
Rust
|
cassiemeharry/pf2e-csheet
|
/pf2e-csheet-shared/src/calc.rs
|
UTF-8
| 18,942
| 2.890625
| 3
|
[] |
no_license
|
#[cfg(test)]
use proptest::prelude::*;
#[cfg(test)]
use proptest_derive::Arbitrary;
use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
use smartstring::alias::String;
use std::{fmt, str::FromStr};
use thiserror::Error;
use crate::{
bonuses::{Bonus, Modifier, Penalty},
character::Character,
choices::Choice,
common::ResourceRef,
storage::ResourceStorage,
};
#[derive(Copy, Clone)]
// #[cfg_attr(test, derive(Arbitrary))]
pub struct CalcContext<'a> {
pub character: &'a Character,
pub rref: &'a ResourceRef,
pub target: Option<&'a ResourceRef>,
pub resources: &'a dyn ResourceStorage,
}
impl<'a> fmt::Debug for CalcContext<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("CalcContext")
.field("character", self.character)
.field("rref", self.rref)
.field("target", &self.target)
.field("resources", &format_args!("…"))
.finish()
}
}
impl<'a> CalcContext<'a> {
pub fn new(
character: &'a Character,
rref: &'a ResourceRef,
resources: &'a dyn ResourceStorage,
) -> Self {
Self {
character,
rref,
target: None,
resources,
}
}
pub fn with_target(mut self, target: &'a ResourceRef) -> Self {
self.target = Some(target);
self
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[cfg_attr(test, derive(Arbitrary))]
pub enum Op {
Add,
// Subtract,
// Multiply,
// Divide,
}
impl fmt::Display for Op {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::Add => write!(f, "+"),
// Self::Subtract => write!(f, "-"),
// Self::Multiply => write!(f, "*"),
// Self::Divide => write!(f, "/"),
}
}
}
impl Op {
// fn is_associative(self) -> bool {
// match self {
// Self::Add => true,
// // Self::Subtract => false,
// // Self::Multiply => true,
// // Self::Divide => false,
// }
// }
fn apply(self, x: i16, y: i16) -> i16 {
match self {
Self::Add => x + y,
// Self::Subtract => x - y,
// Self::Multiply => x * y,
// Self::Divide => x / y,
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize)]
#[serde(try_from = "smartstring::alias::String")]
pub enum Calculation {
Named(String),
Choice(Choice),
Modifier(Modifier),
Op(Op, Vec<Calculation>),
}
#[cfg(test)]
impl Arbitrary for Calculation {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;
fn arbitrary_with(_args: ()) -> Self::Strategy {
let leaf = prop_oneof![
prop::string::string_regex("[a-zA-Z][a-zA-Z_]+")
.unwrap()
.prop_map_into()
.prop_map(Calculation::Named),
any::<Choice>().prop_map(Calculation::Choice),
any::<Modifier>().prop_map(Calculation::Modifier),
];
leaf.prop_recursive(
3, // levels deep
20, // maximum node count
5, // items per collection
|inner| {
(any::<Op>(), prop::collection::vec(inner.clone(), 2..10))
.prop_map(|(op, calcs)| Self::Op(op, calcs).normalized())
},
)
.boxed()
}
}
impl Default for Calculation {
fn default() -> Self {
Self::Modifier(Modifier::new())
}
}
impl Calculation {
fn normalize(&mut self) {
if let Self::Op(outer_op, outer_terms) = self {
trace!("Normalizing Op({:?}, {:?})", outer_op, outer_terms);
if outer_terms.iter().all(|t| matches!(t, Self::Modifier(_))) {
{
trace!(
"Normalizing Op({:?}, {:?}) by combining modifiers",
outer_op,
outer_terms
);
}
let mut total = Modifier::new();
match outer_op {
Op::Add => {
for term in outer_terms.drain(..) {
match term {
Self::Modifier(m) => total += m,
_ => unreachable!(),
}
}
}
}
*self = Self::Modifier(total);
return;
}
// Flatten tree with matching ops
let mut terms = vec![];
std::mem::swap(&mut terms, outer_terms);
let mut flattened = true;
let mut iterations = 0;
while flattened {
trace!("Flattening op terms {:?}", terms);
flattened = false;
terms.sort_by_key(|term| match term {
Self::Op(_, _) => 0,
Self::Modifier(_) => 1,
Self::Named(_) => 2,
Self::Choice(_) => 3,
});
let mut new_terms = Vec::with_capacity(terms.len());
for mut term in terms.drain(..) {
term.normalize();
match term {
Self::Op(inner_op, inner_terms) if *outer_op == inner_op => {
flattened = true;
new_terms.extend(inner_terms);
}
Self::Modifier(m1) => match (*outer_op, new_terms.pop()) {
(Op::Add, Some(Self::Modifier(m2))) => {
new_terms.push(Self::Modifier(m1 + m2));
flattened = true;
}
(Op::Add, Some(other)) => {
new_terms.push(other);
new_terms.push(Self::Modifier(m1));
}
(Op::Add, None) => {
new_terms.push(Self::Modifier(m1));
}
},
other_term => new_terms.push(other_term),
}
}
terms = new_terms;
iterations += 1;
assert!(iterations < 20);
}
trace!("After flatten, terms = {:?}", terms);
*self = Self::Op(*outer_op, terms);
}
}
pub fn normalized(mut self) -> Self {
self.normalize();
self
}
// pub fn build_op(left: Self, right: Self, op: Op) -> Self {
// let (op, items) = match (left, right) {
// (Self::Op(l_op, mut l_terms), Self::Op(r_op, r_terms)) if l_op == op && r_op == op => {
// l_terms.extend(r_terms);
// (op, l_terms)
// }
// (Self::Op(l_op, mut terms), other) if l_op == op => {
// terms.push(other);
// (op, terms)
// }
// (s, Self::Op(r_op, mut terms)) if r_op == op => {
// terms.insert(0, s);
// (op, terms)
// }
// (left, right) => (op, vec![left, right]),
// };
// // HACK: modifiers with multiple parts deserialize into adding up the
// // individual components, so we need to undo that for the roundtrip
// // tests to pass.
// if items.iter().all(|item| matches!(item, Self::Modifier(_))) {
// trace!("In Calculation modifier sum hack");
// let mut total = Modifier::new();
// match op {
// Op::Add => {
// for item in items {
// match item {
// Self::Modifier(m) => total += m,
// _ => unreachable!(),
// }
// }
// }
// }
// Self::Modifier(total)
// } else {
// Self::Op(op, items)
// }
// }
pub fn from_number(n: i16) -> Self {
let m = if n >= 0 {
Bonus::untyped(n).into()
} else {
Penalty::untyped(-n).into()
};
Calculation::Modifier(m)
}
pub fn evaluate(&self, ctx: CalcContext<'_>) -> i16 {
match self {
Self::Named(name) => ctx
.character
.get_modifier(name, ctx.target, ctx.resources)
.total(),
Self::Choice(choice) => {
let resource = match ctx.resources.lookup_immediate(ctx.rref) {
None => {
debug!("When attempting to evaluate choice, failed to look up resouce for reference {}", ctx.rref);
return 0;
}
Some(r) => r,
};
match resource.common().get_choice(choice, ctx) {
None => {
debug!("When attempting to evaluate a choice, failed to find a numeric value set for resource {}", resource);
0
}
Some(v) => v,
}
}
Self::Modifier(m) => m.total(),
Self::Op(op, terms) => {
let mut iter = terms.iter();
let mut value = match iter.next() {
None => return 0,
Some(t) => t.evaluate(ctx),
};
for term in iter {
let next = term.evaluate(ctx);
value = op.apply(value, next);
}
value
}
}
}
}
try_from_str!(Calculation);
#[derive(Debug, Error)]
#[error("Failed to parse calculation")]
pub struct CalculationFromStrError;
impl FromStr for Calculation {
type Err = CalculationFromStrError;
fn from_str(s: &str) -> Result<Calculation, Self::Err> {
trace!("Parsing calculation from {:?}", s);
match crate::parsers::calculation(s) {
Ok(c) => {
trace!("Got calculation {:?}", c);
Ok(c)
}
Err(e) => {
error!("Failed to parse calculation from {:?}:\n{}", s, e);
Err(CalculationFromStrError)
}
}
}
}
mod parens {
use super::Calculation;
use std::fmt;
pub struct Parens<'a> {
pub calc: &'a Calculation,
}
impl fmt::Display for Parens<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({})", self.calc)
}
}
}
impl fmt::Display for Calculation {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
trace!("Displaying calculation {:?}", self);
match self {
Self::Named(name) => write!(f, "{}", name),
Self::Choice(choice) => write!(f, "${}", choice),
Self::Modifier(m) => write!(f, "{}", m),
Self::Op(op, terms) => {
for (i, t) in terms.iter().enumerate() {
let pt;
let t: &dyn fmt::Display = match t {
Self::Op(_, _) => {
pt = parens::Parens { calc: &t };
&pt
}
_ => &t,
};
if i == 0 {
write!(f, "{}", t)?;
} else {
write!(f, " {} {}", op, t)?;
}
}
Ok(())
}
}
}
}
impl Serialize for Calculation {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
let calc = format!("{}", self);
serializer.serialize_str(calc.as_str())
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
#[cfg_attr(test, derive(Arbitrary))]
pub enum CalculatedStringPart {
Calc(Calculation),
Literal(
#[cfg_attr(
test,
proptest(
strategy = "\"..+\".prop_filter_map(\"invalid calculated string literal\", proptest_bad_calc_str_part_literal)"
)
)]
String,
),
}
#[cfg(test)]
fn proptest_bad_calc_str_part_literal(s: std::string::String) -> Option<String> {
if s.contains("[") {
None
} else if s.contains("]") {
None
} else {
Some(s.into())
}
}
impl CalculatedStringPart {
fn evaluate(&self, s: &mut String, ctx: CalcContext<'_>) -> fmt::Result {
use std::fmt::Write;
match self {
Self::Calc(c) => write!(s, "{}", c.evaluate(ctx)),
Self::Literal(l) => write!(s, "{}", l),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize)]
#[serde(try_from = "smartstring::alias::String")]
pub struct CalculatedString {
parts: SmallVec<[CalculatedStringPart; 3]>,
}
#[cfg(test)]
impl Arbitrary for CalculatedString {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;
fn arbitrary_with(_args: ()) -> Self::Strategy {
proptest::collection::vec(any::<CalculatedStringPart>(), 1..=5)
.prop_map(|parts_vec| {
let mut parts = SmallVec::new();
for part in parts_vec {
match (parts.pop(), part) {
(
Some(CalculatedStringPart::Literal(mut l1)),
CalculatedStringPart::Literal(l2),
) => {
l1.push_str(&l2);
parts.push(CalculatedStringPart::Literal(l1));
}
(
Some(l @ CalculatedStringPart::Literal(_)),
c @ CalculatedStringPart::Calc(_),
) => {
parts.push(l);
parts.push(c);
}
(Some(CalculatedStringPart::Calc(c1)), CalculatedStringPart::Calc(c2)) => {
let calc = Calculation::Op(Op::Add, vec![c1, c2]).normalized();
parts.push(CalculatedStringPart::Calc(calc));
}
(
Some(c @ CalculatedStringPart::Calc(_)),
l @ CalculatedStringPart::Literal(_),
) => {
parts.push(c);
parts.push(l);
}
(None, first) => {
parts.push(first);
}
}
}
Self { parts }
})
.boxed()
}
}
impl CalculatedString {
pub fn concat(mut self, other: Self) -> Self {
use CalculatedStringPart::*;
for part in other.parts {
match (self.parts.pop(), part) {
(None, part) => self.parts.push(part),
(Some(Calc(c1)), Calc(c2)) => {
let mut c = Calculation::Op(Op::Add, vec![c1, c2]);
c.normalize();
self.parts.push(Calc(c));
}
(Some(Literal(mut l1)), Literal(l2)) => {
l1.push_str(&l2);
self.parts.push(Literal(l1));
}
(Some(left @ Calc(_)), right @ Literal(_))
| (Some(left @ Literal(_)), right @ Calc(_)) => {
self.parts.push(left);
self.parts.push(right);
}
}
}
self
}
pub fn join_with_literal(mut self, other: Self, part: &str) -> Self {
self.parts.push(CalculatedStringPart::Literal(part.into()));
self.parts.extend(other.parts);
self
}
pub fn join_with_calc(mut self, other: Self, part: Calculation) -> Self {
self.parts.push(CalculatedStringPart::Calc(part.into()));
self.parts.extend(other.parts);
self
}
pub fn evaluate(
&self,
character: &Character,
rref: &ResourceRef,
resources: &dyn ResourceStorage,
) -> String {
let mut s = String::new();
for part in self.parts.iter() {
let len_before = s.len();
let context = CalcContext::new(character, rref, resources);
if let Err(_) = part.evaluate(&mut s, context) {
s.truncate(len_before);
s.push_str("<<error>>");
}
}
s
}
}
impl Default for CalculatedString {
fn default() -> Self {
CalculatedString {
parts: SmallVec::default(),
}
}
}
impl fmt::Display for CalculatedString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for part in self.parts.iter() {
match part {
CalculatedStringPart::Calc(c) => write!(f, "[[ {} ]]", c)?,
CalculatedStringPart::Literal(s) => write!(f, "{}", s)?,
}
}
Ok(())
}
}
try_from_str!(CalculatedString);
#[derive(Debug, Error)]
#[error("Failed to parse calculated string")]
pub struct CalculatedStringFromStrError;
impl FromStr for CalculatedString {
type Err = CalculatedStringFromStrError;
fn from_str(mut s: &str) -> Result<Self, Self::Err> {
let mut parts = SmallVec::new();
let mut in_literal = true;
while !s.is_empty() {
if in_literal {
match s.find("[[") {
Some(0) => {
s = &s[2..];
in_literal = false;
}
Some(loc) => {
parts.push(CalculatedStringPart::Literal(String::from(&s[..loc])));
s = &s[loc + 2..];
in_literal = false;
}
None => {
parts.push(CalculatedStringPart::Literal(String::from(s)));
s = "";
}
}
} else {
match s.find("]]") {
Some(loc) => {
let calc = s[..loc].parse().map_err(|_| CalculatedStringFromStrError)?;
s = &s[loc + 2..];
parts.push(CalculatedStringPart::Calc(calc));
in_literal = true;
}
None => return Err(CalculatedStringFromStrError),
}
}
}
Ok(CalculatedString { parts })
}
}
serialize_display!(CalculatedString);
| true
|
6bc87acb6bd315bf80c39fa1be64999e89894729
|
Rust
|
wasmerio/cranelift
|
/cranelift-codegen/src/dce.rs
|
UTF-8
| 2,407
| 2.859375
| 3
|
[
"LLVM-exception",
"Apache-2.0"
] |
permissive
|
//! A Dead-Code Elimination (DCE) pass.
//!
//! Dead code here means instructions that have no side effects and have no
//! result values used by other instructions.
use crate::cursor::{Cursor, FuncCursor};
use crate::dominator_tree::DominatorTree;
use crate::entity::EntityRef;
use crate::ir::instructions::InstructionData;
use crate::ir::{DataFlowGraph, Function, Inst, Opcode};
use crate::timing;
/// Test whether the given opcode is unsafe to even consider for DCE.
fn trivially_unsafe_for_dce(opcode: Opcode) -> bool {
opcode.is_call()
|| opcode.is_branch()
|| opcode.is_terminator()
|| opcode.is_return()
|| opcode.can_trap()
|| opcode.other_side_effects()
|| opcode.can_store()
}
/// Preserve instructions with used result values.
fn any_inst_results_used(inst: Inst, live: &[bool], dfg: &DataFlowGraph) -> bool {
dfg.inst_results(inst).iter().any(|v| live[v.index()])
}
/// Load instructions without the `notrap` flag are defined to trap when
/// operating on inaccessible memory, so we can't DCE them even if the
/// loaded value is unused.
fn is_load_with_defined_trapping(opcode: Opcode, data: &InstructionData) -> bool {
if !opcode.can_load() {
return false;
}
match *data {
InstructionData::StackLoad { .. } => false,
InstructionData::Load { flags, .. } => !flags.notrap(),
_ => true,
}
}
/// Perform DCE on `func`.
pub fn do_dce(func: &mut Function, domtree: &mut DominatorTree) {
let _tt = timing::dce();
debug_assert!(domtree.is_valid());
let mut live = vec![false; func.dfg.num_values()];
for &ebb in domtree.cfg_postorder() {
let mut pos = FuncCursor::new(func).at_bottom(ebb);
while let Some(inst) = pos.prev_inst() {
{
let data = &pos.func.dfg[inst];
let opcode = data.opcode();
if trivially_unsafe_for_dce(opcode)
|| is_load_with_defined_trapping(opcode, &data)
|| any_inst_results_used(inst, &live, &pos.func.dfg)
{
for arg in pos.func.dfg.inst_args(inst) {
let v = pos.func.dfg.resolve_aliases(*arg);
live[v.index()] = true;
}
continue;
}
}
pos.remove_inst();
}
}
}
| true
|
6b0655e515543ab777f79fd73c939411c9caa736
|
Rust
|
cryptopossum/gp-v2-services
|
/solver/src/settlement_submission/retry.rs
|
UTF-8
| 3,534
| 2.78125
| 3
|
[] |
no_license
|
use super::EncodedSettlement;
use anyhow::Result;
use contracts::GPv2Settlement;
use ethcontract::{
dyns::DynMethodBuilder,
errors::{ExecutionError, MethodError},
jsonrpc::types::Error as RpcError,
transaction::{confirm::ConfirmParams, ResolveCondition},
web3::error::Error as Web3Error,
GasPrice,
};
use primitive_types::U256;
use transaction_retry::{TransactionResult, TransactionSending};
fn is_transaction_error(error: &ExecutionError) -> bool {
// This is the error as we've seen it on openethereum nodes. The code and error messages can
// be found in openethereum's source code in `rpc/src/v1/helpers/errors.rs`.
// TODO: check how this looks on geth and infura. Not recognizing the error is not a serious
// problem but it will make us sometimes log an error when there actually was no problem.
matches!(error, ExecutionError::Web3(Web3Error::Rpc(RpcError { code, .. })) if code.code() == -32010)
}
pub struct SettleResult(pub Result<(), MethodError>);
impl TransactionResult for SettleResult {
fn was_mined(&self) -> bool {
if let Err(err) = &self.0 {
!is_transaction_error(&err.inner)
} else {
true
}
}
}
pub struct SettlementSender<'a> {
pub contract: &'a GPv2Settlement,
pub nonce: U256,
pub gas_limit: f64,
pub settlement: EncodedSettlement,
}
#[async_trait::async_trait]
impl<'a> TransactionSending for SettlementSender<'a> {
type Output = SettleResult;
async fn send(&self, gas_price: f64) -> Self::Output {
tracing::info!("submitting solution transaction at gas price {}", gas_price);
let mut method = settle_method_builder(self.contract, self.settlement.clone())
.nonce(self.nonce)
.gas_price(GasPrice::Value(U256::from_f64_lossy(gas_price)))
.gas(U256::from_f64_lossy(self.gas_limit));
method.tx.resolve = Some(ResolveCondition::Confirmed(ConfirmParams::mined()));
let result = method.send().await.map(|_| ());
SettleResult(result)
}
}
pub fn settle_method_builder(
contract: &GPv2Settlement,
settlement: EncodedSettlement,
) -> DynMethodBuilder<()> {
contract.settle(
settlement.tokens,
settlement.clearing_prices,
settlement.encoded_trades,
settlement.encoded_interactions,
)
}
// We never send cancellations but we still need to have types that implement the traits.
pub struct CancelResult;
impl TransactionResult for CancelResult {
fn was_mined(&self) -> bool {
unreachable!()
}
}
pub struct CancelSender;
#[async_trait::async_trait]
impl TransactionSending for CancelSender {
type Output = CancelResult;
async fn send(&self, _gas_price: f64) -> Self::Output {
unreachable!()
}
}
#[cfg(test)]
mod tests {
use super::*;
use jsonrpc_core::ErrorCode;
#[test]
fn test_submission_result_was_mined() {
let transaction_error = ExecutionError::Web3(Web3Error::Rpc(RpcError {
code: ErrorCode::from(-32010),
message: "".into(),
data: None,
}));
let result = SettleResult(Ok(()));
assert!(result.was_mined());
let result = SettleResult(Err(MethodError::from_parts(
"".into(),
ExecutionError::StreamEndedUnexpectedly,
)));
assert!(result.was_mined());
let result = SettleResult(Err(MethodError::from_parts("".into(), transaction_error)));
assert!(!result.was_mined());
}
}
| true
|
362dcd6e537e4ae742a72c5967d558f21cd7881f
|
Rust
|
michaelfairley/adventofcode2016
|
/src/bin/13.rs
|
UTF-8
| 1,147
| 2.984375
| 3
|
[] |
no_license
|
use std::collections::{HashSet,VecDeque};
// const INPUT: u32 = 10;
// const GOAL: (u32, u32) = (7, 4);
const INPUT: u32 = 1364;
const GOAL: (u32, u32) = (31, 39);
fn open(coords: (u32, u32)) -> bool {
let (x, y) = coords;
(x*x + 3*x + 2*x*y + y + y*y + INPUT).count_ones() % 2 == 0
}
fn main() {
let mut pending = VecDeque::new();
let mut known = HashSet::new();
{
let initial = (1, 1);
pending.push_back((initial, 0));
known.insert(initial);
}
while let Some((candidate, moves)) = pending.pop_front() {
if candidate == GOAL {
println!("{}", moves);
return;
}
let mut neighbors = vec![];
neighbors.push((candidate.0 + 1, candidate.1 ));
neighbors.push((candidate.0 , candidate.1 + 1));
if candidate.0 > 0 { neighbors.push((candidate.0 - 1, candidate.1 )); }
if candidate.1 > 0 { neighbors.push((candidate.0 , candidate.1 - 1)); }
for neighbor in neighbors {
if open(neighbor) && !known.contains(&neighbor) {
known.insert(neighbor);
pending.push_back((neighbor, moves+1));
}
}
}
}
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.