blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
5ab79eec293e3378992447d0991a70f60cef0382
|
Rust
|
yuulive/eu
|
/src/lib.rs
|
UTF-8
| 17,722
| 2.53125
| 3
|
[] |
no_license
|
#![feature(proc_macro_diagnostic)]
extern crate proc_macro;
use proc_macro::TokenStream;
use proc_macro2;
use quote::quote;
use syn;
use syn::spanned::Spanned;
/// Turns function into partially applicable functions.
#[proc_macro_attribute]
pub fn part_app(attr: TokenStream, item: TokenStream) -> TokenStream {
let func_item: syn::Item = syn::parse(item).expect("failed to parse input");
let attr_options = MacroOptions::from(attr);
attr_options.check(&func_item);
match func_item {
syn::Item::Fn(ref func) => {
let fn_info = FunctionInformation::from(func);
fn_info.check();
// disallow where clauses
if let Some(w) = &func.sig.generics.where_clause {
w.span()
.unstable()
.error("part_app does not allow where clauses")
.emit();
}
let func_struct = main_struct(&fn_info, &attr_options);
let generator_func = generator_func(&fn_info, &attr_options);
let final_call = final_call(&fn_info, &attr_options);
let argument_calls = argument_calls(&fn_info, &attr_options);
let unit_structs = {
let added_unit = fn_info.unit.added;
let empty_unit = fn_info.unit.empty;
let vis = fn_info.public;
quote! {
#[allow(non_camel_case_types,non_snake_case)]
#vis struct #added_unit;
#[allow(non_camel_case_types,non_snake_case)]
#vis struct #empty_unit;
}
};
// assemble output
let mut out = proc_macro2::TokenStream::new();
out.extend(unit_structs);
out.extend(func_struct);
out.extend(generator_func);
out.extend(argument_calls);
out.extend(final_call);
// println!("{}", out);
TokenStream::from(out)
}
_ => {
func_item
.span()
.unstable()
.error(
"Only functions can be partially applied, for structs use the builder pattern",
)
.emit();
proc_macro::TokenStream::from(quote! { #func_item })
}
}
}
/// The portion of the signature necessary for each impl block
fn impl_signature<'a>(
args: &Vec<&syn::PatType>,
ret_type: &'a syn::ReturnType,
generics: &Vec<&syn::GenericParam>,
opts: &MacroOptions,
) -> proc_macro2::TokenStream {
let arg_names = arg_names(&args);
let arg_types = arg_types(&args);
let augmented_names = if !(opts.impl_poly || opts.by_value) {
augmented_argument_names(&arg_names)
} else {
Vec::new()
};
quote! {
#(#generics,)*
#(#augmented_names: Fn() -> #arg_types,)*
BODYFN: Fn(#(#arg_types,)*) #ret_type
}
}
/// Generates the methods used to add argument values to a partially applied function. One method is generate for each
/// argument and each method is contained in it's own impl block.
fn argument_calls<'a>(
fn_info: &FunctionInformation,
opts: &MacroOptions,
) -> proc_macro2::TokenStream {
let impl_sig = impl_signature(
&fn_info.argument_vec,
fn_info.ret_type,
&fn_info.generics,
opts,
);
let arg_name_vec = arg_names(&fn_info.argument_vec);
let aug_arg_names = augmented_argument_names(&arg_name_vec);
let arg_types = arg_types(&fn_info.argument_vec);
arg_names(&fn_info.argument_vec)
.into_iter()
.zip(&aug_arg_names)
.zip(arg_types)
.map(|((n, n_fn), n_type)| {
// All variable names except the name of this function
let free_vars: Vec<_> = arg_name_vec.iter().filter(|&x| x != &n).collect();
let associated_vals_out: Vec<_> = arg_name_vec
.iter()
.map(|x| {
if &n == x {
fn_info.unit.added.clone()
} else {
x.clone()
}
})
.collect();
let val_list_out = if opts.impl_poly || opts.by_value {
quote! {#(#associated_vals_out,)*}
} else {
quote! {#(#associated_vals_out, #aug_arg_names,)*}
};
let associated_vals_in: Vec<_> = associated_vals_out
.iter()
.map(|x| {
if x == &fn_info.unit.added {
&fn_info.unit.empty
} else {
x
}
})
.collect();
let val_list_in = if opts.impl_poly || opts.by_value {
quote! {#(#associated_vals_in,)*}
} else {
quote! {#(#associated_vals_in, #aug_arg_names,)*}
};
let (transmute, self_type) = if opts.impl_poly || opts.impl_clone {
(quote!(transmute), quote!(self))
} else {
// if by_value
(quote!(transmute_copy), quote!(&self))
};
let some = if opts.impl_poly {
quote! {Some(::std::sync::Arc::from(#n))}
} else {
// || by_value
quote! {Some(#n)}
};
let in_type = if opts.impl_poly {
quote! { Box<dyn Fn() -> #n_type> }
} else if opts.by_value {
quote! {#n_type}
} else {
quote! { #n_fn }
};
let struct_name = &fn_info.struct_name;
let generics = &fn_info.generics;
let vis = fn_info.public;
quote! {
#[allow(non_camel_case_types,non_snake_case)]
impl< #impl_sig, #(#free_vars,)* > // The impl signature
#struct_name<#(#generics,)* #val_list_in BODYFN> // The struct signature
{
#vis fn #n (mut self, #n: #in_type) ->
#struct_name<#(#generics,)* #val_list_out BODYFN>{
self.#n = #some;
unsafe {
::std::mem::#transmute::<
#struct_name<#(#generics,)* #val_list_in BODYFN>,
#struct_name<#(#generics,)* #val_list_out BODYFN>,
>(#self_type)
}
}
}
}
})
.collect()
}
/// Generates the call function, which executes a fully filled out partially applicable struct.
fn final_call<'a>(fn_info: &FunctionInformation, opts: &MacroOptions) -> proc_macro2::TokenStream {
let ret_type = fn_info.ret_type;
let generics = &fn_info.generics;
let unit_added = &fn_info.unit.added;
let struct_name = &fn_info.struct_name;
let impl_sig = impl_signature(&fn_info.argument_vec, ret_type, generics, opts);
let arg_names = arg_names(&fn_info.argument_vec);
let aug_args = augmented_argument_names(&arg_names);
let vis = fn_info.public;
let arg_list: proc_macro2::TokenStream = if opts.impl_poly || opts.by_value {
aug_args.iter().map(|_| quote! {#unit_added,}).collect()
} else {
aug_args.iter().map(|a| quote! {#unit_added, #a,}).collect()
};
let call = if !opts.by_value {
quote! {()}
} else {
quote! {}
};
quote! {
#[allow(non_camel_case_types,non_snake_case)]
impl <#impl_sig> // impl signature
// struct signature
#struct_name<#(#generics,)* #arg_list BODYFN>
{
#vis fn call(self) #ret_type { // final call
(self.body)(#(self.#arg_names.unwrap()#call,)*)
}
}
}
}
/// The function called by the user to create an instance of a partially applicable function. This function always has
/// the name of the original function the macro is called on.
fn generator_func<'a>(
fn_info: &FunctionInformation,
opts: &MacroOptions,
) -> proc_macro2::TokenStream {
// because the quote! macro cannot expand fields
let arg_names = arg_names(&fn_info.argument_vec);
let arg_types = arg_types(&fn_info.argument_vec);
let marker_names = marker_names(&arg_names);
let generics = &fn_info.generics;
let empty_unit = &fn_info.unit.empty;
let body = fn_info.block;
let name = fn_info.fn_name;
let struct_name = &fn_info.struct_name;
let ret_type = fn_info.ret_type;
let vis = fn_info.public;
let gen_types = if opts.impl_poly || opts.by_value {
quote! {#(#generics,)*}
} else {
quote! {#(#generics,)* #(#arg_names,)*}
};
let struct_types = if opts.impl_poly || opts.by_value {
arg_names.iter().map(|_| quote! {#empty_unit,}).collect()
} else {
quote! {#(#empty_unit,#arg_names,)*}
};
let body_fn = if opts.impl_poly || opts.impl_clone {
quote! {::std::sync::Arc::new(|#(#arg_names,)*| #body),}
} else {
quote! {|#(#arg_names,)*| #body,}
};
let where_clause = if opts.impl_poly || opts.by_value {
quote!()
} else {
quote! {
where
#(#arg_names: Fn() -> #arg_types,)*
}
};
quote! {
#[allow(non_camel_case_types,non_snake_case)]
#vis fn #name<#gen_types>() -> #struct_name<#(#generics,)* #struct_types
impl Fn(#(#arg_types,)*) #ret_type>
#where_clause
{
#struct_name {
#(#arg_names: None,)*
#(#marker_names: ::std::marker::PhantomData,)*
body: #body_fn
}
}
}
}
/// A vector of all argument names. Simple parsing.
fn arg_names<'a>(args: &Vec<&syn::PatType>) -> Vec<syn::Ident> {
args.iter()
.map(|f| {
let f_pat = &f.pat;
syn::Ident::new(&format!("{}", quote!(#f_pat)), f.span())
})
.collect()
}
/// The vector of names used to hold PhantomData.
fn marker_names(names: &Vec<syn::Ident>) -> Vec<syn::Ident> {
names.iter().map(|f| concat_ident(f, "m")).collect()
}
/// Concatenates a identity with a string, returning a new identity with the same span.
fn concat_ident<'a>(ident: &'a syn::Ident, end: &str) -> syn::Ident {
let name = format!("{}___{}", quote! {#ident}, end);
syn::Ident::new(&name, ident.span())
}
/// Filter an argument list to a pattern type
fn argument_vector<'a>(
args: &'a syn::punctuated::Punctuated<syn::FnArg, syn::token::Comma>,
) -> Vec<&syn::PatType> {
args.iter()
.map(|fn_arg| match fn_arg {
syn::FnArg::Receiver(_) => panic!("should filter out reciever arguments"),
syn::FnArg::Typed(t) => {
if let syn::Type::Reference(r) = t.ty.as_ref() {
if r.lifetime.is_none() {
t.span()
.unstable()
.error("part_app does not support lifetime elision")
.emit();
}
}
t
}
})
.collect()
}
/// Retrieves the identities of an the argument list
fn arg_types<'a>(args: &Vec<&'a syn::PatType>) -> Vec<&'a syn::Type> {
args.iter().map(|f| f.ty.as_ref()).collect()
}
/// Names to hold function types
fn augmented_argument_names<'a>(arg_names: &Vec<syn::Ident>) -> Vec<syn::Ident> {
arg_names.iter().map(|f| concat_ident(f, "FN")).collect()
}
/// Generates the main struct for the partially applicable function.
/// All other functions are methods on this struct.
fn main_struct<'a>(fn_info: &FunctionInformation, opts: &MacroOptions) -> proc_macro2::TokenStream {
let arg_types = arg_types(&fn_info.argument_vec);
let arg_names = arg_names(&fn_info.argument_vec);
let arg_augmented = augmented_argument_names(&arg_names);
let ret_type = fn_info.ret_type;
let arg_list: Vec<_> = if !(opts.impl_poly || opts.by_value) {
arg_names
.iter()
.zip(arg_augmented.iter())
.flat_map(|(n, a)| vec![n, a])
.collect()
} else {
arg_names.iter().collect()
};
let bodyfn = if opts.impl_poly || opts.impl_clone {
quote! {::std::sync::Arc<BODYFN>}
} else {
quote! { BODYFN }
};
let where_clause = if opts.impl_poly || opts.by_value {
quote!(BODYFN: Fn(#(#arg_types,)*) #ret_type,)
} else {
quote! {
#(#arg_augmented: Fn() -> #arg_types,)*
BODYFN: Fn(#(#arg_types,)*) #ret_type,
}
};
let names_with_m = marker_names(&arg_names);
let option_list = if opts.impl_poly {
quote! {#(#arg_names: Option<::std::sync::Arc<dyn Fn() -> #arg_types>>,)*}
} else if opts.by_value {
quote! {#(#arg_names: Option<#arg_types>,)*}
} else {
quote! {#(#arg_names: Option<#arg_augmented>,)*}
};
let name = &fn_info.struct_name;
let clone = if opts.impl_clone {
let sig = impl_signature(
&fn_info.argument_vec,
fn_info.ret_type,
&fn_info.generics,
opts,
);
quote! {
#[allow(non_camel_case_types,non_snake_case)]
impl<#sig, #(#arg_list,)*> ::std::clone::Clone for #name <#(#arg_list,)* BODYFN>
where #where_clause
{
fn clone(&self) -> Self {
Self {
#(#names_with_m: ::std::marker::PhantomData,)*
#(#arg_names: self.#arg_names.clone(),)*
body: self.body.clone(),
}
}
}
}
} else {
quote! {}
};
let generics = &fn_info.generics;
let vis = fn_info.public;
quote! {
#[allow(non_camel_case_types,non_snake_case)]
#vis struct #name <#(#generics,)* #(#arg_list,)*BODYFN>
where #where_clause
{
// These hold the (phantom) types which represent if a field has
// been filled
#(#names_with_m: ::std::marker::PhantomData<#arg_names>,)*
// These hold the closures representing each argument
#option_list
// This holds the executable function
body: #bodyfn,
}
#clone
}
}
/// Contains options used to customize output
struct MacroOptions {
attr: proc_macro::TokenStream,
by_value: bool,
impl_clone: bool,
impl_poly: bool,
}
impl MacroOptions {
fn new(attr: proc_macro::TokenStream) -> Self {
Self {
attr,
by_value: false,
impl_clone: false,
impl_poly: false,
}
}
fn from(attr: proc_macro::TokenStream) -> Self {
let attributes: Vec<String> = attr
.to_string()
.split(",")
.map(|s| s.trim().to_string())
.collect();
let mut attr_options = MacroOptions::new(attr);
attr_options.impl_poly = attributes.contains(&"poly".to_string());
attr_options.by_value = attributes.contains(&"value".to_string());
attr_options.impl_clone = attributes.contains(&"Clone".to_string());
attr_options
}
fn check(&self, span: &syn::Item) {
if self.impl_poly && self.by_value {
span.span()
.unstable()
.error(r#"Cannot implement "poly" and "value" at the same time"#)
.emit()
}
if self.impl_clone && !(self.impl_poly || self.by_value) {
span.span()
.unstable()
.error(r#"Cannot implement "Clone" without "poly" or "value""#)
.emit()
}
if !self.attr.is_empty() && !self.impl_poly && !self.by_value && !self.impl_clone {
span.span()
.unstable()
.error(
r#"Unknown attribute. Acceptable attributes are "poly", "Clone" and "value""#,
)
.emit()
}
}
}
/// Contains prepossesses information about
struct FunctionInformation<'a> {
argument_vec: Vec<&'a syn::PatType>,
ret_type: &'a syn::ReturnType,
generics: Vec<&'a syn::GenericParam>,
fn_name: &'a syn::Ident,
struct_name: syn::Ident,
unit: Units,
block: &'a syn::Block,
public: &'a syn::Visibility,
orignal_fn: &'a syn::ItemFn,
}
/// Contains Idents for the unit structs
struct Units {
added: syn::Ident,
empty: syn::Ident,
}
impl<'a> FunctionInformation<'a> {
fn from(func: &'a syn::ItemFn) -> Self {
let func_name = &func.sig.ident;
Self {
argument_vec: argument_vector(&func.sig.inputs),
ret_type: &func.sig.output,
generics: func.sig.generics.params.iter().map(|f| f).collect(),
fn_name: func_name,
struct_name: syn::Ident::new(
&format!("__PartialApplication__{}_", func_name),
func_name.span(),
),
unit: Units {
added: concat_ident(func_name, "Added"),
empty: concat_ident(func_name, "Empty"),
},
block: &func.block,
public: &func.vis,
orignal_fn: func,
}
}
fn check(&self) {
if let Some(r) = self.orignal_fn.sig.receiver() {
r.span()
.unstable()
.error("Cannot make methods partially applicable yet")
.emit();
}
}
}
| true
|
9d506be6d534eb1aac814f60e52cb1f6c6f6cca3
|
Rust
|
dairyisscary/for-your-reference-part-2
|
/src/main.rs
|
UTF-8
| 3,132
| 3.765625
| 4
|
[] |
no_license
|
#[derive(Debug)]
enum Error {
UnexpectedEndOfInput,
NotANumberChar(char),
NotASymbolChar(char),
NumberParseError(std::num::ParseIntError),
}
struct MinusExpression {
left: i64,
right: i64,
}
enum Symbol {
Plus,
Minus,
}
enum ParseTree {
PlusExpr(i64, i64),
MinusExpr(MinusExpression),
}
fn read_any_amount_of_whitespace(input: &str) -> &str {
match input.chars().nth(0) {
Some(' ') => read_any_amount_of_whitespace(&input[1..]),
_ => input,
}
}
fn read_number(input: &str) -> Result<(i64, &str), Error> {
let mut chars = input.chars();
let mut bytes_read = 0;
loop {
match chars.next() {
// Allow a first char of -
Some('-') if bytes_read == 0 => {
bytes_read += 1;
}
Some(c) if c.is_digit(10) => {
bytes_read += 1;
}
Some(' ') => break,
Some(c) => return Err(Error::NotANumberChar(c)),
None if bytes_read == 0 => return Err(Error::UnexpectedEndOfInput),
None => break,
}
}
let number = input[0..bytes_read]
.parse()
.map_err(Error::NumberParseError)?;
Ok((number, &input[bytes_read..]))
}
fn read_symbol(input: &str) -> Result<(Symbol, &str), Error> {
let symbol = match input.chars().nth(0) {
Some('+') => Symbol::Plus,
Some('-') => Symbol::Minus,
Some(c) => return Err(Error::NotASymbolChar(c)),
None => return Err(Error::UnexpectedEndOfInput),
};
Ok((symbol, &input[1..]))
}
fn parse_input(input: &str) -> Result<ParseTree, Error> {
let left_trimmed = read_any_amount_of_whitespace(input);
let (left_number, symbol_untrimmed) = match read_number(left_trimmed) {
Ok(values) => values,
Err(e) => return Err(e),
};
let symbol_trimmed = read_any_amount_of_whitespace(symbol_untrimmed);
let (symbol, right_untrimmed) = read_symbol(symbol_trimmed)?;
let right_trimmed = read_any_amount_of_whitespace(right_untrimmed);
let (right_number, _) = read_number(right_trimmed)?;
let tree = match symbol {
Symbol::Plus => ParseTree::PlusExpr(left_number, right_number),
Symbol::Minus => {
let minus_expr = MinusExpression {
left: left_number,
right: right_number,
};
ParseTree::MinusExpr(minus_expr)
}
};
Ok(tree)
}
fn printed_result(parse_tree: ParseTree) -> String {
match parse_tree {
ParseTree::PlusExpr(left, right) => format!("{} + {} = {}", left, right, left + right),
ParseTree::MinusExpr(info) => format!(
"{} - {} = {}",
info.left,
info.right,
info.left - info.right
),
}
}
fn main() {
let parsed = match std::env::args().nth(1) {
Some(string) => parse_input(&string),
None => Err(Error::UnexpectedEndOfInput),
};
match parsed {
Ok(parsed) => println!("{}", printed_result(parsed)),
Err(e) => eprintln!("Something went wrong: {:?}", e),
}
}
| true
|
d28846fecf27e52c8e2454e72b5f8dfc49f91825
|
Rust
|
wako057/rust-tutorial
|
/chapter-05/struct_definition/src/main.rs
|
UTF-8
| 2,283
| 3.546875
| 4
|
[] |
no_license
|
struct User {
username: String,
email: String,
sign_in_count: u64,
active: bool,
}
struct Color(i32, i32, i32);
struct Point(i32, i32, i32);
fn main() {
let user1 = User {
email: String::from("someone@example.com"),
username: String::from("someusername123"),
active: false,
sign_in_count: 1,
};
print_user(&user1);
let mut user2 = User {
email: String::from("someone@example.com"),
username: String::from("someusername123"),
active: true,
sign_in_count: 1,
};
user2.email = String::from("anotheremail@example.com");
print_user(&user2);
let user3 = build_user(
String::from("wako057@gmail.com"),
String::from("Wako awax")
);
print_user(&user3);
let user4 = build_user_shorthand(
String::from("wako057@hotmail.com"),
String::from("Wako mini")
);
print_user(&user4);
let user4 = User {
email: String::from("another@example.com"),
username: String::from("anotherusername567"),
active: user1.active,
sign_in_count: user1.sign_in_count,
};
print_user(&user4);
let user5 = User {
email: String::from("another@example.com"),
username: String::from("anotherusername567"),
..user1
};
print_user(&user5);
// drop(user1);
// print_user(&user5);
let black = Color(0, 0, 0);
let origin = Point(0, 0, 0);
print_color(black);
print_origin(origin);
}
fn print_origin(origin: Point)
{
println!("Point Values X: [{}] Y: [{}] Z: [{}]", origin.0, origin.1, origin.2);
}
fn print_color(color: Color)
{
println!("Colors Values R: [{}] G: [{}] B: [{}]", color.0, color.1, color.2);
}
fn print_user(user: &User)
{
println!("user.email: [{}]\nuser.username: [{}]\nuser.active: [{}]\nuser.sign_incount: [{}]", user.email, user.username, user.active, user.sign_in_count);
println!("############################################");
}
fn build_user(email: String, username: String) -> User {
User {
email: email,
username: username,
active: true,
sign_in_count: 1,
}
}
fn build_user_shorthand(email: String, username: String) -> User {
User {
email: email,
username: username,
active: true,
sign_in_count: 1,
}
}
| true
|
bb984b11ddc58526fed05862fe18cd3d3de6e228
|
Rust
|
MDoerner/AdventOfCode2020
|
/Rust/adventOfCode2020/src/input.rs
|
UTF-8
| 886
| 3.046875
| 3
|
[] |
no_license
|
use std::{path::Path, path::PathBuf};
use std::fs;
pub struct PuzzleConfiguration {
pub day: i32,
pub part: i32
}
pub fn puzzle_input(config: &PuzzleConfiguration) -> Option<String>{
let path: PathBuf = puzzle_file_path(&config);
match fs::read_to_string(path){
Err(_) => None,
Ok(text) => Some(text)
}
}
fn puzzle_file_path(config: &PuzzleConfiguration) -> PathBuf{
let filename = puzzle_file_name(&config);
let mut path: PathBuf = Path::new(env!("CARGO_MANIFEST_DIR"))
.parent().unwrap()
.parent().unwrap()
.to_path_buf();
path.push("Input");
path.push(filename);
path
}
fn puzzle_file_name(config: &PuzzleConfiguration) -> String{
let day: &str = &config.day.to_string();
["Day", day, ".txt"].join("")
}
| true
|
d4752100f7cf7ed6c070381b9a1cfabfd47a118a
|
Rust
|
boa-dev/boa
|
/boa_ast/src/function/mod.rs
|
UTF-8
| 6,004
| 3.296875
| 3
|
[
"MIT",
"Unlicense"
] |
permissive
|
//! Functions and classes nodes, as defined by the [spec].
//!
//! [Functions][func] are mainly subprograms that can be called by external code to execute a sequence of
//! statements (the *body* of the function). Javascript functions fall in several categories:
//!
//! - [`Function`]s.
//! - [`ArrowFunction`]s.
//! - [`AsyncArrowFunction`]s.
//! - [`Generator`]s.
//! - [`AsyncFunction`]s.
//! - [`AsyncGenerator`]s.
//!
//! All of them can be declared in either [declaration][decl] form or [expression][expr] form,
//! except from `ArrowFunction`s and `AsyncArrowFunction`s, which can only be declared in expression form.
//!
//! This module also contains [`Class`]es, which are templates for creating objects. Classes
//! can also be declared in either declaration or expression form.
//!
//! [spec]: https://tc39.es/ecma262/#sec-ecmascript-language-functions-and-classes
//! [func]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions
//! [decl]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function
//! [expr]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/function
mod arrow_function;
mod async_arrow_function;
mod async_function;
mod async_generator;
mod class;
mod generator;
mod parameters;
pub use arrow_function::ArrowFunction;
pub use async_arrow_function::AsyncArrowFunction;
pub use async_function::AsyncFunction;
pub use async_generator::AsyncGenerator;
pub use class::{Class, ClassElement, PrivateName};
use core::ops::ControlFlow;
pub use generator::Generator;
pub use parameters::{FormalParameter, FormalParameterList, FormalParameterListFlags};
use crate::visitor::{VisitWith, Visitor, VisitorMut};
use crate::{block_to_string, join_nodes};
use crate::{try_break, Script};
use boa_interner::{Interner, ToIndentedString};
use super::expression::{Expression, Identifier};
use super::Declaration;
/// A function definition, as defined by the [spec].
///
/// By default, functions return `undefined`. To return any other value, the function must have
/// a return statement that specifies the value to return.
///
/// More information:
/// - [MDN documentation][mdn]
///
/// [spec]: https://tc39.es/ecma262/#sec-function-definitions
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(Clone, Debug, PartialEq)]
pub struct Function {
name: Option<Identifier>,
parameters: FormalParameterList,
body: FunctionBody,
has_binding_identifier: bool,
}
impl Function {
/// Creates a new function expression.
#[inline]
#[must_use]
pub const fn new(
name: Option<Identifier>,
parameters: FormalParameterList,
body: FunctionBody,
) -> Self {
Self {
name,
parameters,
body,
has_binding_identifier: false,
}
}
/// Creates a new function expression with an expression binding identifier.
#[inline]
#[must_use]
pub const fn new_with_binding_identifier(
name: Option<Identifier>,
parameters: FormalParameterList,
body: FunctionBody,
has_binding_identifier: bool,
) -> Self {
Self {
name,
parameters,
body,
has_binding_identifier,
}
}
/// Gets the name of the function declaration.
#[inline]
#[must_use]
pub const fn name(&self) -> Option<Identifier> {
self.name
}
/// Gets the list of parameters of the function declaration.
#[inline]
#[must_use]
pub const fn parameters(&self) -> &FormalParameterList {
&self.parameters
}
/// Gets the body of the function declaration.
#[inline]
#[must_use]
pub const fn body(&self) -> &FunctionBody {
&self.body
}
/// Returns whether the function expression has a binding identifier.
#[inline]
#[must_use]
pub const fn has_binding_identifier(&self) -> bool {
self.has_binding_identifier
}
}
impl ToIndentedString for Function {
fn to_indented_string(&self, interner: &Interner, indentation: usize) -> String {
let mut buf = "function".to_owned();
if let Some(name) = self.name {
buf.push_str(&format!(" {}", interner.resolve_expect(name.sym())));
}
buf.push_str(&format!(
"({}) {}",
join_nodes(interner, self.parameters.as_ref()),
block_to_string(self.body.statements(), interner, indentation)
));
buf
}
}
impl From<Function> for Expression {
#[inline]
fn from(expr: Function) -> Self {
Self::Function(expr)
}
}
impl From<Function> for Declaration {
#[inline]
fn from(f: Function) -> Self {
Self::Function(f)
}
}
impl VisitWith for Function {
fn visit_with<'a, V>(&'a self, visitor: &mut V) -> ControlFlow<V::BreakTy>
where
V: Visitor<'a>,
{
if let Some(ident) = &self.name {
try_break!(visitor.visit_identifier(ident));
}
try_break!(visitor.visit_formal_parameter_list(&self.parameters));
visitor.visit_script(&self.body)
}
fn visit_with_mut<'a, V>(&'a mut self, visitor: &mut V) -> ControlFlow<V::BreakTy>
where
V: VisitorMut<'a>,
{
if let Some(ident) = &mut self.name {
try_break!(visitor.visit_identifier_mut(ident));
}
try_break!(visitor.visit_formal_parameter_list_mut(&mut self.parameters));
visitor.visit_script_mut(&mut self.body)
}
}
/// A Function body.
///
/// Since [`Script`] and `FunctionBody` has the same semantics, this is currently
/// only an alias of the former.
///
/// More information:
/// - [ECMAScript reference][spec]
///
/// [spec]: https://tc39.es/ecma262/#prod-FunctionBody
pub type FunctionBody = Script;
| true
|
4224061d877433c6408b8b052eb2b7250c589cb2
|
Rust
|
gypsydave5/rust-book-exercises
|
/collections/src/main.rs
|
UTF-8
| 854
| 2.96875
| 3
|
[] |
no_license
|
extern crate rand;
mod vectors;
mod strings;
mod hashmaps;
mod exercises;
fn main() {
use rand::prelude::*;
vectors::accessing_elements();
vectors::iteration();
vectors::mutable_iteration();
strings::new_string_from_data();
strings::utf8_strings();
strings::updating_strings();
strings::length();
strings::iterating();
hashmaps::blue_vs_yellow();
hashmaps::upsert();
let mut rng = rand::thread_rng();
let numbers: Vec<u32> = (1..20).into_iter().map(|_| rng.gen_range(1, 10)).collect();
println!("{:?}", numbers);
let average = exercises::average(&numbers);
println!("Mode: {}", average.mode);
println!("Median: {}", average.median);
println!("Mean: {}", average.mean);
let latin = exercises::pig_latin(String::from("hello, world!"));
println!("Latin: {}", latin);
}
| true
|
14a3369b4ec0345539ebc03e046ba79fe74797e9
|
Rust
|
remysucre/fgh
|
/eqsat/src/analysis.rs
|
UTF-8
| 3,594
| 2.859375
| 3
|
[] |
no_license
|
use egg::*;
use std::collections::HashSet;
use std::cmp::Ordering;
use crate::lang::*;
use crate::EGraph;
#[derive(Default, Clone)]
pub struct SemiringAnalysis;
// Metadata for each class
#[derive(Debug, PartialEq, Eq)]
pub struct Data {
// Set of free variables by their class ID
pub free: HashSet<Symbol>,
pub constant: Option<Semiring>,
}
impl Analysis<Semiring> for SemiringAnalysis {
type Data = Data;
// fn merge(&self, to: &mut Data, from: Data) -> Option<Ordering> {
// if *to == from {
// false
// } else {
// // The free vars may differ due to constant folding
// to.free.retain(|i| from.free.contains(i));
// // Merged classes must agree on the constant value,
// // if both have one.
// if let Some(c_from) = from.constant {
// if let Some(c_to) = &to.constant {
// assert_eq!(&c_from, c_to, "merging classes with different constants");
// } else {
// to.constant = Some(c_from);
// }
// }
// true
// }
// }
fn merge(&self, to: &mut Data, from: Data) -> Option<Ordering> {
let before_len = to.free.len();
// to.free.extend(from.free);
to.free.retain(|i| from.free.contains(i));
let did_change = before_len != to.free.len();
if to.constant.is_none() && from.constant.is_some() {
to.constant = from.constant;
None
} else if did_change {
None
} else {
Some(Ordering::Greater)
}
}
fn make(egraph: &EGraph, enode: &Semiring) -> Data {
let fvs = |i: &Id| egraph[*i].data.free.iter().copied();
let mut free = HashSet::default();
match enode {
Semiring::Symbol(v) => {
free.insert(*v);
}
Semiring::Let([v, a, b]) => {
free.extend(fvs(b));
// NOTE only do this if v free in b?
if let Some(v) = fvs(v).next() {
free.remove(&v);
}
free.extend(fvs(a));
}
Semiring::Sum([v, a]) => {
free.extend(fvs(a));
if let Some(v) = fvs(v).next() {
free.remove(&v);
}
}
Semiring::Rel(xs) =>
for x in xs[1..].iter() {
free.extend(fvs(x));
}
Semiring::Other(_, xs) => {
for x in xs {
free.extend(fvs(x));
}
}
_ => enode.for_each(|c| free.extend(&egraph[c].data.free)),
}
let constant = eval(egraph, enode);
Data { free, constant }
}
fn modify(egraph: &mut EGraph, id: Id) {
if let Some(c) = egraph[id].data.constant.clone() {
let const_id = egraph.add(c);
egraph.union(id, const_id);
}
}
fn pre_union(_egraph: &egg::EGraph<Semiring, Self>, _id1: Id, _id2: Id) {}
}
fn eval(egraph: &EGraph, enode: &Semiring) -> Option<Semiring> {
let x = |i: &Id| egraph[*i].data.constant.clone();
match enode {
Semiring::Num(n) => Some(Semiring::Num(*n)),
Semiring::Add([a, b]) => Some(Semiring::Num(x(a)?.num()? + x(b)?.num()?)),
Semiring::Min([a, b]) => Some(Semiring::Num(x(a)?.num()? - x(b)?.num()?)),
Semiring::Mul([a, b]) => Some(Semiring::Num(x(a)?.num()? * x(b)?.num()?)),
_ => None,
}
}
| true
|
89b09ff53ce90f894127e5f50f338f6f503cd0c9
|
Rust
|
P3GLEG/shellcode_loader
|
/src/main.rs
|
UTF-8
| 3,603
| 2.515625
| 3
|
[] |
no_license
|
#![no_std]
#![no_main]
/// This is a shellcode loader Rust while also minimizing the size of the binary. In my tests the C
/// version binary turned out to be 4k whilst on Mac with rust I got to 4.2k after running strip
/// This can be optimized further. Conditional compiling for the static varibles would get you some
/// more room.
/// The main benefit is compliation with the toolchains Rust offers. It's a lot easier to compile a
/// windows binary
//This is required fails without it
#[cfg(any(unix, macos))]
#[link(name = "c")]
extern "C" {}
#[cfg(target_os = "mac")]
static MAC_PAYLOAD: [u8; 51] = [
0xeb, 0x1e, 0x5e, 0xb8, 0x04, 0x00, 0x00, 0x02, 0xbf, 0x01, 0x00, 0x00, 0x00, 0xba, 0x0e, 0x00,
0x00, 0x00, 0x0f, 0x05, 0xb8, 0x01, 0x00, 0x00, 0x02, 0xbf, 0x00, 0x00, 0x00, 0x00, 0x0f, 0x05,
0xe8, 0xdd, 0xff, 0xff, 0xff, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x57, 0x6f, 0x72, 0x6c, 0x64,
0x21, 0x0d, 0x0a,
]; //Hello world x64
#[cfg(target_os = "windows")]
#[no_mangle]
#[link_section = ".text"] //Without this the payload ends up in non executable memory
static WINDOWS_PAYLOAD: [u8; 199] = [
0x33, 0xc9, 0x64, 0x8b, 0x49, 0x30, 0x8b, 0x49, 0x0c, 0x8b, 0x49, 0x1c, 0x8b, 0x59, 0x08, 0x8b,
0x41, 0x20, 0x8b, 0x09, 0x80, 0x78, 0x0c, 0x33, 0x75, 0xf2, 0x8b, 0xeb, 0x03, 0x6d, 0x3c, 0x8b,
0x6d, 0x78, 0x03, 0xeb, 0x8b, 0x45, 0x20, 0x03, 0xc3, 0x33, 0xd2, 0x8b, 0x34, 0x90, 0x03, 0xf3,
0x42, 0x81, 0x3e, 0x47, 0x65, 0x74, 0x50, 0x75, 0xf2, 0x81, 0x7e, 0x04, 0x72, 0x6f, 0x63, 0x41,
0x75, 0xe9, 0x8b, 0x75, 0x24, 0x03, 0xf3, 0x66, 0x8b, 0x14, 0x56, 0x8b, 0x75, 0x1c, 0x03, 0xf3,
0x8b, 0x74, 0x96, 0xfc, 0x03, 0xf3, 0x33, 0xff, 0x57, 0x68, 0x61, 0x72, 0x79, 0x41, 0x68, 0x4c,
0x69, 0x62, 0x72, 0x68, 0x4c, 0x6f, 0x61, 0x64, 0x54, 0x53, 0xff, 0xd6, 0x33, 0xc9, 0x57, 0x66,
0xb9, 0x33, 0x32, 0x51, 0x68, 0x75, 0x73, 0x65, 0x72, 0x54, 0xff, 0xd0, 0x57, 0x68, 0x6f, 0x78,
0x41, 0x01, 0xfe, 0x4c, 0x24, 0x03, 0x68, 0x61, 0x67, 0x65, 0x42, 0x68, 0x4d, 0x65, 0x73, 0x73,
0x54, 0x50, 0xff, 0xd6, 0x57, 0x68, 0x72, 0x6c, 0x64, 0x21, 0x68, 0x6f, 0x20, 0x57, 0x6f, 0x68,
0x48, 0x65, 0x6c, 0x6c, 0x8b, 0xcc, 0x57, 0x57, 0x51, 0x57, 0xff, 0xd0, 0x57, 0x68, 0x65, 0x73,
0x73, 0x01, 0xfe, 0x4c, 0x24, 0x03, 0x68, 0x50, 0x72, 0x6f, 0x63, 0x68, 0x45, 0x78, 0x69, 0x74,
0x54, 0x53, 0xff, 0xd6, 0x57, 0xff, 0xd,
]; //Messagebox x86 payload
#[cfg(target_os = "linux")]
#[link_section = ".text"]
static LINUX_PAYLOAD: [u8; 50] = [
0xeb, 0x1e, 0xb8, 0x01, 0x00, 0x00, 0x00, 0xbf, 0x01, 0x00, 0x00, 0x00, 0x5e, 0xba, 0x0c, 0x00,
0x00, 0x00, 0x0f, 0x05, 0xb8, 0x3c, 0x00, 0x00, 0x00, 0xbf, 0x00, 0x00, 0x00, 0x00, 0x0f, 0x05,
0xe8, 0xdd, 0xff, 0xff, 0xff, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x57, 0x6f, 0x72, 0x6c, 0x64,
0x0, 0xa0,
]; //Hello world x86
#[cfg(target_os = "windows")]
pub extern "C" fn main() {
let exec_data: extern "C" fn() -> ! =
core::mem::transmute(&WINDOWS_PAYLOAD);
exec_data();
}
#[no_mangle]
#[cfg(target_os = "linux")]
pub extern "C" fn main() {
unsafe {
let exec_data: extern "C" fn() -> ! =
core::mem::transmute(&LINUX_PAYLOAD);
exec_data();
}
}
#[cfg(target_os = "macos")]
#[no_mangle]
pub extern "C" fn main() {
unsafe {
let exec_data: extern "C" fn() -> ! =
core::mem::transmute(&MAC_PAYLOAD);
exec_data();
}
}
#[panic_handler]
fn panic(_info: &core::panic::PanicInfo) -> ! {
loop {}
}
#[no_mangle]
pub extern "C" fn rust_eh_register_frames() {}
#[no_mangle]
pub extern "C" fn rust_eh_unregister_frames() {}
| true
|
f2edaeca9c9b074334181ec84f76c99d5d623329
|
Rust
|
jungaretti/advent-of-code-2020
|
/src/day_04.rs
|
UTF-8
| 4,718
| 3.21875
| 3
|
[
"MIT"
] |
permissive
|
use regex::Regex;
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
const DATA_FILE_PATH: &str = "./data/day04.txt";
struct Passport {
birth_year: Option<String>,
issue_year: Option<String>,
expiration_year: Option<String>,
height: Option<String>,
hair_color: Option<String>,
eye_color: Option<String>,
passport_id: Option<String>,
country_id: Option<String>,
}
impl Passport {
fn is_full(&self) -> bool {
return self.birth_year.is_some()
&& self.issue_year.is_some()
&& self.expiration_year.is_some()
&& self.height.is_some()
&& self.hair_color.is_some()
&& self.eye_color.is_some()
&& self.passport_id.is_some();
}
fn is_valid(&self) -> bool {
let hgt_regex = Regex::new(r"^(((1[5-8]\d)|(19[0-3]))cm|(59|[6-7][0-9])in)$").unwrap();
let hcl_regex = Regex::new(r"^#[A-Fa-f0-9]{6}$").unwrap();
let ecl_regex = Regex::new(r"^(amb)|(blu)|(brn)|(gry)|(grn)|(hzl)|(oth)$").unwrap();
let pid_regex = Regex::new(r"^\d{9}$").unwrap();
let mut byr_valid = false;
if let Some(year) = &self.birth_year {
if let Ok(year) = year.parse::<i32>() {
byr_valid = year >= 1920 && year <= 2002;
}
}
let mut iyr_valid = false;
if let Some(year) = &self.issue_year {
if let Ok(year) = year.parse::<i32>() {
iyr_valid = year >= 2010 && year <= 2020;
}
}
let mut eyr_valid = false;
if let Some(year) = &self.expiration_year {
if let Ok(year) = year.parse::<i32>() {
eyr_valid = year >= 2020 && year <= 2030;
}
}
let mut hgt_valid = false;
if let Some(height) = &self.height {
hgt_valid = hgt_regex.is_match(height);
}
let mut hcl_valid = false;
if let Some(hair_color) = &self.hair_color {
hcl_valid = hcl_regex.is_match(hair_color);
}
let mut ecl_valid = false;
if let Some(eye_color) = &self.eye_color {
ecl_valid = ecl_regex.is_match(eye_color);
}
let mut pid_valid = false;
if let Some(passport_id) = &self.passport_id {
pid_valid = pid_regex.is_match(passport_id);
}
return byr_valid
&& iyr_valid
&& eyr_valid
&& hgt_valid
&& hcl_valid
&& ecl_valid
&& pid_valid;
}
}
fn passport_from_string(passport_string: &String) -> Passport {
let mut passport = Passport {
birth_year: None,
issue_year: None,
expiration_year: None,
height: None,
hair_color: None,
eye_color: None,
passport_id: None,
country_id: None,
};
for field in passport_string.split_whitespace() {
let mut pair = field.split(':');
let key = pair.next().unwrap();
let value = pair.next().unwrap();
match key {
"byr" => passport.birth_year = Some(String::from(value)),
"iyr" => passport.issue_year = Some(String::from(value)),
"eyr" => passport.expiration_year = Some(String::from(value)),
"hgt" => passport.height = Some(String::from(value)),
"hcl" => passport.hair_color = Some(String::from(value)),
"ecl" => passport.eye_color = Some(String::from(value)),
"pid" => passport.passport_id = Some(String::from(value)),
"cid" => passport.country_id = Some(String::from(value)),
_ => (),
}
}
return passport;
}
fn load_passports_from_file(file_path: &str) -> Vec<Passport> {
let file = File::open(file_path).unwrap();
let mut lines = BufReader::new(file).lines().map(|line| line.unwrap());
let mut passports: Vec<Passport> = Vec::new();
let mut passport_raw = String::new();
while let Some(line) = lines.next() {
if line.is_empty() {
let passport = passport_from_string(&passport_raw);
passports.push(passport);
passport_raw.clear();
} else {
passport_raw.push(' ');
passport_raw.push_str(&line);
}
}
return passports;
}
pub fn day_04_challenge_1() -> usize {
let passports = load_passports_from_file(DATA_FILE_PATH);
return passports
.iter()
.filter(|passport| passport.is_full())
.count();
}
pub fn day_04_challenge_2() -> usize {
let passports = load_passports_from_file(DATA_FILE_PATH);
return passports
.iter()
.filter(|passport| passport.is_valid())
.count();
}
| true
|
7299a6fe675d2747927d62c89e4f2ff98708f5e2
|
Rust
|
ken0x0a/tonic-build-with-code-gen
|
/src/generate_code.rs
|
UTF-8
| 5,612
| 2.53125
| 3
|
[
"MIT"
] |
permissive
|
use super::{Method, Service};
use crate::{generate_doc_comments, naive_snake_case, util};
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
/// Generate boilerplate code for gRPC server
///
/// This takes some `Service` and will generate a `TokenStream` that contains
/// a public module with the generated client.
pub fn generate<T: Service>(service: &T, proto_path: &str) {
generate_service_file(service, proto_path);
generate_file_for_each_method(service, proto_path);
// format generated files
#[cfg(feature = "rustfmt")]
{
let source_dir = format!("{}/src", std::env::current_dir().unwrap().to_str().unwrap());
super::fmt(&source_dir);
}
}
fn generate_service_file<T: Service>(service: &T, proto_path: &str) {
let stream = generate_service(service, proto_path);
util::write_stream_to_file_with_header(stream, "src/service.rs", None)
.expect("failed to write result to file");
}
fn generate_service<T: Service>(service: &T, proto_path: &str) -> TokenStream {
let service_name_str = naive_snake_case(service.name());
let service_name = quote::format_ident!("{}", service.name());
let service_server_name = quote::format_ident!("{}_server", naive_snake_case(service.name()));
let proto_mod_name = quote::format_ident!("{}_proto", naive_snake_case(service.name()));
let service_ident = quote::format_ident!("{}Service", service.name());
let service_doc = generate_doc_comments(service.comment());
let service_methods = generate_methods(service, proto_path);
let stream = quote! {
// use tonic::{Request, Response, Status};
use #proto_mod_name::#service_server_name::Auth;
use #proto_mod_name::*;
use super::action;
pub mod #proto_mod_name {
tonic::include_proto!(#service_name_str);
}
// Generated client implementations.
//
#service_doc
#[derive(Default)]
pub struct #service_ident {}
#[tonic::async_trait]
impl #service_name for #service_ident {
#service_methods
}
};
stream
}
fn generate_methods<T: Service>(service: &T, proto_path: &str) -> TokenStream {
let mut stream = TokenStream::new();
for method in service.methods() {
stream.extend(generate_doc_comments(method.comment()));
let method = generate_each_method_for_service(method, proto_path);
stream.extend(method);
}
stream
}
fn generate_each_method_for_service<T: Method>(method: &T, proto_path: &str) -> TokenStream {
// let codec_name = syn::parse_str::<syn::Path>(T::CODEC_PATH).unwrap();
let ident = format_ident!("{}", method.name());
let (request, response) = get_req_res_type(method, proto_path);
quote! {
async fn #ident(
&self,
request: tonic::Request<#request>,
) -> ::std::result::Result<tonic::Response<#response>, tonic::Status> {
action::#ident::handler(request).await
}
}
}
fn get_req_res_type<T: Method>(
method: &T,
proto_path: &str,
) -> (quote::__private::Ident, quote::__private::Ident) {
let (request, response) = method.request_response_name(proto_path);
let v = format!("{}", request);
// assert_eq!("RegisterPayload", v);
let v: Vec<&str> = v.split(" :: ").collect();
let request_type = format_ident!("{}", v[1]);
let v = format!("{}", response);
let v: Vec<&str> = v.split(" :: ").collect();
let response_type = format_ident!("{}", v[1]);
(request_type, response_type)
}
// ###########################################
// ############ for each method ############
// ###########################################
fn generate_file_for_each_method<T: Service>(service: &T, proto_path: &str) {
let mut stream_for_mod = TokenStream::new();
let header = "// This file is generated by \"proto-gen-code\"
// You can refresh boilerplate by removing this file\n\n";
for method in service.methods() {
let method_name = format_ident!("{}", method.name());
stream_for_mod.extend(quote! {
pub mod #method_name;
});
let mut stream = TokenStream::new();
stream.extend(generate_doc_comments(method.comment()));
let method_stream = generate_method_handler(service, method, proto_path);
stream.extend(method_stream);
util::write_stream_to_file_with_header_if_not_exist(
stream,
&format!("src/action/{}.rs", method.name()),
Some(&header),
)
.expect("failed to write result to file");
}
util::write_stream_to_file_with_header(stream_for_mod, "src/action.rs", None)
.expect("failed to write result to file");
}
fn generate_method_handler<S: Service, T: Method>(
service: &S,
method: &T,
proto_path: &str,
) -> TokenStream {
// let service_name = quote::format_ident!("{}", service.name());
let proto_mod_name = quote::format_ident!("{}_proto", naive_snake_case(service.name()));
// let codec_name = syn::parse_str::<syn::Path>(T::CODEC_PATH).unwrap();
// let ident = format_ident!("{}", method.name());
let (request, response) = get_req_res_type(method, proto_path);
quote! {
use tonic::{Request, Response, Status};
use crate::#proto_mod_name::{#request, #response};
pub async fn handler(
request: Request<#request>,
) -> ::std::result::Result<Response<#response>, Status> {
println!("Request from {:?}", request.remote_addr());
println!("Metadata => {:?}", request.metadata());
let message = request.into_inner();
let reply = #response {
message: format!("Hello {}!", message.name),
field_name: String::from(""),
status: my_proto::result::Status::Success as i32,
// ..
};
Ok(Response::new(reply))
}
}
}
| true
|
851b4c0412ebca15e4717fe54d05a9c6b6b1d831
|
Rust
|
mhk032/sequencer_lighting_linux
|
/src/animation/animation_custom_colors.rs
|
UTF-8
| 1,397
| 3.3125
| 3
|
[] |
no_license
|
use crate::color::Color;
#[derive(Debug)]
pub struct AnimationCustomColors {
pub colors: Vec<Color>,
}
impl AnimationCustomColors {
pub fn set_colors_in_buffer(&self, buf: &mut Vec<u8>) {
if self.colors.is_empty() {
panic!("Colors are empty, this function shouldnt be called");
}
let len = &self.colors.len();
if *len < 2 {
panic!("Atleast two custom colors are required!");
}
if *len > 4 {
eprintln!("More than 4 custom colors provided, only the first 4 will be considered");
}
buf[6] = (len - 1) as u8;
for (index, color) in self.colors.iter().enumerate() {
match index {
0 => {
buf[10] = color.r;
buf[11] = color.g;
buf[12] = color.b;
}
1 => {
buf[13] = color.r;
buf[14] = color.g;
buf[15] = color.b;
}
2 => {
buf[16] = color.r;
buf[17] = color.g;
buf[18] = color.b;
}
3 => {
buf[19] = color.r;
buf[20] = color.g;
buf[21] = color.b;
}
_ => {}
}
}
}
}
| true
|
2d75539d8b6db72f163aad6f906e5f679e7a0a12
|
Rust
|
nesium/xcode-color-assets
|
/crates/swift-gen/src/renderers/renderer.rs
|
UTF-8
| 500
| 2.8125
| 3
|
[] |
no_license
|
use super::super::AccessLevel;
use super::data::RuleSet;
pub struct RendererConfig {
tab: String,
pub access_level: AccessLevel,
}
impl RendererConfig {
pub fn new(tab: &str, access_level: AccessLevel) -> Self {
RendererConfig {
tab: tab.to_owned(),
access_level,
}
}
pub fn indent(&self, depth: usize) -> String {
self.tab.repeat(depth)
}
}
pub trait Renderer {
fn render_into(&self, ruleset: &RuleSet, destination: &mut String, config: &RendererConfig);
}
| true
|
680a62061bf8e031562a02c5dc44202d29ea7aba
|
Rust
|
miam-miam100/Calculator
|
/src/app.rs
|
UTF-8
| 2,256
| 2.671875
| 3
|
[
"MIT"
] |
permissive
|
use calculator::expression::{eval, Expression, Parser, Rule};
use calculator::types::{MathError, Token};
use eframe::egui::epaint::{color, Shadow};
use eframe::egui::{Align2, Frame, Window};
use eframe::{egui, epi};
pub struct CalcApp {
input: String,
prev_input: String,
result: Result<Token, MathError>,
}
impl Default for CalcApp {
fn default() -> Self {
Self {
input: "".to_string(),
prev_input: "".to_string(),
result: Err(MathError::None),
}
}
}
impl epi::App for CalcApp {
fn update(&mut self, ctx: &egui::CtxRef, frame: &mut epi::Frame<'_>) {
Window::new("Calculator")
.anchor(Align2::CENTER_CENTER, egui::Vec2::default())
.frame(Frame::window(&ctx.style()))
.show(ctx, |ui| {
ui.text_edit_singleline(&mut self.input);
if self.input != self.prev_input {
self.prev_input = self.input.clone();
if self.input == "!" {
frame.quit();
} else if self.input.is_empty() {
self.result = Err(MathError::None);
} else {
self.result = match Expression::parse(Rule::calculation, &self.input) {
Ok(calc) => eval(calc),
Err(_) => Err(MathError::SyntaxError),
}
}
}
ui.label(match &self.result {
Err(MathError::None) => "Awaiting input...".to_string(),
Err(e) => format!("Got Error: {}", e),
Ok(t) => format!("Got Result: {:?}", t),
});
});
frame.set_window_size(ctx.used_size());
}
fn setup(
&mut self,
ctx: &egui::CtxRef,
_frame: &mut epi::Frame<'_>,
_storage: Option<&dyn epi::Storage>,
) {
let mut style: egui::Style = (*ctx.style()).clone();
style.visuals.window_shadow = Shadow {
extrusion: 0.0,
color: color::Color32::TRANSPARENT,
};
ctx.set_style(style);
}
fn name(&self) -> &str {
"Calculator"
}
}
| true
|
77b4d98a6f22e09934f646584e462cde264c2b6f
|
Rust
|
iBelieve/oxide
|
/kernel/src/arch/x86_64/io.rs
|
UTF-8
| 1,817
| 3.1875
| 3
|
[] |
no_license
|
use core::marker::PhantomData;
/***** TRAITS *****/
pub trait InOut {
unsafe fn port_in(port: u16) -> Self;
unsafe fn port_out(port: u16, value: Self);
}
impl InOut for u8 {
unsafe fn port_in(port: u16) -> u8 { inb(port) }
unsafe fn port_out(port: u16, value: u8) { outb(port, value); }
}
// impl InOut for u16 {
// unsafe fn port_in(port: u16) -> u16 { inw(port) }
// unsafe fn port_out(port: u16, value: u16) { outw(port, value); }
// }
//
// impl InOut for u32 {
// unsafe fn port_in(port: u16) -> u32 { inl(port) }
// unsafe fn port_out(port: u16, value: u32) { outl(port, value); }
// }
/***** STRUCTS & ENUMS *****/
pub struct Port<T: InOut> {
port: u16,
phantom: PhantomData<T>
}
impl<T: InOut> Port<T> {
pub const unsafe fn new(port: u16) -> Port<T> {
Port { port: port, phantom: PhantomData }
}
pub fn read(&mut self) -> T {
unsafe { T::port_in(self.port) }
}
pub fn write(&mut self, value: T) {
unsafe { T::port_out(self.port, value); }
}
}
pub struct PortPair<T: InOut> {
control: Port<T>,
data: Port<T>
}
impl<T: InOut> PortPair<T> {
pub const unsafe fn new(control: u16, data: u16) -> PortPair<T> {
PortPair { control: Port::new(control), data: Port::new(data) }
}
pub fn write(&mut self, control: T, value: T) {
self.control.write(control);
self.data.write(value);
}
pub fn read(&mut self, control: T) -> T {
self.control.write(control);
self.data.read()
}
}
/***** FUNCTIONS *****/
unsafe fn inb(port: u16) -> u8 {
let ret: u8;
asm!("inb $1, $0" : "={ax}"(ret) : "N{dx}"(port) : : "volatile");
ret
}
unsafe fn outb(port: u16, value: u8) {
asm!("outb $0, $1" : : "{ax}"(value), "N{dx}"(port) : : "volatile");
}
| true
|
4ce2418544363926d1352e01e07f51fe398e5387
|
Rust
|
tigleym/dwarf_seeks_fortune
|
/dsf_editor/src/systems/place_tiles.rs
|
UTF-8
| 2,668
| 2.59375
| 3
|
[
"BlueOak-1.0.0"
] |
permissive
|
use crate::resources::{EditorData, TileEdit};
use amethyst::core::ecs::shrev::EventChannel;
use amethyst::core::ecs::{Read, System, Write};
use amethyst::input::{InputEvent, StringBindings, VirtualKeyCode};
use dsf_core::components::Pos;
use dsf_core::levels::{TileDefinition, TileDefinitions};
use dsf_core::resources::EventReaders;
pub struct PlaceTilesSystem;
/// TODO: Delay in channel is unacceptable here. Replace channel with direct input check.
impl<'s> System<'s> for PlaceTilesSystem {
#[allow(clippy::type_complexity)]
type SystemData = (
Write<'s, EventReaders>,
Read<'s, EventChannel<InputEvent<StringBindings>>>,
Read<'s, TileDefinitions>,
Write<'s, EditorData>,
);
fn run(&mut self, (mut readers, event_channel, tile_defs, mut editor_data): Self::SystemData) {
let reader_id = readers
.get_reader_id("place_tiles_system")
.expect("ReaderId was not registered for system PlaceTilesSystem.");
for event in event_channel.read(reader_id) {
match event {
InputEvent::KeyReleased {
key_code: VirtualKeyCode::Return,
scancode: _,
} => {
let (key, tile_def) = get_brush(&editor_data, &tile_defs);
set_tiles(&mut editor_data, key, tile_def);
}
InputEvent::KeyReleased {
key_code: VirtualKeyCode::Delete,
scancode: _,
} => {
set_tiles(&mut editor_data, None, None);
}
_ => (),
}
}
}
}
fn set_tiles(editor_data: &mut EditorData, key: Option<String>, tile_def: Option<TileDefinition>) {
let brush_dimens = tile_def
.as_ref()
.map(|def| def.dimens)
.unwrap_or_else(|| Pos::new(1, 1));
let lower_bounds = (*editor_data).selection.lower_bounds();
let selection_dimens = (*editor_data).selection.dimens();
for x in
(lower_bounds.x..(lower_bounds.x + selection_dimens.x)).step_by(brush_dimens.x as usize)
{
for y in
(lower_bounds.y..(lower_bounds.y + selection_dimens.y)).step_by(brush_dimens.y as usize)
{
(*editor_data)
.level
.put_tile(Pos::new(x, y), key.clone().map(TileEdit::new));
}
}
}
fn get_brush(
editor_data: &EditorData,
tile_defs: &TileDefinitions,
) -> (Option<String>, Option<TileDefinition>) {
let key = editor_data.brush.get_key().clone();
let def = key.as_ref().map(|key| tile_defs.get(key).clone());
(key, def)
}
| true
|
eb8cfa8fe980157c5efc0b4301171a423902c7cd
|
Rust
|
mcoffin/ioreg-proc
|
/zinc/build.rs
|
UTF-8
| 1,866
| 2.625
| 3
|
[] |
no_license
|
use std::env;
use std::fs;
use std::io;
use std::path::Path;
fn get_platform() -> Option<String> {
let features = env::vars().filter(|&(ref key, _)| key.starts_with("CARGO_FEATURE_MCU"));
features.last().map(|(feature_var, _)| {
feature_var
.trim_left_matches("CARGO_FEATURE_MCU_")
.to_string()
.to_ascii_lowercase()
})
}
fn file_exists<P: AsRef<Path>>(file: P) -> bool {
match fs::metadata(file.as_ref()) {
Ok(_) => true,
// Check for ENOENT (No such file or directory)
Err(e) => e.raw_os_error() != Some(2),
}
}
fn copy_linker_scripts<P: AsRef<Path>, Q: AsRef<Path>>(target: P, out_path: Q) -> io::Result<()> {
let path_prefix = if env::var("CARGO_MANIFEST_DIR").unwrap().find("/examples/").is_none() {
Path::new(".")
} else {
Path::new("./../..")
};
// Try copying the linker scripts
let target_dir = Path::new("src/hal").join(target);
let out_dir = out_path.as_ref();
fs::copy(path_prefix.join("src/hal/layout_common.ld"), out_dir.join("layout_common.ld"))?;
let iomem_ld = path_prefix.join(target_dir.join("iomem.ld"));
if file_exists(iomem_ld.as_path()) {
fs::copy(iomem_ld, out_dir.join("iomem.ld"))?;
}
fs::copy(path_prefix.join(target_dir.join("layout.ld")), out_dir.join("layout.ld"))?;
Ok(())
}
fn main() {
let platform = match get_platform() {
Some(p) => p,
None => {
return;
},
};
// Get output directory for cargo for zinc crate
let out_dir = env::var("OUT_DIR").unwrap();
// Move linker scripts to cargo output dir
copy_linker_scripts(&platform, &out_dir)
.expect("Failed to copy linker scripts");
// Make sure that the output dir is passed to linker
println!("cargo:rustc-link-search=native={}", out_dir);
}
| true
|
49be8e25c9b7dc6d88a5582946db34772fa61954
|
Rust
|
sonos/tract
|
/examples/jupyter-keras-tract-tf2/src/main.rs
|
UTF-8
| 746
| 2.59375
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
use rand::*;
use tract_onnx::prelude::*;
fn main() -> TractResult<()> {
let model = tract_onnx::onnx()
// load the model
.model_for_path("./example.onnx")?
// optimize graph
.into_optimized()?
// make the model runnable and fix its inputs and outputs
.into_runnable()?;
// Generate some input data for the model
let mut rng = thread_rng();
let vals: Vec<_> = (0..1000).map(|_| rng.gen::<f32>()).collect();
let input = tract_ndarray::arr1(&vals).into_shape((10, 100)).unwrap().into_tensor();
// Input the generated data into the model
let result = model.run(tvec![input.into()]).unwrap();
let to_show = result[0].to_array_view::<f32>()?;
println!("result: {to_show:?}");
Ok(())
}
| true
|
b9edba8560927bd52e12f3fd9adb5ccbaf512577
|
Rust
|
jTitor/leek2
|
/src/open-source/engine/modules/src/math/linear_algebra/mat4x4.rs
|
UTF-8
| 940
| 3.4375
| 3
|
[] |
no_license
|
/*!
Represents a 4x4 matrix
and common matrix operations.
*/
pub trait ToIndex {
fn to_index(self) -> usize;
}
/**
Trait used to convert different matrix
representations to a 1D float array for
MatOps::from_floats().
#Expected Nontrivial Implementations
##Array of Arrays/Vectors ([[f32; 4]; 4], [Vec4; 4])
In these cases, the first index should be
by row, the second/vector component by column.
*/
pub trait ToMatrixArray {
fn to_matrix_array(&self) -> [f32; 16];
}
pub trait MatOps<T=Self> {
fn elem_at<I: ToIndex>(&self, i: I) -> f32;
fn mut_elem_at<I: ToIndex>(&mut self, i: I) -> &mut f32;
///Gets the maximum element in this vector.
fn max_elem(&self) -> f32;
///Gets the minimum element in this vector.
fn min_elem(&self) -> f32;
fn new() -> T;
/**
Converts a 1D array of 16 floats to a matrix array.
Every 4 floats counts as one row of the matrix.
*/
fn from_floats<M>(floats: M) -> T where M: ToMatrixArray;
}
| true
|
8d8cc1c69a88b9a46c299b045fb7a7b82c4067c8
|
Rust
|
mcdonaldm1993/fibonacci-heap
|
/src/fibonacci_heap.rs
|
UTF-8
| 7,772
| 3.359375
| 3
|
[] |
no_license
|
use std::collections::HashMap;
use std::hash::Hash;
use std::num::Float;
use super::fibonacci_node::FibonacciNodeType;
use super::fibonacci_node::FibNode;
/// Struct that represents the [Fibonacci Heap](http://en.wikipedia.org/wiki/Fibonacci_heap) data structure.
///
/// Algorithms for this are as seen in the [Introduction to Algorithms](http://en.wikipedia.org/wiki/Introduction_to_Algorithms) by Thomas H. Cormen, Charles E. Leiserson, Ronald L. Rivest, and Clifford Stein.
///
/// The key, K, is the priority used to order the heap. The value, V, is the data associated with the key.
pub struct FibonacciHeap<K, V> {
// Hashmap for O(1) retrieval of nodes
hash_map: HashMap<V, FibonacciNodeType<K, V>>,
// Roots is a HashMap instead of a list for O(1) removal and insertion of root nodes
roots: Option<HashMap<V, FibonacciNodeType<K, V>>>,
min: Option<FibonacciNodeType<K, V>>,
size: i32
}
impl<K, V> FibonacciHeap<K, V>
where K: Clone + Eq + Ord,
V: Clone + Eq + Hash
{
/// Creates a new empty `FibonacciHeap`.
pub fn new() -> FibonacciHeap<K, V> {
FibonacciHeap{
hash_map: HashMap::new(),
roots: Some(HashMap::new()),
min: None,
size: 0
}
}
/// Inserts the value into the heap with priority key.
pub fn insert(&mut self, key: K, value: V) -> () {
let node: FibonacciNodeType<K, V> = FibNode::new(key, value.clone());
self.hash_map.insert(value, node.clone());
let min = self.min.clone();
match min {
Some(ref m) => {
self.roots.as_mut().unwrap().insert(node.get_value(), node.clone());
if node.get_key() < m.get_key() {
self.min = Some(node.clone());
}
},
None => {
self.roots = Some(HashMap::new());
self.roots.as_mut().unwrap().insert(node.get_value(), node.clone());
self.min = Some(node.clone());
}
}
self.size = self.size + 1;
}
/// Peeks at the minimum of the heap.
///
/// Returns `None` if the heap is empty.
pub fn minimum(&self) -> Option<(K, V)> {
match self.min {
Some(ref m) => Some((m.get_key().clone(), m.get_value().clone(),)),
None => None
}
}
// pub fn union(&mut self, other: FibonacciHeap<K, V>) -> () {
//
// }
/// Exctracts the minimum of the heap.
///
/// Returns `None` if the heap is empty.
pub fn extract_min(&mut self) -> Option<(K, V)> {
let z = self.min.clone();
let mut result = None;
match z {
Some(z) => {
let mut children = z.get_children();
for child in &mut children {
child.set_parent(None);
self.roots.as_mut().unwrap().insert(child.get_value(), child.clone());
}
self.roots.as_mut().unwrap().remove(&z.get_value());
{
if self.roots.as_mut().unwrap().is_empty() {
self.min = None;
} else {
let mut new_min = None;
for value in self.roots.as_mut().unwrap().values() {
new_min = Some(value.clone());
}
self.min = new_min;
self.consolidate();
}
}
self.hash_map.remove(&z.get_value());
self.size = self.size -1;
result = Some(( z.get_key(), z.get_value() ));
},
None => { }
}
result
}
/// Decreases the priority of the value to the key.
///
/// Returns `Err` if the value is not in the heap or if the key is greater than the current priority of the value.
pub fn decrease_key(&mut self, value: V, key: K) -> Result<(), ()> {
let x;
{
let hash_node = self.hash_map.get(&value);
if hash_node.is_none() {
return Err(());
} else {
x = hash_node.unwrap().clone();
}
}
if key > x.get_key() {
return Err(());
}
x.set_key(key);
let y = x.get_parent();
match y {
Some(y_some) => {
if x.get_key() < y_some.get_key() {
self.cut(x.clone(), y_some.clone());
self.cascading_cut(y_some.clone());
}
},
None => { }
}
if x.get_key() < self.min.clone().unwrap().get_key() {
self.min = Some(x);
}
Ok(())
}
// pub fn delete(&mut self, value: V) -> () {
//
// }
fn consolidate(&mut self) -> () {
let base: f64 = (1.0 + 5.0.sqrt())/2.0;
let log_n = (self.size as f64).log(base) as usize + 1;
let mut array: Vec<Option<FibonacciNodeType<K, V>>> = (0..log_n).map(|_| None).collect();
let roots = self.roots.take().unwrap();
for (_, root) in roots {
let mut x = root.clone();
let mut d = x.rank();
loop {
if array[d].clone().is_none() {
break;
}
let mut y = array[d].clone().unwrap();
if x.get_key() > y.get_key() {
let n = x.clone();
x = y.clone();
y = n;
}
self.heap_link(y.clone(), x.clone());
array[d] = None;
d = d + 1;
}
array[d] = Some(x.clone());
}
self.min = None;
self.roots = Some(HashMap::new());
for i in 0..log_n {
let min = self.min.clone();
let i_root = array[i].clone();
if i_root.is_none() {
continue;
}
if min.is_none() {
self.roots.as_mut().unwrap().insert(i_root.clone().unwrap().get_value(), i_root.clone().unwrap());
self.min = i_root;
} else {
self.roots.as_mut().unwrap().insert(i_root.clone().unwrap().get_value(), i_root.clone().unwrap());
if i_root.clone().unwrap().get_key() < min.unwrap().get_key() {
self.min = i_root;
}
}
}
}
fn heap_link(&mut self, y: FibonacciNodeType<K, V>, x: FibonacciNodeType<K, V>) -> () {
// No need to remove from roots as self.roots has been consumed and will be replaced anyway
x.add_child(y.clone());
y.set_parent(Some(x.clone()));
y.set_marked(false);
}
fn cut(&mut self, x: FibonacciNodeType<K, V>, y: FibonacciNodeType<K, V>) -> () {
y.remove_child(x.clone());
self.roots.as_mut().unwrap().insert(x.get_value(), x.clone());
x.set_parent(None);
x.set_marked(false);
}
fn cascading_cut(&mut self, y: FibonacciNodeType<K, V>) -> () {
let z = y.get_parent();
match z {
Some(z_some) => {
if !y.is_marked() {
y.set_marked(true);
} else {
self.cut(y, z_some.clone());
self.cascading_cut(z_some.clone())
}
},
None => { }
}
}
}
| true
|
d4e204c6522191132621ccd8bca2519bd10aa023
|
Rust
|
redox-os/ion
|
/src/lib/expansion/braces.rs
|
UTF-8
| 5,124
| 3.1875
| 3
|
[
"MIT"
] |
permissive
|
use auto_enums::auto_enum;
use permutate::Permutator;
use smallvec::SmallVec;
#[derive(Debug)]
/// A token primitive for the `expand_braces` function.
pub enum BraceToken {
Normal(small::String),
Expander,
}
#[auto_enum]
pub fn expand<'a>(
tokens: &'a [BraceToken],
expanders: &'a [&'a [&'a str]],
) -> impl Iterator<Item = small::String> + 'a {
#[auto_enum(Iterator)]
match expanders.len() {
0 => ::std::iter::empty(),
1 => SingleBraceExpand { elements: expanders[0].iter().copied(), tokens, loop_count: 0 },
_ => MultipleBraceExpand::new(tokens, expanders),
}
}
fn escape_string(output: &mut SmallVec<[u8; 64]>, input: &str) {
output.reserve(input.len());
let mut backslash = false;
for character in input.bytes() {
if backslash {
if ![b'{', b'}', b','].contains(&character) {
output.push(b'\\');
}
output.push(character);
backslash = false;
} else if character == b'\\' {
backslash = true;
} else {
output.push(character);
}
}
}
pub struct MultipleBraceExpand<'a> {
permutator: Permutator<'a, str>,
tokens: &'a [BraceToken],
buffer: Vec<&'a str>,
}
impl<'a> MultipleBraceExpand<'a> {
pub fn new(
tokens: &'a [BraceToken],
expanders: &'a [&'a [&'a str]],
) -> MultipleBraceExpand<'a> {
MultipleBraceExpand {
permutator: Permutator::new(expanders),
tokens,
buffer: vec![""; expanders.len()],
}
}
}
impl<'a> Iterator for MultipleBraceExpand<'a> {
type Item = small::String;
fn next(&mut self) -> Option<Self::Item> {
if self.permutator.next_with_buffer(&mut self.buffer) {
let mut strings = self.buffer.iter();
let small_vec =
self.tokens.iter().fold(SmallVec::with_capacity(64), |mut small_vec, token| {
escape_string(
&mut small_vec,
match *token {
BraceToken::Normal(ref text) => text,
BraceToken::Expander => strings.next().unwrap(),
},
);
small_vec
});
Some(unsafe { small::String::from_utf8_unchecked(small_vec.to_vec()) })
} else {
None
}
}
}
pub struct SingleBraceExpand<'a, 'b, I>
where
I: Iterator<Item = &'a str>,
{
elements: I,
tokens: &'b [BraceToken],
loop_count: usize,
}
impl<'a, 'b, I> Iterator for SingleBraceExpand<'a, 'b, I>
where
I: Iterator<Item = &'a str>,
{
type Item = small::String;
fn next(&mut self) -> Option<Self::Item> {
let element = self.elements.next()?;
let small_vec =
self.tokens.iter().fold(SmallVec::with_capacity(64), |mut small_vec, token| {
escape_string(
&mut small_vec,
match *token {
BraceToken::Normal(ref text) => text,
BraceToken::Expander => element,
},
);
small_vec
});
if self.loop_count == 0 {
self.loop_count = 1;
}
Some(unsafe { small::String::from_utf8_unchecked(small_vec.to_vec()) })
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_multiple_brace_expand() {
let expanders: &[&[&str]] = &[&["1", "2"][..], &["3", "4"][..], &["5", "6"][..]];
let tokens: &[BraceToken] = &[
BraceToken::Normal("AB".into()),
BraceToken::Expander,
BraceToken::Normal("CD".into()),
BraceToken::Expander,
BraceToken::Normal("EF".into()),
BraceToken::Expander,
BraceToken::Normal("GH".into()),
];
assert_eq!(
MultipleBraceExpand::new(tokens, expanders).collect::<Vec<small::String>>(),
vec![
small::String::from("AB1CD3EF5GH"),
small::String::from("AB1CD3EF6GH"),
small::String::from("AB1CD4EF5GH"),
small::String::from("AB1CD4EF6GH"),
small::String::from("AB2CD3EF5GH"),
small::String::from("AB2CD3EF6GH"),
small::String::from("AB2CD4EF5GH"),
small::String::from("AB2CD4EF6GH"),
]
);
}
#[test]
fn test_single_brace_expand() {
let elements = &["one", "two", "three"];
let tokens: &[BraceToken] = &[BraceToken::Normal("A=".into()), BraceToken::Expander];
assert_eq!(
SingleBraceExpand {
elements: elements.iter().map(|element| *element),
tokens,
loop_count: 0
}
.collect::<Vec<small::String>>(),
vec![
small::String::from("A=one"),
small::String::from("A=two"),
small::String::from("A=three"),
]
);
}
}
| true
|
a55b035726c3e07575176818f43dde4031f79276
|
Rust
|
xosmig/xo_os
|
/libs/basics/src/sync/not_owning/spinlock.rs
|
UTF-8
| 656
| 2.578125
| 3
|
[
"MIT"
] |
permissive
|
prelude!();
use ::core::sync::atomic::*;
use super::Lock;
pub struct SpinLock {
current: AtomicUsize,
next: AtomicUsize,
}
impl SpinLock {
pub const fn new() -> Self {
SpinLock {
current: AtomicUsize::new(0),
next: AtomicUsize::new(0),
}
}
}
impl Default for SpinLock {
fn default() -> Self {
Self::new()
}
}
impl Lock for SpinLock {
fn acquire(&self) {
let ticket = self.next.fetch_add(1, Ordering::Relaxed);
while self.current.load(Ordering::Acquire) != ticket {}
}
fn release(&self) {
self.current.fetch_add(1, Ordering::Release);
}
}
| true
|
a9b01f878501f94b2a4255da9c7c0cd9a0ca3787
|
Rust
|
rustyforks/moonlander
|
/src/gui/mod.rs
|
UTF-8
| 8,315
| 2.5625
| 3
|
[
"MIT"
] |
permissive
|
mod header;
use gtk::prelude::*;
use gtk::Inhibit;
use gtk::WidgetExt;
use relm::{connect, init, Component, Relm, Widget};
use relm_derive::{widget, Msg};
use header::{Header, Msg as HeaderMsg};
use relm_moonrender::{Moonrender, Msg as MoonrenderMsg};
#[derive(Msg)]
pub enum Msg {
Quit,
Goto(String),
GotoDone,
Redirect(String),
UnsupportedRedirect(String),
Back,
Forward,
Refresh,
ShowTooltip(String),
HideTooltip,
}
pub struct Model {
relm: Relm<Win>,
header: Component<Header>,
status_ctx_goto: u32,
status_ctx_tooltip: u32,
history: Vec<String>,
forward_history: Vec<String>,
}
#[widget]
impl Widget for Win {
fn model(relm: &Relm<Self>, _: ()) -> Model {
let header = init::<Header>(()).expect("Header cannot be initialized");
Model {
header,
relm: relm.clone(),
status_ctx_goto: 0,
status_ctx_tooltip: 0,
history: vec![],
forward_history: vec![],
}
}
fn init_view(&mut self) {
let header = &self.model.header;
let content = &self.content;
self.model.status_ctx_goto = self.status.get_context_id("Navigation");
self.model.status_ctx_tooltip = self.status.get_context_id("Tooltip");
self.status.hide();
connect!(header@HeaderMsg::Goto(ref url), self.model.relm, Msg::Goto(url.to_owned()));
connect!(header@HeaderMsg::Back, self.model.relm, Msg::Back);
connect!(header@HeaderMsg::Forward, self.model.relm, Msg::Forward);
connect!(header@HeaderMsg::Refresh, self.model.relm, Msg::Refresh);
connect!(content@MoonrenderMsg::Back, self.model.relm, Msg::Back);
connect!(content@MoonrenderMsg::Forward, self.model.relm, Msg::Forward);
connect!(content@MoonrenderMsg::ShowTooltip(ref tip), self.model.relm, Msg::ShowTooltip(tip.to_owned()));
connect!(content@MoonrenderMsg::HideTooltip, self.model.relm, Msg::HideTooltip);
connect!(content@MoonrenderMsg::Done, self.model.relm, Msg::GotoDone);
connect!(content@MoonrenderMsg::Goto(ref url), self.model.relm, Msg::Redirect(url.to_owned()));
connect!(content@MoonrenderMsg::UnsupportedRedirect(ref url), self.model.relm, Msg::UnsupportedRedirect(url.clone()));
let url = crate::CONFIG.homepage.clone();
self.model.relm.stream().emit(Msg::Goto(url));
}
fn update(&mut self, event: Msg) {
match event {
Msg::Quit => gtk::main_quit(),
Msg::Goto(url) => {
self.content.emit(MoonrenderMsg::Goto(url.clone()));
self.status.show();
self.status.remove_all(self.model.status_ctx_goto);
self.status
.push(self.model.status_ctx_goto, &format!("Loading {}...", url));
}
Msg::UnsupportedRedirect(url) => {
let d = gtk::MessageDialog::new(
Some(&self.window),
gtk::DialogFlags::all(),
gtk::MessageType::Question,
gtk::ButtonsType::YesNo,
&format!("Moonlander does not support the following URL:\n\n{}\n\nDo you want to open it in your default browser?", url),
);
d.set_title("Unsupported URL");
d.connect_response(move |d, resp| {
if let gtk::ResponseType::Yes = resp {
webbrowser::open(&url).unwrap();
}
d.destroy();
});
self.model.history.pop();
if self.model.history.len() < 2 {
self.model.header.emit(HeaderMsg::EnableBtnBack(false));
}
self.model.relm.stream().emit(Msg::GotoDone);
d.show();
}
Msg::Redirect(url) => {
self.model.header.emit(HeaderMsg::Redirect(url.clone()));
self.status.show();
self.model.history.push(url.clone());
self.model.header.emit(HeaderMsg::EnableBtnRefresh(true));
log::debug!("r: {:?}", self.model.history);
if self.model.history.len() >= 2 {
self.model.header.emit(HeaderMsg::EnableBtnBack(true));
}
self.status.remove_all(self.model.status_ctx_goto);
self.status
.push(self.model.status_ctx_goto, &format!("Loading {}...", url));
}
Msg::GotoDone => {
self.status.remove_all(self.model.status_ctx_goto);
// this is useless
if let Some(area) = self.status.get_message_area() {
if let Some(widget) = area.get_children().iter().cloned().next() {
if let Ok(label) = widget.downcast::<gtk::Label>() {
if let Some(text) = label.get_text() {
if !text.is_empty() {
return;
}
}
};
}
}
self.status.hide();
}
Msg::ShowTooltip(tip) => {
self.status.show();
self.status.push(self.model.status_ctx_tooltip, &tip);
}
Msg::HideTooltip => {
self.status.remove_all(self.model.status_ctx_tooltip);
// this is even more useless
if let Some(area) = self.status.get_message_area() {
if let Some(widget) = area.get_children().iter().cloned().next() {
if let Ok(label) = widget.downcast::<gtk::Label>() {
if let Some(text) = label.get_text() {
if !text.is_empty() {
return;
}
}
};
}
}
self.status.hide();
}
Msg::Back => {
if let Some(prev) = self.model.history.pop() {
self.model.forward_history.push(prev)
}
if let Some(url) = self.model.history.last() {
let url = url.to_owned();
self.model.history.pop(); // the signals push again
self.content.emit(MoonrenderMsg::Goto(url.clone()));
self.model.header.emit(HeaderMsg::Redirect(url));
}
if self.model.history.len() < 2 {
self.model.header.emit(HeaderMsg::EnableBtnBack(false));
}
if !self.model.forward_history.is_empty() {
self.model.header.emit(HeaderMsg::EnableBtnForward(true));
}
}
Msg::Forward => {
if let Some(url) = self.model.forward_history.pop() {
self.content.emit(MoonrenderMsg::Goto(url.clone()));
self.model.header.emit(HeaderMsg::Redirect(url));
}
if self.model.forward_history.is_empty() {
self.model.header.emit(HeaderMsg::EnableBtnForward(false));
}
}
Msg::Refresh => {
if let Some(url) = self.model.history.pop() {
self.content.emit(MoonrenderMsg::Goto(url.clone()));
self.model.header.emit(HeaderMsg::Redirect(url));
}
}
}
}
view! {
#[name="window"]
gtk::ApplicationWindow {
titlebar: Some(self.model.header.widget()),
gtk::Box {
orientation: gtk::Orientation::Vertical,
#[name="content"]
Moonrender(crate::CONFIG.theme.clone()) {
child: {
expand: true
},
},
#[name="status"]
gtk::Statusbar {},
},
delete_event(_, _) => (Msg::Quit, Inhibit(false)),
}
}
}
| true
|
8328f8a7156e293b791e2e862975700d54b11600
|
Rust
|
a-r-n/wasm-rust
|
/src/wasm.rs
|
UTF-8
| 12,600
| 3.140625
| 3
|
[] |
no_license
|
use std::collections::HashMap;
use std::convert::TryFrom;
use crate::error::Error;
/// The allowable types for any real value in wasm (u8 and others are packed)
#[derive(Copy, Clone, PartialEq)]
pub enum PrimitiveType {
I32,
I64,
F32,
F64,
}
impl From<i32> for PrimitiveType {
fn from(_: i32) -> PrimitiveType {
PrimitiveType::I32
}
}
impl From<i64> for PrimitiveType {
fn from(_: i64) -> PrimitiveType {
PrimitiveType::I64
}
}
impl From<f32> for PrimitiveType {
fn from(_: f32) -> PrimitiveType {
PrimitiveType::F32
}
}
impl From<f64> for PrimitiveType {
fn from(_: f64) -> PrimitiveType {
PrimitiveType::F64
}
}
/// Storage type for all wasm values
#[derive(Copy, Clone)]
pub union InternalValue {
i32: i32,
i64: i64,
f32: f32,
f64: f64,
}
impl From<i32> for InternalValue {
fn from(x: i32) -> InternalValue {
InternalValue { i32: x }
}
}
impl From<i64> for InternalValue {
fn from(x: i64) -> InternalValue {
InternalValue { i64: x }
}
}
impl From<f32> for InternalValue {
fn from(x: f32) -> InternalValue {
InternalValue { f32: x }
}
}
impl From<f64> for InternalValue {
fn from(x: f64) -> InternalValue {
InternalValue { f64: x }
}
}
/// Representation of all wasm values
#[derive(Copy, Clone)]
pub struct Value {
t: PrimitiveType,
v: InternalValue,
}
impl Value {
pub fn new<T: Into<InternalValue> + Into<PrimitiveType> + Copy>(x: T) -> Self {
Self {
t: x.into(),
v: x.into(),
}
}
pub fn from_explicit_type(t: PrimitiveType, v: u64) -> Value {
Self {
t,
v: InternalValue { i64: v as i64 },
}
}
#[inline]
pub fn as_i32_unchecked(&self) -> i32 {
unsafe { self.v.i32 }
}
#[inline]
pub fn as_i64_unchecked(&self) -> i64 {
unsafe { self.v.i64 }
}
#[inline]
pub fn as_f32_unchecked(&self) -> f32 {
unsafe { self.v.f32 }
}
#[inline]
pub fn as_f64_unchecked(&self) -> f64 {
unsafe { self.v.f64 }
}
}
impl From<i32> for Value {
fn from(v: i32) -> Self {
Self {
t: PrimitiveType::from(v),
v: InternalValue::from(v),
}
}
}
impl From<i64> for Value {
fn from(v: i64) -> Self {
Self {
t: PrimitiveType::from(v),
v: InternalValue::from(v),
}
}
}
impl From<f32> for Value {
fn from(v: f32) -> Self {
Self {
t: PrimitiveType::from(v),
v: InternalValue::from(v),
}
}
}
impl From<f64> for Value {
fn from(v: f64) -> Self {
Self {
t: PrimitiveType::from(v),
v: InternalValue::from(v),
}
}
}
impl TryFrom<Value> for u32 {
type Error = Error;
fn try_from(x: Value) -> Result<u32, Error> {
match x.t {
PrimitiveType::I32 => Ok(unsafe { x.v.i32 as u32 }),
_ => Err(Error::Misc("Cannot extract as u32 from incorrect type")),
}
}
}
impl From<&PrimitiveType> for Value {
fn from(x: &PrimitiveType) -> Value {
match x {
PrimitiveType::I32 => Value::new(0_i32),
PrimitiveType::I64 => Value::new(0_i64),
PrimitiveType::F32 => Value::new(0_f32),
PrimitiveType::F64 => Value::new(0_f64),
}
}
}
impl std::fmt::Display for Value {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
unsafe {
match self.t {
PrimitiveType::I32 => {
write!(f, "(i32:{})", self.v.i32)
}
PrimitiveType::I64 => {
write!(f, "(i64:{})", self.v.i64)
}
PrimitiveType::F32 => {
write!(f, "(f32:{})", self.v.f32)
}
PrimitiveType::F64 => {
write!(f, "(f64:{})", self.v.f64)
}
}
}
}
}
/// Represents expected runtime errors, i.e. problems with the program, not the interpreter
pub enum Trap {
MemoryOutOfBounds,
UndefinedDivision,
}
pub enum ControlInfo {
Branch(u32),
Return,
Trap(Trap),
None,
}
/// Representation of a wasm stack.
/// All functions use a new stack when called.
#[derive(Default)]
pub struct Stack {
values: Vec<Value>,
}
impl Stack {
fn new() -> Self {
Self::default()
}
fn push_value(&mut self, v: Value) {
log::debug!("Pushing {}", v);
self.values.push(v);
}
pub fn pop_value(&mut self) -> Result<Value, Error> {
log::debug!("Current stack len {}", self.values.len());
if self.values.is_empty() {
Err(Error::StackViolation)
} else {
unsafe { Ok(self.values.pop().unwrap_unchecked()) }
}
}
/// Return the 0-indexed offset'th value from the stack (such that 0 is the most recently pushed value)
pub fn fetch_value(&self, offset: usize) -> Result<&Value, Error> {
let stack_size = self.values.len();
let offset_to_fetch = stack_size - 1 - offset;
match self.values.get(offset_to_fetch) {
Some(n) => Ok(n),
None => {
log::debug!("Try to read {} stack size {}", offset_to_fetch, stack_size);
Err(Error::StackViolation)
}
}
}
pub fn assert_empty(&self) -> Result<(), Error> {
if self.values.is_empty() {
Ok(())
} else {
Err(Error::StackViolation)
}
}
}
impl std::fmt::Display for Stack {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
write!(f, "Current stack:\n[")?;
for v in self.values.iter() {
writeln!(f, " {}", v)?;
}
write!(f, "]\n\n")?;
Ok(())
}
}
pub trait Instruction {
/// A wasm instruction may modify any state of the program
fn execute(
&self,
stack: &mut Stack,
memory: &mut Memory,
locals: &mut Vec<Value>,
functions: &Vec<Function>,
) -> Result<ControlInfo, Error>;
}
pub mod inst;
#[derive(Default)]
struct Table {
functions: Vec<usize>,
}
pub struct Function {
r#type: FunctionType,
local_types: Vec<PrimitiveType>,
instructions: Vec<Box<dyn Instruction>>,
}
impl Function {
pub fn new(r#type: FunctionType) -> Self {
Self {
r#type,
local_types: Vec::new(),
instructions: Vec::new(),
}
}
pub fn push_inst(&mut self, i: Box<dyn Instruction>) {
self.instructions.push(i);
}
pub fn num_params(&self) -> usize {
self.r#type.num_params()
}
pub fn num_locals(&self) -> usize {
self.local_types.len()
}
pub fn new_locals(&mut self, count: usize, t: PrimitiveType) {
self.local_types.reserve(count);
for _ in 0..count {
self.local_types.push(t);
}
}
fn do_return(mut stack: Stack) -> Result<Value, Error> {
let ret = stack.pop_value();
stack.assert_empty()?;
ret
}
pub fn call(
&self,
functions: &Vec<Function>,
memory: &mut Memory,
args: Vec<Value>,
) -> Result<Value, Error> {
let mut stack = Stack::new();
let mut locals = Vec::with_capacity(self.num_params() + self.num_locals());
for arg in args {
locals.push(arg);
}
for t in &self.local_types {
locals.push(Value::from(t));
}
for instruction in &self.instructions {
match instruction.execute(&mut stack, memory, &mut locals, functions)? {
ControlInfo::Return => {
return Self::do_return(stack);
}
ControlInfo::Trap(Trap::MemoryOutOfBounds) => panic!(), //TODO: don't panic, handle traps gracefully
ControlInfo::Trap(Trap::UndefinedDivision) => panic!(),
_ => (),
};
}
Self::do_return(stack)
}
}
#[derive(Default)]
pub struct Memory {
bytes: Vec<u8>,
virtual_size_pages: u32,
upper_limit_pages: u32,
}
const PAGE_SIZE: u64 = 0x10000;
impl Memory {
pub fn new(min: u32, max: u32) -> Self {
let mut s = Self {
bytes: Vec::with_capacity((PAGE_SIZE * min as u64) as usize),
virtual_size_pages: min,
upper_limit_pages: max,
};
s.write(PAGE_SIZE * min as u64, 32, 4); // It looks like
s
}
pub fn write(&mut self, mut value: u64, bitwidth: u8, address: u64) -> Option<()> {
log::debug!(
"Write to address 0x{:x} with bitwidth {} and value 0x{:x}",
address,
bitwidth,
value
);
if bitwidth % 8 != 0 {
// Probably don't even need to implement this
panic!();
}
let bytes_to_write = bitwidth / 8;
let last_write_address = address + bytes_to_write as u64;
// Check for out of bounds access
if last_write_address > PAGE_SIZE * self.virtual_size_pages as u64 {
return None;
}
// Resize internal vector if needed
if self.bytes.is_empty() || last_write_address > (self.bytes.len() - 1) as u64 {
self.bytes.resize((last_write_address + 1) as usize, 0);
}
for i in (address..(address + bytes_to_write as u64)).rev() {
self.bytes[i as usize] = (value & 0xFF) as u8;
value >>= 8;
}
Some(())
}
pub fn read(
&mut self,
result_type: PrimitiveType,
bitwidth: u8,
address: u64,
) -> Option<Value> {
let bytes_to_read = (bitwidth / 8) as u64;
let mut result = 0_u64;
for i in address..(address + bytes_to_read) {
result <<= 8;
result += self.bytes[i as usize] as u64;
}
log::debug!(
"Read from address 0x{:x} with bitwidth {} and value 0x{:x}",
address,
bitwidth,
result
);
Some(Value::from_explicit_type(result_type, result))
}
}
#[derive(Default, Clone)]
pub struct FunctionType {
pub params: Vec<PrimitiveType>,
pub returns: Vec<PrimitiveType>,
}
impl FunctionType {
pub fn new(params: Vec<PrimitiveType>, returns: Vec<PrimitiveType>) -> Self {
Self { params, returns }
}
pub fn num_params(&self) -> usize {
self.params.len()
}
pub fn params_iter(&self) -> std::slice::Iter<PrimitiveType> {
self.params.iter()
}
}
pub enum Export {
Function(usize),
Table(usize),
Memory(usize),
Global(usize),
}
#[derive(Default)]
pub struct Module {
function_types: Vec<FunctionType>,
functions: Vec<Function>,
exports: HashMap<String, Export>,
table: Table,
memory: Memory,
globals: Vec<Value>,
}
impl Module {
pub fn new() -> Self {
Self::default()
}
pub fn call(&mut self, function_name: &str, args: Vec<Value>) -> Result<Value, Error> {
let function_index = match self.exports.get(function_name) {
Some(Export::Function(n)) => *n,
_ => return Err(Error::Misc("On module call, given name is not a function")),
};
let function = match self.functions.get(function_index) {
Some(n) => n,
None => {
return Err(Error::Misc(
"Function index given by export section is not valid",
))
}
};
function.call(&self.functions, &mut self.memory, args)
}
pub fn add_function_type(&mut self, ft: FunctionType) {
self.function_types.push(ft);
}
pub fn get_function_type(&self, i: usize) -> FunctionType {
self.function_types[i].clone()
}
pub fn add_function(&mut self, f: Function) {
self.functions.push(f);
}
pub fn add_memory(&mut self, m: Memory) {
self.memory = m;
}
pub fn add_export(&mut self, name: String, export: Export) -> Result<(), Error> {
if self.exports.contains_key(&name) {
return Err(Error::UnexpectedData("Expected a unique export name"));
}
self.exports.insert(name, export);
Ok(())
}
pub fn get_mut_function(&mut self, i: usize) -> &mut Function {
&mut self.functions[i]
}
}
| true
|
7179074ebae9647ac2cdde2bad83f09cee59c077
|
Rust
|
benferse/batgrep
|
/src/main.rs
|
UTF-8
| 4,285
| 3.796875
| 4
|
[
"MIT"
] |
permissive
|
//! A small utility to parse search results from Ag and feed them
//! into bat for pretty printing
use std::env::args;
use std::error::Error;
use std::io::{self, Write};
use std::process::Command;
/// Invokes bat, which is assumed to be in the runtime path
fn run_bat(filepath: String, center: usize) -> Result<(), Box<dyn Error>> {
// The default size of the preview window
let lines: usize = 40;
let first: usize = match center.checked_sub(lines / 3) {
Some(u) => u,
None => 1,
};
let last: usize = match first.checked_add(lines - 1) {
Some(u) => u,
None => usize::MAX,
};
let range_arg = format!("--line-range={}:{}", first, last);
let highlight_arg = format!("--highlight-line={}", center);
let output = Command::new("bat")
.arg("--style=numbers")
.arg("--color=always")
.arg("--pager=never")
.arg(range_arg)
.arg(highlight_arg)
.arg(filepath)
.output()?;
io::stdout().write_all(&output.stdout)?;
io::stderr().write_all(&output.stderr)?;
Ok(())
}
/// Takes an argument assumed to be a string in grep line result
/// format and breaks it into a file path and the line offset
///
/// # Examples
///
/// ```
/// let (f,o) = process_arg("foo:10:20: something"));
/// assert_eq!(f, "foo");
/// assert_eq!(o, 10);
/// ```
fn process_arg(arg: String) -> Option<(String, usize)> {
// Each argument should be of the form:
//
// filename:line_no:column_no: line_contents
//
// The line contents and column number are not interesting, so they can
// both be removed. Complicating matters is the fact that Windows paths
// may contain a drive designation, so try to catch that
let mut pieces: Vec<&str> = arg.split(':').collect();
// At the very least, there should be four chunks. If not, the line
// is definitely not formatted properly
if pieces.len() < 4 {
return None;
}
// Check for a drive letter using some sketchy but probably sufficient
// heuristics:
//
// - if the first piece is exactly one character long
// - if the second piece begins with a Windows path separator
//
// then assume that there is a drive letter
let content_skip_count: usize = match pieces[0].len() == 1 && pieces[1].chars().next().unwrap_or(' ') == '\\' {
true => pieces.len() - 4,
false => pieces.len() - 3,
};
// Remove however many pieces we need to discard the content
for _ in 0..content_skip_count {
let _content_chunk = pieces.pop();
}
// Ignore the column number as well
let _column_number = pieces.pop();
// Pop the line number off and try to parse it
match pieces.pop().unwrap().parse::<usize>() {
Ok(center) => Some((pieces.join(":"), center)),
Err(_) => None
}
}
fn main() -> Result<(), Box<dyn Error>>{
// Skip the exe name argument and process the rest as file name
// and line offsets in grep format
for arg in args().skip(1) {
match process_arg(arg) {
Some((f, l)) => run_bat(f, l)?,
None => ()
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::process_arg;
#[test]
fn parses_result_no_drive() {
let (f, o) = process_arg("foo:10:20: something".to_string()).unwrap();
assert_eq!(f, "foo");
assert_eq!(o, 10);
}
#[test]
fn parses_result_with_drive_letter() {
let (f, o) = process_arg("c:\\foo:10:20: something".to_string()).unwrap();
assert_eq!(f, "c:\\foo");
assert_eq!(o, 10);
}
#[test]
fn ignores_trailing_colon() {
let (f, o) = process_arg("foo:10:20: something:".to_string()).unwrap();
assert_eq!(f, "foo");
assert_eq!(o, 10);
}
#[test]
fn ignores_embedded_colons() {
let (f, o) = process_arg("foo:10:20: some:thing".to_string()).unwrap();
assert_eq!(f, "foo");
assert_eq!(o, 10);
}
#[test]
fn returns_none_if_missing_field() {
assert!(process_arg("foo:10:".to_string()).is_none());
}
#[test]
fn returns_none_if_line_number_nonnumeric() {
assert!(process_arg("foo:1x:20: something".to_string()).is_none());
}
}
| true
|
752e24dde97756eba2a3f15f8ff19ae90c35c585
|
Rust
|
pgimalac/rustomaton
|
/src/parser.rs
|
UTF-8
| 3,983
| 3.21875
| 3
|
[
"MIT"
] |
permissive
|
use crate::{parser::Token::*, regex::Operations};
use logos::Logos;
use std::collections::{BTreeSet, VecDeque};
/// The token used by [`logos`](/logos/index.html`]).
#[derive(Logos, Debug, PartialEq, Clone)]
pub enum Token {
#[end]
End,
#[error]
Error,
#[token = "|"]
Union,
#[token = "("]
Lpar,
#[token = ")"]
Rpar,
#[token = "."]
Dot,
#[token = "*"]
Kleene,
#[token = "?"]
Question,
#[token = "+"]
Plus,
#[token = "𝜀"]
Epsilon,
#[regex = "[^|+().*?𝜀]"]
Letter,
}
/*
(REG) > REG* = REG+ = REG? > REGREG > REG|REG
REG ::= .
𝜀
CHAR
(REG)
REG*
REG+
REG?
REGREG
REG|REG
*/
pub(crate) fn tokens(s: &str) -> VecDeque<(Token, &str)> {
let mut lexer = Token::lexer(s);
let mut tokens = VecDeque::new();
while lexer.token != Token::End {
tokens.push_back((lexer.token.clone(), lexer.slice()));
lexer.advance();
}
tokens
}
pub(crate) fn peak(tokens: &mut VecDeque<(Token, &str)>) -> Option<Token> {
tokens.get(0).map(|x| x.0.clone())
}
pub(crate) fn read_union(tokens: &mut VecDeque<(Token, &str)>) -> Result<Operations<char>, String> {
let mut u = BTreeSet::new();
loop {
u.insert(read_concat(tokens)?);
if peak(tokens) == Some(Union) {
tokens.pop_front();
} else {
break;
}
}
if u.len() == 1 {
let e = u.iter().next().unwrap().clone();
Ok(u.take(&e).unwrap())
} else {
Ok(Operations::Union(u))
}
}
pub(crate) fn read_paren(tokens: &mut VecDeque<(Token, &str)>) -> Result<Operations<char>, String> {
if peak(tokens) != Some(Lpar) {
return Err("Expected left parenthesis.".to_string());
}
tokens.pop_front();
let o = read_union(tokens)?;
if peak(tokens) != Some(Rpar) {
return Err("Expected right parenthesis.".to_string());
}
tokens.pop_front();
Ok(read_quantif(tokens, o))
}
pub(crate) fn read_quantif(
tokens: &mut VecDeque<(Token, &str)>,
mut o: Operations<char>,
) -> Operations<char> {
while let Some(x) = peak(tokens) {
if x == Plus {
o = Operations::Repeat(Box::new(o), 1, None);
} else if x == Kleene {
o = Operations::Repeat(Box::new(o), 0, None);
} else if x == Question {
o = Operations::Repeat(Box::new(o), 0, Some(1));
} else {
break;
}
tokens.pop_front();
}
o
}
pub(crate) fn read_letter(
tokens: &mut VecDeque<(Token, &str)>,
) -> Result<Operations<char>, String> {
if let Some(x) = peak(tokens) {
let o = if x == Dot {
Operations::Dot
} else if x == Epsilon {
Operations::Epsilon
} else if x == Letter {
Operations::Letter(tokens[0].1.chars().next().unwrap())
} else {
return Err("Expected letter".to_string());
};
tokens.pop_front();
Ok(read_quantif(tokens, o))
} else {
Err("Expected letter".to_string())
}
}
pub(crate) fn read_concat(
tokens: &mut VecDeque<(Token, &str)>,
) -> Result<Operations<char>, String> {
let mut c = VecDeque::new();
while let Some(x) = peak(tokens) {
if x == Dot || x == Epsilon || x == Letter {
c.push_back(read_letter(tokens)?);
} else if x == Lpar {
c.push_back(read_paren(tokens)?);
} else if x == Kleene || x == Plus || x == Question {
return Err(format!(
"Unexpected {}",
tokens[0].1.chars().next().unwrap()
));
} else if x == Rpar || x == Union || x == End {
break;
} else {
unreachable!()
}
}
if c.len() == 1 {
Ok(c.pop_front().unwrap())
} else {
Ok(Operations::Concat(c))
}
}
| true
|
9086befef163a71a3230ebab3e3ef6a84bf2c7b0
|
Rust
|
MadPsyentist/AdventOfCode2020
|
/src/day2.rs
|
UTF-8
| 2,463
| 3.375
| 3
|
[] |
no_license
|
pub fn day(input: &str) {
part1(input);
part2(input);
}
pub fn part1(input: &str) {
println!("Part 1: {} passwords meet their policy",
input.lines().filter(|i| PasswordMeetsPolicy(PasswordAndPolicyFromLine(i))).count());
}
pub fn part2(input: &str) {
println!("part 2: {} passwords meet their policy",
input.lines().map(|l| PasswordAndPolicyFromLine(l))
.filter(|pap| PasswordMeetsPolicy_Part2(pap)).count());
}
struct PasswordAndPolicy {
Min_Character_Count: u32,
Max_Character_Count: u32,
Character: char,
Password: String,
}
fn PasswordAndPolicyFromLine(line: &str) -> PasswordAndPolicy {
let min: u32 = GetMinCharacterCountFromLine(line);
let max: u32 = GetMaxCharacterCountFromLine(line);
let character: char = GetPolicyCharacterFromLine(line);
let pass: String = GetPasswordFromLine(line);
return PasswordAndPolicy { Min_Character_Count: min,
Max_Character_Count: max,
Character: character,
Password: pass.to_string() };
}
fn GetMinCharacterCountFromLine(line: &str) -> u32 {
line.split("-").next().unwrap().parse().unwrap()
}
fn GetMaxCharacterCountFromLine(line: &str) -> u32 {
line.split("-").nth(1).unwrap().split(" ").next().unwrap()
.parse().unwrap()
}
fn GetPolicyCharacterFromLine(line: &str) -> char {
line.split(" ").nth(1).unwrap().chars().next().unwrap()
}
fn GetPasswordFromLine(line: &str) -> String {
line.split(" ").nth(2).unwrap().to_string()
}
fn PasswordMeetsPolicy(passAndpPol: PasswordAndPolicy) -> bool {
let count: u32 = passAndpPol.Password
.matches(|character| character == passAndpPol.Character)
.collect::<String>()
.len() as u32;
count >= passAndpPol.Min_Character_Count && count <= passAndpPol.Max_Character_Count
}
fn CheckCharacterInPosition(character: char, position: u32, password: &str) -> bool {
password.chars().nth((position-1) as usize).unwrap_or('$').eq(&character)
}
fn PasswordMeetsPolicy_Part2(PassPol: &PasswordAndPolicy) -> bool {
(CheckCharacterInPosition(PassPol.Character, PassPol.Min_Character_Count, &PassPol.Password)
|| CheckCharacterInPosition(PassPol.Character, PassPol.Max_Character_Count, &PassPol.Password))
&&
!(CheckCharacterInPosition(PassPol.Character, PassPol.Min_Character_Count, &PassPol.Password)
&& CheckCharacterInPosition(PassPol.Character, PassPol.Max_Character_Count, &PassPol.Password))
}
| true
|
faf0b9635e1725ca2bfe554243ec9634140d1b50
|
Rust
|
Stannislav/Advent-of-Code
|
/2020/src/bin/16.rs
|
UTF-8
| 6,966
| 3.5
| 4
|
[
"MIT"
] |
permissive
|
//! Solutions for day 16.
extern crate regex;
use regex::Regex;
use std::collections::HashMap;
use std::fs;
/// A rule specifies two ranges: `(i, j, k, l)` => `i..=j` and `k..=l`.
type Rule = (u32, u32, u32, u32);
#[doc(hidden)]
fn main() {
// Parse the 3 blocks of the input: rules for ticket fields, my ticket, nearby tickets
let data: Vec<String> = fs::read_to_string("input/16.txt")
.expect("Can't read input")
.trim()
.split("\n\n")
.map(|s| s.to_string())
.collect();
// Parse block 1: rules for ticket fields
let re = Regex::new(r"(?P<name>[a-z\s]+): (?P<s1>\d+)-(?P<e1>\d+) or (?P<s2>\d+)-(?P<e2>\d+)")
.unwrap();
let rules: HashMap<String, Rule> = data[0]
.lines()
.map(|line| {
let caps = re.captures(line).unwrap();
let name = caps["name"].to_string();
let s1: u32 = caps["s1"].parse().unwrap();
let e1: u32 = caps["e1"].parse().unwrap();
let s2: u32 = caps["s2"].parse().unwrap();
let e2: u32 = caps["e2"].parse().unwrap();
(name, (s1, e1, s2, e2))
})
.collect();
// Parse block 2: my ticket
let my_ticket: Vec<u32> = data[1]
.lines()
.nth(1)
.unwrap()
.split(',')
.map(|x| x.parse().unwrap())
.collect();
// Parse block 3: nearby tickets
let other_tickets: Vec<Vec<u32>> = data[2]
.lines()
.skip(1)
.map(|line| line.split(',').map(|x| x.parse().unwrap()).collect())
.collect();
// Part 1
let part_1: u32 = other_tickets
.iter()
.flat_map(|ticket| ticket.iter().filter(|&&value| !match_any(&value, &rules)))
.sum();
println!("Part 1: {}", part_1);
// Discard invalid tickets and add my ticket
let mut other_tickets: Vec<_> = other_tickets
.into_iter()
.filter(|ticket| ticket.iter().all(|value| match_any(&value, &rules)))
.collect();
other_tickets.push(my_ticket.clone());
// Part 2
let departure_ids = get_departure_ids(&rules, &other_tickets);
let part_2 = departure_ids
.iter()
.fold(1, |acc, &idx| acc * my_ticket[idx] as u64);
println!("Part 2: {}", part_2);
}
/// Check if a value complies with a rule.
///
/// The check is done by verifying if the value is contained
/// in either of the two intervals specified by the rule.
///
/// # Arguments
/// * `value` -- The value to verify.
/// * `(s1, s1, s2, e2)` -- The unpacked bounds of the rule.
///
/// # Return
/// * `bool` -- The result of the check.
fn check_rule(value: &u32, (s1, e1, s2, e2): &Rule) -> bool {
(s1..=e1).contains(&value) | (s2..=e2).contains(&value)
}
/// Check if a value complies with any of the rules.
///
/// # Arguments
/// * `value` -- The value to verify.
/// * `rules` -- A map with rules as values.
///
/// # Return
/// * `bool` -- The result of the check.
fn match_any(value: &u32, rules: &HashMap<String, Rule>) -> bool {
rules.values().any(|rule| check_rule(&value, rule))
}
/// Determine indices of departure fields.
///
/// This solves part 2 by matching all tickets against
/// the rules that are provided.
///
/// The strategy is to build a cross-table of ticket fields vs. their
/// possible positions. We start by assuming that any field can
/// correspond to any position, so the cross-table is initialized with
/// values `true`. Then we iterate through all tickets and all positions
/// and fill the cross-table. A field can only be in a given position if
/// the values of all tickets at the given position comply with the rule for
/// the given field.
///
/// Once the cross-table is filled not all fields will be assigned to a
/// unique column. We pick those fields for which the assignment is
/// unique - the corresponding positions cannot be taken by other fields.
/// So we update the other fields to include this restriction. This will
/// uniquely determine the position of more fields, and so forth. We
/// iterate this procedure until all positions are determined.
///
/// Once all positions are determined, the problem statement asks to pick
/// only the positions corresponding to fields with names starting with
/// "departure".
///
/// # Arguments
/// * `rules` -- All rules that apply to the ticket fields.
/// * `tickets` -- A sequence of tickets that will be matched against the rules.
///
/// # Return
/// * `Vec<usize>` -- The positions of all fields that start with "departure".
fn get_departure_ids(rules: &HashMap<String, Rule>, tickets: &[Vec<u32>]) -> Vec<usize> {
let n_fields = rules.len();
// Initialise cross-table for all ticket fields vs. their possible positions
let mut cross_table: HashMap<String, Vec<bool>> = HashMap::new();
for key in rules.keys() {
cross_table.insert(key.clone(), vec![true; n_fields]);
}
// Build ticket field columns = transpose of the vector of tickets
// This will help filling the cross-table, as we want to fix a position
// and then iterate through all ticket values at that position.
let columns: Vec<Vec<u32>> = (0..n_fields)
.map(|i| tickets.iter().map(|ticket| ticket[i]).collect())
.collect();
// Fill cross-table
for (i, column) in columns.iter().enumerate() {
for (key, rule) in rules.iter() {
cross_table.get_mut(key).unwrap()[i] =
column.iter().all(|value| check_rule(value, rule));
}
}
// Iteratively assign positions that are uniquely determined, i.e. only have one true entry.
// The map `field_positions` saves the fields for which the position has been determined.
let mut field_positions: HashMap<String, usize> = HashMap::new();
let mut finished = false;
while !finished {
finished = true;
for field in rules.keys() {
// Does this field have only one single position that it could correspond to?
if cross_table[field].iter().filter(|&&x| x).count() == 1 {
// Get the index of the single `true` entry
let idx = cross_table[field].iter().position(|&x| x).unwrap();
// Save the found position for this field
field_positions.insert(field.clone(), idx);
// All other fields cannot be in this position any more
for other_field in rules.keys().filter(|&key| key != field) {
cross_table.get_mut(other_field).unwrap()[idx] = false;
}
} else {
// We found at least one field that was not uniquely determined,
// so we're not finished yet.
finished = false;
}
}
}
// Extract the positions of all fields that start with "departure"
field_positions
.iter()
.filter(|&(key, _idx)| key.starts_with("departure"))
.map(|(_key, &idx)| idx)
.collect()
}
| true
|
e4cf19142fe85d850feab9c70fdb0198831fb66c
|
Rust
|
DevHyperCoder/alpc
|
/tests/test_numeric.rs
|
UTF-8
| 122
| 2.53125
| 3
|
[] |
no_license
|
use alpc::parse;
#[test]
fn number_to_str() {
assert_eq!(vec!("ONE ", "TWO ", "TREE "), parse("123".to_string()));
}
| true
|
1814eb801c2d6f68c6c36a225db0d3861b203edd
|
Rust
|
m-kann0/atcoder-rust
|
/examples/agc036_a.rs
|
UTF-8
| 1,980
| 3.671875
| 4
|
[] |
no_license
|
use std::io::Read;
fn main() {
let mut buf = String::new();
std::io::stdin().read_to_string(&mut buf).unwrap();
let answer = solve(&buf);
println!("{}", answer);
}
fn solve(input: &str) -> String {
let mut iterator = input.split_whitespace();
let s: usize = iterator.next().unwrap().parse().unwrap();
let mut ok: isize = 1_000_000_000;
let mut ng: isize = 0;
while (ok - ng).abs() > 1 {
let mid = (ok + ng) / 2;
if mid * mid >= s as isize {
ok = mid;
} else {
ng = mid;
}
}
let w: usize = ok as usize;
let h: usize = if w * (w - 1) >= s {
w - 1
} else {
w
};
let mut x1: usize = w;
let y1: usize = 0;
let x2: usize = 0;
let mut y2: usize = 0;
let x3 = w;
let y3 = h;
let mut current = w * h;
if current == s {
return format!("{} {} {} {} {} {}", x1, y1, x2, y2, x3, y3);
}
x1 -= 1;
current -= h;
y2 = s - current;
return format!("{} {} {} {} {} {}", x1, y1, x2, y2, x3, y3);
}
#[test]
fn test() {
let cases: Vec<(&str, &str)> = vec![
(
r"3",
"1 0 2 2 0 1"
),
(
r"100",
"0 0 10 0 0 10"
),
(
r"311114770564041497",
"314159265 358979323 846264338 327950288 419716939 937510582"
),
(
r"1000000000000000000",
"1000000000 0 0 0 1000000000 1000000000"
),
];
let mut all_ok = true;
for (i, case) in cases.iter().enumerate() {
print!("case {} : ", i);
let expected = case.1;
let actual = solve(case.0);
if expected == actual {
println!("OK");
} else {
println!("NG");
println!(" Expected: {}", expected);
println!(" Actual : {}", actual);
all_ok = false;
}
}
assert_eq!(all_ok, true);
}
| true
|
3bce8bef7a1ac96076202b8585199253379d4261
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/run-pass/deriving/deriving-cmp-generic-struct.rs
|
UTF-8
| 872
| 3.640625
| 4
|
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
// run-pass
#[derive(PartialEq, Eq, PartialOrd, Ord)]
struct S<T> {
x: T,
y: T
}
pub fn main() {
let s1 = S {x: 1, y: 1};
let s2 = S {x: 1, y: 2};
// in order for both PartialOrd and Ord
let ss = [s1, s2];
for (i, s1) in ss.iter().enumerate() {
for (j, s2) in ss.iter().enumerate() {
let ord = i.cmp(&j);
let eq = i == j;
let lt = i < j;
let le = i <= j;
let gt = i > j;
let ge = i >= j;
// PartialEq
assert_eq!(*s1 == *s2, eq);
assert_eq!(*s1 != *s2, !eq);
// PartialOrd
assert_eq!(*s1 < *s2, lt);
assert_eq!(*s1 > *s2, gt);
assert_eq!(*s1 <= *s2, le);
assert_eq!(*s1 >= *s2, ge);
// Ord
assert_eq!(s1.cmp(s2), ord);
}
}
}
| true
|
09156dc43594d225b11ff2077888e4d93c8963cf
|
Rust
|
flip1995/comprakt
|
/compiler-lib/src/ast.rs
|
UTF-8
| 6,236
| 3.203125
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::{asciifile::Spanned, strtab::Symbol};
use strum_macros::EnumDiscriminants;
#[strum_discriminants(derive(Display))]
#[derive(EnumDiscriminants, Debug, PartialEq, Eq)]
pub enum AST<'t> {
Empty,
Program(Spanned<'t, Program<'t>>),
}
/// This is the top-level AST node. It stores all class declarations of the
/// MiniJava program.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Program<'t> {
pub classes: Vec<Spanned<'t, ClassDeclaration<'t>>>,
}
/// This AST node stores the Class declaration, which consists of a name and
/// the members of the class.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ClassDeclaration<'t> {
pub name: Symbol,
pub members: Vec<Spanned<'t, ClassMember<'t>>>,
}
/// This AST node describes a class member. Variants of class members are
/// defined in `ClassMemberKind`. Every class member has a name.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ClassMember<'t> {
pub kind: ClassMemberKind<'t>,
pub name: Symbol,
}
pub type ParameterList<'t> = Vec<Spanned<'t, Parameter<'t>>>;
/// A class member is either one of
/// * `Field(type)`: a declaration of a field of a class
/// * `Method(type, params, body)`: a method of a class
/// * `MainMethod(param, body)`: a main method, which is a special method that
/// is only allowed once in a MiniJava Program. The `param` is the name of a
/// symbol that must not be used in the body.
#[strum_discriminants(derive(Display))]
#[derive(EnumDiscriminants, Debug, PartialEq, Eq, Clone)]
pub enum ClassMemberKind<'t> {
Field(Spanned<'t, Type>),
Method(
Spanned<'t, Type>,
Spanned<'t, ParameterList<'t>>,
Spanned<'t, Block<'t>>,
),
MainMethod(Symbol, Spanned<'t, Block<'t>>),
}
/// This AST node represents a method parameter. A parameter consists of a
/// `Type` and a name.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Parameter<'t> {
pub ty: Spanned<'t, Type>,
pub name: Symbol,
}
/// A `Type` is basically a `BasicType`. Optional it can be an (n-dimensional)
/// array type.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Type {
pub basic: BasicType,
/// Depth of the array type (number of `[]`) i.e. this means means `self.ty
/// []^(self.array)`
pub array_depth: u64,
}
/// A `BasicType` is either one of
/// * `Int`: a 32-bit integer
/// * `Boolean`: a boolean
/// * `Void`: a void type
/// * `Custom`: a custom defined type
#[strum_discriminants(derive(Display))]
#[derive(EnumDiscriminants, Debug, PartialEq, Eq, Clone)]
pub enum BasicType {
Int,
Boolean,
Void,
Custom(Symbol),
}
/// A `Block` in the AST is basically just a vector of statements.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Block<'t> {
pub statements: Vec<Spanned<'t, Stmt<'t>>>,
}
/// A statement can have one of the kinds:
/// * `Block`: A block defined in `Block`
/// * `Empty`: An empty statement: `;`
/// * `If`: a if expression consisting of the condition, its body and
/// optionally an else statement
/// * `Expression`: an expression defined in `Expr`
/// * `While`: a while loop consisting of the condition and its body
/// * `Return`: a return which can optionally return an expression
/// * `LocalVariableDeclaration`: a declaration and optional initialization of
/// a local variable
#[strum_discriminants(derive(Display))]
#[derive(EnumDiscriminants, Debug, PartialEq, Eq, Clone)]
pub enum Stmt<'t> {
Block(Spanned<'t, Block<'t>>),
Empty,
If(
Box<Spanned<'t, Expr<'t>>>,
Box<Spanned<'t, Stmt<'t>>>,
Option<Box<Spanned<'t, Stmt<'t>>>>,
),
While(Box<Spanned<'t, Expr<'t>>>, Box<Spanned<'t, Stmt<'t>>>),
Expression(Box<Spanned<'t, Expr<'t>>>),
Return(Option<Box<Spanned<'t, Expr<'t>>>>),
LocalVariableDeclaration(
Spanned<'t, Type>,
Symbol,
Option<Box<Spanned<'t, Expr<'t>>>>,
),
}
/// An expression is either one of
/// * `Assignment`: an assignment expression
/// * `Binary`: one of the binary operations defined in `BinaryOp`
/// * `Unary`: one of the unary operations defined in `UnaryOp`
/// * `MethodInvocation`: a method invocation on a primary expression:
/// `foo.method()`
/// * `FieldAccess`: a field access on a primary expression:
/// `foo.bar`
/// * `ArrayAccess`: an array access on a primary expression:
/// `foo[42]`
/// The primary expression from the original grammar are also part of this,
/// since the distinction is only required for correct postfix-op parsing. These
/// are:
/// * `Null`: the `null` keyword
/// * `Boolean`: a boolean literal
/// * `Int`: an integer literal
/// * `Var`: use of a variable
/// * `MethodInvocation`: a method invocation
/// * `This`: the `this` keyword
/// * `NewObject`: generating a new object, e.g. `new Foo()`
/// * `NewArray`: generating a new array, e.g. `new int[]`
#[strum_discriminants(derive(Display))]
#[derive(EnumDiscriminants, Debug, PartialEq, Eq, Clone)]
pub enum Expr<'t> {
Binary(
BinaryOp,
Box<Spanned<'t, Expr<'t>>>,
Box<Spanned<'t, Expr<'t>>>,
),
Unary(UnaryOp, Box<Spanned<'t, Expr<'t>>>),
// Postfix ops
MethodInvocation(
Box<Spanned<'t, Expr<'t>>>,
Symbol,
Spanned<'t, ArgumentList<'t>>,
),
FieldAccess(Box<Spanned<'t, Expr<'t>>>, Symbol),
ArrayAccess(Box<Spanned<'t, Expr<'t>>>, Box<Spanned<'t, Expr<'t>>>),
// The old primary expressions
Null,
Boolean(bool),
Int(Symbol), // TODO Should be String?
Var(Symbol),
ThisMethodInvocation(Symbol, Spanned<'t, ArgumentList<'t>>),
This,
NewObject(Symbol),
NewArray(BasicType, Box<Spanned<'t, Expr<'t>>>, u64),
}
/// Binary operations like comparisons (`==`, `!=`, `<=`, ...), logical
/// operations (`||`, `&&`) or algebraic operation (`+`, `-`, `*`, `/`, `%`).
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum BinaryOp {
Assign,
Equals,
NotEquals,
LessThan,
GreaterThan,
LessEquals,
GreaterEquals,
LogicalOr,
LogicalAnd,
Add,
Sub,
Mul,
Div,
Mod,
}
/// One of the unary operations `!` and `-`
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum UnaryOp {
Not,
Neg,
}
pub type ArgumentList<'t> = Vec<Spanned<'t, Expr<'t>>>;
| true
|
5d0512f459f5fb7c8c24c6882fa9490dc86eaefe
|
Rust
|
iasoon/KeyboardOptimizer-LayoutGen-3000
|
/src/eval/ngram_eval/utils.rs
|
UTF-8
| 2,521
| 3.171875
| 3
|
[] |
no_license
|
use cat::*;
use eval::ngram_eval::types::*;
/// A cursor over fixed-length subsequences of a given sequence.
pub struct SubSeqs<'t, T: 't> {
seq: &'t [T],
idxs: Vec<usize>,
}
impl<'t, T: 't> SubSeqs<'t, T> {
pub fn new(seq: &'t [T], len: usize) -> Self {
SubSeqs {
seq: seq,
idxs: vec![0; len],
}
}
/// Returns the current sequence.
pub fn seq<'a>(&'a self) -> impl Iterator<Item = &'a T> + 'a {
self.idxs.iter().map(move |&idx| &self.seq[idx])
}
/// Move to next sequence
/// Returns whether the subsequences are depleted.
pub fn next(&mut self) -> bool {
self.increment();
return self.pos_valid(0);
}
fn increment(&mut self) {
let mut pos = self.idxs.len();
loop {
pos -= 1;
self.idxs[pos] += 1;
if pos == 0 || self.pos_valid(pos) {
for i in 1..(self.idxs.len() - pos) {
self.idxs[pos + i] = self.idxs[pos] + i;
}
return;
}
}
}
fn min_value(&self, pos: usize) -> usize {
pos
}
fn max_value(&self, pos: usize) -> usize {
self.seq.len() - self.idxs.len() + pos
}
fn pos_valid(&self, pos: usize) -> bool {
let val = self.idxs[pos];
return val <= self.max_value(pos) && val >= self.min_value(pos);
}
}
/// Builds a subset of n-gram / freq pairs
pub struct NGramsSubsetBuilder<T> {
nums: Vec<Num<NGram<T>>>,
next_allowed: usize,
}
impl<T> NGramsSubsetBuilder<T> {
pub fn new() -> Self {
NGramsSubsetBuilder {
nums: Vec::new(),
next_allowed: 0,
}
}
pub fn push(&mut self, num: Num<NGram<T>>) {
// avoid pushing the same sequence twice
if num.as_usize() >= self.next_allowed {
self.nums.push(num);
self.next_allowed = num.as_usize() + 1;
}
}
pub fn build(&self, ngrams: &NGrams<T>) -> NGrams<T> {
let mut elems = Vec::with_capacity(
self.nums.len() * ngrams.elements.seq_len());
let mut freqs = Vec::with_capacity(self.nums.len());
for &num in self.nums.iter() {
elems.extend(ngrams.elements[num].iter().cloned());
freqs.push(ngrams.freqs[num]);
}
return NGrams {
elements: SeqTable::from_elem_vec(elems, ngrams.elements.seq_len()),
freqs: Table::from_vec(freqs),
}
}
}
| true
|
fd33a0edf59bb39a39504896255b088bbd0d63b2
|
Rust
|
mkpankov/parse-rust
|
/src/main.rs
|
UTF-8
| 1,441
| 2.703125
| 3
|
[] |
no_license
|
extern crate regex;
use std::collections::BTreeMap;
use std::io::BufRead;
fn main() {
let re = regex::Regex::new(concat!(
r#""GET (?:https?://.+?/)?(.+?\.mp3)"#,
r#" HTTP/1\.[01]" \d{3} (\d+)"#)).unwrap();
let stdin = std::io::stdin();
let locked_stdin = stdin.lock();
let mut stat: BTreeMap<String, (isize, isize)> = BTreeMap::new();
for maybe_line in locked_stdin.lines() {
let line = maybe_line.unwrap();
let captures: Vec<_> = re.captures_iter(&line).collect();
if captures.len() == 1 {
let fname = captures[0].at(1).unwrap();
let size: isize =
std::str::FromStr::from_str(
captures[0].at(2).unwrap())
.unwrap();
match stat.get(fname) {
Some(&(mut sum, mut max)) => {
sum += size;
max = std::cmp::max(max, size);
stat.insert(fname.to_owned(), (sum, max));
}
None => {
stat.insert(fname.to_owned(), (size, size));
},
}
}
};
for it in stat {
let (key, value) = it;
let (sum, mut max) = value;
if max == 0 {
max = 1;
}
let downloads = sum as f64 / max as f64;
println!("Key: {} downloads: {} (max size: {})",
key, downloads, max);
}
}
| true
|
f8d4b2a2429d82e03850d10d77723104dcf4cf21
|
Rust
|
davisvansant/otel_spec
|
/sdk/src/metrics/accumulator/aggregation.rs
|
UTF-8
| 1,042
| 2.75
| 3
|
[] |
no_license
|
use super::Aggregator;
pub struct Aggregation {
pub result: Aggregator,
}
#[derive(Debug, PartialEq)]
pub enum AggregationKind {
// SomeAggregationKind,
Sum,
LastValue,
Histogram,
MinMaxSumCount,
Exact,
Sketch,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn aggregation_kind() {
let test_sum = AggregationKind::Sum;
let test_last_value = AggregationKind::LastValue;
let test_histogram = AggregationKind::Histogram;
let test_min_max_sum_count = AggregationKind::MinMaxSumCount;
let test_exact = AggregationKind::Exact;
let test_sketch = AggregationKind::Sketch;
assert_eq!(test_sum, AggregationKind::Sum);
assert_eq!(test_last_value, AggregationKind::LastValue);
assert_eq!(test_histogram, AggregationKind::Histogram);
assert_eq!(test_min_max_sum_count, AggregationKind::MinMaxSumCount);
assert_eq!(test_exact, AggregationKind::Exact);
assert_eq!(test_sketch, AggregationKind::Sketch);
}
}
| true
|
fb1764a75ef3f2cdfdf55973ae5adc934f58fd89
|
Rust
|
bovee/RosalindRust
|
/src/bin/rosalind_RNA.rs
|
UTF-8
| 330
| 2.90625
| 3
|
[] |
no_license
|
#![feature(io)]
use std::io::ReadExt;
use std::ascii::AsciiExt;
fn main() {
let stdin = std::io::stdin();
for chr in stdin.chars() {
let chr = chr.ok().unwrap().to_ascii_uppercase();
let rna_chr = match chr {
'T' => 'U',
_ => chr
};
print!("{}", rna_chr);
};
}
| true
|
01bc26ac3d5a0b40d600fe7c13b4c9f16795cd5c
|
Rust
|
maxjoehnk/g13-rs
|
/src/device.rs
|
UTF-8
| 3,696
| 2.671875
| 3
|
[
"MIT"
] |
permissive
|
use std::time::Duration;
use rusb::{Context, Device, DeviceHandle, Direction, Recipient, request_type, RequestType};
use rusb::constants::*;
use crate::consts::*;
use crate::error::G13Error;
use crate::flags::{Keys, ModeLeds};
pub struct G13 {
handle: DeviceHandle<Context>,
}
impl G13 {
pub(crate) fn new(device: Device<Context>) -> Result<G13, G13Error> {
let mut handle = device.open()?;
if handle.kernel_driver_active(0)? {
handle.detach_kernel_driver(0)?;
}
handle.claim_interface(0)?;
handle.write_control(0, 9, 1, 0, &[], Duration::from_millis(1000))?;
let device = G13 {
handle
};
Ok(device)
}
/// Clear the LCD.
pub fn clear_lcd(&mut self) -> Result<(), G13Error> {
let buf = [0; G13_LCD_BUFFER_SIZE];
self.write_lcd(&buf)
}
/// Write the given buffer to the lcd.
///
/// The buffer should be sized exactly 960 bytes.
pub fn write_lcd(&mut self, buffer: &[u8]) -> Result<(), G13Error> {
if buffer.len() != G13_LCD_BUFFER_SIZE {
return Err(G13Error::InvalidLcdBufferSize(buffer.len(), G13_LCD_BUFFER_SIZE));
}
let mut buf = vec![0; 32];
buf.extend_from_slice(buffer);
buf[0] = 0x03;
self.handle.write_interrupt(LIBUSB_ENDPOINT_OUT | G13_LCD_ENDPOINT, &buf, Duration::from_millis(1000))?;
Ok(())
}
/// Set the keyboard color to the given red, green and blue bytes.
pub fn set_key_color(&mut self, (red, green, blue): (u8, u8, u8)) -> Result<(), G13Error> {
let data = vec![5, red, green, blue, 0];
let result = self.handle.write_control(request_type(Direction::Out, RequestType::Class, Recipient::Interface), 9, G13_SET_KEY_COLOR, G13_INDEX, &data, Duration::from_millis(1000))?;
if result != 5 {
return Err(G13Error::ProblemSendingData(result));
}
Ok(())
}
/// Activate/Deactivate the M1, M2, M3 and MR leds corresponding to the `ModeLeds` flags.
pub fn set_mode_leds(&mut self, leds: ModeLeds) -> Result<(), G13Error> {
let data = vec![5, leds.bits(), 0, 0, 0];
let result = self.handle.write_control(request_type(Direction::Out, RequestType::Class, Recipient::Interface), 9, G13_SET_MODE_LEDS, G13_INDEX, &data, Duration::from_millis(1000))?;
if result != 5 {
return Err(G13Error::ProblemSendingData(result));
}
Ok(())
}
/// Read input from the device.
///
/// This will block until an input is received or until `timeout` is reached.
pub fn read(&self, timeout: Duration) -> Result<Response, G13Error> {
let mut data = [0; G13_REPORT_SIZE];
self.handle.read_interrupt(LIBUSB_ENDPOINT_IN | G13_KEY_ENDPOINT, &mut data, timeout)?;
let mut value: u64 = data[7] as u64;
value <<= 8;
value += data[6] as u64;
value <<= 8;
value += data[5] as u64;
value <<= 8;
value += data[4] as u64;
value <<= 8;
value += data[3] as u64;
log::trace!("{value:#010b}");
let keys = Keys::from_bits_truncate(value);
let x = data[1] as f32 / u8::MAX as f32;
let y = data[2] as f32 / u8::MAX as f32;
Ok(Response {
keys,
joystick: (x, y),
})
}
}
#[derive(Debug, Clone, Copy)]
pub struct Response {
/// The pressed keys.
pub keys: Keys,
/// The (x, y) positions of the joystick.
pub joystick: (f32, f32),
}
impl std::fmt::Debug for G13 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "G13 {{}}")
}
}
| true
|
bb9ef72b50d73e8336f6b4903435417190bfb3ef
|
Rust
|
samscott89/serde_qs
|
/tests/test_serialize.rs
|
UTF-8
| 6,569
| 3.265625
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#[macro_use]
extern crate serde_derive;
extern crate serde_qs as qs;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct Address {
city: String,
street: String,
postcode: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct QueryParams {
id: u8,
name: String,
phone: u32,
address: Address,
user_ids: Vec<u8>,
}
#[test]
fn serialize_struct() {
let params = QueryParams {
id: 42,
name: "Acme".to_string(),
phone: 12345,
address: Address {
city: "Carrot City".to_string(),
street: "Special-Street* No. 11".to_string(),
postcode: "12345".to_string(),
},
user_ids: vec![1, 2, 3, 4],
};
assert_eq!(
qs::to_string(¶ms).unwrap(),
"\
id=42&name=Acme&phone=12345&address[city]=Carrot+City&\
address[street]=Special-Street*+No.+11&\
address[postcode]=12345&user_ids[0]=1&user_ids[1]=2&\
user_ids[2]=3&user_ids[3]=4"
);
}
#[test]
fn serialize_option() {
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct Query {
vec: Option<Vec<u8>>,
}
let params = "";
let query = Query { vec: None };
let rec_params = qs::to_string(&query).unwrap();
assert_eq!(rec_params, params);
let params = "vec[0]=1&vec[1]=2";
let query = Query {
vec: Some(vec![1, 2]),
};
let rec_params = qs::to_string(&query).unwrap();
assert_eq!(rec_params, params);
}
#[test]
fn serialize_enum() {
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
enum TestEnum {
A,
B(bool),
C { x: u8, y: u8 },
D(u8, u8),
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct Query {
e: TestEnum,
}
let params = "e=a";
let query = Query { e: TestEnum::A };
let rec_params = qs::to_string(&query).unwrap();
assert_eq!(rec_params, params);
let params = "e[b]=true";
let query = Query {
e: TestEnum::B(true),
};
let rec_params = qs::to_string(&query).unwrap();
assert_eq!(rec_params, params);
let params = "e[c][x]=2&e[c][y]=3";
let query = Query {
e: TestEnum::C { x: 2, y: 3 },
};
let rec_params = qs::to_string(&query).unwrap();
assert_eq!(rec_params, params);
let params = "e[d][0]=128&e[d][1]=1";
let query = Query {
e: TestEnum::D(128, 1),
};
let rec_params = qs::to_string(&query).unwrap();
assert_eq!(rec_params, params);
}
#[test]
fn serialize_flatten() {
#[derive(Deserialize, Serialize, Debug, PartialEq)]
struct Query {
a: u8,
#[serde(flatten)]
common: CommonParams,
}
#[derive(Deserialize, Serialize, Debug, PartialEq)]
struct CommonParams {
limit: u64,
offset: u64,
}
let params = "a=1&limit=100&offset=50";
let query = Query {
a: 1,
common: CommonParams {
limit: 100,
offset: 50,
},
};
let rec_params = qs::to_string(&query).unwrap();
assert_eq!(rec_params, params);
}
#[test]
fn serialize_map_with_unit_enum_keys() {
use std::collections::HashMap;
#[derive(Serialize, Eq, PartialEq, Hash)]
enum Operator {
Lt,
Gt,
}
#[derive(Serialize)]
struct Filter {
point: HashMap<Operator, u64>,
}
let mut map = HashMap::new();
map.insert(Operator::Gt, 123);
map.insert(Operator::Lt, 321);
let test = Filter { point: map };
let query = qs::to_string(&test).unwrap();
assert!(query == "point[Lt]=321&point[Gt]=123" || query == "point[Gt]=123&point[Lt]=321");
}
#[test]
fn serialize_bytes() {
struct Bytes(&'static [u8]);
#[derive(Serialize)]
struct Query {
bytes: Bytes,
}
impl serde::Serialize for Bytes {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_bytes(self.0)
}
}
let bytes = Bytes(b"hello, world!");
let s = qs::to_string(&Query { bytes }).unwrap();
assert_eq!(s, "bytes=hello%2C+world%21");
}
#[test]
fn serialize_hashmap_keys() {
// Issue: https://github.com/samscott89/serde_qs/issues/45
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct HashParams {
attrs: std::collections::HashMap<String, String>,
}
let data = HashParams {
attrs: vec![
("key 1!".to_owned(), "val 1".to_owned()),
("key 2!".to_owned(), "val 2".to_owned()),
]
.into_iter()
.collect(),
};
let s = qs::to_string(&data).unwrap();
assert!(
s == "attrs[key+1%21]=val+1&attrs[key+2%21]=val+2"
|| s == "attrs[key+2%21]=val+2&attrs[key+1%21]=val+1"
);
}
#[test]
fn test_serializer() {
use serde::Serialize;
#[derive(Serialize, Debug, Clone)]
struct Query {
a: Vec<u8>,
b: &'static str,
}
let mut writer = Vec::new();
{
let serializer = &mut qs::Serializer::new(&mut writer);
let q = Query {
a: vec![0, 1],
b: "b",
};
q.serialize(serializer).unwrap();
}
assert_eq!(writer, b"a[0]=0&a[1]=1&b=b");
writer.clear();
{
let serializer = &mut qs::Serializer::new(&mut writer);
let q = Query {
a: vec![3, 2],
b: "a",
};
q.serialize(serializer).unwrap();
}
assert_eq!(writer, b"a[0]=3&a[1]=2&b=a");
}
#[test]
fn test_serializer_unit() {
use serde::Serialize;
#[derive(Serialize)]
struct A;
#[derive(Serialize)]
struct B {
t: (),
}
let mut writer = Vec::new();
{
let serializer = &mut qs::Serializer::new(&mut writer);
// allow this clippy lints cause I like how explicit the test is
#[allow(clippy::let_unit_value)]
let q = ();
q.serialize(serializer).unwrap();
}
assert_eq!(writer, b"", "we are testing ()");
writer.clear();
{
let serializer = &mut qs::Serializer::new(&mut writer);
let q = A;
q.serialize(serializer).unwrap();
}
assert_eq!(writer, b"", "we are testing A");
writer.clear();
{
let serializer = &mut qs::Serializer::new(&mut writer);
let q = B { t: () };
q.serialize(serializer).unwrap();
}
assert_eq!(writer, b"t=", "we are testing B{{t: ()}}");
}
| true
|
364d315653557e90c0b2c814e3b15c642f7afe48
|
Rust
|
0nkery/rust-monkey
|
/src/parser.rs
|
UTF-8
| 33,777
| 3.625
| 4
|
[
"MIT"
] |
permissive
|
use super::lexer::Lexer;
use super::token::Token;
use super::token::TokenType;
use super::ast::Program;
use super::ast::Statement;
use super::ast::Expression;
#[derive(PartialEq, PartialOrd)]
enum Precedence {
Lowest,
Equals,
LessGreater,
Sum,
Product,
Prefix,
Call,
Index,
}
pub struct Parser<'a> {
lexer: &'a mut Lexer,
cur_token: Token,
peek_token: Token,
errors: Vec<String>,
}
impl<'a> Parser<'a> {
pub fn new(lexer: &'a mut Lexer) -> Self {
let cur_token = lexer.next_token();
let peek_token = lexer.next_token();
Parser {
lexer: lexer,
cur_token: cur_token,
peek_token: peek_token,
errors: Vec::new(),
}
}
fn next_token(&mut self) {
self.cur_token = self.peek_token.clone();
self.peek_token = self.lexer.next_token();
}
fn parse_stmt(&mut self) -> Option<Statement> {
match self.cur_token.token_type {
TokenType::Let => self.parse_let_stmt(),
TokenType::Return => self.parse_return_stmt(),
_ => self.parse_expr_stmt(),
}
}
fn parse_let_stmt(&mut self) -> Option<Statement> {
let let_token = self.cur_token.clone();
if !self.expect_peek(TokenType::Ident) {
return None;
}
let name = Expression::Identifier {
token: self.cur_token.clone(),
value: self.cur_token.literal.clone(),
};
if !self.expect_peek(TokenType::Assign) {
return None;
}
self.next_token();
let value = self.parse_expr(Precedence::Lowest);
if value.is_none() {
return None;
}
if self.peek_token.token_type == TokenType::Semicolon {
self.next_token();
}
Some(Statement::Let {
token: let_token,
name: name,
value: value.unwrap(),
})
}
fn parse_return_stmt(&mut self) -> Option<Statement> {
let return_token = self.cur_token.clone();
self.next_token();
let return_value = self.parse_expr(Precedence::Lowest);
if self.peek_token.token_type == TokenType::Semicolon {
self.next_token();
}
Some(Statement::Return {
token: return_token,
value: return_value,
})
}
fn parse_expr_stmt(&mut self) -> Option<Statement> {
let expr_token = self.cur_token.clone();
let expr = self.parse_expr(Precedence::Lowest)
.expect("Unable to parse expression");
if self.peek_token.token_type == TokenType::Semicolon {
self.next_token();
}
Some(Statement::Expression {
token: expr_token,
expression: expr,
})
}
fn parse_block_stmt(&mut self) -> Option<Statement> {
let token = self.cur_token.clone();
let mut statements = Vec::new();
self.next_token();
while self.cur_token.token_type != TokenType::RightBrace {
let stmt = self.parse_stmt();
if stmt.is_some() {
statements.push(stmt.unwrap());
}
self.next_token();
}
Some(Statement::Block {
token: token,
statements: statements,
})
}
fn parse_expr(&mut self, precedence: Precedence) -> Option<Expression> {
let mut left = self.parse_prefix();
while self.peek_token.token_type != TokenType::Semicolon &&
precedence < self.peek_precedence() {
self.next_token();
let expr = self.parse_infix_expr(left.clone().unwrap());
if expr.is_none() {
return left;
}
left = expr;
}
left
}
fn parse_prefix(&mut self) -> Option<Expression> {
match self.cur_token.token_type {
TokenType::Ident => self.parse_identifier(),
TokenType::Int => self.parse_integer_literal(),
TokenType::String => self.parse_string(),
TokenType::Bang | TokenType::Minus => self.parse_prefix_expr(),
TokenType::True | TokenType::False => self.parse_boolean(),
TokenType::LeftParen => self.parse_grouped_expr(),
TokenType::If => self.parse_if_expr(),
TokenType::Function => self.parse_function_literal(),
TokenType::LeftBracket => self.parse_array(),
TokenType::LeftBrace => self.parse_hash_literal(),
ref tt @ _ => {
let err_msg = format!("No prefix parse fn for {:?} found.", tt);
self.errors.push(err_msg);
None
}
}
}
fn parse_identifier(&self) -> Option<Expression> {
Some(Expression::Identifier {
token: self.cur_token.clone(),
value: self.cur_token.literal.clone(),
})
}
fn parse_integer_literal(&mut self) -> Option<Expression> {
let literal_token = self.cur_token.clone();
let parse_result = self.cur_token.literal.parse();
match parse_result {
Ok(value) => {
Some(Expression::IntegerLiteral {
token: literal_token,
value: value,
})
}
Err(_) => {
let msg = format!("Could not parse {} as integer", literal_token.literal);
self.errors.push(msg);
None
}
}
}
fn parse_string(&self) -> Option<Expression> {
Some(Expression::String {
token: self.cur_token.clone(),
value: self.cur_token.literal.clone(),
})
}
fn parse_array(&mut self) -> Option<Expression> {
let token = self.cur_token.clone();
let elements = self.parse_expression_list(TokenType::RightBracket);
if elements.is_some() {
Some(Expression::Array {
token: token,
elements: elements.unwrap(),
})
} else {
None
}
}
fn parse_hash_literal(&mut self) -> Option<Expression> {
let token = self.cur_token.clone();
let mut pairs = Vec::new();
while self.peek_token.token_type != TokenType::RightBrace &&
self.peek_token.token_type != TokenType::EOF {
self.next_token();
let key = self.parse_expr(Precedence::Lowest);
if !self.expect_peek(TokenType::Colon) || key.is_none() {
return None;
}
self.next_token();
let value = self.parse_expr(Precedence::Lowest);
if value.is_none() {
return None;
}
pairs.push((key.unwrap(), value.unwrap()));
if self.peek_token.token_type != TokenType::RightBrace &&
!self.expect_peek(TokenType::Comma) {
return None;
}
}
if !self.expect_peek(TokenType::RightBrace) {
return None;
}
Some(Expression::Hash {
token: token,
pairs: pairs,
})
}
fn parse_boolean(&self) -> Option<Expression> {
Some(Expression::Boolean {
token: self.cur_token.clone(),
value: self.cur_token.token_type == TokenType::True,
})
}
fn parse_grouped_expr(&mut self) -> Option<Expression> {
self.next_token();
let expr = self.parse_expr(Precedence::Lowest);
if self.expect_peek(TokenType::RightParen) {
expr
} else {
None
}
}
fn parse_if_expr(&mut self) -> Option<Expression> {
let token = self.cur_token.clone();
if !self.expect_peek(TokenType::LeftParen) {
return None;
}
self.next_token();
let condition = self.parse_expr(Precedence::Lowest);
if condition.is_none() {
return None;
}
if !self.expect_peek(TokenType::RightParen) {
return None;
}
if !self.expect_peek(TokenType::LeftBrace) {
return None;
}
let consequence = self.parse_block_stmt();
if consequence.is_none() {
return None;
}
let mut alternative = None;
if self.peek_token.token_type == TokenType::Else {
self.next_token();
if !self.expect_peek(TokenType::LeftBrace) {
return None;
}
let alternative_maybe = self.parse_block_stmt();
if alternative_maybe.is_some() {
alternative = Some(Box::new(alternative_maybe.unwrap()));
}
}
Some(Expression::If {
token: token,
condition: Box::new(condition.unwrap()),
consequence: Box::new(consequence.unwrap()),
alternative: alternative,
})
}
fn parse_function_literal(&mut self) -> Option<Expression> {
let token = self.cur_token.clone();
if !self.expect_peek(TokenType::LeftParen) {
return None;
}
let parameters = self.parse_function_parameters();
if parameters.is_none() {
return None;
}
if !self.expect_peek(TokenType::LeftBrace) {
return None;
}
let body = self.parse_block_stmt();
if body.is_none() {
return None;
}
Some(Expression::FunctionLiteral {
token: token,
parameters: parameters.unwrap(),
body: Box::new(body.unwrap()),
})
}
fn parse_function_parameters(&mut self) -> Option<Vec<Expression>> {
let mut identifiers = Vec::new();
if self.peek_token.token_type == TokenType::RightParen {
self.next_token();
return Some(identifiers);
}
self.next_token();
let ident = Expression::Identifier {
token: self.cur_token.clone(),
value: self.cur_token.literal.clone(),
};
identifiers.push(ident);
while self.peek_token.token_type == TokenType::Comma {
self.next_token();
self.next_token();
let ident = Expression::Identifier {
token: self.cur_token.clone(),
value: self.cur_token.literal.clone(),
};
identifiers.push(ident);
}
if self.expect_peek(TokenType::RightParen) {
Some(identifiers)
} else {
None
}
}
fn parse_prefix_expr(&mut self) -> Option<Expression> {
let token = self.cur_token.clone();
let operator = self.cur_token.literal.clone();
self.next_token();
let right = self.parse_expr(Precedence::Prefix);
if let Some(r) = right {
Some(Expression::Prefix {
token: token,
operator: operator,
right: Box::new(r),
})
} else {
None
}
}
fn precedence(&self, tt: &TokenType) -> Precedence {
match *tt {
TokenType::Eq => Precedence::Equals,
TokenType::NotEq => Precedence::Equals,
TokenType::LessThan => Precedence::LessGreater,
TokenType::GreaterThan => Precedence::LessGreater,
TokenType::Plus => Precedence::Sum,
TokenType::Minus => Precedence::Sum,
TokenType::Slash => Precedence::Product,
TokenType::Asterisk => Precedence::Product,
TokenType::LeftParen => Precedence::Call,
TokenType::LeftBracket => Precedence::Index,
_ => Precedence::Lowest,
}
}
fn peek_precedence(&self) -> Precedence {
self.precedence(&self.peek_token.token_type)
}
fn cur_precedence(&self) -> Precedence {
self.precedence(&self.cur_token.token_type)
}
fn parse_infix_expr(&mut self, left: Expression) -> Option<Expression> {
match self.cur_token.token_type {
TokenType::Minus | TokenType::Plus | TokenType::Asterisk | TokenType::Slash |
TokenType::LessThan | TokenType::GreaterThan | TokenType::Eq | TokenType::NotEq => {
let token = self.cur_token.clone();
let op = self.cur_token.literal.clone();
let precedence = self.cur_precedence();
self.next_token();
let right = self.parse_expr(precedence);
if let Some(r) = right {
Some(Expression::Infix {
token: token,
operator: op,
left: Box::new(left),
right: Box::new(r),
})
} else {
None
}
}
TokenType::LeftParen => self.parse_call_expr(left),
TokenType::LeftBracket => self.parse_index_expr(left),
ref tt @ _ => {
let msg = format!("Infix parse func for {:?} not found.", tt);
self.errors.push(msg);
None
}
}
}
fn parse_call_expr(&mut self, func: Expression) -> Option<Expression> {
let token = self.cur_token.clone();
let args = self.parse_expression_list(TokenType::RightParen);
if args.is_none() {
return None;
}
Some(Expression::Call {
token: token,
arguments: args.unwrap(),
function: Box::new(func),
})
}
fn parse_index_expr(&mut self, left: Expression) -> Option<Expression> {
let token = self.cur_token.clone();
self.next_token();
let index = self.parse_expr(Precedence::Lowest);
if index.is_none() {
return None;
}
if self.expect_peek(TokenType::RightBracket) {
Some(Expression::Index {
token: token,
left: Box::new(left),
index: Box::new(index.unwrap()),
})
} else {
None
}
}
fn parse_expression_list(&mut self, end: TokenType) -> Option<Vec<Expression>> {
let mut list = Vec::new();
if self.peek_token.token_type == end {
self.next_token();
return Some(list);
}
self.next_token();
let expr = self.parse_expr(Precedence::Lowest);
if expr.is_none() {
return None;
}
list.push(expr.unwrap());
while self.peek_token.token_type == TokenType::Comma {
self.next_token();
self.next_token();
let expr = self.parse_expr(Precedence::Lowest);
if expr.is_none() {
return None;
}
list.push(expr.unwrap());
}
if self.expect_peek(end) {
Some(list)
} else {
None
}
}
fn expect_peek(&mut self, tt: TokenType) -> bool {
if self.peek_token.token_type == tt {
self.next_token();
true
} else {
self.peek_error(tt);
false
}
}
fn peek_error(&mut self, tt: TokenType) {
let err_msg = format!("Expected next token to be {:?}, got {:?} instead",
tt,
self.peek_token.token_type);
self.errors.push(err_msg);
}
pub fn parse_program(&mut self) -> Program {
let mut program = Program::new();
while self.cur_token.token_type != TokenType::EOF {
let maybe_stmt = self.parse_stmt();
if let Some(stmt) = maybe_stmt {
program.statements.push(stmt);
}
self.next_token();
}
program
}
pub fn errors(&self) -> &[String] {
&self.errors
}
}
#[cfg(test)]
fn check_parser_errors<'a>(parser: &'a Parser) {
if parser.errors.len() == 0 {
return;
}
for err in &parser.errors {
println!("{}", err);
}
panic!("There are parser errors!");
}
#[cfg(test)]
use super::ast::Node;
#[cfg(test)]
fn check_integer_literal(il: &Expression, test_value: i64) {
match *il {
Expression::IntegerLiteral { ref value, .. } => {
assert!(*value == test_value, "il.value is not {}. Got {}", test_value, value);
assert!(il.token_literal() == test_value.to_string(),
"il.token_literal() is not {}. Got {}",
test_value,
il.token_literal());
}
_ => panic!("il is not Expression::IntegerLiteral. Got {:?}", il),
}
}
#[cfg(test)]
fn check_infix_expression(ie: &Expression, exp_left: &str, exp_op: &str, exp_right: &str) {
if let Expression::Infix { ref left, ref operator, ref right, .. } = *ie {
assert!(left.string() == exp_left,
"Infix: left is not {}. Got {}",
exp_left,
left.string());
assert!(operator == exp_op, "Infix: op is not {}. Got {}", exp_left, operator);
assert!(right.string() == exp_right,
"Infix: right is not {}. Got {}",
exp_right,
right.string());
} else {
panic!("Expression is not Expression::Infix. Got {:?}", ie);
}
}
#[cfg(test)]
fn check_identifier(ident: &Expression, exp_value: &str) {
if let Expression::Identifier { ref value, .. } = *ident {
assert!(value == exp_value, "ident.value is not {}. Got {}", exp_value, value);
assert!(ident.token_literal() == exp_value,
"ident.token_literal() is not {}. Got {}",
exp_value,
ident.token_literal());
} else {
panic!("ident is not Expression::Identifier. Got {:?}", ident);
}
}
#[test]
fn test_let_statements() {
let input = String::from("
let x = 5;
let y = 10;
let foobar = 838383;
");
let mut l = Lexer::new(input);
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
assert!(program.statements.len() == 3,
"program.statements does not contain 3 statements. Got {}",
program.statements.len());
let tests = vec!["x", "y", "foobar"];
for ((i, expected), stmt) in tests.iter().enumerate().zip(program.statements) {
assert!(stmt.token_literal() == "let",
"[{}] stmt.token_literal not 'let'. Got {}",
i,
stmt.token_literal());
if let Statement::Let { name, .. } = stmt {
if let Expression::Identifier { ref value, .. } = name {
assert!(value == *expected, "stmt.name.value is not {}. Got {}", expected, value);
assert!(name.token_literal() == *expected,
"stmt.name is not {}. Got {:?}",
expected,
name);
} else {
panic!("name is not Expression::Identifier. Got {:?}", name);
}
} else {
panic!("stmt is not Statement::Let. Got {:?}", stmt);
}
}
}
#[test]
fn test_return_statements() {
let input = String::from("
return 5;
return 10;
return 993322;
");
let mut l = Lexer::new(input);
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
assert!(program.statements.len() == 3,
"program.statements does not contain 3 statements. Got {}",
program.statements.len());
for stmt in program.statements {
if let Statement::Return { .. } = stmt {
assert!(stmt.token_literal() == "return",
"stmt.token_literal() isn't 'return'. Got {}",
stmt.token_literal());
} else {
panic!("stmt is not Statement::Return. Got {:?}", stmt);
}
}
}
#[test]
fn test_identifier_expression() {
let input = "foobar;";
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
assert!(program.statements.len() == 1,
"program has not enough statements. Got {}",
program.statements.len());
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::Identifier { ref value, .. } = *expression {
assert!(value == "foobar", "value not 'foobar'. Got '{}'", value);
assert!(expression.token_literal() == "foobar",
"token_literal is not 'foobar'. Got {}",
expression.token_literal());
} else {
panic!("expression is not Expression::Identifier. Got {:?}", expression);
}
} else {
panic!("program.statements[0] is not Statement::Expression. Got {:?}",
program.statements[0]);
}
}
#[test]
fn test_integer_literal_expression() {
let input = "5;";
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
assert!(program.statements.len() == 1,
"program has not enough statements. Got {}",
program.statements.len());
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::IntegerLiteral { ref value, .. } = *expression {
assert!(*value == 5, "value is not 5. Got {}", value);
assert!(expression.token_literal() == "5",
"token_literal() is not '5'. Got {}",
expression.token_literal());
} else {
panic!("expression is not Expression::IntegerLiteral. Got {:?}", expression);
}
} else {
panic!("program.statements[0] is not Statement::Expression. Got {:?}",
program.statements[0]);
}
}
#[test]
fn test_parsing_prefix_expressions() {
let tests = vec![("!5", "!", 5), ("-15", "-", 15)];
for (input, op, value) in tests {
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
assert!(program.statements.len() == 1,
"program.statements does not contain 1 statement. Got {}",
program.statements.len());
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::Prefix { ref operator, ref right, .. } = *expression {
assert!(operator == op, "operator is not {}. Got {}", op, operator);
check_integer_literal(right, value);
} else {
panic!("expression is not Expression::Prefix. Got {:?}", expression);
}
} else {
panic!("program.statements[0] is not Statement::Expression. Got {:?}",
program.statements[0]);
}
}
}
#[test]
fn test_parsing_infix_expressions() {
let tests = vec![("5 + 5;", 5, "+", 5),
("5 - 5;", 5, "-", 5),
("5 * 5;", 5, "*", 5),
("5 / 5;", 5, "/", 5),
("5 > 5;", 5, ">", 5),
("5 < 5;", 5, "<", 5),
("5 == 5;", 5, "==", 5),
("5 != 5;", 5, "!=", 5)];
for (input, left_val, op, right_val) in tests {
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
assert!(program.statements.len() == 1,
"program.statements does not contain 1 statement. Got {}",
program.statements.len());
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::Infix { ref left, ref operator, ref right, .. } = *expression {
check_integer_literal(left, left_val);
assert!(operator == op, "operator is not {}. Got {}", op, operator);
check_integer_literal(right, right_val);
} else {
panic!("expression is not Expression::Infix. Got {:?}", expression);
}
} else {
panic!("program.statements[0] is not Statement::Expression. Got {:?}",
program.statements[0]);
}
}
}
#[test]
fn test_operator_precedence_parsing() {
let tests = vec![("-a * b", "((-a) * b)"),
("!-a", "(!(-a))"),
("a + b + c", "((a + b) + c)"),
("a + b - c", "((a + b) - c)"),
("a * b * c", "((a * b) * c)"),
("a * b / c", "((a * b) / c)"),
("a + b / c", "(a + (b / c))"),
("a + b * c + d / e - f", "(((a + (b * c)) + (d / e)) - f)"),
("3 + 4; -5 * 5", "(3 + 4)((-5) * 5)"),
("5 > 4 == 3 < 4", "((5 > 4) == (3 < 4))"),
("5 < 4 != 3 > 4", "((5 < 4) != (3 > 4))"),
("3 + 4 * 5 == 3 * 1 + 4 * 5", "((3 + (4 * 5)) == ((3 * 1) + (4 * 5)))"),
("true", "true"),
("false", "false"),
("3 > 5 == false", "((3 > 5) == false)"),
("3 < 5 == true", "((3 < 5) == true)"),
("1 + (2 + 3) + 4", "((1 + (2 + 3)) + 4)"),
("(5 + 5) * 2", "((5 + 5) * 2)"),
("2 / (5 + 5)", "(2 / (5 + 5))"),
("-(5 + 5)", "(-(5 + 5))"),
("!(true == true)", "(!(true == true))"),
("a * [1, 2, 3, 4][b * c] * d", "((a * ([1, 2, 3, 4][(b * c)])) * d)"),
("add(a * b[2], b[1], 2 * [1, 2][1])",
"add((a * (b[2])), (b[1]), (2 * ([1, 2][1])))")];
for (input, expected) in tests {
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
let actual = program.string();
assert!(actual == expected, "Expected {}. Got {}", expected, actual);
}
}
#[test]
fn test_boolean_expression() {
let tests = vec![("true;", true, "true"), ("false;", false, "false")];
for (input, expected_value, expected_literal) in tests {
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
assert!(program.statements.len() == 1,
"program.statements does not contain 1 statement. Got {}",
program.statements.len());
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::Boolean { value, .. } = *expression {
assert!(value == expected_value, "value is not {}. Got {}", expected_value, value);
assert!(expression.token_literal() == expected_literal,
"token_literal is not {}. Got {}",
expected_literal,
expression.token_literal());
} else {
panic!("expression is not Expression::Boolean. Got {:?}", expression);
}
} else {
panic!("stmt is not Statement::Expression. Got {:?}", program.statements[0]);
}
}
}
#[test]
fn test_if_expression() {
let input = "if (x < y) { x }";
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
assert!(program.statements.len() == 1,
"program.statements does not contain 1 statement. Got {}",
program.statements.len());
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::If { ref condition, ref consequence, ref alternative, .. } =
*expression {
check_infix_expression(condition, "x", "<", "y");
if let Statement::Block { ref statements, .. } = *consequence.as_ref() {
assert!(statements.len() == 1,
"Consequence does not contain 1 statement. Got {}",
statements.len());
if let Statement::Expression { ref expression, .. } = statements[0] {
check_identifier(expression, "x");
} else {
panic!("consequence is not Statement::Expression. Got {:?}", consequence);
}
} else {
panic!("consequence is not Statement::Block. Got {:?}", consequence);
}
assert!(alternative.is_none(),
"expression.alternative was not None. Got {:?}",
alternative);
} else {
panic!("expression is not Expression::If. Got {:?}", expression);
}
} else {
panic!("program.statements[0] is not Statement::Expression. Got {:?}",
program.statements[0]);
}
}
#[test]
fn test_function_literal_parsing() {
let input = "fn(x, y) { x + y; }";
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
assert!(program.statements.len() == 1,
"program.statements does not contain 1 statement. Got {}",
program.statements.len());
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::FunctionLiteral { ref parameters, ref body, .. } = *expression {
assert!(parameters.len() == 2, "parameters wrong. Want 2, Got {}", parameters.len());
if let Statement::Block { ref statements, .. } = *body.as_ref() {
assert!(statements.len() == 1,
"statements should have 1 statement. Got {}",
statements.len());
}
} else {
panic!("expression is not Expression::FunctionLiteral. Got {:?}", expression);
}
} else {
panic!("program.statements[0] is not Statement::Expression. Got {:?}",
program.statements[0]);
}
}
#[test]
fn test_call_expression_parsing() {
let input = "add(1, 2 * 3, 4 + 5);";
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
assert!(program.statements.len() == 1,
"program.statements does not contain 1 statement. Got {}",
program.statements.len());
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::Call { ref arguments, ref function, .. } = *expression {
check_identifier(function, "add");
assert!(arguments.len() == 3, "parameters wrong. Want 3, Got {}", arguments.len());
check_infix_expression(&arguments[1], "2", "*", "3");
check_infix_expression(&arguments[2], "4", "+", "5");
} else {
panic!("expression is not Expression::Call. Got {:?}", expression);
}
} else {
panic!("program.statements[0] is not Statement::Expression. Got {:?}",
program.statements[0]);
}
}
#[test]
fn test_string_literal_expression() {
let input = "\"hello world\";";
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::String { ref value, .. } = *expression {
assert!(value == "hello world", "value is not 'hello world'. Got {}", value);
} else {
panic!("Expression is not String. Got {:?}", expression);
}
} else {
panic!("stmt is not Statement::Expression. Got {:?}", program.statements[0]);
}
}
#[test]
fn test_parsing_array_literals() {
let input = "[1, 2 * 2, 3 + 3]";
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::Array { ref elements, .. } = *expression {
assert!(elements.len() == 3,
"elements count is not equal to 3. Got {}",
elements.len());
check_integer_literal(&elements[0], 1);
check_infix_expression(&elements[1], "2", "*", "2");
check_infix_expression(&elements[2], "3", "+", "3");
} else {
panic!("expression is not Array. Got {:?}", expression);
}
} else {
panic!("program.statements[0] is not Statement::Expression");
}
}
#[test]
fn test_parsing_index_expressions() {
let input = "myArray[1 + 1]";
let mut l = Lexer::new(input.to_string());
let mut p = Parser::new(&mut l);
let program = p.parse_program();
check_parser_errors(&p);
if let Statement::Expression { ref expression, .. } = program.statements[0] {
if let Expression::Index { ref left, ref index, .. } = *expression {
check_identifier(left, "myArray");
check_infix_expression(index, "1", "+", "1");
} else {
panic!("expression is not Expression::Index. Got {:?}", expression);
}
}
}
| true
|
117a48048ed8c5ea183a6a898cd3d274889d14ca
|
Rust
|
mqudsi/sqlite-readers-writers
|
/src/main.rs
|
UTF-8
| 11,044
| 2.671875
| 3
|
[] |
no_license
|
use random_fast_rng::{FastRng, Random};
use rusqlite::{params, Connection, DropBehavior};
use std::fs;
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{mpsc, Arc, RwLock};
use std::thread;
use std::time::{Duration, Instant};
const ITER_SECS: u64 = 5;
const USE_RWLOCK: bool = false;
const SEED_COUNT: usize = 20;
const NEW_ITEM_SIZE: usize = 40 * 1024;
const PRINT_VALUES: bool = false;
/// SQLite's approach to concurrency requires waiting/backing off in case of
/// readers/writers conflict. This sets a max duration before failing.
const DB_TIMEOUT: Duration = Duration::from_secs(6);
struct Database {
conn: rusqlite::Connection,
}
#[derive(Copy, Clone, Debug)]
struct DbOptions {
wal: bool,
shared_cache: bool,
}
impl DbOptions {
fn db_flags(&self) -> rusqlite::OpenFlags {
use rusqlite::OpenFlags;
let mut flags = OpenFlags::empty();
flags.set(OpenFlags::SQLITE_OPEN_CREATE, true);
flags.set(OpenFlags::SQLITE_OPEN_READ_WRITE, true);
flags.set(OpenFlags::SQLITE_OPEN_SHARED_CACHE, self.shared_cache);
flags
}
}
impl Database {
pub fn create<P: AsRef<Path>>(path: P, options: &DbOptions) -> Self {
let path: &Path = path.as_ref();
if path.exists() {
fs::remove_file(path).expect("Could not delete existing database file");
}
let mut db = Self::open(path, options);
db.create_tables(options);
db
}
pub fn open<P: AsRef<Path>>(path: P, options: &DbOptions) -> Self {
let conn = Connection::open_with_flags(path, options.db_flags())
.expect("Could not create SQLite connection");
conn.busy_timeout(DB_TIMEOUT)
.expect("Error setting the database timeout");
Database { conn }
}
fn create_tables(&mut self, options: &DbOptions) {
if options.wal {
self.conn
.pragma_update(None, "journal_mode", &"WAL".to_owned())
.expect("Error applying WAL journal_mode");
}
self.conn
.execute(
r#"
CREATE TABLE "kv" (
"key" INTEGER NOT NULL,
"value" BLOB NOT NULL,
PRIMARY KEY("key")
) WITHOUT ROWID;
"#,
[],
)
.expect("Error creating tables");
}
pub fn seed(&mut self) -> std::io::Result<Vec<u16>> {
let mut transaction = self
.conn
.transaction()
.expect("Could not open DB transaction");
transaction.set_drop_behavior(DropBehavior::Commit);
let mut query = transaction
.prepare(
r#"
INSERT INTO "kv" VALUES (?1, ?2);
"#,
)
.expect("Failed to prepare insert query");
let mut keys = Vec::new();
let mut rng = FastRng::new();
for k in &mut keys {
*k = rng.get_u16();
}
for _ in 0..SEED_COUNT {
let (key, value) = (rng.get_u16(), rng.get_u16());
keys.push(key);
query
.execute(params![key, value])
.expect("Insertion failure seeding database!");
}
Ok(keys)
}
}
fn read_loop(
db: Database,
keys: &[u16],
stop: Arc<AtomicBool>,
rwlock: Arc<RwLock<()>>,
) -> (i32, Vec<i64>) {
let mut times = Vec::new();
let mut query = db
.conn
.prepare(
r#"
SELECT "value" FROM "kv"
WHERE "key" = ?1
LIMIT 1;"#,
)
.expect("Failed to prepare query statement");
let mut reads = 0;
let mut rng = FastRng::new();
while !stop.load(Ordering::Relaxed) {
let key_index = rng.get_usize() % keys.len();
let key = &keys[key_index as usize];
let timer = Instant::now();
let _guard;
if USE_RWLOCK {
_guard = rwlock.read().expect("Cannot unlock for read!");
}
let value: Result<String, _> = query.query_row(&[key], |result| result.get(0));
reads += 1;
let elapsed = timer.elapsed();
if PRINT_VALUES {
if let Ok(value) = value {
println!("{}: {}", key, value);
}
}
times.push(elapsed.as_nanos() as i64);
}
(reads, times)
}
fn write_loop(db: Database, stop: Arc<AtomicBool>, rwlock: Arc<RwLock<()>>) -> Vec<i64> {
let mut times = Vec::new();
let mut query = db
.conn
.prepare(
r#"
INSERT OR IGNORE INTO "kv" ("key", "value")
VALUES (?1, ?2)
"#,
)
.expect("Failed to prepare update statement");
let mut rng = FastRng::new();
let mut value = Vec::new();
value.resize(NEW_ITEM_SIZE, 0u8);
rng.fill_bytes(&mut value);
while !stop.load(Ordering::Relaxed) {
let key = rng.get_u16();
let timer = Instant::now();
let _guard;
if USE_RWLOCK {
_guard = rwlock.write().expect("Cannot unlock for read!");
}
let rows_updated = query
.execute(params![key, value])
.expect("Failed to issue update query!");
let elapsed = timer.elapsed();
if PRINT_VALUES && rows_updated > 0 {
println!("{} set", key);
}
times.push(elapsed.as_nanos() as i64);
}
times
}
fn average(nums: &[i64]) -> f64 {
let sum: i128 = nums.iter().map(|n| *n as i128).sum();
sum as f64 / (nums.len() as f64)
}
struct PerfRecord {
config: String,
readers: i32,
writers: i32,
reads_per_sec: f64,
writes_per_sec: f64,
read_p95: f64,
read_p99: f64,
read_p999: f64,
write_p95: Option<f64>,
write_p99: Option<f64>,
write_p999: Option<f64>,
}
fn main() {
let mut perf_vec = Vec::new();
for options in [
DbOptions { shared_cache: false, wal: false },
DbOptions { shared_cache: false, wal: true },
// Shared cache w/out wal requires unlock_notify to work
DbOptions { shared_cache: true, wal: false },
DbOptions { shared_cache: true, wal: true },
] {
println!("## {:?}", options);
println!("");
let keys = {
let mut db = Database::create("test.db", &options);
db.seed().expect("Error seeding database!")
};
for writers in 0..4 {
let done = Arc::new(AtomicBool::new(false));
let rwlock = Arc::new(RwLock::new(()));
let options = Arc::new(options);
{
let done = done.clone();
thread::spawn(move || {
thread::sleep(Duration::from_secs(ITER_SECS));
done.store(true, Ordering::Release);
});
}
let db = Database::open("test.db", &options);
let (write_counts_send, write_counts_recv) = mpsc::channel();
for _ in 0..writers {
let done = done.clone();
let sender = write_counts_send.clone();
let rwlock = rwlock.clone();
let options = options.clone();
thread::spawn(move || {
let write_db = Database::open("test.db", &options);
let write_times = write_loop(write_db, done, rwlock);
sender
.send(write_times)
.expect("Could not send write count!");
});
}
drop(write_counts_send);
let (total_reads, mut read_times) = read_loop(db, &keys, done.clone(), rwlock.clone());
read_times.sort();
let mut total_writes = 0;
let mut write_times = Vec::new();
for _ in 0..writers {
let mut writes = write_counts_recv
.recv()
.expect("Failed to receive write counts!");
total_writes += writes.len();
write_times.append(&mut writes);
}
write_times.sort();
println!("{} writers:", writers);
println!("- Read {} values from the database.", read_times.len());
println!("- Wrote {} values to the database.", total_writes);
println!(
"- Mean read time: {:.5} ms",
average(&read_times) / 1000_000f64
);
let p95_nanos = read_times[(0.95 * (read_times.len() as f64)) as usize];
let p95_millis = p95_nanos as f64 / 1000_000f64;
println!("- P95: {} ms", p95_millis);
let p99_nanos = read_times[(0.99 * (read_times.len() as f64)) as usize];
let p99_millis = p99_nanos as f64 / 1000_000f64;
println!("- P99: {} ms", p99_millis);
let p99_9_nanos = read_times[(0.999 * (read_times.len() as f64)) as usize];
let p99_9_millis = p99_9_nanos as f64 / 1000_000f64;
println!("- P99.9: {} ms", p99_9_millis);
println!("");
fn not_str(v: bool) -> &'static str {
if v { "" } else { "!" }
}
perf_vec.push(PerfRecord {
config: format!("{}wal, {}shared_cache", not_str(options.wal), not_str(options.shared_cache)),
readers: 1,
writers,
reads_per_sec: total_reads as f64 / ITER_SECS as f64,
writes_per_sec: total_writes as f64 / ITER_SECS as f64,
read_p95: p95_millis,
read_p99: p99_millis,
read_p999: p99_9_millis,
write_p95: if write_times.len() > 0 { Some(write_times[(0.95 * (write_times.len() as f64)) as usize] as f64 / 1000_000f64) } else { None },
write_p99: if write_times.len() > 0 { Some(write_times[(0.99 * (write_times.len() as f64)) as usize] as f64 / 1000_000f64) } else { None },
write_p999: if write_times.len() > 0 { Some(write_times[(0.999 * (write_times.len() as f64)) as usize] as f64 / 1000_000f64) } else { None },
});
}
}
fn print_or<T: std::fmt::Display>(v: Option<T>, o: &str) -> String {
v.map(|v| v.to_string())
.unwrap_or(o.to_owned())
}
let title_width = perf_vec.iter().map(|r| r.config.len()).max().unwrap();
println!("---------------------------------");
println!("");
println!("| configuration | readers | writers | reads/sec | writes/sec | read p95 (ms) | read p99 | read p99.9 | write p95 | write p99 | write p99.9 |");
println!("| ------------- | ------- | ------- | --------- | ---------- | ------------- | -------- | ---------- | --------- | --------- | ----------- |");
for row in perf_vec {
println!("| {:w0$} | {:2} | {:2} | {} | {} | {} | {} | {} | {} | {} | {} |",
row.config, row.readers, row.writers, row.reads_per_sec, row.writes_per_sec,
row.read_p95, row.read_p99, row.read_p999,
print_or(row.write_p95, "N/A"), print_or(row.write_p99, "N/A"), print_or(row.write_p999, "N/A"),
w0 = title_width,
);
}
}
| true
|
5c8487c3ffdc4c64bcb041db7182056cb8444a1d
|
Rust
|
thomvil/zetelverdeling-rs
|
/src/gelaagde_zetel_verdeler.rs
|
UTF-8
| 4,153
| 2.890625
| 3
|
[] |
no_license
|
use super::*;
#[derive(Debug)]
pub struct GelaagdeZetelVerdeler<T, U> {
pub(crate) kieskringen: HashMap<T, ZetelVerdeler<U>>,
}
impl<T, U> GelaagdeZetelVerdeler<T, U> {
pub fn totaal_stemmen(&self) -> f32 {
self.kieskringen
.values()
.map(|stemmen| stemmen.totaal_stemmen())
.sum()
}
}
impl<T: Clone + Eq + Hash, U> GelaagdeZetelVerdeler<T, U> {
pub fn totaal_stemmen_per_kieskring(&self) -> HashMap<T, f32> {
self.kieskringen
.iter()
.map(|(kieskring_id, zv)| (kieskring_id.clone(), zv.totaal_stemmen()))
.collect()
}
}
/////////////
// Builder //
/////////////
#[derive(Debug, Default)]
pub struct GelaagdeZetelVerdelerBuilder<T, U> {
kieskringen: HashMap<T, ZetelVerdeler<U>>,
}
impl<T: Eq + Hash, U> GelaagdeZetelVerdelerBuilder<T, U> {
pub fn new() -> Self {
Self {
kieskringen: HashMap::new(),
}
}
pub fn add(mut self, kieskring_naam: T, zv: ZetelVerdeler<U>) -> Self {
self.kieskringen.insert(kieskring_naam, zv);
self
}
pub fn finish(self) -> GelaagdeZetelVerdeler<T, U> {
GelaagdeZetelVerdeler {
kieskringen: self.kieskringen,
}
}
}
impl<T: Clone + Eq + Hash, U: Clone + Eq + Hash> GelaagdeZetelVerdelerBuilder<T, U> {
#[allow(clippy::type_complexity)]
pub fn from_data(
data: &[(T, &[(U, u32)], u32, f32)],
) -> Result<GelaagdeZetelVerdeler<T, U>, String> {
data.iter()
.fold(
Ok(Self::new()),
|acc, (kieskring, stem_aantallen, zetels, kiesdrempel)| {
ZetelVerdelerBuilder::from_data((*stem_aantallen, *zetels, *kiesdrempel))
.finish()
.and_then(|zv| acc.map(|gzv| gzv.add(kieskring.clone(), zv)))
},
)
.map(|gzv| gzv.finish())
}
#[allow(clippy::type_complexity)]
pub fn constante_drempel(
kiesdrempel: f32,
data: &[(T, u32, &[(U, u32)])],
) -> Result<GelaagdeZetelVerdeler<T, U>, String> {
data.iter()
.fold(
Ok(Self::new()),
|acc, (kieskring, zetels, stem_aantallen)| {
ZetelVerdelerBuilder::from_data((*stem_aantallen, *zetels, kiesdrempel))
.finish()
.and_then(|zv| acc.map(|gzv| gzv.add(kieskring.clone(), zv)))
},
)
.map(|gzv| gzv.finish())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn from_data() {
let gzv = GelaagdeZetelVerdelerBuilder::from_data(&[
(
"zonder drempel",
&[("Partij A", 6500), ("Partij B", 3800), ("Partij C", 2300)],
25,
0.,
),
(
"met drempel",
&[("Partij A", 6500), ("Partij B", 3800), ("Partij C", 2300)],
25,
20.,
),
]);
assert!(gzv.is_ok());
let zetels = gzv.unwrap().dhondt();
assert_eq!(13, zetels.get("zonder drempel", "Partij A").unwrap());
assert_eq!(8, zetels.get("zonder drempel", "Partij B").unwrap());
assert_eq!(4, zetels.get("zonder drempel", "Partij C").unwrap());
assert_eq!(16, zetels.get("met drempel", "Partij A").unwrap());
assert_eq!(9, zetels.get("met drempel", "Partij B").unwrap());
assert_eq!(0, zetels.get("met drempel", "Partij C").unwrap());
}
#[test]
fn constante_drempel() {
let gzv = GelaagdeZetelVerdelerBuilder::constante_drempel(
20.,
&[
(
"zonder drempel",
25,
&[("Partij A", 6500), ("Partij B", 3800), ("Partij C", 2300)],
),
(
"met drempel",
25,
&[("Partij A", 6500), ("Partij B", 3800), ("Partij C", 2300)],
),
],
);
assert!(gzv.is_ok());
}
}
| true
|
6bc124dd5da851a83df80ae6c1f751fcee0b20cb
|
Rust
|
jcholder-thoughtworks/advent-of-code-2019
|
/02/intcode/src/lib.rs
|
UTF-8
| 2,381
| 3.765625
| 4
|
[] |
no_license
|
pub type IntcodeProgram = Vec<i32>;
pub type Pointer = usize;
pub const OUTPUT: Pointer = 0;
pub const NOUN: Pointer = 1;
pub const VERB: Pointer = 2;
pub fn parse_code(code: String) -> IntcodeProgram {
let program = code.trim().split(',');
program.map(|c| c.parse().unwrap()).collect()
}
pub fn execute_intcode(program: IntcodeProgram) -> IntcodeProgram {
execute_intcode_at_pointer(program, 0)
}
fn execute_intcode_at_pointer(mut program: IntcodeProgram, pointer: Pointer) -> IntcodeProgram {
let command = program[pointer];
if command == 99 {
return program;
}
let left_index = program[pointer + 1] as usize;
let right_index = program[pointer + 2] as usize;
let destination = program[pointer + 3] as usize;
let left_value = program[left_index];
let right_value = program[right_index];
let new_value = match command {
1 => left_value + right_value,
2 => left_value * right_value,
_ => panic!("Unrecognized command: {:?}", command),
};
program[destination] = new_value;
execute_intcode_at_pointer(program, pointer + 4)
}
#[cfg(test)]
pub mod tests {
use super::*;
#[test]
fn minimal_program() {
let program = vec![1, 0, 0, 0, 99];
let expected = vec![2, 0, 0, 0, 99];
assert_eq!(expected, execute_intcode(program));
}
#[test]
fn example_program() {
let program = vec![1, 9, 10, 3, 2, 3, 11, 0, 99, 30, 40, 50];
let expected = vec![3500, 9, 10, 70, 2, 3, 11, 0, 99, 30, 40, 50];
assert_eq!(expected, execute_intcode(program));
}
#[test]
fn small_program_1() {
let program = vec![1,0,0,0,99];
let expected = vec![2,0,0,0,99];
assert_eq!(expected, execute_intcode(program));
}
#[test]
fn small_program_2() {
let program = vec![2,3,0,3,99];
let expected = vec![2,3,0,6,99];
assert_eq!(expected, execute_intcode(program));
}
#[test]
fn small_program_3() {
let program = vec![2,4,4,5,99,0];
let expected = vec![2,4,4,5,99,9801];
assert_eq!(expected, execute_intcode(program));
}
#[test]
fn small_program_4() {
let program = vec![1,1,1,4,99,5,6,0,99];
let expected = vec![30,1,1,4,2,5,6,0,99];
assert_eq!(expected, execute_intcode(program));
}
}
| true
|
84a090879390cf24093f7f779cf0a0501efbc8c3
|
Rust
|
matklad/rust-analyzer
|
/crates/ide_completion/src/completions/record.rs
|
UTF-8
| 7,189
| 3.015625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Complete fields in record literals and patterns.
use ide_db::{helpers::FamousDefs, SymbolKind};
use syntax::ast::Expr;
use crate::{
item::CompletionKind, patterns::ImmediateLocation, CompletionContext, CompletionItem,
Completions,
};
pub(crate) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> {
let missing_fields = match &ctx.completion_location {
Some(ImmediateLocation::RecordExpr(record_expr)) => {
let ty = ctx.sema.type_of_expr(&Expr::RecordExpr(record_expr.clone()));
let default_trait = FamousDefs(&ctx.sema, ctx.krate).core_default_Default();
let impl_default_trait = default_trait
.zip(ty)
.map_or(false, |(default_trait, ty)| ty.impls_trait(ctx.db, default_trait, &[]));
let missing_fields = ctx.sema.record_literal_missing_fields(record_expr);
if impl_default_trait && !missing_fields.is_empty() {
let completion_text = "..Default::default()";
let mut item = CompletionItem::new(
CompletionKind::Snippet,
ctx.source_range(),
completion_text,
);
let completion_text =
completion_text.strip_prefix(ctx.token.text()).unwrap_or(completion_text);
item.insert_text(completion_text).kind(SymbolKind::Field);
item.add_to(acc);
}
missing_fields
}
Some(ImmediateLocation::RecordPat(record_pat)) => {
ctx.sema.record_pattern_missing_fields(record_pat)
}
_ => return None,
};
for (field, ty) in missing_fields {
acc.add_field(ctx, None, field, &ty);
}
Some(())
}
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use crate::{
tests::{check_edit, filtered_completion_list},
CompletionKind,
};
fn check(ra_fixture: &str, expect: Expect) {
let actual = filtered_completion_list(ra_fixture, CompletionKind::Reference);
expect.assert_eq(&actual);
}
fn check_snippet(ra_fixture: &str, expect: Expect) {
let actual = filtered_completion_list(ra_fixture, CompletionKind::Snippet);
expect.assert_eq(&actual);
}
#[test]
fn test_record_literal_field_default() {
let test_code = r#"
//- minicore: default
struct S { foo: u32, bar: usize }
impl Default for S {
fn default() -> Self {
S {
foo: 0,
bar: 0,
}
}
}
fn process(f: S) {
let other = S {
foo: 5,
.$0
};
}
"#;
check(
test_code,
expect![[r#"
fd bar usize
"#]],
);
check_snippet(
test_code,
expect![[r#"
fd ..Default::default()
"#]],
);
}
#[test]
fn test_record_literal_field_default_completion() {
check_edit(
"..Default::default()",
r#"
//- minicore: default
struct S { foo: u32, bar: usize }
impl Default for S {
fn default() -> Self {
S {
foo: 0,
bar: 0,
}
}
}
fn process(f: S) {
let other = S {
foo: 5,
.$0
};
}
"#,
r#"
struct S { foo: u32, bar: usize }
impl Default for S {
fn default() -> Self {
S {
foo: 0,
bar: 0,
}
}
}
fn process(f: S) {
let other = S {
foo: 5,
..Default::default()
};
}
"#,
);
}
#[test]
fn test_record_literal_field_without_default() {
let test_code = r#"
struct S { foo: u32, bar: usize }
fn process(f: S) {
let other = S {
foo: 5,
.$0
};
}
"#;
check(
test_code,
expect![[r#"
fd bar usize
"#]],
);
check_snippet(test_code, expect![[r#""#]]);
}
#[test]
fn test_record_pattern_field() {
check(
r#"
struct S { foo: u32 }
fn process(f: S) {
match f {
S { f$0: 92 } => (),
}
}
"#,
expect![[r#"
fd foo u32
"#]],
);
}
#[test]
fn test_record_pattern_enum_variant() {
check(
r#"
enum E { S { foo: u32, bar: () } }
fn process(e: E) {
match e {
E::S { $0 } => (),
}
}
"#,
expect![[r#"
fd foo u32
fd bar ()
"#]],
);
}
#[test]
fn test_record_pattern_field_in_simple_macro() {
check(
r"
macro_rules! m { ($e:expr) => { $e } }
struct S { foo: u32 }
fn process(f: S) {
m!(match f {
S { f$0: 92 } => (),
})
}
",
expect![[r#"
fd foo u32
"#]],
);
}
#[test]
fn only_missing_fields_are_completed_in_destruct_pats() {
check(
r#"
struct S {
foo1: u32, foo2: u32,
bar: u32, baz: u32,
}
fn main() {
let s = S {
foo1: 1, foo2: 2,
bar: 3, baz: 4,
};
if let S { foo1, foo2: a, $0 } = s {}
}
"#,
expect![[r#"
fd bar u32
fd baz u32
"#]],
);
}
#[test]
fn test_record_literal_field() {
check(
r#"
struct A { the_field: u32 }
fn foo() {
A { the$0 }
}
"#,
expect![[r#"
fd the_field u32
"#]],
);
}
#[test]
fn test_record_literal_enum_variant() {
check(
r#"
enum E { A { a: u32 } }
fn foo() {
let _ = E::A { $0 }
}
"#,
expect![[r#"
fd a u32
"#]],
);
}
#[test]
fn test_record_literal_two_structs() {
check(
r#"
struct A { a: u32 }
struct B { b: u32 }
fn foo() {
let _: A = B { $0 }
}
"#,
expect![[r#"
fd b u32
"#]],
);
}
#[test]
fn test_record_literal_generic_struct() {
check(
r#"
struct A<T> { a: T }
fn foo() {
let _: A<u32> = A { $0 }
}
"#,
expect![[r#"
fd a u32
"#]],
);
}
#[test]
fn test_record_literal_field_in_simple_macro() {
check(
r#"
macro_rules! m { ($e:expr) => { $e } }
struct A { the_field: u32 }
fn foo() {
m!(A { the$0 })
}
"#,
expect![[r#"
fd the_field u32
"#]],
);
}
#[test]
fn only_missing_fields_are_completed() {
check(
r#"
struct S {
foo1: u32, foo2: u32,
bar: u32, baz: u32,
}
fn main() {
let foo1 = 1;
let s = S { foo1, foo2: 5, $0 }
}
"#,
expect![[r#"
fd bar u32
fd baz u32
"#]],
);
}
#[test]
fn completes_functional_update() {
check(
r#"
struct S { foo1: u32, foo2: u32 }
fn main() {
let foo1 = 1;
let s = S { foo1, $0 .. loop {} }
}
"#,
expect![[r#"
fd foo2 u32
"#]],
);
}
}
| true
|
a00aef86c842e0ed359c0df9b10c6d48eb128300
|
Rust
|
Farzy/rust-training
|
/src/myrand.rs
|
UTF-8
| 701
| 2.734375
| 3
|
[] |
no_license
|
extern crate rand;
extern crate rand_chacha;
use rand::{Rng, SeedableRng};
use rand::seq::{SliceRandom};
pub fn main() {
let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(10);
for _ in 1..20 {
println!("Random: i16: {} f32: {}", rng.gen::<i16>(), rng.gen::<f32>());
}
for _ in 1..20 {
println!("Range: i16: {}", rng.gen_range(-1000..4000));
}
let mut ints = [0i8; 20];
rng.fill(&mut ints);
println!("Ints i8 = {:?}", ints);
let mut v: Vec<i8> = (1..20).collect();
v.shuffle(&mut rng);
println!("Shuffle 1..20: {:?}", v);
println!("Choose 1..20: {:?}", v.choose(&mut rng));
println!("Choose 1..20: {:?}", v.choose(&mut rng));
}
| true
|
c50c88efad8c4409781ca6c80fb1ceded85a1949
|
Rust
|
andrew-d/rscrape
|
/src/fetchers.rs
|
UTF-8
| 1,805
| 3.3125
| 3
|
[] |
no_license
|
use std::error;
use std::io::Read;
use hyper::method::Method;
use hyper::client::{Client, IntoUrl};
/// Fetcher is the interface for things that can fetch a remote URL.
pub trait Fetcher {
/// Called once at the beginning of the scrape.
fn prepare(&mut self) -> Result<(), Box<error::Error>> {
Ok(())
}
/// Called to retrieve each document from the remote server.
fn fetch<U: IntoUrl>(&mut self, method: Method, url: U) -> Result<Box<Read>, Box<error::Error>>;
/// Called once when the scrape is finished. Can be used to clean up
/// allocated resources or perform other cleanup actions.
fn close(&mut self) {
}
}
pub struct HttpClientFetcher {
// TODO: prepare client function?
// TODO: prepare request function?
// TODO: process response function?
c: Client,
}
impl HttpClientFetcher {
pub fn new() -> Self {
HttpClientFetcher {
c: Client::new(),
}
}
pub fn with_client(c: Client) -> Self {
HttpClientFetcher {
c: c,
}
}
}
impl Fetcher for HttpClientFetcher {
fn fetch<U: IntoUrl>(&mut self, method: Method, url: U) -> Result<Box<Read>, Box<error::Error>> {
// TODO: filters?
let resp = try!(self.c.request(method, url).send());
// TODO: post filters?
Ok(Box::new(resp) as Box<Read>)
}
}
#[cfg(test)]
mod tests {
use hyper::method::Method;
use super::{Fetcher, HttpClientFetcher};
#[test]
fn test_will_fetch() {
let mut cf = HttpClientFetcher::new();
let mut r = cf.fetch(Method::Get, "https://www.google.com").unwrap();
let mut data = String::new();
r.read_to_string(&mut data).unwrap();
// Simple sanity check.
assert!(data.len() > 10);
}
}
| true
|
a01aff5eb14629a8e7c4a1591ec69566d386f3f3
|
Rust
|
dveeden/sysinfo
|
/src/unknown/disk.rs
|
UTF-8
| 642
| 2.578125
| 3
|
[
"MIT"
] |
permissive
|
//
// Sysinfo
//
// Copyright (c) 2017 Guillaume Gomez
//
use DiskExt;
use DiskType;
use std::ffi::OsStr;
use std::path::Path;
/// Struct containing a disk information.
pub struct Disk {}
impl DiskExt for Disk {
fn get_type(&self) -> DiskType {
unreachable!()
}
fn get_name(&self) -> &OsStr {
unreachable!()
}
fn get_file_system(&self) -> &[u8] {
&[]
}
fn get_mount_point(&self) -> &Path {
Path::new("")
}
fn get_total_space(&self) -> u64 {
0
}
fn get_available_space(&self) -> u64 {
0
}
fn refresh(&mut self) -> bool {
true
}
}
| true
|
039f848993b3e7f226a9b7608c8a17ebbf0ebc91
|
Rust
|
exists-forall/nickel
|
/src/test_utils/expr.rs
|
UTF-8
| 5,408
| 2.796875
| 3
|
[] |
no_license
|
use std::rc::Rc;
use std::iter::repeat;
use super::rc_str::rc_str;
use expr::*;
use types::*;
pub fn unit(free_vars: usize, free_types: usize) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::Unit {
free_vars,
free_types,
})
}
pub fn var(usage: VarUsage, free_vars: usize, free_types: usize, index: usize) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::Var {
usage,
free_vars,
free_types,
index,
})
}
pub fn forall(param_count: usize, body: Expr<Rc<String>>) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::ForAll {
type_params: Rc::new(
repeat(TypeParam { name: rc_str("") })
.take(param_count)
.collect(),
),
body,
})
}
pub fn forall_named(params: &[&str], body: Expr<Rc<String>>) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::ForAll {
type_params: Rc::new(
params
.iter()
.cloned()
.map(|name| TypeParam { name: rc_str(name) })
.collect(),
),
body,
})
}
pub fn func(arg_type: Type<Rc<String>>, body: Expr<Rc<String>>) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::Func {
arg_name: rc_str(""),
arg_type,
arg_phase: Phase::Dynamic,
body,
})
}
pub fn func_forall(
param_count: usize,
arg_type: Type<Rc<String>>,
body: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
forall(param_count, func(arg_type, body))
}
pub fn func_named(
arg_name: &str,
arg_type: Type<Rc<String>>,
body: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::Func {
arg_name: rc_str(arg_name),
arg_type,
arg_phase: Phase::Dynamic,
body,
})
}
pub fn func_forall_named(
params: &[&str],
arg_name: &str,
arg_type: Type<Rc<String>>,
body: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
forall_named(params, func_named(arg_name, arg_type, body))
}
pub fn inst(receiver: Expr<Rc<String>>, type_params: &[Type<Rc<String>>]) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::Inst {
receiver,
type_params: Rc::new(type_params.to_owned()),
})
}
pub fn app(callee: Expr<Rc<String>>, arg: Expr<Rc<String>>) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::App { callee, arg })
}
pub fn app_forall(
callee: Expr<Rc<String>>,
type_params: &[Type<Rc<String>>],
arg: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
app(inst(callee, type_params), arg)
}
pub fn pair(left: Expr<Rc<String>>, right: Expr<Rc<String>>) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::Pair { left, right })
}
pub fn let_vars(count: usize, val: Expr<Rc<String>>, body: Expr<Rc<String>>) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::Let {
names: Rc::new(repeat(rc_str("")).take(count).collect()),
val,
body,
})
}
pub fn let_vars_named(
names: &[&str],
val: Expr<Rc<String>>,
body: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::Let {
names: Rc::new(names.iter().map(|&name| rc_str(name)).collect()),
val,
body,
})
}
pub fn let_exists(
type_count: usize,
val: Expr<Rc<String>>,
body: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::LetExists {
type_names: Rc::new(repeat(rc_str("")).take(type_count).collect()),
val_name: rc_str(""),
val,
body,
})
}
pub fn let_exists_named(
type_names: &[&str],
val_name: &str,
val: Expr<Rc<String>>,
body: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::LetExists {
type_names: Rc::new(type_names.iter().map(|&name| rc_str(name)).collect()),
val_name: rc_str(val_name),
val,
body,
})
}
pub fn make_exists(
params: &[Type<Rc<String>>],
type_body: Type<Rc<String>>,
body: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::MakeExists {
params: Rc::new(params.iter().cloned().map(|ty| (rc_str(""), ty)).collect()),
type_body,
body,
})
}
pub fn make_exists_named(
params: &[(&str, Type<Rc<String>>)],
type_body: Type<Rc<String>>,
body: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::MakeExists {
params: Rc::new(
params
.iter()
.cloned()
.map(|(name, ty)| (rc_str(name), ty))
.collect(),
),
type_body,
body,
})
}
pub fn cast(
type_body: Type<Rc<String>>,
equivalence: Expr<Rc<String>>,
body: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
cast_named("", type_body, equivalence, body)
}
pub fn cast_named(
param_name: &str,
type_body: Type<Rc<String>>,
equivalence: Expr<Rc<String>>,
body: Expr<Rc<String>>,
) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::Cast {
param: TypeParam {
name: rc_str(param_name),
},
type_body,
equivalence,
body,
})
}
pub fn intrinsic(intrinsic: Intrinsic, free_vars: usize, free_types: usize) -> Expr<Rc<String>> {
Expr::from_content(ExprContent::Intrinsic {
intrinsic,
free_vars,
free_types,
})
}
| true
|
2b4faa703a1e96d34c39e9ce5e1ce843d6489160
|
Rust
|
geom3trik/tooro-editor
|
/src/ui/style.rs
|
UTF-8
| 11,308
| 2.546875
| 3
|
[
"MIT"
] |
permissive
|
//! Style definitions for the different elements
use iced::{button, checkbox, container, pick_list, slider, Background, Color, Vector};
/// Default window width
pub const WINDOW_WIDTH: u32 = 1024;
/// Default window height
pub const WINDOW_HEIGHT: u32 = 655;
/// Common element padding
pub const SECTION_PADDING: u16 = 7;
/// Common element spacing
pub const SECTION_SPACING: u16 = 1;
/// Text size for section labels
pub const SECTION_LABEL_TEXT_SIZE: u16 = 16;
/// Text size for parameter labels
pub const PARAM_LABEL_TEXT_SIZE: u16 = 14;
/// Width of parameter labels
pub const PARAM_LABEL_WIDTH: u16 = 65;
/// Width of parameter values
pub const PARAM_VALUE_WIDTH: u16 = 25;
/// Text size of dropdown menu items
pub const LIST_ITEM_TEXT_SIZE: u16 = 13;
/// Button text size
pub const BUTTON_TEXT_SIZE: u16 = 14;
/// Text size of status bar items
pub const STATUS_TEXT_SIZE: u16 = 14;
/// Text color for all section elements
const SECTION_TEXT_COLOR: Color = Color::from_rgb(0_f32, 0_f32, 0_f32);
/// Color for active elements
const ACTIVE: Color = Color::from_rgb(
0x20 as f32 / 255.0,
0x20 as f32 / 255.0,
0x20 as f32 / 255.0,
);
/// Color for hovered elements
const HOVERED: Color = Color::from_rgb(
0x67 as f32 / 255.0,
0x7B as f32 / 255.0,
0xC4 as f32 / 255.0,
);
/// Surface color for checkboxes
const SURFACE: Color = Color::from_rgb(
0x20 as f32 / 255.0,
0x20 as f32 / 255.0,
0x20 as f32 / 255.0,
);
/// Styles for the oscillator sections
pub struct OscSection;
impl container::StyleSheet for OscSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xab, 0xa3, 0x39))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the extra section
pub struct ExtraSection;
impl container::StyleSheet for ExtraSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xf9, 0xb0, 0x8b))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the shaper section
pub struct ShaperSection;
impl container::StyleSheet for ShaperSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xd8, 0x00, 0x00))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the filter section
pub struct FilterSection;
impl container::StyleSheet for FilterSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xd8, 0x00, 0x00))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the amplifier section
pub struct AmpSection;
impl container::StyleSheet for AmpSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0x65, 0xa4, 0x7e))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the LFO sections
pub struct LFOSection;
impl container::StyleSheet for LFOSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xd2, 0x6a, 0x25))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the envelope sections
pub struct EnvSection;
impl container::StyleSheet for EnvSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xff, 0xbd, 0x00))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the arpeggiator section
pub struct ArpSection;
impl container::StyleSheet for ArpSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xf9, 0xb0, 0x8b))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the misc section
pub struct MiscSection;
impl container::StyleSheet for MiscSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xC0, 0xC0, 0xC0))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the modulation section
pub struct ModSection;
impl container::StyleSheet for ModSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xb4, 0xcb, 0xd9))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the FX section
pub struct FXSection;
impl container::StyleSheet for FXSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0x65, 0xa4, 0x7e))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the mixer section
pub struct MixerSection;
impl container::StyleSheet for MixerSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xC0, 0xC0, 0xC0))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the MIDI section
pub struct MidiSection;
impl container::StyleSheet for MidiSection {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(SECTION_TEXT_COLOR),
background: Some(Background::Color(Color::from_rgb8(0xC0, 0xC0, 0xC0))),
border_width: 0.0,
border_color: Color::from_rgb(0.7, 0.7, 0.7),
border_radius: 5.0,
}
}
}
/// Styles for the main window
pub struct MainWindow;
impl container::StyleSheet for MainWindow {
fn style(&self) -> container::Style {
container::Style {
text_color: Some(Color::from_rgb8(0xFF, 0xFF, 0xFF)),
background: Some(Background::Color(Color::from_rgb8(0x20, 0x20, 0x20))),
..Default::default()
}
}
}
/// Styles for all sliders
pub struct Slider;
impl slider::StyleSheet for Slider {
fn active(&self) -> slider::Style {
slider::Style {
rail_colors: (ACTIVE, Color { a: 0.1, ..ACTIVE }),
handle: slider::Handle {
shape: slider::HandleShape::Circle { radius: 6.0 },
color: ACTIVE,
border_width: 0.0,
border_color: Color::TRANSPARENT,
},
}
}
fn hovered(&self) -> slider::Style {
let active = self.active();
slider::Style {
handle: slider::Handle {
color: HOVERED,
..active.handle
},
..active
}
}
fn dragging(&self) -> slider::Style {
let active = self.active();
slider::Style {
handle: slider::Handle {
color: Color::from_rgb8(0x50, 0x50, 0x50),
..active.handle
},
..active
}
}
}
/// Styles for all checkboxes
pub struct Checkbox;
impl checkbox::StyleSheet for Checkbox {
fn active(&self, is_checked: bool) -> checkbox::Style {
checkbox::Style {
background: if is_checked { ACTIVE } else { SURFACE }.into(),
checkmark_color: Color::WHITE,
border_radius: 2.0,
border_width: 1.0,
border_color: ACTIVE,
}
}
fn hovered(&self, is_checked: bool) -> checkbox::Style {
checkbox::Style {
background: Color {
a: 0.8,
..if is_checked { ACTIVE } else { HOVERED }
}
.into(),
..self.active(is_checked)
}
}
}
/// Styles for all dropdown menus
pub struct PickList;
impl pick_list::StyleSheet for PickList {
fn active(&self) -> pick_list::Style {
pick_list::Style {
text_color: Color::from_rgb8(0xFF, 0xFF, 0xFF),
background: Background::Color(Color::from_rgb8(0x20, 0x20, 0x20)),
border_radius: 5.0,
border_width: 1.0,
border_color: Color::from_rgb8(0x80, 0x80, 0x80),
icon_size: 0.5,
}
}
fn menu(&self) -> pick_list::Menu {
pick_list::Menu {
text_color: Color::from_rgb8(0xFF, 0xFF, 0xFF),
background: Background::Color(Color::from_rgb8(0x20, 0x20, 0x20)),
border_width: 1.0,
border_color: Color::from_rgb8(0x80, 0x80, 0x80),
selected_text_color: Color::from_rgb8(0xFF, 0xFF, 0xFF),
selected_background: Background::Color(Color::from_rgb8(0x80, 0x80, 0x80)),
}
}
fn hovered(&self) -> pick_list::Style {
pick_list::Style {
background: Background::Color(HOVERED),
..self.active()
}
}
}
/// Different button variations
#[allow(dead_code)]
pub enum Button {
Primary,
Secondary,
}
impl button::StyleSheet for Button {
fn active(&self) -> button::Style {
button::Style {
background: Some(Background::Color(match self {
Button::Primary => Color::from_rgb(0.11, 0.42, 0.87),
Button::Secondary => Color::from_rgb(0.5, 0.5, 0.5),
})),
border_radius: 5.0,
shadow_offset: Vector::new(1.0, 1.0),
text_color: Color::from_rgb8(0xEE, 0xEE, 0xEE),
..button::Style::default()
}
}
fn hovered(&self) -> button::Style {
button::Style {
text_color: Color::WHITE,
shadow_offset: Vector::new(1.0, 2.0),
..self.active()
}
}
}
| true
|
368de44c7a44fe1133686177587e891eebfe3c0a
|
Rust
|
augustocdias/bail-out
|
/src/lib.rs
|
UTF-8
| 4,818
| 4.1875
| 4
|
[
"MIT"
] |
permissive
|
//! This library is inspired in the `ensure` and `bail` macros from `anyhow`. The difference is that it is not tied to anyhow's types.
//! Many libraries have their own error types and using the anyhow's `ensure` macro doesn't work because it returns an anyhow error. This library intends to work with any type.
//! This library also provides the `assure` macro, that does the same but evaluates to a `Result` instead of returning. This is useful for using inside `try` blocks.
/// Ensures the condition is met. This evaluates to a `Result<(), ERROR>`
/// This macro is equivalent to `if !cond { Err(error) } else { Ok(()) }`.
/// Example:
/// ```
/// use bail_out::*;
/// fn test_err() -> Result<(), &'static str> {
/// assure!(false, "error")
/// }
/// assert_eq!(test_err(), Err("error"));
///
/// fn test_ok() -> Result<(), &'static str> {
/// assure!(true, "error")
/// }
/// assert!(test_ok().is_ok());
/// ```
#[macro_export]
macro_rules! assure {
($cond:expr, $error:expr) => {
if !$cond {
Err($error)
} else {
Ok(())
}
};
}
/// Ensures the condition is met or returns the value. This evaluates to a `Result<VALUE, ERROR>`
/// This macro is equivalent to `if !cond { Err(error) } else { Ok(value) }`.
/// Example:
/// ```
/// use bail_out::*;
/// fn test_err() -> Result<&'static str, &'static str> {
/// assure_or!(false, "ok", "error")
/// }
/// assert_eq!(test_err(), Err("error"));
///
/// fn test_ok() -> Result<&'static str, &'static str> {
/// assure_or!(true, "ok", "error")
/// }
/// assert_eq!(test_ok(), Ok("ok"));
/// ```
#[macro_export]
macro_rules! assure_or {
($cond:expr, $ok: expr, $error:expr) => {
if !$cond {
Err($error)
} else {
Ok($ok)
}
};
}
/// Ensures the condition is not met. This evaluates to a `Result<(), ERROR>`
/// This macro is equivalent to `if cond { Err(error) } else { Ok(()) }`.
/// Example:
/// ```
/// use bail_out::*;
/// fn test_err() -> Result<(), &'static str> {
/// assure_not!(true, "error")
/// }
/// assert_eq!(test_err(), Err("error"));
///
/// fn test_ok() -> Result<(), &'static str> {
/// assure_not!(false, "error")
/// }
/// assert!(test_ok().is_ok());
/// ```
#[macro_export]
macro_rules! assure_not {
($cond:expr, $error:expr) => {
if $cond {
Err($error)
} else {
Ok(())
}
};
}
/// Ensures the condition is not met or returns the value. This evaluates to a `Result<VALUE, ERROR>`
/// This macro is equivalent to `if cond { Err(error) } else { Ok(value) }`.
/// Example:
/// ```
/// use bail_out::*;
/// fn test_err() -> Result<&'static str, &'static str> {
/// assure_not_or!(true, "ok", "error")
/// }
/// assert_eq!(test_err(), Err("error"));
///
/// fn test_ok() -> Result<&'static str, &'static str> {
/// assure_not_or!(false, "ok", "error")
/// }
/// assert_eq!(test_ok(), Ok("ok"));
/// ```
#[macro_export]
macro_rules! assure_not_or {
($cond:expr, $ok:expr, $error:expr) => {
if $cond {
Err($error)
} else {
Ok($ok)
}
};
}
/// Return early with an error if the condition is false. This ensures the condition is met.
/// This macro is equivalent to `if !cond { return Err(error) }`.
/// Example:
/// ```
/// use bail_out::*;
/// fn test_err() -> Result<(), &'static str> {
/// ensure!(false, "error");
/// Ok(())
/// }
/// assert_eq!(test_err(), Err("error"));
///
/// fn test_ok() -> Result<(), &'static str> {
/// ensure!(true, "ok");
/// Ok(())
/// }
/// assert_eq!(test_ok(), Ok(()));
/// ```
#[macro_export]
macro_rules! ensure {
($cond:expr, $error:expr) => {
if !$cond {
$crate::bail!($error);
}
};
}
/// Return early with an error if the condition is true. This ensures the condition is not met.
/// This macro is equivalent to `if cond { return Err(error) }`.
/// Example:
/// ```
/// use bail_out::*;
/// fn test_err() -> Result<(), &'static str> {
/// ensure_not!(true, "error");
/// Ok(())
/// }
/// assert_eq!(test_err(), Err("error"));
///
/// fn test_ok() -> Result<(), &'static str> {
/// ensure_not!(false, "ok");
/// Ok(())
/// }
/// assert_eq!(test_ok(), Ok(()));
/// ```
#[macro_export]
macro_rules! ensure_not {
($cond:expr, $error:expr) => {
if $cond {
$crate::bail!($error);
}
};
}
/// Return early with an error.
/// This macro is equivalent to `return Err(error)`.
/// Example:
/// ```
/// use bail_out::*;
/// fn bail_test() -> Result<(), &'static str> {
/// bail!("error");
/// Ok(())
/// }
/// assert_eq!(bail_test(), Err("error"));
/// ```
#[macro_export]
macro_rules! bail {
($error:expr) => {
return Err($error);
};
}
#[cfg(test)]
mod tests;
| true
|
59f592767e1f219ff9bf25bbfcff062427a50588
|
Rust
|
esqu1/too-many-ray-tracers
|
/src/main.rs
|
UTF-8
| 3,938
| 2.5625
| 3
|
[] |
no_license
|
#![feature(mutex_unlock)]
#![feature(trait_alias)]
mod camera;
mod color;
mod object;
mod ppm;
mod rasterizer;
mod vector;
use camera::Camera;
use color::Color;
use object::*;
use ppm::PPM;
use rasterizer::Rasterizer;
use std::sync::{Arc, Mutex};
use vector::{Vec3f, ORIGIN};
fn rasterize() {
let aspect_ratio = 16.0 / 9.0;
let img_length = 450;
let mut ppm = PPM::new(img_length, (img_length as f64 * aspect_ratio) as usize);
let mut rasterizer = Rasterizer::new();
// rasterizer.line(200, 200, 100, 200);
rasterizer.triangle(
Vec3f::new(100.0, 100.0, 2.0),
Vec3f::new(200.0, 150.0, 2.0),
Vec3f::new(100.0, 200.0, 2.0),
Color::new(0, 0, 125),
);
rasterizer.triangle(
Vec3f::new(120.0, 100.0, 4.0),
Vec3f::new(220.0, 150.0, 4.0),
Vec3f::new(120.0, 200.0, 4.0),
Color::new(0, 255, 0),
);
rasterizer.write_to_ppm(&mut ppm);
ppm.write_to_file(String::from("rasterized.ppm"));
}
fn raytrace() {
let aspect_ratio = 16.0 / 9.0;
let img_length = 450;
let origin = Vec3f::new(13.0, 2.0, 3.0);
let lookat = ORIGIN;
let focus_dist = 10.0;
let mut camera = Camera::new(
Arc::new(Mutex::new(PPM::new(
img_length,
(img_length as f64 * aspect_ratio) as usize,
))),
origin,
lookat,
Vec3f::new(0.0, -1.0, 0.0),
40.0,
focus_dist,
2.0,
);
let mut objects: Vec<Object> = vec![];
objects.push(Object {
shape: Arc::new(Sphere {
center: Vec3f::new(0.0, -1000.0, -0.0),
radius: 1000.0,
}),
material: Arc::new(DiffuseMaterial {
color: Color::new(125, 125, 125),
}),
});
let small_sphere_radius = 0.2;
for i in -11..11 {
for j in -11..11 {
let material_seed = rand::random::<f64>();
let center = Vec3f::new(
i as f64 + 0.9 * rand::random::<f64>(),
small_sphere_radius,
j as f64 + 0.9 * rand::random::<f64>(),
);
if (¢er - &Vec3f::new(4.0, 0.2, 0.0)).norm() > 0.9 {
let sphere = Arc::new(Sphere {
center,
radius: small_sphere_radius,
});
if material_seed < 0.8 {
let random_color = Color::random();
objects.push(Object {
shape: sphere,
material: Arc::new(DiffuseMaterial {
color: random_color,
}),
})
} else if material_seed < 0.95 {
let fuzz = rand::random::<f64>();
objects.push(Object {
shape: sphere,
material: Arc::new(MetalMaterial {
attenuation: Color::random(),
fuzz,
}),
})
}
}
}
}
objects.push(Object {
shape: Arc::new(Sphere {
center: Vec3f::new(-4.0, 1.0, 0.0),
radius: 1.0,
}),
material: Arc::new(DiffuseMaterial {
color: Color::new(100, 50, 25),
}),
});
objects.push(Object {
shape: Arc::new(Sphere {
center: Vec3f::new(4.0, 1.0, 0.0),
radius: 1.0,
}),
material: Arc::new(MetalMaterial {
attenuation: Color::new(120, 120, 120),
fuzz: 0.0,
}),
});
let world = Arc::new(World { objects });
camera.write_ppm(world);
// let gradient_ppm = draw_gradient(1080, 1920);
camera
.img
.lock()
.unwrap()
.write_to_file(String::from("test.ppm"))
.expect("I/O error during write");
}
fn main() {
rasterize();
}
| true
|
bf71e13c776b8b91c9a2d2d07a42f8f7388adcbf
|
Rust
|
remysucre/differential-datalog
|
/lib/log.rs
|
UTF-8
| 2,623
| 2.609375
| 3
|
[
"MIT"
] |
permissive
|
use std::sync;
use std::collections;
use std::ffi;
type log_callback_t = Box<dyn Fn(log_log_level_t, &str) + Send + Sync>;
lazy_static! {
/* Logger configuration for each module consists of the maximal enabled
* log level (messages above this level are ignored) and callback.
*/
static ref LOG_CONFIG: sync::RwLock<collections::HashMap<log_module_t, (log_callback_t, log_log_level_t)>> = {
sync::RwLock::new(collections::HashMap::new())
};
}
/*
* Logging API exposed to the DDlog program.
* (see detailed documentation in `log.dl`)
*/
pub fn log_log(module: &log_module_t, level: &log_log_level_t, msg: &String) -> bool
{
if let Some((cb, current_level)) = LOG_CONFIG.read().unwrap().get(&module) {
if *level <= *current_level {
cb(*level, msg.as_str());
}
};
true
}
/*
* Configuration API
* (detailed documentation in `ddlog_log.h`)
*
* `cb = None` - disables logging for the given module.
*
* NOTE: we set callback and log level simultaneously. A more flexible API
* would allow changing log level without changing the callback.
*/
pub fn log_set_callback(module: log_module_t, cb: Option<log_callback_t>, max_level: log_log_level_t)
{
match cb {
Some(cb) => {
LOG_CONFIG.write().unwrap().insert(module, (cb, max_level));
},
None => {
LOG_CONFIG.write().unwrap().remove(&module);
}
}
}
/*
* C bindings for the config API
*/
#[no_mangle]
pub unsafe extern "C" fn ddlog_log_set_callback(module: raw::c_int,
cb: Option<extern "C" fn(arg: libc::uintptr_t,
level: raw::c_int,
msg: *const raw::c_char)>,
cb_arg: libc::uintptr_t,
max_level: raw::c_int)
{
match cb {
Some(cb) => {
log_set_callback(module as log_module_t,
Some(Box::new(move |level, msg| {
cb(cb_arg,
level as raw::c_int,
ffi::CString::new(msg).unwrap_or_default().as_ptr())
})),
max_level as log_log_level_t)
},
None => {
log_set_callback(module as log_module_t,
None,
max_level as log_log_level_t)
}
}
}
| true
|
20b6fdc75d1d5c4ca224bc0968521460b10b068d
|
Rust
|
kildevaeld/keyval-rs
|
/keyval/src/codec.rs
|
UTF-8
| 1,870
| 2.890625
| 3
|
[] |
no_license
|
#[allow(unused_imports)]
use crate::{Error, Raw, Value};
/// Base trait for values that can be encoded using serde
pub trait Codec<T: serde::Serialize + serde::de::DeserializeOwned>:
Value + AsRef<T> + AsMut<T>
{
/// Convert back into inner value
fn into_inner(self) -> T;
}
#[macro_export]
/// Define a codec type and implement the Codec trait
macro_rules! codec {
($x:ident) => {
/// Codec implementation
#[derive(Debug)]
pub struct $x<T: serde::Serialize + serde::de::DeserializeOwned>(pub T);
impl<T: serde::Serialize + serde::de::DeserializeOwned> AsRef<T> for $x<T> {
fn as_ref(&self) -> &T {
&self.0
}
}
impl<T: serde::Serialize + serde::de::DeserializeOwned> AsMut<T> for $x<T> {
fn as_mut(&mut self) -> &mut T {
&mut self.0
}
}
impl<T: serde::Serialize + serde::de::DeserializeOwned> Codec<T> for $x<T> {
fn into_inner(self) -> T {
self.0
}
}
impl<T: Clone + serde::Serialize + serde::de::DeserializeOwned> Clone for $x<T> {
fn clone(&self) -> Self {
$x(self.0.clone())
}
}
};
($x:ident, {$ser:expr, $de:expr}) => {
codec!($x);
impl<T: serde::Serialize + serde::de::DeserializeOwned> Value for $x<T> {
fn to_raw(self) -> Result<Raw, Error> {
let x = $ser(&self.0)?;
Ok(x.into())
}
fn from_raw(r: Raw) -> Result<Self, Error> {
let x = $de(&r)?;
Ok($x(x))
}
}
};
}
#[cfg(feature = "cbor")]
mod cbor_value {
use super::*;
codec!(Cbor, { serde_cbor::to_vec, serde_cbor::from_slice});
}
#[cfg(feature = "cbor")]
pub use cbor_value::*;
| true
|
3de64094497322ef0ea43bddaa0ac633a5031ce0
|
Rust
|
0rvar/advent-of-code-2017
|
/day13/src/main.rs
|
UTF-8
| 2,195
| 3.25
| 3
|
[] |
no_license
|
extern crate regex;
use regex::Regex;
use std::collections::HashMap;
fn main() {
let input = include_str!("input.txt").trim();
// let input = include_str!("alt_input.txt");
let mut range_by_depth: HashMap<usize, usize> = HashMap::new();
let mut max_depth = 0usize;
for line in input.lines() {
let re = Regex::new(r"(\d+): (\d+)").unwrap();
let capture = re.captures_iter(line).last().expect("Regex should match");
let depth: usize = capture.get(1).unwrap().as_str().parse().unwrap();
let range: usize = capture.get(2).unwrap().as_str().parse().unwrap();
range_by_depth.insert(depth, range);
if depth > max_depth {
max_depth = depth;
}
}
println!("score #1: {:?}", calculate_score(0, max_depth, &range_by_depth));
for initial_time in 0.. {
let score = calculate_score(initial_time, max_depth, &range_by_depth);
if score == 0 {
println!("Initial delay: {}", initial_time);
break;
}
}
}
fn calculate_score(
initial_time: usize,
max_depth: usize,
range_by_depth: &HashMap<usize, usize>
) -> usize {
(0 .. max_depth + 1).map(|depth| {
let layer_range: usize = match range_by_depth.get(&depth) {
Some(range) => *range,
None => return 0
};
let current_position = calculate_position(depth + initial_time, layer_range);
if current_position == 0 {
let mut score = depth * layer_range;
if score < 1 {
score = 1;
}
return score;
}
0
}).fold(0, |x, y| x + y)
}
#[test]
fn test_calculate_score() {
let max_depth = 6;
let mut range_by_depth: HashMap<usize, usize> = HashMap::new();
range_by_depth.insert(0, 3);
range_by_depth.insert(1, 2);
range_by_depth.insert(4, 4);
range_by_depth.insert(6, 4);
for i in 0..10 {
assert!(calculate_score(i, max_depth, &range_by_depth) > 0, "i = {}", i);
}
assert_eq!(calculate_score(10, max_depth, &range_by_depth), 0);
}
fn calculate_position(time: usize, range: usize) -> usize {
time % (2 * (range - 1))
}
| true
|
934f9d60289645ae8ec7ebee6ca94fcb852da695
|
Rust
|
quietlychris/rust_c_arrays_ffi
|
/rust_arrays/src/test.rs
|
UTF-8
| 123
| 2.71875
| 3
|
[] |
no_license
|
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
#[test]
fn test_raf() {
assert_eq!(4, crate::return_a_four());
}
| true
|
71f0ce0dc07a6bf8032fe6749cd0753cc0155195
|
Rust
|
tensorbase/tensorbase
|
/crates/arrow/src/util/integration_util.rs
|
UTF-8
| 39,688
| 2.5625
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Utils for JSON integration testing
//!
//! These utilities define structs that read the integration JSON format for integration testing purposes.
use serde_derive::{Deserialize, Serialize};
use serde_json::{Map as SJMap, Number as VNumber, Value};
use crate::array::*;
use crate::datatypes::*;
use crate::error::Result;
use crate::record_batch::{RecordBatch, RecordBatchReader};
/// A struct that represents an Arrow file with a schema and record batches
#[derive(Deserialize, Serialize, Debug)]
pub struct ArrowJson {
pub schema: ArrowJsonSchema,
pub batches: Vec<ArrowJsonBatch>,
#[serde(skip_serializing_if = "Option::is_none")]
pub dictionaries: Option<Vec<ArrowJsonDictionaryBatch>>,
}
/// A struct that partially reads the Arrow JSON schema.
///
/// Fields are left as JSON `Value` as they vary by `DataType`
#[derive(Deserialize, Serialize, Debug)]
pub struct ArrowJsonSchema {
pub fields: Vec<ArrowJsonField>,
}
/// Fields are left as JSON `Value` as they vary by `DataType`
#[derive(Deserialize, Serialize, Debug)]
pub struct ArrowJsonField {
pub name: String,
#[serde(rename = "type")]
pub field_type: Value,
pub nullable: bool,
pub children: Vec<ArrowJsonField>,
#[serde(skip_serializing_if = "Option::is_none")]
pub dictionary: Option<ArrowJsonFieldDictionary>,
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata: Option<Value>,
}
impl From<&Field> for ArrowJsonField {
fn from(field: &Field) -> Self {
let metadata_value = match field.metadata() {
Some(kv_list) => {
let mut array = Vec::new();
for (k, v) in kv_list {
let mut kv_map = SJMap::new();
kv_map.insert(k.clone(), Value::String(v.clone()));
array.push(Value::Object(kv_map));
}
if !array.is_empty() {
Some(Value::Array(array))
} else {
None
}
}
_ => None,
};
Self {
name: field.name().to_string(),
field_type: field.data_type().to_json(),
nullable: field.is_nullable(),
children: vec![],
dictionary: None, // TODO: not enough info
metadata: metadata_value,
}
}
}
#[derive(Deserialize, Serialize, Debug)]
pub struct ArrowJsonFieldDictionary {
pub id: i64,
#[serde(rename = "indexType")]
pub index_type: DictionaryIndexType,
#[serde(rename = "isOrdered")]
pub is_ordered: bool,
}
#[derive(Deserialize, Serialize, Debug)]
pub struct DictionaryIndexType {
pub name: String,
#[serde(rename = "isSigned")]
pub is_signed: bool,
#[serde(rename = "bitWidth")]
pub bit_width: i64,
}
/// A struct that partially reads the Arrow JSON record batch
#[derive(Deserialize, Serialize, Debug)]
pub struct ArrowJsonBatch {
count: usize,
pub columns: Vec<ArrowJsonColumn>,
}
/// A struct that partially reads the Arrow JSON dictionary batch
#[derive(Deserialize, Serialize, Debug)]
#[allow(non_snake_case)]
pub struct ArrowJsonDictionaryBatch {
pub id: i64,
pub data: ArrowJsonBatch,
}
/// A struct that partially reads the Arrow JSON column/array
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct ArrowJsonColumn {
name: String,
pub count: usize,
#[serde(rename = "VALIDITY")]
pub validity: Option<Vec<u8>>,
#[serde(rename = "DATA")]
pub data: Option<Vec<Value>>,
#[serde(rename = "OFFSET")]
pub offset: Option<Vec<Value>>, // leaving as Value as 64-bit offsets are strings
pub children: Option<Vec<ArrowJsonColumn>>,
}
impl ArrowJson {
/// Compare the Arrow JSON with a record batch reader
pub fn equals_reader(&self, reader: &mut dyn RecordBatchReader) -> bool {
if !self.schema.equals_schema(&reader.schema()) {
return false;
}
self.batches.iter().all(|col| {
let batch = reader.next();
match batch {
Some(Ok(batch)) => col.equals_batch(&batch),
_ => false,
}
})
}
}
impl ArrowJsonSchema {
/// Compare the Arrow JSON schema with the Arrow `Schema`
fn equals_schema(&self, schema: &Schema) -> bool {
let field_len = self.fields.len();
if field_len != schema.fields().len() {
return false;
}
for i in 0..field_len {
let json_field = &self.fields[i];
let field = schema.field(i);
if !json_field.equals_field(field) {
return false;
}
}
true
}
}
impl ArrowJsonField {
/// Compare the Arrow JSON field with the Arrow `Field`
fn equals_field(&self, field: &Field) -> bool {
// convert to a field
match self.to_arrow_field() {
Ok(self_field) => {
assert_eq!(&self_field, field, "Arrow fields not the same");
true
}
Err(e) => {
eprintln!(
"Encountered error while converting JSON field to Arrow field: {:?}",
e
);
false
}
}
}
/// Convert to an Arrow Field
/// TODO: convert to use an Into
fn to_arrow_field(&self) -> Result<Field> {
// a bit regressive, but we have to convert the field to JSON in order to convert it
let field = serde_json::to_value(self)?;
Field::from(&field)
}
}
impl ArrowJsonBatch {
/// Compare the Arrow JSON record batch with a `RecordBatch`
fn equals_batch(&self, batch: &RecordBatch) -> bool {
if self.count != batch.num_rows() {
return false;
}
let num_columns = self.columns.len();
if num_columns != batch.num_columns() {
return false;
}
let schema = batch.schema();
self.columns
.iter()
.zip(batch.columns())
.zip(schema.fields())
.all(|((col, arr), field)| {
// compare each column based on its type
if &col.name != field.name() {
return false;
}
let json_array: Vec<Value> = json_from_col(col, field.data_type());
match field.data_type() {
DataType::Null => {
let arr: &NullArray =
arr.as_any().downcast_ref::<NullArray>().unwrap();
// NullArrays should have the same length, json_array is empty
arr.len() == col.count
}
DataType::Boolean => {
let arr = arr.as_any().downcast_ref::<BooleanArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Int8 => {
let arr = arr.as_any().downcast_ref::<Int8Array>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Int16 => {
let arr = arr.as_any().downcast_ref::<Int16Array>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Int32 | DataType::Date32 | DataType::Time32(_) | DataType::Timestamp32(_) => {
let arr = Int32Array::from(arr.data().clone());
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Int64
| DataType::Date64
| DataType::Time64(_)
| DataType::Timestamp(_, _)
| DataType::Duration(_) => {
let arr = Int64Array::from(arr.data().clone());
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Interval(IntervalUnit::YearMonth) => {
let arr = IntervalYearMonthArray::from(arr.data().clone());
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Interval(IntervalUnit::DayTime) => {
let arr = IntervalDayTimeArray::from(arr.data().clone());
let x = json_array
.iter()
.map(|v| {
match v {
Value::Null => Value::Null,
Value::Object(v) => {
// interval has days and milliseconds
let days: i32 =
v.get("days").unwrap().as_i64().unwrap()
as i32;
let milliseconds: i32 = v
.get("milliseconds")
.unwrap()
.as_i64()
.unwrap()
as i32;
let value: i64 = unsafe {
std::mem::transmute::<[i32; 2], i64>([
days,
milliseconds,
])
};
Value::Number(VNumber::from(value))
}
// return null if Value is not an object
_ => Value::Null,
}
})
.collect::<Vec<Value>>();
arr.equals_json(&x.iter().collect::<Vec<&Value>>()[..])
}
DataType::UInt8 => {
let arr = arr.as_any().downcast_ref::<UInt8Array>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::UInt16 => {
let arr = arr.as_any().downcast_ref::<UInt16Array>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::UInt32 => {
let arr = arr.as_any().downcast_ref::<UInt32Array>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::UInt64 => {
let arr = arr.as_any().downcast_ref::<UInt64Array>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Float32 => {
let arr = arr.as_any().downcast_ref::<Float32Array>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Float64 => {
let arr = arr.as_any().downcast_ref::<Float64Array>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Binary => {
let arr = arr.as_any().downcast_ref::<BinaryArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::LargeBinary => {
let arr =
arr.as_any().downcast_ref::<LargeBinaryArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::FixedSizeBinary(_) => {
let arr =
arr.as_any().downcast_ref::<FixedSizeBinaryArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Utf8 => {
let arr = arr.as_any().downcast_ref::<StringArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::LargeUtf8 => {
let arr =
arr.as_any().downcast_ref::<LargeStringArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::List(_) => {
let arr = arr.as_any().downcast_ref::<ListArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::LargeList(_) => {
let arr = arr.as_any().downcast_ref::<LargeListArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::FixedSizeList(_, _) => {
let arr =
arr.as_any().downcast_ref::<FixedSizeListArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Struct(_) => {
let arr = arr.as_any().downcast_ref::<StructArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Map(_, _) => {
let arr = arr.as_any().downcast_ref::<MapArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Decimal(_, _) => {
let arr = arr.as_any().downcast_ref::<DecimalArray>().unwrap();
arr.equals_json(&json_array.iter().collect::<Vec<&Value>>()[..])
}
DataType::Dictionary(ref key_type, _) => match key_type.as_ref() {
DataType::Int8 => {
let arr = arr
.as_any()
.downcast_ref::<Int8DictionaryArray>()
.unwrap();
arr.equals_json(
&json_array.iter().collect::<Vec<&Value>>()[..],
)
}
DataType::Int16 => {
let arr = arr
.as_any()
.downcast_ref::<Int16DictionaryArray>()
.unwrap();
arr.equals_json(
&json_array.iter().collect::<Vec<&Value>>()[..],
)
}
DataType::Int32 => {
let arr = arr
.as_any()
.downcast_ref::<Int32DictionaryArray>()
.unwrap();
arr.equals_json(
&json_array.iter().collect::<Vec<&Value>>()[..],
)
}
DataType::Int64 => {
let arr = arr
.as_any()
.downcast_ref::<Int64DictionaryArray>()
.unwrap();
arr.equals_json(
&json_array.iter().collect::<Vec<&Value>>()[..],
)
}
DataType::UInt8 => {
let arr = arr
.as_any()
.downcast_ref::<UInt8DictionaryArray>()
.unwrap();
arr.equals_json(
&json_array.iter().collect::<Vec<&Value>>()[..],
)
}
DataType::UInt16 => {
let arr = arr
.as_any()
.downcast_ref::<UInt16DictionaryArray>()
.unwrap();
arr.equals_json(
&json_array.iter().collect::<Vec<&Value>>()[..],
)
}
DataType::UInt32 => {
let arr = arr
.as_any()
.downcast_ref::<UInt32DictionaryArray>()
.unwrap();
arr.equals_json(
&json_array.iter().collect::<Vec<&Value>>()[..],
)
}
DataType::UInt64 => {
let arr = arr
.as_any()
.downcast_ref::<UInt64DictionaryArray>()
.unwrap();
arr.equals_json(
&json_array.iter().collect::<Vec<&Value>>()[..],
)
}
t => panic!("Unsupported dictionary comparison for {:?}", t),
},
t => panic!("Unsupported comparison for {:?}", t),
}
})
}
pub fn from_batch(batch: &RecordBatch) -> ArrowJsonBatch {
let mut json_batch = ArrowJsonBatch {
count: batch.num_rows(),
columns: Vec::with_capacity(batch.num_columns()),
};
for (col, field) in batch.columns().iter().zip(batch.schema().fields.iter()) {
let json_col = match field.data_type() {
DataType::Int8 => {
let col = col.as_any().downcast_ref::<Int8Array>().unwrap();
let mut validity: Vec<u8> = Vec::with_capacity(col.len());
let mut data: Vec<Value> = Vec::with_capacity(col.len());
for i in 0..col.len() {
if col.is_null(i) {
validity.push(1);
data.push(0i8.into());
} else {
validity.push(0);
data.push(col.value(i).into());
}
}
ArrowJsonColumn {
name: field.name().clone(),
count: col.len(),
validity: Some(validity),
data: Some(data),
offset: None,
children: None,
}
}
_ => ArrowJsonColumn {
name: field.name().clone(),
count: col.len(),
validity: None,
data: None,
offset: None,
children: None,
},
};
json_batch.columns.push(json_col);
}
json_batch
}
}
/// Convert an Arrow JSON column/array into a vector of `Value`
fn json_from_col(col: &ArrowJsonColumn, data_type: &DataType) -> Vec<Value> {
match data_type {
DataType::List(field) => json_from_list_col(col, field.data_type()),
DataType::FixedSizeList(field, list_size) => {
json_from_fixed_size_list_col(col, field.data_type(), *list_size as usize)
}
DataType::Struct(fields) => json_from_struct_col(col, fields),
DataType::Map(field, keys_sorted) => json_from_map_col(col, field, *keys_sorted),
DataType::Int64
| DataType::UInt64
| DataType::Date64
| DataType::Time64(_)
| DataType::Timestamp(_, _)
| DataType::Duration(_) => {
// convert int64 data from strings to numbers
let converted_col: Vec<Value> = col
.data
.clone()
.unwrap()
.iter()
.map(|v| {
Value::Number(match v {
Value::Number(number) => number.clone(),
Value::String(string) => VNumber::from(
string
.parse::<i64>()
.expect("Unable to parse string as i64"),
),
t => panic!("Cannot convert {} to number", t),
})
})
.collect();
merge_json_array(
col.validity.as_ref().unwrap().as_slice(),
converted_col.as_slice(),
)
}
DataType::Null => vec![],
_ => merge_json_array(
col.validity.as_ref().unwrap().as_slice(),
&col.data.clone().unwrap(),
),
}
}
/// Merge VALIDITY and DATA vectors from a primitive data type into a `Value` vector with nulls
fn merge_json_array(validity: &[u8], data: &[Value]) -> Vec<Value> {
validity
.iter()
.zip(data)
.map(|(v, d)| match v {
0 => Value::Null,
1 => d.clone(),
_ => panic!("Validity data should be 0 or 1"),
})
.collect()
}
/// Convert an Arrow JSON column/array of a `DataType::Struct` into a vector of `Value`
fn json_from_struct_col(col: &ArrowJsonColumn, fields: &[Field]) -> Vec<Value> {
let mut values = Vec::with_capacity(col.count);
let children: Vec<Vec<Value>> = col
.children
.clone()
.unwrap()
.iter()
.zip(fields)
.map(|(child, field)| json_from_col(child, field.data_type()))
.collect();
// create a struct from children
for j in 0..col.count {
let mut map = serde_json::map::Map::new();
for i in 0..children.len() {
map.insert(fields[i].name().to_string(), children[i][j].clone());
}
values.push(Value::Object(map));
}
values
}
/// Convert an Arrow JSON column/array of a `DataType::List` into a vector of `Value`
fn json_from_list_col(col: &ArrowJsonColumn, data_type: &DataType) -> Vec<Value> {
let mut values = Vec::with_capacity(col.count);
// get the inner array
let child = &col.children.clone().expect("list type must have children")[0];
let offsets: Vec<usize> = col
.offset
.clone()
.unwrap()
.iter()
.map(|o| match o {
Value::String(s) => s.parse::<usize>().unwrap(),
Value::Number(n) => n.as_u64().unwrap() as usize,
_ => panic!(
"Offsets should be numbers or strings that are convertible to numbers"
),
})
.collect();
let inner = match data_type {
DataType::List(ref field) => json_from_col(child, field.data_type()),
DataType::Struct(fields) => json_from_struct_col(col, fields),
_ => merge_json_array(
child.validity.as_ref().unwrap().as_slice(),
&child.data.clone().unwrap(),
),
};
for i in 0..col.count {
match &col.validity {
Some(validity) => match &validity[i] {
0 => values.push(Value::Null),
1 => {
values.push(Value::Array(inner[offsets[i]..offsets[i + 1]].to_vec()))
}
_ => panic!("Validity data should be 0 or 1"),
},
None => {
// Null type does not have a validity vector
}
}
}
values
}
/// Convert an Arrow JSON column/array of a `DataType::List` into a vector of `Value`
fn json_from_fixed_size_list_col(
col: &ArrowJsonColumn,
data_type: &DataType,
list_size: usize,
) -> Vec<Value> {
let mut values = Vec::with_capacity(col.count);
// get the inner array
let child = &col.children.clone().expect("list type must have children")[0];
let inner = match data_type {
DataType::List(ref field) => json_from_col(child, field.data_type()),
DataType::FixedSizeList(ref field, _) => json_from_col(child, field.data_type()),
DataType::Struct(fields) => json_from_struct_col(col, fields),
_ => merge_json_array(
child.validity.as_ref().unwrap().as_slice(),
&child.data.clone().unwrap(),
),
};
for i in 0..col.count {
match &col.validity {
Some(validity) => match &validity[i] {
0 => values.push(Value::Null),
1 => values.push(Value::Array(
inner[(list_size * i)..(list_size * (i + 1))].to_vec(),
)),
_ => panic!("Validity data should be 0 or 1"),
},
None => {}
}
}
values
}
fn json_from_map_col(
col: &ArrowJsonColumn,
field: &Field,
_keys_sorted: bool,
) -> Vec<Value> {
let mut values = Vec::with_capacity(col.count);
// get the inner array
let child = &col.children.clone().expect("list type must have children")[0];
let offsets: Vec<usize> = col
.offset
.clone()
.unwrap()
.iter()
.map(|o| match o {
Value::String(s) => s.parse::<usize>().unwrap(),
Value::Number(n) => n.as_u64().unwrap() as usize,
_ => panic!(
"Offsets should be numbers or strings that are convertible to numbers"
),
})
.collect();
let inner = match field.data_type() {
DataType::Struct(fields) => json_from_struct_col(child, fields),
_ => panic!("Map child must be Struct"),
};
for i in 0..col.count {
match &col.validity {
Some(validity) => match &validity[i] {
0 => values.push(Value::Null),
1 => {
values.push(Value::Array(inner[offsets[i]..offsets[i + 1]].to_vec()))
}
_ => panic!("Validity data should be 0 or 1"),
},
None => {
// Null type does not have a validity vector
}
}
}
values
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::File;
use std::io::Read;
use std::sync::Arc;
use crate::buffer::Buffer;
#[test]
fn test_schema_equality() {
let json = r#"
{
"fields": [
{
"name": "c1",
"type": {"name": "int", "isSigned": true, "bitWidth": 32},
"nullable": true,
"children": []
},
{
"name": "c2",
"type": {"name": "floatingpoint", "precision": "DOUBLE"},
"nullable": true,
"children": []
},
{
"name": "c3",
"type": {"name": "utf8"},
"nullable": true,
"children": []
},
{
"name": "c4",
"type": {
"name": "list"
},
"nullable": true,
"children": [
{
"name": "custom_item",
"type": {
"name": "int",
"isSigned": true,
"bitWidth": 32
},
"nullable": false,
"children": []
}
]
}
]
}"#;
let json_schema: ArrowJsonSchema = serde_json::from_str(json).unwrap();
let schema = Schema::new(vec![
Field::new("c1", DataType::Int32, true),
Field::new("c2", DataType::Float64, true),
Field::new("c3", DataType::Utf8, true),
Field::new(
"c4",
DataType::List(Box::new(Field::new(
"custom_item",
DataType::Int32,
false,
))),
true,
),
]);
assert!(json_schema.equals_schema(&schema));
}
#[test]
#[cfg_attr(miri, ignore)] // running forever
fn test_arrow_data_equality() {
let secs_tz = Some("Europe/Budapest".to_string());
let millis_tz = Some("America/New_York".to_string());
let micros_tz = Some("UTC".to_string());
let nanos_tz = Some("Africa/Johannesburg".to_string());
let schema = Schema::new(vec![
{
let mut f =
Field::new("bools-with-metadata-map", DataType::Boolean, true);
f.set_metadata(Some(
[("k".to_string(), "v".to_string())]
.iter()
.cloned()
.collect(),
));
f
},
{
let mut f =
Field::new("bools-with-metadata-vec", DataType::Boolean, true);
f.set_metadata(Some(
[("k2".to_string(), "v2".to_string())]
.iter()
.cloned()
.collect(),
));
f
},
Field::new("bools", DataType::Boolean, true),
Field::new("int8s", DataType::Int8, true),
Field::new("int16s", DataType::Int16, true),
Field::new("int32s", DataType::Int32, true),
Field::new("int64s", DataType::Int64, true),
Field::new("uint8s", DataType::UInt8, true),
Field::new("uint16s", DataType::UInt16, true),
Field::new("uint32s", DataType::UInt32, true),
Field::new("uint64s", DataType::UInt64, true),
Field::new("float32s", DataType::Float32, true),
Field::new("float64s", DataType::Float64, true),
Field::new("date_days", DataType::Date32, true),
Field::new("date_millis", DataType::Date64, true),
Field::new("time_secs", DataType::Time32(TimeUnit::Second), true),
Field::new("time_millis", DataType::Time32(TimeUnit::Millisecond), true),
Field::new("time_micros", DataType::Time64(TimeUnit::Microsecond), true),
Field::new("time_nanos", DataType::Time64(TimeUnit::Nanosecond), true),
Field::new("ts_secs", DataType::Timestamp(TimeUnit::Second, None), true),
Field::new(
"ts_millis",
DataType::Timestamp(TimeUnit::Millisecond, None),
true,
),
Field::new(
"ts_micros",
DataType::Timestamp(TimeUnit::Microsecond, None),
true,
),
Field::new(
"ts_nanos",
DataType::Timestamp(TimeUnit::Nanosecond, None),
true,
),
Field::new(
"ts_secs_tz",
DataType::Timestamp(TimeUnit::Second, secs_tz.clone()),
true,
),
Field::new(
"ts_millis_tz",
DataType::Timestamp(TimeUnit::Millisecond, millis_tz.clone()),
true,
),
Field::new(
"ts_micros_tz",
DataType::Timestamp(TimeUnit::Microsecond, micros_tz.clone()),
true,
),
Field::new(
"ts_nanos_tz",
DataType::Timestamp(TimeUnit::Nanosecond, nanos_tz.clone()),
true,
),
Field::new("utf8s", DataType::Utf8, true),
Field::new(
"lists",
DataType::List(Box::new(Field::new("item", DataType::Int32, true))),
true,
),
Field::new(
"structs",
DataType::Struct(vec![
Field::new("int32s", DataType::Int32, true),
Field::new("utf8s", DataType::Utf8, true),
]),
true,
),
]);
let bools_with_metadata_map =
BooleanArray::from(vec![Some(true), None, Some(false)]);
let bools_with_metadata_vec =
BooleanArray::from(vec![Some(true), None, Some(false)]);
let bools = BooleanArray::from(vec![Some(true), None, Some(false)]);
let int8s = Int8Array::from(vec![Some(1), None, Some(3)]);
let int16s = Int16Array::from(vec![Some(1), None, Some(3)]);
let int32s = Int32Array::from(vec![Some(1), None, Some(3)]);
let int64s = Int64Array::from(vec![Some(1), None, Some(3)]);
let uint8s = UInt8Array::from(vec![Some(1), None, Some(3)]);
let uint16s = UInt16Array::from(vec![Some(1), None, Some(3)]);
let uint32s = UInt32Array::from(vec![Some(1), None, Some(3)]);
let uint64s = UInt64Array::from(vec![Some(1), None, Some(3)]);
let float32s = Float32Array::from(vec![Some(1.0), None, Some(3.0)]);
let float64s = Float64Array::from(vec![Some(1.0), None, Some(3.0)]);
let date_days = Date32Array::from(vec![Some(1196848), None, None]);
let date_millis = Date64Array::from(vec![
Some(167903550396207),
Some(29923997007884),
Some(30612271819236),
]);
let time_secs =
Time32SecondArray::from(vec![Some(27974), Some(78592), Some(43207)]);
let time_millis = Time32MillisecondArray::from(vec![
Some(6613125),
Some(74667230),
Some(52260079),
]);
let time_micros =
Time64MicrosecondArray::from(vec![Some(62522958593), None, None]);
let time_nanos = Time64NanosecondArray::from(vec![
Some(73380123595985),
None,
Some(16584393546415),
]);
let ts_secs = TimestampSecondArray::from_opt_vec(
vec![None, Some(193438817552), None],
None,
);
let ts_millis = TimestampMillisecondArray::from_opt_vec(
vec![None, Some(38606916383008), Some(58113709376587)],
None,
);
let ts_micros =
TimestampMicrosecondArray::from_opt_vec(vec![None, None, None], None);
let ts_nanos = TimestampNanosecondArray::from_opt_vec(
vec![None, None, Some(-6473623571954960143)],
None,
);
let ts_secs_tz = TimestampSecondArray::from_opt_vec(
vec![None, Some(193438817552), None],
secs_tz,
);
let ts_millis_tz = TimestampMillisecondArray::from_opt_vec(
vec![None, Some(38606916383008), Some(58113709376587)],
millis_tz,
);
let ts_micros_tz =
TimestampMicrosecondArray::from_opt_vec(vec![None, None, None], micros_tz);
let ts_nanos_tz = TimestampNanosecondArray::from_opt_vec(
vec![None, None, Some(-6473623571954960143)],
nanos_tz,
);
let utf8s = StringArray::from(vec![Some("aa"), None, Some("bbb")]);
let value_data = Int32Array::from(vec![None, Some(2), None, None]);
let value_offsets = Buffer::from_slice_ref(&[0, 3, 4, 4]);
let list_data_type =
DataType::List(Box::new(Field::new("item", DataType::Int32, true)));
let list_data = ArrayData::builder(list_data_type)
.len(3)
.add_buffer(value_offsets)
.add_child_data(value_data.data().clone())
.build()
.unwrap();
let lists = ListArray::from(list_data);
let structs_int32s = Int32Array::from(vec![None, Some(-2), None]);
let structs_utf8s = StringArray::from(vec![None, None, Some("aaaaaa")]);
let structs = StructArray::from(vec![
(
Field::new("int32s", DataType::Int32, true),
Arc::new(structs_int32s) as ArrayRef,
),
(
Field::new("utf8s", DataType::Utf8, true),
Arc::new(structs_utf8s) as ArrayRef,
),
]);
let record_batch = RecordBatch::try_new(
Arc::new(schema.clone()),
vec![
Arc::new(bools_with_metadata_map),
Arc::new(bools_with_metadata_vec),
Arc::new(bools),
Arc::new(int8s),
Arc::new(int16s),
Arc::new(int32s),
Arc::new(int64s),
Arc::new(uint8s),
Arc::new(uint16s),
Arc::new(uint32s),
Arc::new(uint64s),
Arc::new(float32s),
Arc::new(float64s),
Arc::new(date_days),
Arc::new(date_millis),
Arc::new(time_secs),
Arc::new(time_millis),
Arc::new(time_micros),
Arc::new(time_nanos),
Arc::new(ts_secs),
Arc::new(ts_millis),
Arc::new(ts_micros),
Arc::new(ts_nanos),
Arc::new(ts_secs_tz),
Arc::new(ts_millis_tz),
Arc::new(ts_micros_tz),
Arc::new(ts_nanos_tz),
Arc::new(utf8s),
Arc::new(lists),
Arc::new(structs),
],
)
.unwrap();
let mut file = File::open("test/data/integration.json").unwrap();
let mut json = String::new();
file.read_to_string(&mut json).unwrap();
let arrow_json: ArrowJson = serde_json::from_str(&json).unwrap();
// test schemas
assert!(arrow_json.schema.equals_schema(&schema));
// test record batch
assert!(arrow_json.batches[0].equals_batch(&record_batch));
}
}
| true
|
4f1a4b5f5157f645ab60dde824e3d60c3fa3a999
|
Rust
|
CreateSomethingNew/WhyRust
|
/a6.rs
|
UTF-8
| 2,447
| 3.28125
| 3
|
[] |
no_license
|
enum ExprC {
NumC { n : u32 },
BoolC { b : bool },
StringC { s : String },
IfC { i : Box<ExprC>, t : Box<ExprC>, e : Box<ExprC> },
IdC { i : String },
LamC { params : Vec<String>, body : Box<ExprC> },
AppC { fun_def : Box<ExprC>, params: Vec<Box<ExprC>> }
}
enum ValueV {
NumV { n : u32 },
BoolV { b : bool },
StringV { s : String },
PrimV { p : String },
CloV { params : Vec<String>, body : Box<ExprC> }
}
fn interp(e: ExprC) -> ValueV {
match e {
ExprC::NumC { n } => ValueV::NumV { n : n },
ExprC::BoolC { b } => ValueV::BoolV { b : b },
ExprC::StringC { s } => ValueV::StringV { s : s },
ExprC::IfC { i, t, e } => if_helper(i, t, e),
_ => ValueV::NumV { n : 4 }
}
}
fn if_helper(i: Box<ExprC>, t: Box<ExprC>, e: Box<ExprC>) -> ValueV {
match interp(*i) {
ValueV::BoolV { b } => if b { interp(*t) } else { interp(*e) }
_ => panic!("invalid in if")
}
}
fn serialize(v: ValueV) -> String {
match v {
ValueV::NumV { n } => n.to_string(),
ValueV::BoolV { b } => b.to_string(),
ValueV::StringV { s } => s,
ValueV::CloV { params, body } => String::from("#<procedure>"),
ValueV::PrimV { p } => String::from("#<primop>"),
_ => panic!("invalid in serialize")
}
}
fn main() {
// primative types
let bool_true = ExprC::BoolC { b: true };
let bool_false = ExprC::BoolC { b: false };
let num_45 = ExprC::NumC { n: 45 };
let string_test = ExprC::StringC { s: String::from("just a test") };
// true if case
let test_if_i = ExprC::BoolC { b: true };
let test_if_t = ExprC::NumC { n: 5 };
let test_if_e = ExprC::NumC { n: 3 };
let test_if = ExprC::IfC { i: Box::new(test_if_i), t: Box::new(test_if_t), e: Box::new(test_if_e) };
// false if case
let test_if_i_2 = ExprC::BoolC { b: false };
let test_if_t_2 = ExprC::NumC { n: 5 };
let test_if_e_2 = ExprC::NumC { n: 3 };
let test_if_2 = ExprC::IfC { i: Box::new(test_if_i_2), t: Box::new(test_if_t_2), e: Box::new(test_if_e_2) };
// tests
assert_eq!("45", serialize(interp(num_45)));
assert_eq!("false", serialize(interp(bool_false)));
assert_eq!("true", serialize(interp(bool_true)));
assert_eq!("just a test", serialize(interp(string_test)));
assert_eq!("5", serialize(interp(test_if)));
assert_eq!("3", serialize(interp(test_if_2)));
}
| true
|
f8ae1abab539d879f1289a710967827ed6a6197d
|
Rust
|
bluss/bluss.github.io
|
/petgraph/src/visit.rs
|
UTF-8
| 22,239
| 3.015625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Graph visitor algorithms.
//!
use fixedbitset::FixedBitSet;
use std::collections::{
HashSet,
VecDeque,
};
use std::hash::Hash;
use super::{
graphmap,
graph,
EdgeType,
EdgeDirection,
Graph,
GraphMap,
Incoming,
Outgoing,
};
use graph::{
IndexType,
};
#[cfg(feature = "stable_graph")]
use graph::stable::StableGraph;
/// Base trait for graphs that defines the node identifier.
pub trait Graphlike {
type NodeId: Clone;
}
/// `NeighborIter` gives access to the neighbors iterator.
pub trait NeighborIter<'a> : Graphlike {
type Iter: Iterator<Item=Self::NodeId>;
/// Return an iterator that visits all neighbors of the node **n**.
fn neighbors(&'a self, n: Self::NodeId) -> Self::Iter;
}
impl<'a, N, E: 'a, Ty, Ix> NeighborIter<'a> for Graph<N, E, Ty, Ix> where
Ty: EdgeType,
Ix: IndexType,
{
type Iter = graph::Neighbors<'a, E, Ix>;
fn neighbors(&'a self, n: graph::NodeIndex<Ix>) -> graph::Neighbors<'a, E, Ix>
{
Graph::neighbors(self, n)
}
}
#[cfg(feature = "stable_graph")]
impl<'a, N, E: 'a, Ty, Ix> NeighborIter<'a> for StableGraph<N, E, Ty, Ix> where
Ty: EdgeType,
Ix: IndexType,
{
type Iter = graph::stable::Neighbors<'a, E, Ix>;
fn neighbors(&'a self, n: graph::NodeIndex<Ix>)
-> graph::stable::Neighbors<'a, E, Ix>
{
StableGraph::neighbors(self, n)
}
}
impl<'a, N: 'a, E> NeighborIter<'a> for GraphMap<N, E>
where N: Copy + Ord + Hash
{
type Iter = graphmap::Neighbors<'a, N>;
fn neighbors(&'a self, n: N) -> graphmap::Neighbors<'a, N>
{
GraphMap::neighbors(self, n)
}
}
/// Wrapper type for walking the graph as if it is undirected
pub struct AsUndirected<G>(pub G);
/// Wrapper type for walking the graph as if all edges are reversed.
pub struct Reversed<G>(pub G);
impl<'a, 'b, N, E: 'a, Ty, Ix> NeighborIter<'a> for AsUndirected<&'b Graph<N, E, Ty, Ix>> where
Ty: EdgeType,
Ix: IndexType,
{
type Iter = graph::Neighbors<'a, E, Ix>;
fn neighbors(&'a self, n: graph::NodeIndex<Ix>) -> graph::Neighbors<'a, E, Ix>
{
Graph::neighbors_undirected(self.0, n)
}
}
impl<'a, 'b, N, E: 'a, Ty, Ix> NeighborIter<'a> for Reversed<&'b Graph<N, E, Ty, Ix>> where
Ty: EdgeType,
Ix: IndexType,
{
type Iter = graph::Neighbors<'a, E, Ix>;
fn neighbors(&'a self, n: graph::NodeIndex<Ix>) -> graph::Neighbors<'a, E, Ix>
{
Graph::neighbors_directed(self.0, n, EdgeDirection::Incoming)
}
}
/// `NeighborsDirected` gives access to neighbors of both `Incoming` and
/// `Outgoing` edges of a node.
pub trait NeighborsDirected<'a> : Graphlike {
type NeighborsDirected: Iterator<Item=Self::NodeId>;
/// Return an iterator that visits all neighbors of the node **n**.
fn neighbors_directed(&'a self, n: Self::NodeId,
d: EdgeDirection) -> Self::NeighborsDirected;
}
impl<'a, N, E: 'a, Ty, Ix> NeighborsDirected<'a> for Graph<N, E, Ty, Ix>
where Ty: EdgeType,
Ix: IndexType,
{
type NeighborsDirected = graph::Neighbors<'a, E, Ix>;
fn neighbors_directed(&'a self, n: graph::NodeIndex<Ix>,
d: EdgeDirection) -> graph::Neighbors<'a, E, Ix>
{
Graph::neighbors_directed(self, n, d)
}
}
#[cfg(feature = "stable_graph")]
impl<'a, N, E: 'a, Ty, Ix> NeighborsDirected<'a> for StableGraph<N, E, Ty, Ix>
where Ty: EdgeType,
Ix: IndexType,
{
type NeighborsDirected = graph::stable::Neighbors<'a, E, Ix>;
fn neighbors_directed(&'a self, n: graph::NodeIndex<Ix>, d: EdgeDirection)
-> graph::stable::Neighbors<'a, E, Ix>
{
StableGraph::neighbors_directed(self, n, d)
}
}
impl<'a, 'b, G> NeighborsDirected<'a> for Reversed<&'b G>
where G: NeighborsDirected<'a>,
{
type NeighborsDirected = <G as NeighborsDirected<'a>>::NeighborsDirected;
fn neighbors_directed(&'a self, n: G::NodeId,
d: EdgeDirection) -> Self::NeighborsDirected
{
self.0.neighbors_directed(n, d.opposite())
}
}
/// Externals returns an iterator of all nodes that either have either no
/// incoming or no outgoing edges.
pub trait Externals<'a> : Graphlike {
type Externals: Iterator<Item=Self::NodeId>;
/// Return an iterator of all nodes with no edges in the given direction
fn externals(&'a self, d: EdgeDirection) -> Self::Externals;
}
impl<'a, N: 'a, E, Ty, Ix> Externals<'a> for Graph<N, E, Ty, Ix>
where Ty: EdgeType,
Ix: IndexType,
{
type Externals = graph::Externals<'a, N, Ty, Ix>;
fn externals(&'a self, d: EdgeDirection) -> graph::Externals<'a, N, Ty, Ix> {
Graph::externals(self, d)
}
}
impl<'a, 'b, G> Externals<'a> for Reversed<&'b G>
where G: Externals<'a>,
{
type Externals = <G as Externals<'a>>::Externals;
fn externals(&'a self, d: EdgeDirection) -> Self::Externals {
self.0.externals(d.opposite())
}
}
/// A mapping for storing the visited status for `NodeId` `N`.
pub trait VisitMap<N> {
/// Return **true** if the value is not already present.
fn visit(&mut self, N) -> bool;
fn is_visited(&self, &N) -> bool;
}
impl<Ix> VisitMap<graph::NodeIndex<Ix>> for FixedBitSet where
Ix: IndexType,
{
fn visit(&mut self, x: graph::NodeIndex<Ix>) -> bool {
let present = self.contains(x.index());
self.insert(x.index());
!present
}
fn is_visited(&self, x: &graph::NodeIndex<Ix>) -> bool {
self.contains(x.index())
}
}
impl<Ix> VisitMap<graph::EdgeIndex<Ix>> for FixedBitSet where
Ix: IndexType,
{
fn visit(&mut self, x: graph::EdgeIndex<Ix>) -> bool {
let present = self.contains(x.index());
self.insert(x.index());
!present
}
fn is_visited(&self, x: &graph::EdgeIndex<Ix>) -> bool {
self.contains(x.index())
}
}
impl<N: Eq + Hash> VisitMap<N> for HashSet<N> {
fn visit(&mut self, x: N) -> bool {
self.insert(x)
}
fn is_visited(&self, x: &N) -> bool {
self.contains(x)
}
}
/// A graph that can create a visitor map.
pub trait Visitable : Graphlike {
type Map: VisitMap<Self::NodeId>;
fn visit_map(&self) -> Self::Map;
}
/// A graph that can reset and resize its visitor map.
pub trait Revisitable : Visitable {
fn reset_map(&self, &mut Self::Map);
}
impl<N, E, Ty, Ix> Graphlike for Graph<N, E, Ty, Ix> where
Ix: IndexType,
{
type NodeId = graph::NodeIndex<Ix>;
}
impl<N, E, Ty, Ix> Visitable for Graph<N, E, Ty, Ix> where
Ty: EdgeType,
Ix: IndexType,
{
type Map = FixedBitSet;
fn visit_map(&self) -> FixedBitSet { FixedBitSet::with_capacity(self.node_count()) }
}
impl<N, E, Ty, Ix> Revisitable for Graph<N, E, Ty, Ix>
where Ty: EdgeType,
Ix: IndexType,
{
fn reset_map(&self, map: &mut Self::Map) {
map.clear();
map.grow(self.node_count());
}
}
#[cfg(feature = "stable_graph")]
impl<N, E, Ty, Ix> Graphlike for StableGraph<N, E, Ty, Ix> where
Ix: IndexType,
{
type NodeId = graph::NodeIndex<Ix>;
}
#[cfg(feature = "stable_graph")]
impl<N, E, Ty, Ix> Visitable for StableGraph<N, E, Ty, Ix> where
Ty: EdgeType,
Ix: IndexType,
{
type Map = FixedBitSet;
fn visit_map(&self) -> FixedBitSet { FixedBitSet::with_capacity(self.node_count()) }
}
#[cfg(feature = "stable_graph")]
impl<N, E, Ty, Ix> Revisitable for StableGraph<N, E, Ty, Ix>
where Ty: EdgeType,
Ix: IndexType,
{
fn reset_map(&self, map: &mut Self::Map) {
map.clear();
map.grow(self.node_count());
}
}
impl<'a, G> Revisitable for Reversed<&'a G>
where G: Revisitable
{
fn reset_map(&self, map: &mut Self::Map) {
self.0.reset_map(map);
}
}
impl<N: Clone, E> Graphlike for GraphMap<N, E>
{
type NodeId = N;
}
impl<N, E> Visitable for GraphMap<N, E>
where N: Copy + Ord + Hash
{
type Map = HashSet<N>;
fn visit_map(&self) -> HashSet<N> { HashSet::with_capacity(self.node_count()) }
}
impl<N, E> Revisitable for GraphMap<N, E>
where N: Copy + Ord + Hash
{
fn reset_map(&self, map: &mut Self::Map) {
map.clear();
}
}
impl<'a, G: Graphlike> Graphlike for AsUndirected<&'a G>
{
type NodeId = G::NodeId;
}
impl<'a, G: Graphlike> Graphlike for Reversed<&'a G>
{
type NodeId = G::NodeId;
}
impl<'a, G: Visitable> Visitable for AsUndirected<&'a G>
{
type Map = G::Map;
fn visit_map(&self) -> G::Map {
self.0.visit_map()
}
}
impl<'a, G: Visitable> Visitable for Reversed<&'a G>
{
type Map = G::Map;
fn visit_map(&self) -> G::Map {
self.0.visit_map()
}
}
/// Create or access the adjacency matrix of a graph
pub trait GetAdjacencyMatrix : Graphlike {
type AdjMatrix;
fn adjacency_matrix(&self) -> Self::AdjMatrix;
fn is_adjacent(&self, matrix: &Self::AdjMatrix, a: Self::NodeId, b: Self::NodeId) -> bool;
}
/// The `GraphMap` keeps an adjacency matrix internally.
impl<N, E> GetAdjacencyMatrix for GraphMap<N, E>
where N: Copy + Ord + Hash
{
type AdjMatrix = ();
#[inline]
fn adjacency_matrix(&self) { }
#[inline]
fn is_adjacent(&self, _: &(), a: N, b: N) -> bool {
self.contains_edge(a, b)
}
}
/// A depth first search (DFS) of a graph.
///
/// Using a **Dfs** you can run a traversal over a graph while still retaining
/// mutable access to it, if you use it like the following example:
///
/// ```
/// use petgraph::{Graph, Dfs};
///
/// let mut graph = Graph::<_,()>::new();
/// let a = graph.add_node(0);
///
/// let mut dfs = Dfs::new(&graph, a);
/// while let Some(nx) = dfs.next(&graph) {
/// // we can access `graph` mutably here still
/// graph[nx] += 1;
/// }
///
/// assert_eq!(graph[a], 1);
/// ```
///
/// **Note:** The algorithm may not behave correctly if nodes are removed
/// during iteration. It may not necessarily visit added nodes or edges.
#[derive(Clone, Debug)]
pub struct Dfs<N, VM> {
/// The stack of nodes to visit
pub stack: Vec<N>,
/// The map of discovered nodes
pub discovered: VM,
}
impl<N, VM> Dfs<N, VM>
where N: Clone,
VM: VisitMap<N>,
{
/// Create a new **Dfs**, using the graph's visitor map, and put **start**
/// in the stack of nodes to visit.
pub fn new<G>(graph: &G, start: N) -> Self
where G: Visitable<NodeId=N, Map=VM>
{
let mut dfs = Dfs::empty(graph);
dfs.move_to(start);
dfs
}
/// Create a new **Dfs** using the graph's visitor map, and no stack.
pub fn empty<G>(graph: &G) -> Self
where G: Visitable<NodeId=N, Map=VM>
{
Dfs {
stack: Vec::new(),
discovered: graph.visit_map(),
}
}
/// Keep the discovered map, but clear the visit stack and restart
/// the dfs from a particular node.
pub fn move_to(&mut self, start: N)
{
self.discovered.visit(start.clone());
self.stack.clear();
self.stack.push(start);
}
/// Return the next node in the dfs, or **None** if the traversal is done.
pub fn next<'a, G>(&mut self, graph: &'a G) -> Option<N> where
G: Graphlike<NodeId=N>,
G: NeighborIter<'a>,
{
while let Some(node) = self.stack.pop() {
for succ in graph.neighbors(node.clone()) {
if self.discovered.visit(succ.clone()) {
self.stack.push(succ);
}
}
return Some(node);
}
None
}
}
/// An iterator for a depth first traversal of a graph.
pub struct DfsIter<'a, G: 'a + Visitable>
{
graph: &'a G,
dfs: Dfs<G::NodeId, G::Map>,
}
impl<'a, G: Visitable> DfsIter<'a, G>
{
pub fn new(graph: &'a G, start: G::NodeId) -> Self
{
// Inline the code from Dfs::new to
// work around rust bug #22841
let mut dfs = Dfs::empty(graph);
dfs.move_to(start);
DfsIter {
graph: graph,
dfs: dfs,
}
}
/// Keep the discovered map, but clear the visit stack and restart
/// the DFS traversal from a particular node.
pub fn move_to(&mut self, start: G::NodeId)
{
self.dfs.move_to(start)
}
}
impl<'a, G: 'a + Visitable> Iterator for DfsIter<'a, G> where
G: NeighborIter<'a>,
{
type Item = G::NodeId;
#[inline]
fn next(&mut self) -> Option<G::NodeId>
{
self.dfs.next(self.graph)
}
fn size_hint(&self) -> (usize, Option<usize>)
{
// Very vauge info about size of traversal
(self.dfs.stack.len(), None)
}
}
impl<'a, G: Visitable> Clone for DfsIter<'a, G> where Dfs<G::NodeId, G::Map>: Clone
{
fn clone(&self) -> Self {
DfsIter {
graph: self.graph,
dfs: self.dfs.clone(),
}
}
}
/// A breadth first search (BFS) of a graph.
///
/// Using a **Bfs** you can run a traversal over a graph while still retaining
/// mutable access to it, if you use it like the following example:
///
/// ```
/// use petgraph::{Graph, Bfs};
///
/// let mut graph = Graph::<_,()>::new();
/// let a = graph.add_node(0);
///
/// let mut bfs = Bfs::new(&graph, a);
/// while let Some(nx) = bfs.next(&graph) {
/// // we can access `graph` mutably here still
/// graph[nx] += 1;
/// }
///
/// assert_eq!(graph[a], 1);
/// ```
///
/// **Note:** The algorithm may not behave correctly if nodes are removed
/// during iteration. It may not necessarily visit added nodes or edges.
#[derive(Clone)]
pub struct Bfs<N, VM> {
/// The queue of nodes to visit
pub stack: VecDeque<N>,
/// The map of discovered nodes
pub discovered: VM,
}
impl<N, VM> Bfs<N, VM>
where N: Clone,
VM: VisitMap<N>,
{
/// Create a new **Bfs**, using the graph's visitor map, and put **start**
/// in the stack of nodes to visit.
pub fn new<G>(graph: &G, start: N) -> Self
where G: Visitable<NodeId=N, Map=VM>
{
let mut discovered = graph.visit_map();
discovered.visit(start.clone());
let mut stack = VecDeque::new();
stack.push_front(start.clone());
Bfs {
stack: stack,
discovered: discovered,
}
}
/// Return the next node in the dfs, or **None** if the traversal is done.
pub fn next<'a, G>(&mut self, graph: &'a G) -> Option<N> where
G: Graphlike<NodeId=N>,
G: NeighborIter<'a>,
{
while let Some(node) = self.stack.pop_front() {
for succ in graph.neighbors(node.clone()) {
if self.discovered.visit(succ.clone()) {
self.stack.push_back(succ);
}
}
return Some(node);
}
None
}
}
/// An iterator for a breadth first traversal of a graph.
pub struct BfsIter<'a, G: 'a + Visitable> {
graph: &'a G,
bfs: Bfs<G::NodeId, G::Map>,
}
impl<'a, G: Visitable> BfsIter<'a, G> where
G::NodeId: Clone,
{
pub fn new(graph: &'a G, start: G::NodeId) -> Self
{
// Inline the code from Bfs::new to
// work around rust bug #22841
let mut discovered = graph.visit_map();
discovered.visit(start.clone());
let mut stack = VecDeque::new();
stack.push_front(start.clone());
let bfs = Bfs {
stack: stack,
discovered: discovered,
};
BfsIter {
graph: graph,
bfs: bfs,
}
}
}
impl<'a, G: 'a + Visitable> Iterator for BfsIter<'a, G> where
G::NodeId: Clone,
G: NeighborIter<'a>,
{
type Item = G::NodeId;
fn next(&mut self) -> Option<G::NodeId>
{
self.bfs.next(self.graph)
}
fn size_hint(&self) -> (usize, Option<usize>)
{
(self.bfs.stack.len(), None)
}
}
impl<'a, G: Visitable> Clone for BfsIter<'a, G> where Bfs<G::NodeId, G::Map>: Clone
{
fn clone(&self) -> Self {
BfsIter {
graph: self.graph,
bfs: self.bfs.clone(),
}
}
}
/// A topological order traversal for a graph.
#[derive(Clone)]
pub struct Topo<N, VM> {
tovisit: Vec<N>,
ordered: VM,
}
impl<N, VM> Topo<N, VM>
where N: Clone,
VM: VisitMap<N>,
{
/// Create a new `Topo`, using the graph's visitor map, and put all
/// initial nodes in the to-visit list.
pub fn new<'a, G>(graph: &'a G) -> Self
where G: Externals<'a> + Visitable<NodeId=N, Map=VM>,
{
let mut topo = Self::empty(graph);
topo.tovisit.extend(graph.externals(Incoming));
topo
}
/* Private until it has a use */
/// Create a new `Topo`, using the graph's visitor map with *no* starting
/// index specified.
fn empty<G>(graph: &G) -> Self
where G: Visitable<NodeId=N, Map=VM>
{
Topo {
ordered: graph.visit_map(),
tovisit: Vec::new(),
}
}
/// Clear visited state, and put all initial nodes into the visit list.
pub fn reset<'a, G>(&mut self, graph: &'a G)
where G: Externals<'a> + Revisitable<NodeId=N, Map=VM>,
{
graph.reset_map(&mut self.ordered);
self.tovisit.clear();
self.tovisit.extend(graph.externals(Incoming));
}
/// Return the next node in the current topological order traversal, or
/// `None` if the traversal is at the end.
///
/// *Note:* The graph may not have a complete topological order, and the only
/// way to know is to run the whole traversal and make sure it visits every node.
pub fn next<'a, G>(&mut self, g: &'a G) -> Option<N>
where G: NeighborsDirected<'a> + Visitable<NodeId=N, Map=VM>,
{
// Take an unvisited element and find which of its neighbors are next
while let Some(nix) = self.tovisit.pop() {
if self.ordered.is_visited(&nix) {
continue;
}
self.ordered.visit(nix.clone());
for neigh in g.neighbors_directed(nix.clone(), Outgoing) {
// Look at each neighbor, and those that only have incoming edges
// from the already ordered list, they are the next to visit.
if g.neighbors_directed(neigh.clone(), Incoming).all(|b| self.ordered.is_visited(&b)) {
self.tovisit.push(neigh);
}
}
return Some(nix);
}
None
}
}
/// A topological order traversal for a subgraph.
///
/// `SubTopo` starts at a node, and does a topological order traversal of
/// all nodes reachable from the starting point.
#[derive(Clone)]
pub struct SubTopo<N, VM> {
tovisit: Vec<N>,
notvisited: VecDeque<N>,
ordered: VM,
discovered: VM,
}
impl<N, VM> SubTopo<N, VM>
where N: Clone,
VM: VisitMap<N>,
{
/// Create a new `SubTopo`, using the graph's visitor map, and put single
/// node in the to-visit list.
pub fn from_node<'a, G>(graph: &'a G, node: N) -> Self
where G: Externals<'a> + Visitable<NodeId=N, Map=VM>,
{
let mut topo = Self::empty(graph);
topo.tovisit.push(node);
topo
}
/* Private until it has a use */
/// Create a new `SubTopo`, using the graph's visitor map with *no* starting
/// index specified.
fn empty<G>(graph: &G) -> Self
where G: Visitable<NodeId=N, Map=VM>
{
SubTopo {
ordered: graph.visit_map(),
discovered: graph.visit_map(),
tovisit: Vec::new(),
notvisited: VecDeque::new(),
}
}
/// Clear visited state, and put a single node into the visit list.
pub fn reset_with_node<G>(&mut self, graph: &G, node: N)
where G: Revisitable<NodeId=N, Map=VM>,
{
graph.reset_map(&mut self.ordered);
graph.reset_map(&mut self.discovered);
self.tovisit.clear();
self.tovisit.push(node);
}
// discover all nodes reachable from `n`
fn discover<'a, G>(&mut self, g: &'a G, n: N)
where G: NeighborsDirected<'a> + Visitable<NodeId=N, Map=VM>,
{
if self.discovered.is_visited(&n) {
return;
}
self.discovered.visit(n.clone());
for neigh in g.neighbors_directed(n, Outgoing) {
self.discover(g, neigh);
}
}
/// Return the next node in the current topological order traversal, or
/// `None` if the traversal is at the end.
///
/// *Note:* The subgraph may not have a complete topological order.
/// If there is a cycle in the subgraph, then nodes of that cycle *are* included in this traversal.
pub fn next<'a, G>(&mut self, g: &'a G) -> Option<N>
where G: NeighborsDirected<'a> + Visitable<NodeId=N, Map=VM>,
{
// Take an unvisited element and find which of its neighbors are next
//
// discovered: All nodes that have been reachable through outgoing paths
// from the start
// ordered: All nodes that have already been emitted until this point
loop {
while let Some(nix) = self.tovisit.pop() {
if self.ordered.is_visited(&nix) {
continue;
}
self.ordered.visit(nix.clone());
self.discover(g, nix.clone());
for neigh in g.neighbors_directed(nix.clone(), Outgoing) {
// Look at each neighbor, and those that only have incoming edges
// from the already ordered list, they are the next to visit.
// `discovered` is used to limit this to the induced subgraph
if g.neighbors_directed(neigh.clone(), Incoming)
.filter(|b| self.discovered.is_visited(b))
.all(|b| self.ordered.is_visited(&b))
{
self.tovisit.push(neigh);
} else {
self.notvisited.push_back(neigh);
}
}
return Some(nix);
}
if let Some(nix) = self.notvisited.pop_front() {
self.tovisit.push(nix);
} else {
return None;
}
}
}
}
| true
|
5e4438d1a4b5e6a38463fc8d6c70dbb690d13c9c
|
Rust
|
tyrchen/rust-training
|
/old-training-code/enum_test/src/address.rs
|
UTF-8
| 410
| 3.3125
| 3
|
[] |
no_license
|
#[derive(Debug)]
struct Ipv4Addr {
value: String,
}
enum IpAddress {
Ipv4(Ipv4Addr),
Ipv6(String),
}
use IpAddress::{Ipv4, Ipv6};
pub fn run() {
let ip = Ipv4(Ipv4Addr {
value: String::from("127.0.0.1"),
});
match ip {
Ipv4(address) => println!("this is an ipv4: {:?}", address),
Ipv6(address) => println!("this is an ipv6: {}", address),
_ => println!("Not a valid IP"),
}
}
| true
|
f30d518298ef5057e184f0426eaa1897166d44c7
|
Rust
|
superdump/building-blocks
|
/crates/building_blocks_mesh/src/quad.rs
|
UTF-8
| 5,903
| 3.125
| 3
|
[
"MIT"
] |
permissive
|
use super::{PosNormMesh, PosNormTexMesh};
use building_blocks_core::{
axis::{Axis3Permutation, SignedAxis3},
prelude::*,
};
/// A set of `Quad`s that share an orientation.
pub struct QuadGroup<M> {
/// The quads themselves. We rely on the group's metadata to interpret them.
pub quads: Vec<(Quad, M)>,
pub face: OrientedCubeFace,
}
impl<M> QuadGroup<M> {
pub fn new(face: OrientedCubeFace) -> Self {
Self {
quads: Vec::new(),
face,
}
}
}
/// Metadata that's used to aid in the geometric calculations for one of the 6 possible cube faces.
pub struct OrientedCubeFace {
// Determines the orientation of the plane.
pub n_sign: i32,
// Determines the {N, U, V} <--> {X, Y, Z} relation.
pub permutation: Axis3Permutation,
// These vectors are always some permutation of +X, +Y, and +Z.
pub n: Point3i,
pub u: Point3i,
pub v: Point3i,
pub mesh_normal: Point3f,
}
impl OrientedCubeFace {
pub fn new(n_sign: i32, permutation: Axis3Permutation) -> Self {
let [n_axis, u_axis, v_axis] = permutation.axes();
let n = n_axis.get_unit_vector();
let mesh_normal: Point3f = (n * n_sign).into();
Self {
n_sign,
permutation,
n,
u: u_axis.get_unit_vector(),
v: v_axis.get_unit_vector(),
mesh_normal,
}
}
pub fn canonical(normal: SignedAxis3) -> Self {
Self::new(
normal.sign,
Axis3Permutation::even_with_normal_axis(normal.axis),
)
}
/// Returns the 4 corners of the quad in this order:
///
/// ```text
/// 2 ----> 3
/// ^
/// ^ \
/// | \
/// +v | 0 ----> 1
/// |
/// -------->
/// +u
/// ```
pub fn quad_corners(&self, quad: &Quad) -> [Point3f; 4] {
let w_vec = self.u * quad.width;
let h_vec = self.v * quad.height;
let minu_minv = if self.n_sign > 0 {
quad.minimum + self.n
} else {
quad.minimum
};
let maxu_minv = minu_minv + w_vec;
let minu_maxv = minu_minv + h_vec;
let maxu_maxv = minu_minv + w_vec + h_vec;
[
minu_minv.into(),
maxu_minv.into(),
minu_maxv.into(),
maxu_maxv.into(),
]
}
pub fn quad_mesh_positions(&self, quad: &Quad) -> [[f32; 3]; 4] {
let [c0, c1, c2, c3] = self.quad_corners(quad);
[c0.0, c1.0, c2.0, c3.0]
}
pub fn quad_mesh_normals(&self) -> [[f32; 3]; 4] {
[self.mesh_normal.0; 4]
}
/// Returns the 6 vertex indices for the quad in order to make two triangles in a mesh. Winding
/// order depends on both the sign of the surface normal and the permutation of the UVs.
pub fn quad_mesh_indices(&self, start: u32) -> [u32; 6] {
quad_indices(start, self.n_sign * self.permutation.sign() > 0)
}
/// Extends `mesh` with the given `quad` that belongs to this face.
pub fn add_quad_to_pos_norm_mesh(&self, quad: &Quad, mesh: &mut PosNormMesh) {
let start_index = mesh.positions.len() as u32;
mesh.positions
.extend_from_slice(&self.quad_mesh_positions(quad));
mesh.normals.extend_from_slice(&self.quad_mesh_normals());
mesh.indices
.extend_from_slice(&self.quad_mesh_indices(start_index));
}
/// Extends `mesh` with the given `quad` that belongs to this group.
///
/// The texture coordinates come from `Quad::simple_tex_coords`.
pub fn add_quad_to_pos_norm_tex_mesh(&self, quad: &Quad, mesh: &mut PosNormTexMesh) {
let start_index = mesh.positions.len() as u32;
mesh.positions
.extend_from_slice(&self.quad_mesh_positions(quad));
mesh.normals.extend_from_slice(&self.quad_mesh_normals());
mesh.tex_coords.extend_from_slice(&quad.simple_tex_coords());
mesh.indices
.extend_from_slice(&self.quad_mesh_indices(start_index));
}
}
/// Returns the vertex indices for a single quad (two triangles). The triangles may have either
/// clockwise or counter-clockwise winding. `start` is the first index.
pub fn quad_indices(start: u32, counter_clockwise: bool) -> [u32; 6] {
if counter_clockwise {
[start, start + 1, start + 2, start + 1, start + 3, start + 2]
} else {
[start, start + 2, start + 1, start + 1, start + 2, start + 3]
}
}
/// A single quad of connected cubic voxel faces. Must belong to a `QuadGroup` to be useful.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct Quad {
pub minimum: Point3i,
pub width: i32,
pub height: i32,
}
impl Quad {
pub fn for_voxel_face(voxel_point: Point3i, face: SignedAxis3) -> Self {
let mut minimum = voxel_point;
if face.sign > 0 {
minimum += face.axis.get_unit_vector();
}
Self {
minimum,
width: 1,
height: 1,
}
}
/// Returns the UV coordinates of the 4 corners of the quad. Returns in the same order as
/// `OrientedCubeFace::quad_corners`.
///
/// This is just one way of assigning UVs to voxel quads. It assumes that each material has a
/// single tile texture with wrapping coordinates, and each voxel face should show the entire
/// texture. It also assumes a particular orientation for the texture. This should be sufficient
/// for minecraft-style meshing.
///
/// If you need to use a texture atlas, you must calculate your own coordinates from the `Quad`.
pub fn simple_tex_coords(&self) -> [[f32; 2]; 4] {
[
[0.0, 0.0],
[self.width as f32, 0.0],
[0.0, self.height as f32],
[self.width as f32, self.height as f32],
]
}
}
| true
|
dc8eee5de63dbcefaf9734012b67accb447e11fc
|
Rust
|
Wandalen/OxiSynth
|
/core/src/synth/soundfont/instrument.rs
|
UTF-8
| 3,546
| 2.75
| 3
|
[
"LGPL-2.1-only"
] |
permissive
|
use std::rc::Rc;
use super::generator::{self, Generator};
use super::modulator::Mod;
use super::Sample;
const GEN_SET : u32 = 1;
#[derive(Clone)]
pub struct Instrument
{
name : String,
global_zone : Option<InstrumentZone>,
zones : Vec<InstrumentZone>,
}
impl Instrument
{
pub fn import(sf2 : &soundfont::SoundFont2, inst : &soundfont::Instrument, samples : &[Rc<Sample>]) -> Result<Self, ()>
{
let name = if !inst.header.name.is_empty()
{
inst.header.name.clone()
}
else
{
"<untitled>".into()
};
let mut global_zone = None;
let mut zones = Vec::new();
for (id, zone) in inst.zones.iter().enumerate()
{
let name = format!("{}/{}", inst.header.name, id);
let zone = InstrumentZone::import(name, sf2, zone, samples)?;
if id == 0 && zone.sample.is_none()
{
global_zone = Some(zone);
}
else
{
zones.push(zone);
}
}
Ok(Self {
name,
global_zone,
zones,
})
}
pub fn global_zone(&self) -> Option<&InstrumentZone> { self.global_zone.as_ref() }
pub fn zones(&self) -> &[InstrumentZone] { &self.zones }
}
#[derive(Clone)]
#[repr(C)]
pub struct InstrumentZone
{
pub name : String,
pub sample : Option<Rc<Sample>>,
pub key_low : u8,
pub key_high : u8,
pub vel_low : u8,
pub vel_high : u8,
pub gen : [Generator; 60],
pub mods : Vec<Mod>,
}
impl InstrumentZone
{
fn import(
name : String,
sf2 : &soundfont::SoundFont2,
zone : &soundfont::Zone,
samples : &[Rc<Sample>],
) -> Result<InstrumentZone, ()>
{
let mut key_low = 0;
let mut key_high = 128;
let mut vel_low = 0;
let mut vel_high = 128;
let mut gen = generator::get_default_values();
for new_gen in zone
.gen_list
.iter()
.filter(|g| g.ty != soundfont::data::GeneratorType::SampleID)
{
match new_gen.ty
{
soundfont::data::GeneratorType::KeyRange =>
{
let amount = new_gen.amount.as_range().unwrap();
key_low = amount.low;
key_high = amount.high;
}
soundfont::data::GeneratorType::VelRange =>
{
let amount = new_gen.amount.as_range().unwrap();
vel_low = amount.low;
vel_high = amount.high;
}
_ =>
{
// FIXME: some generators have an unsigned word amount value but i don't know which ones
gen[new_gen.ty as usize].val = *new_gen.amount.as_i16().unwrap() as f64;
gen[new_gen.ty as usize].flags = GEN_SET as u8;
}
}
}
let sample = if let Some(sample_id) = zone.sample()
{
let sample = sf2.sample_headers.get(*sample_id as usize).unwrap();
let name = &sample.name;
// Find Sample by name:
let sample = samples.iter().find(|sample| &sample.name == name).cloned();
if sample.is_none()
{
log::error!("Couldn't find sample name",);
return Err(());
}
sample
}
else
{
None
};
let mods = zone
.mod_list
.iter()
.map(|new_mod| {
/* Store the new modulator in the zone
* The order of modulators will make a difference, at least in an instrument context:
* The second modulator overwrites the first one, if they only differ in amount. */
Mod::from(new_mod)
})
.collect();
Ok(Self {
name,
sample,
key_low,
key_high,
vel_low,
vel_high,
gen,
mods,
})
}
}
| true
|
e8104e6f1ffbfe52d59b8fb7c6ffb6c20ef605ef
|
Rust
|
Dentosal/rust_os
|
/src/multitasking/waitfor.rs
|
UTF-8
| 4,049
| 3
| 3
|
[
"MIT"
] |
permissive
|
use alloc::vec::Vec;
use core::sync::atomic::{AtomicU64, Ordering};
use hashbrown::HashSet;
use crate::multitasking::ProcessId;
use crate::time::BSPInstant;
use super::queues::Queues;
/// Instructions for scheduling a process
#[derive(Debug, Clone, PartialEq)]
pub enum WaitFor {
/// Run again on the next free slot
None,
/// Run after specified moment
Time(BSPInstant),
/// Process completed
Process(ProcessId),
/// An explicitly-triggered event
Event(ExplicitEventId),
/// First of multiple wait conditions.
/// Should never contain `None`.
FirstOf(Vec<WaitFor>),
}
impl WaitFor {
/// Resolve the condition immediately, if possible
pub fn try_resolve_immediate(self, qs: &Queues, current: ProcessId) -> Result<ProcessId, Self> {
use WaitFor::*;
let process_done = |p| current != p && !qs.process_exists(p);
match &self {
Process(p) if process_done(*p) => {
return Ok(*p);
},
FirstOf(subevents) => {
for e in subevents.iter() {
if let Process(p) = e {
if process_done(*p) {
return Ok(*p);
}
}
}
},
_ => {},
}
Err(self)
}
/// Minimize based on current conditions.
/// Used by scheduler queues to make sure that
/// completed processes are not waited for.
pub fn reduce_queues(self, qs: &Queues, current: ProcessId) -> Self {
use WaitFor::*;
let process_done = |p| current != p && !qs.process_exists(p);
match &self {
Process(p) if process_done(*p) => {
return None;
},
FirstOf(subevents) => {
for e in subevents.iter() {
if let Process(p) = e {
if process_done(*p) {
return None;
}
}
}
},
_ => {},
}
self.reduce()
}
/// Minimize based on conditions.
/// Used to make sure that process that is already
/// completed is not waited for.
pub fn reduce(self) -> Self {
use WaitFor::*;
match self {
FirstOf(mut subevents) => {
let mut new_se = Vec::new();
let mut earliest: Option<BSPInstant> = Option::None;
for e in subevents.into_iter() {
match e {
None => {
panic!("None in FirstOf");
},
Time(instant) => {
if let Some(e) = earliest {
if instant < e {
earliest = Some(instant);
}
} else {
earliest = Some(instant);
}
},
FirstOf(_) => {
panic!("NESTED FirstOf in reduce");
},
other => {
new_se.push(other);
},
}
}
if let Some(e) = earliest {
new_se.push(Time(e));
}
if new_se.is_empty() {
None
} else if new_se.len() == 1 {
new_se.pop().unwrap()
} else {
FirstOf(new_se)
}
},
other => other,
}
}
pub fn new_event_id() -> ExplicitEventId {
ExplicitEventId(NEXT_EVENT.fetch_add(1, Ordering::SeqCst))
}
}
/// Manually triggerable event
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ExplicitEventId(u64);
static NEXT_EVENT: AtomicU64 = AtomicU64::new(0);
| true
|
9f5cb600d325fdbe8e3c1554189f8001887a98ac
|
Rust
|
fizyk20/epidemic-sim
|
/src/simulation/params.rs
|
UTF-8
| 1,864
| 2.765625
| 3
|
[] |
no_license
|
use serde_derive::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct Params {
#[serde(default = "default_num_people")]
pub num_people: usize,
#[serde(default = "default_size")]
pub size_x: f64,
#[serde(default = "default_size")]
pub size_y: f64,
#[serde(default = "default_speed_stdev")]
pub speed_stdev: f64,
#[serde(default = "default_infected")]
pub init_infected: usize,
#[serde(default = "default_vaccinated")]
pub init_vaccinated: usize,
#[serde(default = "default_inf_to_gen")]
pub infection_prob_infected_to_general: f64,
#[serde(default = "default_inf_to_healed")]
pub infection_prob_infected_to_healed: f64,
#[serde(default = "default_inf_to_vacc")]
pub infection_prob_infected_to_vaccinated: f64,
#[serde(default = "default_vacc_to_gen")]
pub infection_prob_vaccinated_to_general: f64,
#[serde(default = "default_vacc_to_healed")]
pub infection_prob_vaccinated_to_healed: f64,
#[serde(default = "default_vacc_to_vacc")]
pub infection_prob_vaccinated_to_vaccinated: f64,
#[serde(default = "default_duration")]
pub infection_avg_duration: f64,
#[serde(default = "default_death_rate")]
pub death_rate: f64,
}
fn default_num_people() -> usize {
1000
}
fn default_size() -> f64 {
100.0
}
fn default_speed_stdev() -> f64 {
5.0
}
fn default_infected() -> usize {
1
}
fn default_vaccinated() -> usize {
0
}
fn default_inf_to_gen() -> f64 {
0.02
}
fn default_inf_to_healed() -> f64 {
0.004
}
fn default_inf_to_vacc() -> f64 {
0.002
}
fn default_vacc_to_gen() -> f64 {
0.012
}
fn default_vacc_to_healed() -> f64 {
0.0024
}
fn default_vacc_to_vacc() -> f64 {
0.0012
}
fn default_duration() -> f64 {
30.0
}
fn default_death_rate() -> f64 {
0.02
}
| true
|
a0554c56eb96faba778b4c61a0c74a1f572ccd2d
|
Rust
|
nathantypanski/ncurses-rs
|
/build.rs
|
UTF-8
| 1,685
| 2.53125
| 3
|
[
"MIT"
] |
permissive
|
extern crate gcc;
extern crate pkg_config;
use std::env;
use std::fs::File;
use std::io::Write;
use std::path::Path;
use std::process::Command;
fn main() {
check_chtype_size();
}
fn check_chtype_size() {
let out_dir = env::var("OUT_DIR").expect("cannot get OUT_DIR");
let src = format!("{}", Path::new(&out_dir).join("chtype_size.c").display());
let bin = format!("{}", Path::new(&out_dir).join("chtype_size").display());
let mut fp = File::create(&src).expect(&format!("cannot create {}", src));
fp.write_all(b"
#include <assert.h>
#include <limits.h>
#include <stdio.h>
#include <ncurses.h>
int main(void)
{
if (sizeof(chtype)*CHAR_BIT == 64) {
puts(\"cargo:rustc-cfg=feature=\\\"wide_chtype\\\"\");
} else {
/* We only support 32-bit and 64-bit chtype. */
assert(sizeof(chtype)*CHAR_BIT == 32 && \"unsupported size for chtype\");
}
return 0;
}
").expect(&format!("cannot write into {}", src));
let cfg = gcc::Config::new();
let compiler = cfg.get_compiler();
Command::new(compiler.path()).arg(&src).arg("-o").arg(&bin)
.status().expect("compilation failed");
let features = Command::new(&bin).output()
.expect(&format!("{} failed", bin));
print!("{}", String::from_utf8_lossy(&features.stdout));
let ncurses_names = ["ncurses5", "ncurses"];
for ncurses_name in &ncurses_names {
if pkg_config::probe_library(ncurses_name).is_ok() {
break;
}
}
std::fs::remove_file(&src).expect(&format!("cannot delete {}", src));
std::fs::remove_file(&bin).expect(&format!("cannot delete {}", bin));
}
| true
|
686ba22ba330d9191b7932e2770189a97ffe7e9a
|
Rust
|
belltoy/mio
|
/src/sys/fuchsia/handles.rs
|
UTF-8
| 2,488
| 3.03125
| 3
|
[
"MIT"
] |
permissive
|
use {io, poll, Evented, Ready, Poll, PollOpt, Token};
use magenta::HandleBase;
use std::sync::Mutex;
/// Wrapper for registering a `HandleBase` type with mio.
#[derive(Debug)]
pub struct EventedHandle<T> where T: HandleBase {
/// The handle to be registered.
handle: T,
/// The current `Token` with which the handle is registered with mio.
token: Mutex<Option<Token>>,
}
impl<T> EventedHandle<T> where T: HandleBase {
/// Create a new `EventedHandle` which can be registered with mio
/// in order to receive event notifications.
pub fn new(handle: T) -> Self {
EventedHandle {
handle: handle,
token: Mutex::new(None),
}
}
/// Get a reference to the underlying `HandleBase`.
pub fn get_ref(&self) -> &T {
&self.handle
}
/// Get a mutable reference to the underlying `HandleBase`.
pub fn get_mut(&mut self) -> &mut T {
&mut self.handle
}
/// Convert back into the inner `HandleBase`.
pub fn into_inner(self) -> T {
self.handle
}
}
impl<T> Evented for EventedHandle<T> where T: HandleBase {
fn register(&self,
poll: &Poll,
token: Token,
interest: Ready,
opts: PollOpt) -> io::Result<()>
{
let mut this_token = self.token.lock().unwrap();
{
poll::selector(poll).register_handle(&self.handle, token, interest, opts)?;
*this_token = Some(token);
}
Ok(())
}
fn reregister(&self,
poll: &Poll,
token: Token,
interest: Ready,
opts: PollOpt) -> io::Result<()>
{
let mut this_token = self.token.lock().unwrap();
{
poll::selector(poll).deregister_handle(&self.handle, token)?;
*this_token = None;
poll::selector(poll).register_handle(&self.handle, token, interest, opts)?;
*this_token = Some(token);
}
Ok(())
}
fn deregister(&self, poll: &Poll) -> io::Result<()> {
let mut this_token = self.token.lock().unwrap();
let token = if let Some(token) = *this_token { token } else {
return Err(io::Error::new(
io::ErrorKind::NotFound,
"Attempted to deregister an unregistered handle."))
};
{
poll::selector(poll).deregister_handle(&self.handle, token)?;
*this_token = None;
}
Ok(())
}
}
| true
|
66df62a3ab37aa3978edf752e8cec69dc1e82f0f
|
Rust
|
Wattyyy/LeetCode
|
/submissions/stone-game/solution.rs
|
UTF-8
| 482
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
use std::cmp::max;
impl Solution {
pub fn stone_game(piles: Vec<i32>) -> bool {
let n = piles.len();
let mut dp = vec![vec![0; n]; n];
for i in (0..n).rev() {
for j in i..n {
if i == j {
dp[i][j] = piles[i];
} else {
dp[i][j] = max(piles[i] - dp[i + 1][j], piles[j] - dp[i][j - 1]);
}
}
}
return 0 < dp[0][n - 1];
}
}
| true
|
32d57e7b6568bfa67d164c62b5f31c26081dedb7
|
Rust
|
TheoVanderkooy/rust-practice
|
/practice_problems/src/merge_lists.rs
|
UTF-8
| 9,721
| 3.40625
| 3
|
[] |
no_license
|
use crate::list_node::*;
// first recursive approach
#[allow(dead_code)]
pub fn merge_two_lists_rec(l1: Option<Box<ListNode>>, l2: Option<Box<ListNode>>) -> Option<Box<ListNode>> {
if let ( Some(m1), Some(m2) ) = (&l1, &l2) {
if m1.val < m2.val {
Option::Some(Box::new(ListNode::cons(m1.val, merge_two_lists_rec(l1.unwrap().next, l2)) ) )
} else {
Option::Some(Box::new(ListNode::cons(m2.val, merge_two_lists_rec(l2.unwrap().next, l1)) ) )
}
} else if l1.is_none() {
l2
} else {
l1
}
}
// second iterative approach. actually worked!
#[allow(dead_code)]
pub fn merge_two_lists_it(l1: Option<Box<ListNode>>, l2: Option<Box<ListNode>>) -> Option<Box<ListNode>> {
let mut l1 = &l1;
let mut l2 = &l2;
let mut ret = Option::None;
let mut tail = &mut ret;
// take min from each
while let ( Some(n1), Some(n2) ) = (l1, l2) {
if n1.val < n2.val {
*tail = Option::Some(Box::new(ListNode::cons(n1.val, Option::None)));
tail = &mut tail.as_mut().unwrap().next;
l1 = & l1.as_ref().unwrap().next;
} else {
*tail = Option::Some(Box::new(ListNode::cons(n2.val, Option::None)));
tail = &mut tail.as_mut().unwrap().next;
l2 = & l2.as_ref().unwrap().next;
}
}
// take from whatever is left of l1
while let Some(n1) = l1 {
*tail = Option::Some(Box::new(ListNode::cons(n1.val, Option::None)));
tail = &mut tail.as_mut().unwrap().next;
l1 = & l1.as_ref().unwrap().next;
}
// take from whatever is left of l2
while let Some(n2) = l2 {
*tail = Option::Some(Box::new(ListNode::cons(n2.val, Option::None)));
tail = &mut tail.as_mut().unwrap().next;
l2 = & l2.as_ref().unwrap().next;
}
ret
}
// third iterative approach that doesn't make new nodes (reuses existing nodes)
pub fn merge_two_lists_inplace(l1: Option<Box<ListNode>>, l2: Option<Box<ListNode>>) -> Option<Box<ListNode>> {
let mut l1 = l1;
let mut l2 = l2;
let mut ret = Option::None;
let mut tail = &mut ret;
// take min from each
while let ( Some(n1), Some(n2) ) = (&l1, &l2) {
let mut temp;
// pop the first node off the list with the lower head
if n1.val < n2.val {
temp = l1.unwrap();
l1 = temp.next.take();
} else {
temp = l2.unwrap();
l2 = temp.next.take();
}
// append node to output list
*tail = Option::Some(temp);
tail = &mut tail.as_mut().unwrap().next;
}
// once one list runs out, append the rest of the other list
*tail = if l1.is_some() { l1 } else { l2 };
ret
}
// tests
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn basic_merge_rec() {
let l1 = Option::Some(Box::new(ListNode::cons(1,
Option::Some(Box::new(ListNode::cons(3,
Option::Some(Box::new(ListNode::cons(5, Option::None)))
)))
)));
let l2 = Option::Some(Box::new(ListNode::cons(2,
Option::Some(Box::new(ListNode::cons(2,
Option::Some(Box::new(ListNode::cons(5, Option::None)))
)))
)));
let merged = list_to_vec(merge_two_lists_rec(l1, l2));
assert_eq!( merged, vec![1, 2, 2, 3, 5, 5] );
}
#[test]
fn basic_merge_it() {
let l1 = Option::Some(Box::new(ListNode::cons(1,
Option::Some(Box::new(ListNode::cons(3,
Option::Some(Box::new(ListNode::cons(5, Option::None)))
)))
)));
let l2 = Option::Some(Box::new(ListNode::cons(2,
Option::Some(Box::new(ListNode::cons(2,
Option::Some(Box::new(ListNode::cons(5, Option::None)))
)))
)));
let merged = list_to_vec(merge_two_lists_it(l1, l2));
assert_eq!( merged, vec![1, 2, 2, 3, 5, 5] );
}
#[test]
fn basic_merge_inplace() {
let l1 = Option::Some(Box::new(ListNode::cons(1,
Option::Some(Box::new(ListNode::cons(3,
Option::Some(Box::new(ListNode::cons(5, Option::None)))
)))
)));
let l2 = Option::Some(Box::new(ListNode::cons(2,
Option::Some(Box::new(ListNode::cons(2,
Option::Some(Box::new(ListNode::cons(5, Option::None)))
)))
)));
let merged = list_to_vec(merge_two_lists_inplace(l1, l2));
assert_eq!( merged, vec![1, 2, 2, 3, 5, 5] );
}
#[test]
fn test_merge_rec() {
let l1 = ListNode::from_vec( vec![] );
let l2 = ListNode::from_vec( vec![] );
let merged = merge_two_lists_rec(l1, l2);
assert!( merged.is_none() );
let l1 = ListNode::from_vec( vec![] );
let l2 = ListNode::from_vec( vec![1,2,3] );
let mut merged = list_to_vec(merge_two_lists_rec(l1, l2));
assert_eq!( merged, vec![1,2,3] );
let l1 = ListNode::from_vec( vec![2,2] );
let l2 = ListNode::from_vec( vec![] );
merged = list_to_vec(merge_two_lists_rec(l1, l2));
assert_eq!( merged, vec![2,2] );
let l1 = ListNode::from_vec( vec![1,1,1] );
let l2 = ListNode::from_vec( vec![1,1] );
merged = list_to_vec(merge_two_lists_rec(l1, l2));
assert_eq!( merged, vec![1,1,1,1,1] );
let l1 = ListNode::from_vec( vec![1,2,2] );
let l2 = ListNode::from_vec( vec![3,4,5,8] );
merged = list_to_vec(merge_two_lists_rec(l1, l2));
assert_eq!( merged, vec![1,2,2,3,4,5,8] );
let l1 = ListNode::from_vec( vec![3,4,4] );
let l2 = ListNode::from_vec( vec![1,3] );
merged = list_to_vec(merge_two_lists_rec(l1, l2));
assert_eq!( merged, vec![1,3,3,4,4] );
let l1 = ListNode::from_vec( vec![1,1,4,5] );
let l2 = ListNode::from_vec( vec![2,3] );
merged = list_to_vec(merge_two_lists_rec(l1, l2));
assert_eq!( merged, vec![1,1,2,3,4,5] );
let l1 = ListNode::from_vec( vec![3,3,5] );
let l2 = ListNode::from_vec( vec![1,2,4,7,8,9] );
merged = list_to_vec(merge_two_lists_rec(l1, l2));
assert_eq!( merged, vec![1,2,3,3,4,5,7,8,9] );
}
#[test]
fn test_merge_it() {
let l1 = ListNode::from_vec( vec![] );
let l2 = ListNode::from_vec( vec![] );
let merged = merge_two_lists_it(l1, l2);
assert!( merged.is_none() );
let l1 = ListNode::from_vec( vec![] );
let l2 = ListNode::from_vec( vec![1,2,3] );
let mut merged = list_to_vec(merge_two_lists_it(l1, l2));
assert_eq!( merged, vec![1,2,3] );
let l1 = ListNode::from_vec( vec![2,2] );
let l2 = ListNode::from_vec( vec![] );
merged = list_to_vec(merge_two_lists_it(l1, l2));
assert_eq!( merged, vec![2,2] );
let l1 = ListNode::from_vec( vec![1,1,1] );
let l2 = ListNode::from_vec( vec![1,1] );
merged = list_to_vec(merge_two_lists_it(l1, l2));
assert_eq!( merged, vec![1,1,1,1,1] );
let l1 = ListNode::from_vec( vec![1,2,2] );
let l2 = ListNode::from_vec( vec![3,4,5,8] );
merged = list_to_vec(merge_two_lists_it(l1, l2));
assert_eq!( merged, vec![1,2,2,3,4,5,8] );
let l1 = ListNode::from_vec( vec![3,4,4] );
let l2 = ListNode::from_vec( vec![1,3] );
merged = list_to_vec(merge_two_lists_it(l1, l2));
assert_eq!( merged, vec![1,3,3,4,4] );
let l1 = ListNode::from_vec( vec![1,1,4,5] );
let l2 = ListNode::from_vec( vec![2,3] );
merged = list_to_vec(merge_two_lists_it(l1, l2));
assert_eq!( merged, vec![1,1,2,3,4,5] );
let l1 = ListNode::from_vec( vec![3,3,5] );
let l2 = ListNode::from_vec( vec![1,2,4,7,8,9] );
merged = list_to_vec(merge_two_lists_it(l1, l2));
assert_eq!( merged, vec![1,2,3,3,4,5,7,8,9] );
}
#[test]
fn test_merge_inplace() {
let l1 = ListNode::from_vec( vec![] );
let l2 = ListNode::from_vec( vec![] );
let merged = merge_two_lists_inplace(l1, l2);
assert!( merged.is_none() );
let l1 = ListNode::from_vec( vec![] );
let l2 = ListNode::from_vec( vec![1,2,3] );
let mut merged = list_to_vec(merge_two_lists_inplace(l1, l2));
assert_eq!( merged, vec![1,2,3] );
let l1 = ListNode::from_vec( vec![2,2] );
let l2 = ListNode::from_vec( vec![] );
merged = list_to_vec(merge_two_lists_inplace(l1, l2));
assert_eq!( merged, vec![2,2] );
let l1 = ListNode::from_vec( vec![1,1,1] );
let l2 = ListNode::from_vec( vec![1,1] );
merged = list_to_vec(merge_two_lists_inplace(l1, l2));
assert_eq!( merged, vec![1,1,1,1,1] );
let l1 = ListNode::from_vec( vec![1,2,2] );
let l2 = ListNode::from_vec( vec![3,4,5,8] );
merged = list_to_vec(merge_two_lists_inplace(l1, l2));
assert_eq!( merged, vec![1,2,2,3,4,5,8] );
let l1 = ListNode::from_vec( vec![3,4,4] );
let l2 = ListNode::from_vec( vec![1,3] );
merged = list_to_vec(merge_two_lists_inplace(l1, l2));
assert_eq!( merged, vec![1,3,3,4,4] );
let l1 = ListNode::from_vec( vec![1,1,4,5] );
let l2 = ListNode::from_vec( vec![2,3] );
merged = list_to_vec(merge_two_lists_inplace(l1, l2));
assert_eq!( merged, vec![1,1,2,3,4,5] );
let l1 = ListNode::from_vec( vec![3,3,5] );
let l2 = ListNode::from_vec( vec![1,2,4,7,8,9] );
merged = list_to_vec(merge_two_lists_inplace(l1, l2));
assert_eq!( merged, vec![1,2,3,3,4,5,7,8,9] );
}
}
| true
|
3216ce2759d7b1089b7d1ff01207a6b13f212904
|
Rust
|
foxfriends/git-project
|
/src/commands/open/state/board/task.rs
|
UTF-8
| 2,136
| 2.6875
| 3
|
[] |
no_license
|
use cursive::{Cursive, align::*, theme::*, traits::*, views::*, event, utils::markup::StyledString};
use super::State;
use crate::model::*;
pub fn card(state: State, task: &Task) -> impl View {
let mut description_text = StyledString::styled(task.name(), Effect::Bold);
if !task.name().ends_with(|ch: char| ch.is_ascii_punctuation()) {
// add a period if the last char was not punctuation already
description_text.append_styled(".", Effect::Bold);
}
description_text.append_plain(" ");
description_text.append_plain(task.short_description());
let button = Button::new("Details", { let state = state.clone(); let task = task.clone(); move |s| {
state.show_task(task.clone(), s)
}});
let delete_task = { let state = state.clone(); let task = task.clone(); move |s: &mut Cursive| {
state.confirm(s, format!("Delete task {}?", task.id()), { let task = task.clone(); let state = state.clone(); move |s| {
let mut git_project = state.git_project.borrow_mut();
let current_project = &mut git_project.projects_mut()[state.selected_project.get()];
current_project.delete_task(task.id());
std::mem::drop(git_project);
state.reload(s);
}});
}};
let event_handler = OnEventView::new(button)
.on_event(event::Key::Del, delete_task.clone())
.on_event(event::Key::Backspace, delete_task.clone())
.on_event('l', { let state = state.clone(); let task = task.clone(); move |s| { state.move_task_right(&task, s) }})
.on_event('h', { let state = state.clone(); let task = task.clone(); move |s| { state.move_task_left(&task, s) }});
let actions = LinearLayout::horizontal()
.child(DummyView.full_width())
.child(event_handler);
let task_contents = LinearLayout::vertical()
.child(PaddedView::new((0, 0, 1, 1), TextView::new(description_text)))
.child(actions);
Panel::new(PaddedView::new((1, 1, 0, 0), task_contents))
.title(task.id())
.title_position(HAlign::Left)
.full_width()
.with_id(task.id())
}
| true
|
355d5219536e7db76ea8127463b00e5fb257d638
|
Rust
|
rbatis/rbatis
|
/rbdc-sqlite/src/types/bytes.rs
|
UTF-8
| 648
| 2.578125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use crate::decode::Decode;
use crate::encode::{Encode, IsNull};
use crate::type_info::DataType;
use crate::types::Type;
use crate::{SqliteArgumentValue, SqliteTypeInfo, SqliteValue};
use rbdc::error::Error;
impl Type for Vec<u8> {
fn type_info(&self) -> SqliteTypeInfo {
SqliteTypeInfo(DataType::Blob)
}
}
impl Encode for Vec<u8> {
fn encode(self, args: &mut Vec<SqliteArgumentValue>) -> Result<IsNull, Error> {
args.push(SqliteArgumentValue::Blob(self));
Ok(IsNull::No)
}
}
impl Decode for Vec<u8> {
fn decode(value: SqliteValue) -> Result<Self, Error> {
Ok(value.blob().to_owned())
}
}
| true
|
a95ece95e2000736d32077718f1245c7bd01bc8a
|
Rust
|
am1ko/sw-renderer
|
/src/rasterization.rs
|
UTF-8
| 4,191
| 3.125
| 3
|
[
"MIT"
] |
permissive
|
use core::{Color, DisplayBuffer, Face, Renderable};
use na::{Vector2, Vector3};
/// Get barycentric coordinates for a point P with respect to a triangle ABC
///
/// # Arguments
///
/// * 'a' Vertex A of the triangle ABC
/// * 'b' Vertex B of the triangle ABC
/// * 'c' Vertex C of the triangle ABC
/// * 'p' Point P for which to calculate the barycentric coordinates
///
/// Barycentric coordinates (u, v, w) are defined such that uA + vB + wC = P
/// Some useful properties
/// - If u, v, w all are >= 0 then point P is inside the triangle ABC
/// - If any of u, v, w is < 0 then point P is outside the triangle ABC
/// - u, v, w can be used to interpolate the vertex attributes inside the triangle
/// - u + v + w = 1
///
fn get_barycentric(
a: Vector2<f32>,
b: Vector2<f32>,
c: Vector2<f32>,
p: Vector2<f32>,
) -> (f32, f32, f32) {
let v0 = b - a;
let v1 = c - a;
let v2 = p - a;
let d00 = v0.dot(&v0);
let d01 = v0.dot(&v1);
let d11 = v1.dot(&v1);
let d20 = v2.dot(&v0);
let d21 = v2.dot(&v1);
let denom = d00 * d11 - d01 * d01;
let v = (d11 * d20 - d01 * d21) / denom;
let w = (d00 * d21 - d01 * d20) / denom;
let u = 1.0 - v - w;
(u, v, w)
}
impl Renderable for Face<Vector3<f32>> {
/// Draw a color-filled face
fn render(&self, buffer: &mut DisplayBuffer) {
// Bounding box for the triangle
let all_x = [self.v0.position.x, self.v1.position.x, self.v2.position.x];
let all_y = [self.v0.position.y, self.v1.position.y, self.v2.position.y];
let min_x = all_x.iter().fold(std::f32::MAX, |a, &b| a.min(b)) as usize;
let max_x = all_x.iter().fold(std::f32::MIN, |a, &b| a.max(b)) as usize;
let min_y = all_y.iter().fold(std::f32::MAX, |a, &b| a.min(b)) as usize;
let max_y = all_y.iter().fold(std::f32::MIN, |a, &b| a.max(b)) as usize;
if max_x >= buffer.width || max_y >= buffer.height {
return;
}
for y in min_y..=max_y {
for x in min_x..=max_x {
let v0 = self.v0.position.remove_row(2);
let v1 = self.v1.position.remove_row(2);
let v2 = self.v2.position.remove_row(2);
let p = Vector2::new(x as f32, y as f32);
let (w0, w1, w2) = get_barycentric(v0, v1, v2, p);
if w0 >= 0.0 && w1 >= 0.0 && w2 >= 0.0 {
let z =
w0 * self.v0.position.z + w1 * self.v1.position.z + w2 * self.v2.position.z;
let color = Color {
r: (w0 * self.v0.color.r as f32
+ w1 * self.v1.color.r as f32
+ w2 * self.v2.color.r as f32) as u8,
g: (w0 * self.v0.color.g as f32
+ w1 * self.v1.color.g as f32
+ w2 * self.v2.color.g as f32) as u8,
b: (w0 * self.v0.color.b as f32
+ w1 * self.v1.color.b as f32
+ w2 * self.v2.color.b as f32) as u8,
a: 255,
};
buffer.set_pixel(x, y, z, color);
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_barycentric_ccw_inside() {
let a = Vector2::new(1.0, 0.0);
let b = Vector2::new(0.0, 1.0);
let c = Vector2::new(-1.0, 0.0);
let p = Vector2::new(0.0, 0.5);
let (w0, w1, w2) = get_barycentric(a, b, c, p);
assert!(w0 > 0.0);
assert!(w1 > 0.0);
assert!(w2 > 0.0);
assert!(w0 < 1.0);
assert!(w1 < 1.0);
assert!(w2 < 1.0);
}
#[test]
fn test_get_barycentric_cw_inside() {
let a = Vector2::new(-1.0, 0.0);
let b = Vector2::new(0.0, 1.0);
let c = Vector2::new(1.0, 0.0);
let p = Vector2::new(0.0, 0.5);
let (w0, w1, w2) = get_barycentric(a, b, c, p);
assert!(w0 > 0.0);
assert!(w1 > 0.0);
assert!(w2 > 0.0);
assert!(w0 < 1.0);
assert!(w1 < 1.0);
assert!(w2 < 1.0);
}
}
| true
|
ebcf5b547c8506cb942bfd71a03bad5719b3d5ac
|
Rust
|
BruceBrown/d3
|
/d3-core/src/channel/connection.rs
|
UTF-8
| 711
| 2.75
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use super::*;
/// The Connection provides a weak link between the sender and receiver.
/// The value of which is dubious. It may provide some feedback mechanisms.
/// It may also provide some insights into a live system to graph the data
/// flow.
#[derive(Default)]
pub struct Connection {
connection: Weak<Mutex<Connection>>,
}
impl Connection {
pub fn new() -> (Arc<Mutex<Self>>, Arc<Mutex<Self>>) {
let c1 = Arc::new(Mutex::new(Self::default()));
let c2 = Arc::new(Mutex::new(Self::default()));
c1.lock().connection = Arc::downgrade(&c2);
c2.lock().connection = Arc::downgrade(&c1);
(c1, c2)
}
}
pub type ThreadSafeConnection = Arc<Mutex<Connection>>;
| true
|
770320b2fb945be6aa6425bc6d4dada830888256
|
Rust
|
Rexagon/raccoin
|
/blockchain/src/blockchain.rs
|
UTF-8
| 713
| 2.859375
| 3
|
[
"Apache-2.0"
] |
permissive
|
use serde::Serialize;
use std::iter::FromIterator;
use crate::block::Block;
#[derive(Debug, Serialize)]
pub struct BlockChain {
pub blocks: Vec<Block>,
}
impl BlockChain {
pub fn new(genesis: Block) -> Self {
BlockChain {
blocks: vec![genesis],
}
}
pub fn add_block(&mut self, block: Block) -> bool {
if !block.validate_previous(self.blocks.last().unwrap()) {
return false;
}
self.blocks.push(block);
true
}
pub fn tail(&self, n: usize) -> Vec<Block> {
Vec::from_iter(self.blocks.iter().rev().take(n).rev().cloned())
}
pub fn last(&self) -> &Block {
self.blocks.last().unwrap()
}
}
| true
|
73e256d1ca1e6be7ea8dd099e864323794c4fec5
|
Rust
|
crazyrex/Hecate
|
/src/meta/mod.rs
|
UTF-8
| 3,148
| 2.796875
| 3
|
[
"MIT"
] |
permissive
|
extern crate r2d2;
extern crate r2d2_postgres;
extern crate postgres;
extern crate serde_json;
extern crate std;
extern crate rocket;
#[derive(PartialEq, Debug)]
pub enum MetaError {
NotFound,
ListError(String),
GetError(String),
SetError(String)
}
impl MetaError {
pub fn to_string(&self) -> String {
match *self {
MetaError::NotFound => String::from("Key/Value Not Found"),
MetaError::ListError(ref msg) => String::from(format!("Could not list Key/Value: {}", msg)),
MetaError::GetError(ref msg) => String::from(format!("Could not get Key/Value: {}", msg)),
MetaError::SetError(ref msg) => String::from(format!("Could not set Key/Value: {}", msg))
}
}
}
pub fn list(conn: &r2d2::PooledConnection<r2d2_postgres::PostgresConnectionManager>) -> Result<Vec<String>, MetaError> {
match conn.query("
SELECT key FROM meta ORDER BY key
", &[ ]) {
Ok(rows) => {
let mut names = Vec::<String>::new();
for row in rows.iter() {
names.push(row.get(0));
}
Ok(names)
},
Err(err) => {
match err.as_db() {
Some(e) => { Err(MetaError::ListError(e.message.clone())) },
_ => Err(MetaError::ListError(String::from("generic")))
}
}
}
}
pub fn get(conn: &r2d2::PooledConnection<r2d2_postgres::PostgresConnectionManager>, key: &String) -> Result<serde_json::Value, MetaError> {
match conn.query("
SELECT value::JSON FROM meta WHERE key = $1;
", &[ &key ]) {
Ok(rows) => {
if rows.len() == 0 {
Ok(json!(null))
} else {
Ok(rows.get(0).get(0))
}
},
Err(err) => {
match err.as_db() {
Some(e) => { Err(MetaError::GetError(e.message.clone())) },
_ => Err(MetaError::GetError(String::from("generic")))
}
}
}
}
pub fn set(conn: &r2d2::PooledConnection<r2d2_postgres::PostgresConnectionManager>, key: &String, value: &serde_json::Value) -> Result<bool, MetaError> {
match conn.query("
INSERT INTO meta (key, value) VALUES ($1, $2)
ON CONFLICT (key) DO
UPDATE
SET value = $2
WHERE meta.key = $1
", &[ &key, &value ]) {
Ok(_) => Ok(true),
Err(err) => {
match err.as_db() {
Some(e) => { Err(MetaError::SetError(e.message.clone())) },
_ => Err(MetaError::SetError(String::from("generic")))
}
}
}
}
pub fn delete(conn: &r2d2::PooledConnection<r2d2_postgres::PostgresConnectionManager>, key: &String) -> Result<bool, MetaError> {
match conn.query("
DELETE FROM meta WHERE key = $1
", &[ &key ]) {
Ok(_) => Ok(true),
Err(err) => {
match err.as_db() {
Some(e) => { Err(MetaError::SetError(e.message.clone())) },
_ => Err(MetaError::SetError(String::from("generic")))
}
}
}
}
| true
|
ce61a678b26d8c460589afba644731effa11ebd3
|
Rust
|
goodcodedev/descr
|
/descr-gen/src/process/codegen_tosource.rs
|
UTF-8
| 4,116
| 2.828125
| 3
|
[
"MIT"
] |
permissive
|
use lang_data::data::*;
use lang_data::rule::*;
use descr_common::util::*;
use std::collections::HashMap;
pub struct CodegenToSource<'a, 'd: 'a> {
data: &'a LangData<'d>
}
struct AstRules<'a, 'd: 'a> {
pub rules: HashMap<&'a str, Vec<&'a AstPartsRule<'d>>>
}
impl<'a, 'd: 'a> AstRules<'a, 'd> {
pub fn insert(&mut self, ast_type: &'d str, rule: &'a AstPartsRule<'d>) {
if !self.rules.contains_key(ast_type) {
self.rules.insert(ast_type, Vec::new());
}
self.rules.get_mut(ast_type).unwrap().push(rule);
}
}
impl<'a, 'd> CodegenToSource<'a, 'd> {
pub fn new(data: &'a LangData<'d>) -> CodegenToSource<'a, 'd> {
CodegenToSource { data }
}
fn collect_rules(&self) -> AstRules<'a, 'd> {
let mut rules = AstRules { rules: HashMap::new() };
// Add rules by ast key
for (_key, ast_data) in &self.data.ast_data {
for rule in &ast_data.rules {
if let &AstRule::PartsRule(ref parts_rule) = rule {
rules.insert(parts_rule.ast_type, parts_rule);
}
}
}
for (_key, list_data) in &self.data.list_data {
for rule in &list_data.rules {
if let &AstRule::PartsRule(ref parts_rule) = &rule.ast_rule {
rules.insert(parts_rule.ast_type, parts_rule);
}
}
}
rules
}
pub fn gen(&self) -> String {
let mut s = String::with_capacity(
self.data.ast_data.len() * 100
+ self.data.list_data.len() * 100
);
let ast_rules = self.collect_rules();
// Create code for each rule under
// function for ast type
s += "use super::ast::*;\n\n";
s += "pub struct ToSource;\n";
s += "#[allow(unused_variables,dead_code)]\n";
s += "impl<'a> ToSource {\n";
for (ast_type, rules) in &ast_rules.rules {
if self.data.simple_structs.contains(ast_type) {
continue;
}
append!(s, " pub fn " self.data.sc(ast_type) "(mut s: String, node: &'a " ast_type ") -> String {\n");
for rule in rules {
// Todo: Possibly create if statement
// if there are several rules for the
// same type, comparing optional
// members at least
for part in &rule.parts {
s += " s += \" \";\n";
s = part.add_to_source(s, self.data, false);
}
}
s += " s\n";
s += " }\n\n";
}
// Ast enums
for (key, ast_enum) in self.data.ast_enums.sorted_iter() {
let is_simple = self.data.simple_enums.contains(key);
append!(s 1, "pub fn " ast_enum.sc() "(");
if is_simple {
s += "mut ";
}
append!(s, "s: String, node: &'a " ast_enum.name ") -> String {\n");
if is_simple {
append!(s 2, "match node {\n");
for enum_item in &ast_enum.items {
append!(s 3, "&" ast_enum.name "::" enum_item " => {\n");
indent!(s 4);
let rules = ast_rules.rules.get(enum_item).unwrap();
for rule in rules {
for part in &rule.parts {
s += "s += \" \";\n";
s = part.add_to_source(s, self.data, false);
}
}
append!(s 3, "},\n");
}
s += " }\n";
s += " s\n";
s += " }\n\n";
continue;
}
append!(s 2, "match node {\n");
for enum_item in &ast_enum.items {
append!(s 3, "&" ast_enum.name "::" enum_item "Item(ref inner) => Self::" self.data.sc(enum_item) "(s, inner),\n");
}
s += " }\n }\n\n";
}
s += "}";
s
}
}
| true
|
966d425885224b3f7f83e783db320593f3dc7b76
|
Rust
|
universome/aladdin
|
/src/base/timers.rs
|
UTF-8
| 1,063
| 3.25
| 3
|
[] |
no_license
|
use std::iter::Iterator;
use std::time::{Duration, Instant};
use std::thread::sleep;
pub struct Periodic {
interval: u64,
timestamp: Instant
}
impl Periodic {
pub fn new(interval: u32) -> Periodic {
Periodic {
interval: interval as u64,
timestamp: Instant::now() - Duration::new(interval as u64, 0)
}
}
pub fn next_if_elapsed(&mut self) -> bool {
let now = Instant::now();
let elapsed = now.duration_since(self.timestamp);
if elapsed.as_secs() < self.interval {
return false;
}
self.timestamp = now;
true
}
}
impl Iterator for Periodic {
type Item = ();
fn next(&mut self) -> Option<()> {
let now = Instant::now();
let elapsed = now.duration_since(self.timestamp);
if elapsed.as_secs() < self.interval {
sleep(Duration::new(self.interval, 0) - elapsed);
self.timestamp = Instant::now();
} else {
self.timestamp = now;
}
Some(())
}
}
| true
|
3183226e0cfd512f382eb1f4703c8bc1a6de021a
|
Rust
|
JohnWall2016/xlsx-rs
|
/src/xlsx/zip.rs
|
UTF-8
| 1,519
| 2.703125
| 3
|
[] |
no_license
|
use zip::read::{ZipArchive, ZipFile};
use std::fs::File;
use std::path::Path;
use std::io::{Read, Cursor, Result as IOResult};
use super::base::XlsxResult;
pub struct Archive(ZipArchive<Cursor<Vec<u8>>>);
impl Archive {
pub fn new<P: AsRef<Path>>(path: P) -> XlsxResult<Self> {
let data = File::open(path)?.read_all_to_vec()?;
Ok(Archive(ZipArchive::new(Cursor::new(data))?))
}
pub fn by_name<'a>(&'a mut self, name: &str) -> XlsxResult<ZipFile> {
Ok(self.0.by_name(name)?)
}
pub fn file_names(&self) -> impl Iterator<Item=&str> {
self.0.file_names()
}
}
pub trait ReadAll {
fn read_all_to_string(&mut self) -> IOResult<String>;
fn read_all_to_vec(&mut self) -> IOResult<Vec<u8>>;
}
impl<T: Read> ReadAll for T {
fn read_all_to_string(&mut self) -> IOResult<String> {
let mut str = String::new();
self.read_to_string(&mut str)?;
Ok(str)
}
fn read_all_to_vec(&mut self) -> IOResult<Vec<u8>> {
let mut buf = Vec::new();
self.read_to_end(&mut buf)?;
Ok(buf)
}
}
#[test]
fn test_archive() -> XlsxResult<()> {
let mut ar = super::test::test_archive()?;
for name in ar.file_names() {
println!("{}", name);
}
{
let mut file = ar.by_name("xl/sharedStrings.xml")?;
println!("{}", file.read_all_to_string()?);
}
{
let mut file = ar.by_name("xl/sharedStrings.xml")?;
println!("{}", file.read_all_to_string()?);
}
Ok(())
}
| true
|
8ff221e3ce477ba83dd48b5aead3ee37c6da1383
|
Rust
|
CaulyKan/leetcode
|
/找出数组中的幸运数/src/lib.rs
|
UTF-8
| 499
| 2.9375
| 3
|
[] |
no_license
|
impl Solution {
pub fn find_lucky(arr: Vec<i32>) -> i32 {
let mut map = std::collections::HashMap::new();
for i in arr {
if map.contains_key(&i) {
*(map.get_mut(&i).unwrap()) += 1;
} else {
map.insert(i, 1);
}
}
let mut max = -1;
for (k, v) in map {
if k == v {
max = std::cmp::max(max, v);
}
}
max
}
}
pub struct Solution;
| true
|
d93956e3156d4b4275829952a6f5b961f37d0a27
|
Rust
|
stm32-rs/stm32h7xx-hal
|
/src/ethernet/eth.rs
|
UTF-8
| 27,911
| 2.578125
| 3
|
[
"BSD-3-Clause",
"0BSD",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! Ethernet PHY layer for the STM32H7
//!
//! As well as this implementation, another notable implementation can
//! be found as part of the [quartiq/stabilizer] project. The two
//! implementations were developed independently, but both in the same
//! year (2019) and they have many similarities.
//!
//! In particular, reference @cjbe's [notes] on ordering accesses to
//! the DMA descriptors.
//!
//! > The CPU is allowed to access normal memory writes out-of-order. Here
//! > the write to the OWN flag in the DMA descriptor (normal memory) was
//! > placed after the DMA tail pointer advance (in device memory, so not
//! > reorderable). This meant the ethernet DMA engine stalled as it saw a
//! > descriptor it did not own, and only restarted and sent the packet when
//! > the next packet was released.
//! >
//! > This fix will work as long as the CPU data cache is disabled. If we
//! > want to enable the cache, the simplest method would be to mark SRAM3
//! > as uncacheable via the MPU.
//!
//! [quartiq/stabilizer]: https://github.com/quartiq/stabilizer
//! [notes]: https://github.com/quartiq/stabilizer/commit/ab1735950b2108eaa8d51eb63efadcd2e25c35c4
use core::ptr;
use crate::rcc::{rec, CoreClocks, ResetEnable};
use crate::stm32;
use smoltcp::{
self,
phy::{self, DeviceCapabilities},
time::Instant,
wire::EthernetAddress,
};
use crate::{
ethernet::{PinsRMII, StationManagement},
gpio::Speed,
};
// 6 DMAC, 6 SMAC, 4 q tag, 2 ethernet type II, 1500 ip MTU, 4 CRC, 2
// padding
const ETH_BUF_SIZE: usize = 1536;
/// Transmit and Receive Descriptor fields
#[allow(dead_code)]
mod emac_consts {
pub const EMAC_DES3_OWN: u32 = 0x8000_0000;
pub const EMAC_DES3_CTXT: u32 = 0x4000_0000;
pub const EMAC_DES3_FD: u32 = 0x2000_0000;
pub const EMAC_DES3_LD: u32 = 0x1000_0000;
pub const EMAC_DES3_ES: u32 = 0x0000_8000;
pub const EMAC_TDES2_IOC: u32 = 0x8000_0000;
pub const EMAC_RDES3_IOC: u32 = 0x4000_0000;
pub const EMAC_RDES3_PL: u32 = 0x0000_7FFF;
pub const EMAC_RDES3_BUF1V: u32 = 0x0100_0000;
pub const EMAC_TDES2_B1L: u32 = 0x0000_3FFF;
pub const EMAC_DES0_BUF1AP: u32 = 0xFFFF_FFFF;
}
use self::emac_consts::*;
/// Transmit Descriptor representation
///
/// * tdes0: transmit buffer address
/// * tdes1:
/// * tdes2: buffer lengths
/// * tdes3: control and payload/frame length
///
/// Note that Copy and Clone are derived to support initialising an
/// array of TDes, but you may not move a TDes after its address has
/// been given to the ETH_DMA engine.
#[derive(Copy, Clone)]
#[repr(C, packed)]
struct TDes {
tdes0: u32,
tdes1: u32,
tdes2: u32,
tdes3: u32,
}
impl TDes {
/// Initialises this TDes to point at the given buffer.
pub fn init(&mut self) {
self.tdes0 = 0;
self.tdes1 = 0;
self.tdes2 = 0;
self.tdes3 = 0; // Owned by us
}
/// Return true if this TDes is not currently owned by the DMA
pub fn available(&self) -> bool {
self.tdes3 & EMAC_DES3_OWN == 0
}
}
/// Store a ring of TDes and associated buffers
#[repr(C, packed)]
struct TDesRing<const TD: usize> {
td: [TDes; TD],
tbuf: [[u32; ETH_BUF_SIZE / 4]; TD],
tdidx: usize,
}
impl<const TD: usize> TDesRing<TD> {
const fn new() -> Self {
Self {
td: [TDes {
tdes0: 0,
tdes1: 0,
tdes2: 0,
tdes3: 0,
}; TD],
tbuf: [[0; ETH_BUF_SIZE / 4]; TD],
tdidx: 0,
}
}
/// Initialise this TDesRing. Assume TDesRing is corrupt
///
/// The current memory address of the buffers inside this TDesRing
/// will be stored in the descriptors, so ensure the TDesRing is
/// not moved after initialisation.
pub fn init(&mut self) {
for x in 0..TD {
self.td[x].init();
}
self.tdidx = 0;
// Initialise pointers in the DMA engine. (There will be a memory barrier later
// before the DMA engine is enabled.)
unsafe {
let dma = &*stm32::ETHERNET_DMA::ptr();
dma.dmactx_dlar
.write(|w| w.bits(&self.td[0] as *const _ as u32));
dma.dmactx_rlr.write(|w| w.tdrl().bits(TD as u16 - 1));
dma.dmactx_dtpr
.write(|w| w.bits(&self.td[0] as *const _ as u32));
}
}
/// Return true if a TDes is available for use
pub fn available(&self) -> bool {
self.td[self.tdidx].available()
}
/// Release the next TDes to the DMA engine for transmission
pub fn release(&mut self) {
let x = self.tdidx;
assert!(self.td[x].tdes3 & EMAC_DES3_OWN == 0); // Owned by us
let address = ptr::addr_of!(self.tbuf[x]) as u32;
// Read format
self.td[x].tdes0 = address; // Buffer 1
self.td[x].tdes1 = 0; // Not used
assert!(self.td[x].tdes2 & !EMAC_TDES2_B1L == 0); // Not used
assert!(self.td[x].tdes2 & EMAC_TDES2_B1L > 0); // Length must be valid
self.td[x].tdes3 = 0;
self.td[x].tdes3 |= EMAC_DES3_FD; // FD: Contains first buffer of packet
self.td[x].tdes3 |= EMAC_DES3_LD; // LD: Contains last buffer of packet
self.td[x].tdes3 |= EMAC_DES3_OWN; // Give the DMA engine ownership
// Ensure changes to the descriptor are committed before
// DMA engine sees tail pointer store
cortex_m::asm::dsb();
// Move the tail pointer (TPR) to the next descriptor
let x = (x + 1) % TD;
unsafe {
let dma = &*stm32::ETHERNET_DMA::ptr();
dma.dmactx_dtpr
.write(|w| w.bits(&(self.td[x]) as *const _ as u32));
}
self.tdidx = x;
}
/// Access the buffer pointed to by the next TDes
pub unsafe fn buf_as_slice_mut(&mut self, length: usize) -> &mut [u8] {
let x = self.tdidx;
// Set address in descriptor
self.td[x].tdes0 = ptr::addr_of!(self.tbuf[x]) as u32; // Buffer 1
// Set length in descriptor
let len = core::cmp::min(length, ETH_BUF_SIZE);
self.td[x].tdes2 = (length as u32) & EMAC_TDES2_B1L;
// Create a raw pointer in place without an intermediate reference. Use
// this to return a slice from the packed buffer
let addr = ptr::addr_of_mut!(self.tbuf[x]) as *mut _;
core::slice::from_raw_parts_mut(addr, len)
}
}
/// Receive Descriptor representation
///
/// * rdes0: recieve buffer address
/// * rdes1:
/// * rdes2:
/// * rdes3: OWN and Status
///
/// Note that Copy and Clone are derived to support initialising an
/// array of RDes, but you may not move a RDes after its address has
/// been given to the ETH_DMA engine.
#[derive(Copy, Clone)]
#[repr(C, packed)]
struct RDes {
rdes0: u32,
rdes1: u32,
rdes2: u32,
rdes3: u32,
}
impl RDes {
/// Initialises RDes
pub fn init(&mut self) {
self.rdes0 = 0;
self.rdes1 = 0;
self.rdes2 = 0;
self.rdes3 = 0; // Owned by us
}
/// Return true if this RDes is acceptable to us
pub fn valid(&self) -> bool {
// Write-back descriptor is valid if:
//
// Contains first buffer of packet AND contains last buf of
// packet AND no errors AND not a contex descriptor
self.rdes3
& (EMAC_DES3_FD | EMAC_DES3_LD | EMAC_DES3_ES | EMAC_DES3_CTXT)
== (EMAC_DES3_FD | EMAC_DES3_LD)
}
/// Return true if this RDes is not currently owned by the DMA
pub fn available(&self) -> bool {
self.rdes3 & EMAC_DES3_OWN == 0 // Owned by us
}
}
/// Store a ring of RDes and associated buffers
#[repr(C, packed)]
struct RDesRing<const RD: usize> {
rd: [RDes; RD],
rbuf: [[u32; ETH_BUF_SIZE / 4]; RD],
rdidx: usize,
}
impl<const RD: usize> RDesRing<RD> {
const fn new() -> Self {
Self {
rd: [RDes {
rdes0: 0,
rdes1: 0,
rdes2: 0,
rdes3: 0,
}; RD],
rbuf: [[0; ETH_BUF_SIZE / 4]; RD],
rdidx: 0,
}
}
/// Initialise this RDesRing. Assume RDesRing is corrupt
///
/// The current memory address of the buffers inside this RDesRing
/// will be stored in the descriptors, so ensure the RDesRing is
/// not moved after initialisation.
pub fn init(&mut self) {
for x in 0..RD {
self.rd[x].init();
}
self.rdidx = 0;
// Initialise pointers in the DMA engine
unsafe {
let dma = &*stm32::ETHERNET_DMA::ptr();
dma.dmacrx_dlar
.write(|w| w.bits(&self.rd[0] as *const _ as u32));
dma.dmacrx_rlr.write(|w| w.rdrl().bits(RD as u16 - 1));
}
// Release descriptors to the DMA engine
while self.available() {
self.release()
}
}
/// Return true if a RDes is available for use
pub fn available(&self) -> bool {
self.rd[self.rdidx].available()
}
/// Return true if current RDes is valid
pub fn valid(&self) -> bool {
self.rd[self.rdidx].valid()
}
/// Release the next RDes to the DMA engine
pub fn release(&mut self) {
let x = self.rdidx;
assert!(self.rd[x].rdes3 & EMAC_DES3_OWN == 0); // Owned by us
let address = ptr::addr_of!(self.rbuf[x]) as u32;
// Read format
self.rd[x].rdes0 = address; // Buffer 1
self.rd[x].rdes1 = 0; // Reserved
self.rd[x].rdes2 = 0; // Marked as invalid
self.rd[x].rdes3 = 0;
self.rd[x].rdes3 |= EMAC_DES3_OWN; // Give the DMA engine ownership
self.rd[x].rdes3 |= EMAC_RDES3_BUF1V; // BUF1V: 1st buffer address is valid
self.rd[x].rdes3 |= EMAC_RDES3_IOC; // IOC: Interrupt on complete
// Ensure changes to the descriptor are committed before
// DMA engine sees tail pointer store
cortex_m::asm::dsb();
// Move the tail pointer (TPR) to this descriptor
unsafe {
let dma = &*stm32::ETHERNET_DMA::ptr();
dma.dmacrx_dtpr
.write(|w| w.bits(&(self.rd[x]) as *const _ as u32));
}
// Update active descriptor
self.rdidx = (x + 1) % RD;
}
/// Access the buffer pointed to by the next RDes
///
/// # Safety
///
/// Ensure that release() is called between subsequent calls to this
/// function.
#[allow(clippy::mut_from_ref)]
pub unsafe fn buf_as_slice_mut(&self) -> &mut [u8] {
let x = self.rdidx;
// Write-back format
let addr = ptr::addr_of!(self.rbuf[x]) as *mut u8;
let len = (self.rd[x].rdes3 & EMAC_RDES3_PL) as usize;
let len = core::cmp::min(len, ETH_BUF_SIZE);
core::slice::from_raw_parts_mut(addr, len)
}
}
pub struct DesRing<const TD: usize, const RD: usize> {
tx: TDesRing<TD>,
rx: RDesRing<RD>,
}
impl<const TD: usize, const RD: usize> DesRing<TD, RD> {
pub const fn new() -> Self {
DesRing {
tx: TDesRing::new(),
rx: RDesRing::new(),
}
}
}
impl<const TD: usize, const RD: usize> Default for DesRing<TD, RD> {
fn default() -> Self {
Self::new()
}
}
///
/// Ethernet DMA
///
pub struct EthernetDMA<const TD: usize, const RD: usize> {
ring: &'static mut DesRing<TD, RD>,
eth_dma: stm32::ETHERNET_DMA,
}
///
/// Ethernet MAC
///
pub struct EthernetMAC {
eth_mac: stm32::ETHERNET_MAC,
eth_phy_addr: u8,
clock_range: u8,
}
/// Create and initialise the ethernet driver.
///
/// You must move in ETH_MAC, ETH_MTL, ETH_DMA.
///
/// Sets up the descriptor structures, sets up the peripheral
/// clocks and GPIO configuration, and configures the ETH MAC and
/// DMA peripherals. Automatically sets slew rate to VeryHigh.
/// If you wish to use another configuration, please see
/// [new_unchecked](new_unchecked).
///
/// This method does not initialise the external PHY. However it does return an
/// [EthernetMAC](EthernetMAC) which implements the
/// [StationManagement](super::StationManagement) trait. This can be used to
/// communicate with the external PHY.
///
/// # Safety
///
/// `EthernetDMA` shall not be moved as it is initialised here
#[allow(clippy::too_many_arguments)]
pub fn new<const TD: usize, const RD: usize>(
eth_mac: stm32::ETHERNET_MAC,
eth_mtl: stm32::ETHERNET_MTL,
eth_dma: stm32::ETHERNET_DMA,
mut pins: impl PinsRMII,
ring: &'static mut DesRing<TD, RD>,
mac_addr: EthernetAddress,
prec: rec::Eth1Mac,
clocks: &CoreClocks,
) -> (EthernetDMA<TD, RD>, EthernetMAC) {
pins.set_speed(Speed::VeryHigh);
unsafe {
new_unchecked(eth_mac, eth_mtl, eth_dma, ring, mac_addr, prec, clocks)
}
}
/// Create and initialise the ethernet driver.
///
/// You must move in ETH_MAC, ETH_MTL, ETH_DMA.
///
/// Sets up the descriptor structures, sets up the peripheral
/// clocks and GPIO configuration, and configures the ETH MAC and
/// DMA peripherals.
///
/// This method does not initialise the external PHY. However it does return an
/// [EthernetMAC](EthernetMAC) which implements the
/// [StationManagement](super::StationManagement) trait. This can be used to
/// communicate with the external PHY.
///
/// All the documented interrupts in the `MMC_TX_INTERRUPT_MASK` and
/// `MMC_RX_INTERRUPT_MASK` registers are masked, since these cause unexpected
/// interrupts after a number of days of heavy ethernet traffic. If these
/// interrupts are desired, you can be unmask them in your own code after this
/// method.
///
/// # Safety
///
/// `EthernetDMA` shall not be moved as it is initialised here
pub unsafe fn new_unchecked<const TD: usize, const RD: usize>(
eth_mac: stm32::ETHERNET_MAC,
eth_mtl: stm32::ETHERNET_MTL,
eth_dma: stm32::ETHERNET_DMA,
ring: &'static mut DesRing<TD, RD>,
mac_addr: EthernetAddress,
prec: rec::Eth1Mac,
clocks: &CoreClocks,
) -> (EthernetDMA<TD, RD>, EthernetMAC) {
// RCC
{
let rcc = &*stm32::RCC::ptr();
let syscfg = &*stm32::SYSCFG::ptr();
// Ensure syscfg is enabled (for PMCR)
rcc.apb4enr.modify(|_, w| w.syscfgen().set_bit());
// Reset ETH_DMA - write 1 and wait for 0.
// On the H723, we have to do this before prec.enable()
// or the DMA will never come out of reset
eth_dma.dmamr.modify(|_, w| w.swr().set_bit());
while eth_dma.dmamr.read().swr().bit_is_set() {}
// AHB1 ETH1MACEN
prec.enable();
// Also need to enable the transmission and reception clocks, which
// don't have prec objects. They don't have prec objects because they
// can't be reset.
rcc.ahb1enr
.modify(|_, w| w.eth1txen().set_bit().eth1rxen().set_bit());
syscfg.pmcr.modify(|_, w| w.epis().bits(0b100)); // RMII
}
// reset ETH_MAC - write 1 then 0
//rcc.ahb1rstr.modify(|_, w| w.eth1macrst().set_bit());
//rcc.ahb1rstr.modify(|_, w| w.eth1macrst().clear_bit());
cortex_m::interrupt::free(|_cs| {
// 200 MHz
eth_mac
.mac1ustcr
.modify(|_, w| w.tic_1us_cntr().bits(200 - 1));
// Configuration Register
eth_mac.maccr.modify(|_, w| {
w.arpen()
.clear_bit()
.ipc()
.set_bit()
.ipg()
.bits(0b000) // 96 bit
.ecrsfd()
.clear_bit()
.dcrs()
.clear_bit()
.bl()
.bits(0b00) // 19
.prelen()
.bits(0b00) // 7
// CRC stripping for Type frames
.cst()
.set_bit()
// Fast Ethernet speed
.fes()
.set_bit()
// Duplex mode
.dm()
.set_bit()
// Automatic pad/CRC stripping
.acs()
.set_bit()
// Retry disable in half-duplex mode
.dr()
.set_bit()
});
eth_mac.macecr.modify(|_, w| {
w.eipgen()
.clear_bit()
.usp()
.clear_bit()
.spen()
.clear_bit()
.dcrcc()
.clear_bit()
});
// Set the MAC address.
// Writes to LR trigger both registers to be loaded into the MAC,
// so write to LR last.
eth_mac.maca0hr.write(|w| {
w.addrhi().bits(
u16::from(mac_addr.0[4]) | (u16::from(mac_addr.0[5]) << 8),
)
});
eth_mac.maca0lr.write(|w| {
w.addrlo().bits(
u32::from(mac_addr.0[0])
| (u32::from(mac_addr.0[1]) << 8)
| (u32::from(mac_addr.0[2]) << 16)
| (u32::from(mac_addr.0[3]) << 24),
)
});
// frame filter register
eth_mac.macpfr.modify(|_, w| {
w.dntu()
.clear_bit()
.ipfe()
.clear_bit()
.vtfe()
.clear_bit()
.hpf()
.clear_bit()
.saf()
.clear_bit()
.saif()
.clear_bit()
.pcf()
.bits(0b00)
.dbf()
.clear_bit()
.pm()
.clear_bit()
.daif()
.clear_bit()
.hmc()
.clear_bit()
.huc()
.clear_bit()
// Receive All
.ra()
.clear_bit()
// Promiscuous mode
.pr()
.clear_bit()
});
eth_mac.macwtr.write(|w| w.pwe().clear_bit());
// Flow Control Register
eth_mac.macqtx_fcr.modify(|_, w| {
// Pause time
w.pt().bits(0x100)
});
eth_mac.macrx_fcr.modify(|_, w| w);
// Mask away Ethernet MAC MMC RX/TX interrupts. These are statistics
// counter interrupts and are enabled by default. We need to manually
// disable various ethernet interrupts so they don't unintentionally
// hang the device. The user is free to re-enable them later to provide
// ethernet MAC-related statistics
eth_mac.mmc_rx_interrupt_mask.modify(|_, w| {
w.rxlpiuscim()
.set_bit()
.rxucgpim()
.set_bit()
.rxalgnerpim()
.set_bit()
.rxcrcerpim()
.set_bit()
});
eth_mac.mmc_tx_interrupt_mask.modify(|_, w| {
w.txlpiuscim()
.set_bit()
.txgpktim()
.set_bit()
.txmcolgpim()
.set_bit()
.txscolgpim()
.set_bit()
});
// TODO: The MMC_TX/RX_INTERRUPT_MASK registers incorrectly mark
// LPITRCIM as read-only, so svd2rust doens't generate bindings to
// modify them. Instead, as a workaround, we manually manipulate the
// bits
eth_mac
.mmc_tx_interrupt_mask
.modify(|r, w| w.bits(r.bits() | (1 << 27)));
eth_mac
.mmc_rx_interrupt_mask
.modify(|r, w| w.bits(r.bits() | (1 << 27)));
eth_mtl.mtlrx_qomr.modify(|_, w| {
w
// Receive store and forward
.rsf()
.set_bit()
// Dropping of TCP/IP checksum error frames disable
.dis_tcp_ef()
.clear_bit()
// Forward error frames
.fep()
.clear_bit()
// Forward undersized good packets
.fup()
.clear_bit()
});
eth_mtl.mtltx_qomr.modify(|_, w| {
w
// Transmit store and forward
.tsf()
.set_bit()
});
// operation mode register
eth_dma.dmamr.modify(|_, w| {
w.intm()
.bits(0b00)
// Rx Tx priority ratio 1:1
.pr()
.bits(0b000)
.txpr()
.clear_bit()
.da()
.clear_bit()
});
// bus mode register
eth_dma.dmasbmr.modify(|_, w| {
// Address-aligned beats
w.aal()
.set_bit()
// Fixed burst
.fb()
.set_bit()
});
eth_dma
.dmaccr
.modify(|_, w| w.dsl().bits(0).pblx8().clear_bit().mss().bits(536));
eth_dma.dmactx_cr.modify(|_, w| {
w
// Tx DMA PBL
.txpbl()
.bits(32)
.tse()
.clear_bit()
// Operate on second frame
.osf()
.clear_bit()
});
eth_dma.dmacrx_cr.modify(|_, w| {
w
// receive buffer size
.rbsz()
.bits(ETH_BUF_SIZE as u16)
// Rx DMA PBL
.rxpbl()
.bits(32)
// Disable flushing of received frames
.rpf()
.clear_bit()
});
// Initialise DMA descriptors
ring.tx.init();
ring.rx.init();
// Ensure the DMA descriptors are committed
cortex_m::asm::dsb();
// Manage MAC transmission and reception
eth_mac.maccr.modify(|_, w| {
w.re()
.bit(true) // Receiver Enable
.te()
.bit(true) // Transmiter Enable
});
eth_mtl.mtltx_qomr.modify(|_, w| w.ftq().set_bit());
// Manage DMA transmission and reception
eth_dma.dmactx_cr.modify(|_, w| w.st().set_bit());
eth_dma.dmacrx_cr.modify(|_, w| w.sr().set_bit());
eth_dma
.dmacsr
.modify(|_, w| w.tps().set_bit().rps().set_bit());
});
// MAC layer
// Set the MDC clock frequency in the range 1MHz - 2.5MHz
let hclk_mhz = clocks.hclk().raw() / 1_000_000;
let csr_clock_range = match hclk_mhz {
0..=34 => 2, // Divide by 16
35..=59 => 3, // Divide by 26
60..=99 => 0, // Divide by 42
100..=149 => 1, // Divide by 62
150..=249 => 4, // Divide by 102
250..=310 => 5, // Divide by 124
_ => panic!(
"HCLK results in MDC clock > 2.5MHz even for the \
highest CSR clock divider"
),
};
let mac = EthernetMAC {
eth_mac,
eth_phy_addr: 0,
clock_range: csr_clock_range,
};
let dma = EthernetDMA { ring, eth_dma };
(dma, mac)
}
impl EthernetMAC {
/// Sets the SMI address to use for the PHY
pub fn set_phy_addr(self, eth_phy_addr: u8) -> Self {
Self {
eth_mac: self.eth_mac,
eth_phy_addr,
clock_range: self.clock_range,
}
}
}
/// PHY Operations
impl StationManagement for EthernetMAC {
/// Read a register over SMI.
fn smi_read(&mut self, reg: u8) -> u16 {
while self.eth_mac.macmdioar.read().mb().bit_is_set() {}
self.eth_mac.macmdioar.modify(|_, w| unsafe {
w.pa()
.bits(self.eth_phy_addr)
.rda()
.bits(reg)
.goc()
.bits(0b11) // read
.cr()
.bits(self.clock_range)
.mb()
.set_bit()
});
while self.eth_mac.macmdioar.read().mb().bit_is_set() {}
self.eth_mac.macmdiodr.read().md().bits()
}
/// Write a register over SMI.
fn smi_write(&mut self, reg: u8, val: u16) {
while self.eth_mac.macmdioar.read().mb().bit_is_set() {}
self.eth_mac
.macmdiodr
.write(|w| unsafe { w.md().bits(val) });
self.eth_mac.macmdioar.modify(|_, w| unsafe {
w.pa()
.bits(self.eth_phy_addr)
.rda()
.bits(reg)
.goc()
.bits(0b01) // write
.cr()
.bits(self.clock_range)
.mb()
.set_bit()
});
while self.eth_mac.macmdioar.read().mb().bit_is_set() {}
}
}
/// Define TxToken type and implement consume method
pub struct TxToken<'a, const TD: usize>(&'a mut TDesRing<TD>);
impl<'a, const TD: usize> phy::TxToken for TxToken<'a, TD> {
fn consume<R, F>(self, len: usize, f: F) -> R
where
F: FnOnce(&mut [u8]) -> R,
{
assert!(len <= ETH_BUF_SIZE);
let result = f(unsafe { self.0.buf_as_slice_mut(len) });
self.0.release();
result
}
}
/// Define RxToken type and implement consume method
pub struct RxToken<'a, const RD: usize>(&'a mut RDesRing<RD>);
impl<'a, const RD: usize> phy::RxToken for RxToken<'a, RD> {
fn consume<R, F>(self, f: F) -> R
where
F: FnOnce(&mut [u8]) -> R,
{
let result = f(unsafe { self.0.buf_as_slice_mut() });
self.0.release();
result
}
}
/// Implement the smoltcp Device interface
impl<const TD: usize, const RD: usize> phy::Device for EthernetDMA<TD, RD> {
type RxToken<'a> = RxToken<'a, RD>;
type TxToken<'a> = TxToken<'a, TD>;
// Clippy false positive because DeviceCapabilities is non-exhaustive
#[allow(clippy::field_reassign_with_default)]
fn capabilities(&self) -> DeviceCapabilities {
let mut caps = DeviceCapabilities::default();
// ethernet frame type II (6 smac, 6 dmac, 2 ethertype),
// sans CRC (4), 1500 IP MTU
caps.max_transmission_unit = 1514;
caps.max_burst_size = Some(core::cmp::min(TD, RD));
caps
}
fn receive(
&mut self,
_timestamp: Instant,
) -> Option<(RxToken<RD>, TxToken<TD>)> {
// Skip all queued packets with errors.
while self.ring.rx.available() && !self.ring.rx.valid() {
self.ring.rx.release()
}
if self.ring.rx.available() && self.ring.tx.available() {
Some((RxToken(&mut self.ring.rx), TxToken(&mut self.ring.tx)))
} else {
None
}
}
fn transmit(&mut self, _timestamp: Instant) -> Option<TxToken<TD>> {
if self.ring.tx.available() {
Some(TxToken(&mut self.ring.tx))
} else {
None
}
}
}
impl<const TD: usize, const RD: usize> EthernetDMA<TD, RD> {
/// Return the number of packets dropped since this method was
/// last called
pub fn number_packets_dropped(&self) -> u32 {
self.eth_dma.dmacmfcr.read().mfc().bits() as u32
}
}
/// Clears the Ethernet interrupt flag
///
/// # Safety
///
/// This method implements a single register write to DMACSR
pub unsafe fn interrupt_handler() {
let eth_dma = &*stm32::ETHERNET_DMA::ptr();
eth_dma
.dmacsr
.write(|w| w.nis().set_bit().ri().set_bit().ti().set_bit());
let _ = eth_dma.dmacsr.read();
let _ = eth_dma.dmacsr.read(); // Delay 2 peripheral clocks
}
/// Enables the Ethernet Interrupt. The following interrupts are enabled:
///
/// * Normal Interrupt `NIE`
/// * Receive Interrupt `RIE`
/// * Transmit Interript `TIE`
///
/// # Safety
///
/// This method implements a single RMW to DMACIER
pub unsafe fn enable_interrupt() {
let eth_dma = &*stm32::ETHERNET_DMA::ptr();
eth_dma
.dmacier
.modify(|_, w| w.nie().set_bit().rie().set_bit().tie().set_bit());
}
| true
|
c477f973d7958d57113c905c5036e202b5eb4f21
|
Rust
|
elasticrash/rust-renderer
|
/src/materials/material.rs
|
UTF-8
| 600
| 2.671875
| 3
|
[] |
no_license
|
use crate::hittable::HitRecord;
use crate::ray::Ray;
use crate::vec3::Color;
pub trait Material: MatClone {
fn scatter(
&self,
_r_in: &Ray,
rec: &HitRecord,
attenuation: &mut Color,
scattered: &mut Ray,
) -> bool;
}
pub trait MatClone {
fn clone_box(&self) -> Box<dyn Material>;
}
impl<T> MatClone for T
where
T: 'static + Material + Clone,
{
fn clone_box(&self) -> Box<dyn Material> {
Box::new(self.clone())
}
}
impl Clone for Box<dyn Material> {
fn clone(&self) -> Box<dyn Material> {
self.clone_box()
}
}
| true
|
f05a224cb2814d8cfad495f64a71063b483e9ba7
|
Rust
|
techtronics/MantaRay
|
/src/main.rs
|
UTF-8
| 7,378
| 2.53125
| 3
|
[] |
no_license
|
#![feature(macro_rules)]
extern crate cgmath;
extern crate image;
extern crate num;
use cgmath::point::Point3;
use std::io::File;
// use image::GenericImage;
// use std::sync::{Arc, Future, RWLock, TaskPool};
// use std::iter::count;
// use std::cmp::min;
use camera::OriginCamera;
use light::{Light, LightSource};
use material::{EmitterMaterial, DiffuseMaterial, ReflectiveMaterial, RefractiveMaterial, GlobalDiffuseMaterial};
use object::Object;
use scene::Scene;
use shape::{Sphere, Plane};
mod camera;
mod light;
mod material;
mod object;
mod raytracing;
mod render;
mod scene;
mod shape;
mod utils;
#[cfg(test)]
mod test_helpers;
fn main() {
let scene = make_scene();
let (width, height) = (1000, 1000);
let camera = OriginCamera {aperture: 1.5, height: width, width: height};
let pixel_renderer = |x, y| render::pixel(&camera, &scene, x, y);
let imbuf = render::image(width, height, pixel_renderer);
let fout = File::create(&Path::new("result.png")).unwrap();
let _ = image::ImageRgb8(imbuf).save(fout, image::PNG);
}
// // Currently runs 7x slower. No output.
// fn main() {
// let scene = Arc::new(make_scene());
// let (width, height) = (1000, 1000);
// let camera = OriginCamera {aperture: 1.0, height: width, width: height};
// let mut futures = Vec::from_fn((width*height) as uint, |i| {
// let task_scene = scene.clone();
// Future::spawn(proc() {
// let x = (i as u32) % width;
// let y = (i as u32) / width;
// render::pixel(&camera, task_scene.deref(), x, y)
// })
// });
// let pixel_renderer = |x: u32, y: u32| futures.get_mut((x + y*width) as uint).get();
// let imbuf = render::image(width, height, pixel_renderer);
// let fout = File::create(&Path::new("result.png")).unwrap();
// let _ = image::ImageRgb8(imbuf).save(fout, image::PNG);
// }
// // Currently runs 6x slower.
// fn main() {
// let scene = Arc::new(make_scene());
// let (width, height) = (1000, 1000);
// let camera = OriginCamera {aperture: 1.0, height: width, width: height};
// let futbuf_max_size = 1000u32;
// let mut imbuf = image::ImageBuf::new(width, height);
// let mut allocated_futures = 0u32;
// while allocated_futures < width*height {
// let futbuf_size = min(futbuf_max_size, width*height - allocated_futures);
// let mut futures = Vec::from_fn(futbuf_size as uint, |i| {
// let task_scene = scene.clone();
// Future::spawn(proc() {
// let x = (i as u32) % width;
// let y = (i as u32) / width;
// render::pixel(&camera, task_scene.deref(), x, y)
// })
// });
// allocated_futures += futbuf_size;
// for (i, future) in count(0u32, 1).zip(futures.mut_iter()) {
// let x = i % width;
// let y = i / width;
// imbuf.put_pixel(x, y, future.get());
// }
// }
// println!("allocated: {}", allocated_futures);
// let fout = File::create(&Path::new("result.png")).unwrap();
// let _ = image::ImageRgb8(imbuf).save(fout, image::PNG);
// }
// // Currently runs 1.3x to 10x slower, the more tasks in the pool the slower.
// fn main() {
// let (width, height) = (1000, 1000);
// let camera = OriginCamera {aperture: 1.0, height: width, width: height};
// let scene = Arc::new(make_scene());
// let imlock = Arc::new(RWLock::new(image::ImageBuf::new(width, height)));
// let mut counter = Arc::new(RWLock::new(width * height));
// let mut pool = TaskPool::new(100, || proc(_tid) { () } );
// for y in range(0, height) {
// for x in range(0, width) {
// let task_imlock = imlock.clone();
// let task_counter = counter.clone();
// let task_scene = scene.clone();
// pool.execute(proc(_) {
// let pixel = render::pixel(&camera, task_scene.deref(), x, y);
// {
// let mut imbuf = task_imlock.write();
// imbuf.put_pixel(x, y, pixel);
// }
// let mut count = task_counter.write();
// *count = *count - 1;
// });
// }
// }
// loop {
// let c = *counter.read();
// if c == 0 { break };
// println!("c = {}, Waiting for 1s...", c);
// std::io::timer::sleep( std::time::duration::Duration::seconds(1));
// }
// let fout = File::create(&Path::new("result.png")).unwrap();
// let _ = image::ImageRgb8(imlock.read().clone()).save(fout, image::PNG);
// }
fn make_scene() -> Scene {
let sphere = Object {
shape: box Sphere {center: Point3::new(1.5f32, 2.5, 4.7), radius: 0.5},
// material: box DiffuseMaterial { diffuse: Light::new(0.0, 0.0, 0.6), specular: Light::white(0.4), shininess: 50.0 }
material: box GlobalDiffuseMaterial::new(0.0, 0.0, 0.6, 5)
};
let mirror = Object {
shape: box Sphere {center: Point3::new(-1.0f32, 1.5, 5.0), radius: 1.5},
// material: box DiffuseMaterial { diffuse: Light::new(0.0, 0.0, 0.6), specular: Light::white(0.4), shininess: 50.0 }
material: box ReflectiveMaterial::new(1.0, 0.9, 0.3)
};
let glass = Object {
shape: box Sphere {center: Point3::new(0.8f32, 0.7, 3.7), radius: 0.7},
// material: box DiffuseMaterial { diffuse: Light::new(0.0, 0.0, 0.6), specular: Light::white(0.4), shininess: 50.0 }
material: box RefractiveMaterial::new(1.0, 1.0, 1.0, 1.4)
};
let bottom = make_global_diffuse_plane(0.0f32, -1.0, 0.0, 3.0);
let top = make_emitter_plane(0.0f32, 1.0, 0.0, 3.0);
let right = make_global_diffuse_plane(-1.0f32, 0.0, 0.0, 3.0);
let left = make_global_diffuse_plane(1.0f32, 0.0, 0.0, 3.0);
let back = make_global_diffuse_plane(0.0f32, 0.0, -1.0, 7.0);
let (light_src1, l1) = make_light_source(-2.0, -2.0, 4.0, 2.0, 2.0, 2.0);
let (light_src2, l2) = make_light_source(2.0, -1.0, 5.0, 2.0, 2.0, 2.0);
Scene {
objects: vec![bottom, top, left, right, back, mirror, glass, sphere],
light_sources: vec![]
}
}
// fn make_plane(a: f32, b: f32, c: f32, d: f32) -> Object {
// Object {
// shape: box Plane::from_abcd(a, b, c, d),
// material: box DiffuseMaterial { diffuse: Light::white(0.9), specular: Light::white(0.1), shininess: 50.0 }
// }
// }
fn make_emitter_plane(a: f32, b: f32, c: f32, d: f32) -> Object {
Object {
shape: box Plane::from_abcd(a, b, c, d),
material: box EmitterMaterial::new(3.0, 3.0, 3.0)
}
}
fn make_global_diffuse_plane(a: f32, b: f32, c: f32, d: f32) -> Object {
Object {
shape: box Plane::from_abcd(a, b, c, d),
// material: box ReflectiveMaterial::new(1.0, 1.0, 1.0)
material: box GlobalDiffuseMaterial::new(0.9, 0.9, 0.9, 5)
}
}
fn make_light_source(x: f32, y: f32, z: f32, red: f32, green: f32, blue: f32) -> (LightSource, Object) {
let position = Point3::new(x, y, z);
let power = Light::new(red, green, blue);
let light_mat = EmitterMaterial::new(red/5.0, green/5.0, blue/5.0);
let obj = Object {
shape: box Sphere {center: position, radius: 0.1},
material: box light_mat
};
let ls = LightSource::new(position, power);
(ls, obj)
}
| true
|
4ad57f30b063709f339da4c30d5de40d6aa26c7e
|
Rust
|
bauhaus93/dwarfs
|
/src/world/world_error.rs
|
UTF-8
| 1,183
| 2.890625
| 3
|
[] |
no_license
|
use std::fmt;
use std::error::Error;
use std::io;
use graphics::{ GraphicsError, mesh::MeshError };
#[derive(Debug)]
pub enum WorldError {
Graphics(GraphicsError),
MeshCreation(MeshError)
}
impl From<GraphicsError> for WorldError {
fn from(err: GraphicsError) -> Self {
WorldError::Graphics(err)
}
}
impl From<MeshError> for WorldError {
fn from(err: MeshError) -> Self {
WorldError::MeshCreation(err)
}
}
impl Error for WorldError {
fn description(&self) -> &str {
match *self {
WorldError::Graphics(_) => "graphics",
WorldError::MeshCreation(_) => "mesh creation",
}
}
fn cause(&self) -> Option<&Error> {
match *self {
WorldError::Graphics(ref err) => Some(err),
WorldError::MeshCreation(ref err) => Some(err),
}
}
}
impl fmt::Display for WorldError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
WorldError::Graphics(ref err) => write!(f, "{}/{}", self.description(), err),
WorldError::MeshCreation(ref err) => write!(f, "{}/{}", self.description(), err),
}
}
}
| true
|
9f97c781e55d23ad5c91ba2422a071d16d5d3109
|
Rust
|
content-authenticity-initiative/img-parts
|
/src/jpeg/entropy.rs
|
UTF-8
| 1,999
| 3.28125
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use std::io::{Read, Result, Write};
use byteorder::{ReadBytesExt, WriteBytesExt};
use super::markers;
#[derive(Clone, PartialEq)]
pub struct Entropy {
raw: Vec<u8>,
}
#[allow(clippy::len_without_is_empty)]
impl Entropy {
pub(crate) fn read(r: &mut dyn Read) -> Result<Entropy> {
let mut raw = Vec::new();
loop {
let byte = r.read_u8()?;
if byte != markers::P {
raw.push(byte);
continue;
}
let marker_byte = r.read_u8()?;
match marker_byte {
markers::EOI => {
return Ok(Entropy { raw });
}
_ => {
raw.push(byte);
raw.push(marker_byte);
}
}
}
}
pub fn len(&self) -> usize {
// raw data + EOI marker (2 bytes)
self.raw.len() + 2
}
pub(crate) fn write_to(&self, w: &mut dyn Write) -> Result<()> {
w.write_all(&self.raw)?;
w.write_u8(markers::P)?;
w.write_u8(markers::EOI)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_read_entropy() {
let input: &[u8] = &[
0xE2, 0xFF, 0xE2, 0x51, 0xE7, 0xFF, 0x00, 0xAA, 0xFD, 0xFF, 0x00, 0xCA, 0xFF, 0xD9,
];
let output = Entropy::read(&mut &input[..]).expect("read_entropy");
assert_eq!(output.len(), input.len());
assert_eq!(output.raw.as_slice(), &input[..input.len() - 2]);
}
#[test]
fn test_write_entropy() {
let mut raw = vec![0xE2, 0xFF, 0xE2, 0x51, 0xE7, 0xFF, 0xAA, 0xFD, 0xFF, 0xCA];
let entropy = Entropy { raw: raw.clone() };
let mut output = Vec::new();
entropy.write_to(&mut output).expect("write_entropy");
raw.push(markers::P);
raw.push(markers::EOI);
assert_eq!(entropy.len(), raw.len());
assert_eq!(output.as_slice(), raw.as_slice());
}
}
| true
|
28d414328289645ec78dd3be20450f396eb79033
|
Rust
|
sugyan/leetcode
|
/problems/0055-jump-game/lib.rs
|
UTF-8
| 499
| 3.25
| 3
|
[] |
no_license
|
pub struct Solution;
impl Solution {
pub fn can_jump(nums: Vec<i32>) -> bool {
let mut n = 1;
for i in (0..nums.len() - 1).rev() {
n = if nums[i] < n { n + 1 } else { 1 };
}
n == 1
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example_1() {
assert_eq!(true, Solution::can_jump(vec![2, 3, 1, 1, 4]));
}
#[test]
fn example_2() {
assert_eq!(false, Solution::can_jump(vec![3, 2, 1, 0, 4]));
}
}
| true
|
9cb50343fa7d8b4683d022a87b1941f71b6aa9e3
|
Rust
|
AE9RB/basic-rust-games
|
/hexapawn/src/main.rs
|
UTF-8
| 7,664
| 3.109375
| 3
|
[] |
no_license
|
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::fmt;
use std::io;
use std::io::Write;
use std::ops::{Index, IndexMut};
extern crate rand;
use rand::seq::SliceRandom;
use rand::thread_rng;
const MOTION: [[u8; 2]; 14] = [
[1, 4],
[1, 5],
[2, 4],
[2, 5],
[2, 6],
[3, 5],
[3, 6],
[4, 7],
[4, 8],
[5, 7],
[5, 8],
[5, 9],
[6, 8],
[6, 9],
];
macro_rules! BOARD {
() => {
"+---+---+---+\n\
| {} | {} | {} |\n\
+---+---+---+\n\
| {} | {} | {} |\n\
+---+---+---+\n\
| {} | {} | {} |\n\
+---+---+---+\n"
};
}
#[derive(Copy, Clone, PartialEq, Eq)]
struct Move {
from: u8,
to: u8,
}
impl fmt::Display for Move {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{},{}", self.from, self.to)
}
}
impl Move {
fn is_forward(&self) -> bool {
if self.from > self.to {
self.from - self.to == 3
} else {
self.to - self.from == 3
}
}
fn all_white_moves() -> Vec<Move> {
MOTION
.iter()
.map(|m| Move {
from: m[0],
to: m[1],
})
.collect()
}
fn all_black_moves() -> Vec<Move> {
MOTION
.iter()
.map(|m| Move {
from: 10 - m[0],
to: 10 - m[1],
})
.collect()
}
}
#[derive(Hash, Clone, Copy, PartialEq, Eq)]
enum Cell {
Empty,
White,
Black,
}
impl fmt::Display for Cell {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Cell::Empty => write!(f, " "),
Cell::White => write!(f, "O"),
Cell::Black => write!(f, "X"),
}
}
}
#[derive(Hash, Copy, Clone, PartialEq, Eq)]
struct Board {
cells: [Cell; 9],
}
impl Index<u8> for Board {
type Output = Cell;
fn index(&self, index: u8) -> &Self::Output {
&self.cells[(index - 1) as usize]
}
}
impl IndexMut<u8> for Board {
fn index_mut(&mut self, index: u8) -> &mut Self::Output {
&mut self.cells[(index - 1) as usize]
}
}
impl Board {
fn new() -> Board {
Board {
cells: [
Cell::White,
Cell::White,
Cell::White,
Cell::Empty,
Cell::Empty,
Cell::Empty,
Cell::Black,
Cell::Black,
Cell::Black,
],
}
}
fn white_moves(&self) -> Vec<Move> {
let mut moves = Move::all_white_moves();
moves.retain(|m| {
if self[m.from] != Cell::White {
return false;
}
if m.is_forward() {
if self[m.to] == Cell::Black {
return false;
}
} else {
if self[m.to] != Cell::Black {
return false;
}
}
true
});
moves
}
fn black_moves(&self) -> Vec<Move> {
let mut moves = Move::all_black_moves();
moves.retain(|m| {
if self[m.from] != Cell::Black {
return false;
}
if m.is_forward() {
if self[m.to] == Cell::White {
return false;
}
} else {
if self[m.to] != Cell::White {
return false;
}
}
true
});
moves
}
fn do_move(&mut self, mov: &Move) {
self[mov.to] = self[mov.from];
self[mov.from] = Cell::Empty;
}
fn black_promoted(&self) -> bool {
for i in 1..=3 {
if self[i] == Cell::Black {
return true;
}
}
false
}
fn white_promoted(&self) -> bool {
for i in 7..=9 {
if self[i] == Cell::White {
return true;
}
}
false
}
}
impl fmt::Display for Board {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
BOARD!(),
self[7], self[8], self[9], self[4], self[5], self[6], self[1], self[2], self[3]
)
}
}
struct Brain {
brain: HashMap<Board, Vec<Move>>,
}
impl Brain {
fn new() -> Self {
Brain {
brain: HashMap::new(),
}
}
fn get(&mut self, board: &Board) -> &mut Vec<Move> {
match self.brain.entry(*board) {
Entry::Occupied(o) => o.into_mut(),
Entry::Vacant(v) => v.insert(board.black_moves()),
}
}
}
fn main() {
println!();
println!(" *********************");
println!(" * H E X A P A W N *");
println!(" *********************");
println!();
print!(BOARD!(), 7, 8, 9, 4, 5, 6, 1, 2, 3);
println!("It's your pawns against mine.");
println!("The board is numbered like a calculator.");
println!("You lose when you can no longer make a move.");
println!("You lose when I reach the third rank.");
println!("You go first.");
println!();
let mut brain = Brain::new();
let mut black_wins = 0;
let mut white_wins = 0;
loop {
let mut history: Vec<(Board, Move)> = Vec::new();
let mut board = Board::new();
loop {
println!("{}", board);
let white_moves = board.white_moves();
if white_moves.is_empty() || board.black_promoted() {
println!("Black wins!");
black_wins += 1;
break;
}
let mov = read_move(&white_moves);
board.do_move(&mov);
println!("{}", board);
let black_moves = board.black_moves();
if black_moves.is_empty() || board.white_promoted() {
println!("White wins!");
white_wins += 1;
loop {
let (b, m) = history.pop().unwrap();
let mvs = brain.get(&b);
mvs.retain(|&mm| mm != m);
if !mvs.is_empty() {
break
}
}
break;
}
let mov = brain.get(&board).choose(&mut thread_rng()).unwrap();
history.push((board,*mov));
board.do_move(&mov);
println!("I move: {}", mov);
}
println!();
println!("Black has {} wins. You have {} wins.", black_wins, white_wins);
if black_wins > white_wins {
println!("The student has become the master.");
} else {
println!("Teach me senpai. Let's go again.");
}
println!();
}
}
fn read_move(moves: &Vec<Move>) -> Move {
loop {
print!("Your move? ");
io::stdout().flush().unwrap();
let mut input = String::new();
io::stdin()
.read_line(&mut input)
.expect("Failed to read line");
let vec: Vec<&str> = input.split(",").collect();
if vec.len() == 2 {
if let Ok(f) = vec[0].trim().parse::<u8>() {
if let Ok(t) = vec[1].trim().parse::<u8>() {
let mov = Move { from: f, to: t };
if moves.iter().any(|m| mov == *m) {
return mov;
}
println!("Invalid move");
continue;
}
}
}
println!("Expected digit-comma-digit e.g. 0,9");
}
}
| true
|
ed69dc53c6aab95cbcdca48fb9917fa78f55727a
|
Rust
|
wjzz/eval-in-rust
|
/src/eval/parsing.rs
|
UTF-8
| 6,052
| 3.703125
| 4
|
[] |
no_license
|
pub mod token;
use token::Token;
#[derive(Debug, PartialEq, Eq)]
pub enum BinOperator {
Plus,
Mult,
}
#[derive(Debug, PartialEq, Eq)]
pub enum Expr {
Number(u32),
Var(String),
BinOp(BinOperator, Box<Expr>, Box<Expr>),
}
#[derive(Debug, PartialEq, Eq)]
pub enum Stm {
EvalExpr(Box<Expr>),
Assign(String, Box<Expr>),
}
struct Parser {
tokens: Vec<Token>,
}
impl Parser {
fn new(tokens: Vec<Token>) -> Self {
Parser { tokens }
}
fn get_token(&mut self) -> Result<Token, String> {
match self.tokens.pop() {
None => Err(String::from("Unexpected end of input")),
Some(token) => Ok(token),
}
}
fn expect(&mut self, token: Token) -> Result<(), String> {
let tk = self.get_token()?;
if tk == token {
Ok(())
} else {
Err(format!("Expected {:?}, found {:?} instead", token, tk))
}
}
// Simple backtracking
fn checkpoint(&mut self) -> Vec<Token> {
self.tokens.clone()
}
fn restore_checkpoint(&mut self, tokens: Vec<Token>) {
self.tokens = tokens;
}
fn parse_atom(&mut self) -> Result<Expr, String> {
let token = self.get_token()?;
match token {
Token::LParen => {
let e = self.parse_expr();
self.expect(Token::RParen)?;
e
}
Token::Number(n) => Ok(Expr::Number(n)),
Token::Var(v) => Ok(Expr::Var(v)),
_ => Err(format!("Expected {{ or digit, got {:?}", token)),
}
}
fn parse_factor(&mut self) -> Result<Expr, String> {
let lhs = self.parse_atom()?;
let token = self.get_token()?;
if token == Token::Mult {
let rhs = self.parse_factor()?;
Ok(Expr::BinOp(BinOperator::Mult, Box::new(lhs), Box::new(rhs)))
} else {
self.tokens.push(token);
Ok(lhs)
}
}
fn parse_expr(&mut self) -> Result<Expr, String> {
let lhs = self.parse_factor()?;
let token = self.get_token()?;
if token == Token::Plus {
let rhs = self.parse_expr()?;
Ok(Expr::BinOp(BinOperator::Plus, Box::new(lhs), Box::new(rhs)))
} else {
self.tokens.push(token);
Ok(lhs)
}
}
fn parse_var(&mut self) -> Result<String, String> {
if let Token::Var(v) = self.get_token()? {
Ok(v)
} else {
Err(String::from("Not a variable!"))
}
}
fn parse_assign(&mut self) -> Result<Stm, String> {
let v = self.parse_var()?;
self.expect(Token::Equal)?;
let e = self.parse_expr()?;
Ok(Stm::Assign(v, Box::new(e)))
}
fn parse_stm(&mut self) -> Result<Stm, String> {
let checkpoint = self.checkpoint();
self.parse_assign().or_else(|_err| {
self.restore_checkpoint(checkpoint);
Ok(Stm::EvalExpr(Box::new(self.parse_expr()?)))
})
}
fn parse_all(&mut self) -> Result<Stm, String> {
let e = self.parse_stm();
self.expect(Token::EOF)?;
e
}
}
pub fn parse_top(input: &str) -> Result<Stm, String> {
let mut tokens = token::tokenize(input)?;
tokens.reverse();
Parser::new(tokens).parse_all()
}
#[cfg(test)]
mod tests {
use super::*;
use BinOperator::*;
use Expr::*;
use Stm::*;
fn parse_top_expr(input: &str) -> Result<Expr, String> {
let r = parse_top(input)?;
if let Stm::EvalExpr(e) = r {
Ok(*e)
} else {
Err(String::from("Not an expr"))
}
}
fn lit(n: u32) -> Expr {
Number(n)
}
fn var(s: &str) -> Expr {
Var(String::from(s))
}
fn eq(v: &str, e: Expr) -> Stm {
Assign(String::from(v), Box::new(e))
}
fn plus(e1: Expr, e2: Expr) -> Expr {
BinOp(Plus, Box::new(e1), Box::new(e2))
}
fn mult(e1: Expr, e2: Expr) -> Expr {
BinOp(Mult, Box::new(e1), Box::new(e2))
}
#[test]
fn test_empty() {
assert!(parse_top_expr("").is_err());
}
#[test]
fn test_literal() {
assert_eq!(Ok(lit(10)), parse_top_expr("10"));
}
#[test]
fn test_plus() {
assert_eq!(Ok(plus(lit(2), lit(3))), parse_top_expr("2+3"));
}
#[test]
fn test_plus_var_left() {
assert_eq!(Ok(plus(var("x"), lit(3))), parse_top_expr("x+3"));
}
#[test]
fn test_plus_var_right() {
assert_eq!(Ok(plus(lit(3), var("x"))), parse_top_expr("3+x"));
}
#[test]
fn test_plus_many() {
assert_eq!(
Ok(plus(lit(2), plus(lit(3), lit(4)))),
parse_top_expr("2+3+4")
);
}
#[test]
fn test_mult() {
assert_eq!(Ok(mult(lit(2), lit(3))), parse_top_expr("2*3"));
}
#[test]
fn test_mult_many() {
assert_eq!(
Ok(mult(lit(2), mult(lit(3), lit(4)))),
parse_top_expr("2*3*4")
);
}
#[test]
fn test_mult_plus() {
assert_eq!(
Ok(plus(lit(2), mult(lit(3), lit(4)))),
parse_top_expr("2+3*4")
);
}
#[test]
fn test_mult_plus2() {
assert_eq!(
Ok(plus(mult(lit(2), lit(3)), lit(4))),
parse_top_expr("2*3+4")
);
}
#[test]
fn test_parens1() {
assert_eq!(Ok(lit(2)), parse_top_expr("(2)"));
}
#[test]
fn test_parens2() {
assert_eq!(Ok(plus(lit(2), lit(3))), parse_top_expr("(2+3)"));
}
#[test]
fn test_parens3() {
assert_eq!(Ok(plus(lit(2), lit(3))), parse_top_expr("(2)+3"));
}
#[test]
fn test_parens4() {
assert_eq!(Ok(mult(lit(2), lit(3))), parse_top_expr("(2)*3"));
}
#[test]
fn test_assign1() {
assert_eq!(Ok(eq("x", lit(5))), parse_top("x = 5"));
}
#[test]
fn test_assign2() {
assert_eq!(Ok(eq("x", plus(lit(1), lit(5)))), parse_top("x = 1 + 5"));
}
}
| true
|
3dc1a423269fee4b61a849cdcac29b69d1830c4e
|
Rust
|
tetcoin/tetsy-wasm-gc
|
/wasm-gc-api/src/error.rs
|
UTF-8
| 518
| 2.640625
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use std::error;
use std::fmt;
use tetsy_wasm::elements::Error as TetsyWasmError;
/// The error type for garbage collecting webassembly bytecode.
#[derive(Debug)]
pub struct Error(TetsyWasmError);
impl error::Error for Error {
fn description(&self) -> &str {
"webassembly garbage collection failed"
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self.0)
}
}
pub fn from(tetsy: TetsyWasmError) -> Error {
Error(tetsy)
}
| true
|
93a3d4abdea57bd03abe7b13cf4d53432c8d10db
|
Rust
|
nvzqz/chance-rs
|
/src/rng/panic.rs
|
UTF-8
| 1,365
| 3.109375
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use core::fmt::Debug;
use crate::prelude::*;
/// A wrapper around a [`TryRng`](trait.TryRng.html) that implements
/// [`Rng`](trait.Rng.html) via panicking if an error occurred.
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(transparent)]
pub struct PanickingRng<R: ?Sized>(pub R);
impl<R> From<R> for PanickingRng<R> {
#[inline]
fn from(rng: R) -> Self {
PanickingRng(rng)
}
}
impl<R: ?Sized> PanickingRng<R> {
/// Wraps the mutable reference `rng` as a mutable reference of type
/// `PanickingRng`.
#[inline]
pub fn from_mut(rng: &mut R) -> &mut Self {
unsafe { &mut *(rng as *mut R as *mut Self) }
}
}
impl<R: ?Sized + TryRng> Rng for PanickingRng<R>
where R::Error: Debug
{
#[inline]
fn fill_bytes(&mut self, buf: &mut [u8]) {
self.0.try_fill_bytes(buf).unwrap();
}
#[inline]
fn next_u8(&mut self) -> u8 {
self.0.try_next_u8().unwrap()
}
#[inline]
fn next_u16(&mut self) -> u16 {
self.0.try_next_u16().unwrap()
}
#[inline]
fn next_u32(&mut self) -> u32 {
self.0.try_next_u32().unwrap()
}
#[inline]
fn next_u64(&mut self) -> u64 {
self.0.try_next_u64().unwrap()
}
#[inline]
fn next_u128(&mut self) -> u128 {
self.0.try_next_u128().unwrap()
}
}
| true
|
a07e21c9baeecbd4dc1bee915b40c846fe151ef5
|
Rust
|
marco-c/gecko-dev-wordified
|
/third_party/rust/rayon/src/slice/mergesort.rs
|
UTF-8
| 25,108
| 2.6875
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
/
/
!
Parallel
merge
sort
.
/
/
!
/
/
!
This
implementation
is
copied
verbatim
from
std
:
:
slice
:
:
sort
and
then
parallelized
.
/
/
!
The
only
difference
from
the
original
is
that
the
sequential
mergesort
returns
/
/
!
MergesortResult
and
leaves
descending
arrays
intact
.
use
crate
:
:
iter
:
:
*
;
use
crate
:
:
slice
:
:
ParallelSliceMut
;
use
crate
:
:
SendPtr
;
use
std
:
:
mem
;
use
std
:
:
mem
:
:
size_of
;
use
std
:
:
ptr
;
use
std
:
:
slice
;
unsafe
fn
get_and_increment
<
T
>
(
ptr
:
&
mut
*
mut
T
)
-
>
*
mut
T
{
let
old
=
*
ptr
;
*
ptr
=
ptr
.
offset
(
1
)
;
old
}
unsafe
fn
decrement_and_get
<
T
>
(
ptr
:
&
mut
*
mut
T
)
-
>
*
mut
T
{
*
ptr
=
ptr
.
offset
(
-
1
)
;
*
ptr
}
/
/
/
When
dropped
copies
from
src
into
dest
a
sequence
of
length
len
.
struct
CopyOnDrop
<
T
>
{
src
:
*
const
T
dest
:
*
mut
T
len
:
usize
}
impl
<
T
>
Drop
for
CopyOnDrop
<
T
>
{
fn
drop
(
&
mut
self
)
{
unsafe
{
ptr
:
:
copy_nonoverlapping
(
self
.
src
self
.
dest
self
.
len
)
;
}
}
}
/
/
/
Inserts
v
[
0
]
into
pre
-
sorted
sequence
v
[
1
.
.
]
so
that
whole
v
[
.
.
]
becomes
sorted
.
/
/
/
/
/
/
This
is
the
integral
subroutine
of
insertion
sort
.
fn
insert_head
<
T
F
>
(
v
:
&
mut
[
T
]
is_less
:
&
F
)
where
F
:
Fn
(
&
T
&
T
)
-
>
bool
{
if
v
.
len
(
)
>
=
2
&
&
is_less
(
&
v
[
1
]
&
v
[
0
]
)
{
unsafe
{
/
/
There
are
three
ways
to
implement
insertion
here
:
/
/
/
/
1
.
Swap
adjacent
elements
until
the
first
one
gets
to
its
final
destination
.
/
/
However
this
way
we
copy
data
around
more
than
is
necessary
.
If
elements
are
big
/
/
structures
(
costly
to
copy
)
this
method
will
be
slow
.
/
/
/
/
2
.
Iterate
until
the
right
place
for
the
first
element
is
found
.
Then
shift
the
/
/
elements
succeeding
it
to
make
room
for
it
and
finally
place
it
into
the
/
/
remaining
hole
.
This
is
a
good
method
.
/
/
/
/
3
.
Copy
the
first
element
into
a
temporary
variable
.
Iterate
until
the
right
place
/
/
for
it
is
found
.
As
we
go
along
copy
every
traversed
element
into
the
slot
/
/
preceding
it
.
Finally
copy
data
from
the
temporary
variable
into
the
remaining
/
/
hole
.
This
method
is
very
good
.
Benchmarks
demonstrated
slightly
better
/
/
performance
than
with
the
2nd
method
.
/
/
/
/
All
methods
were
benchmarked
and
the
3rd
showed
best
results
.
So
we
chose
that
one
.
let
tmp
=
mem
:
:
ManuallyDrop
:
:
new
(
ptr
:
:
read
(
&
v
[
0
]
)
)
;
/
/
Intermediate
state
of
the
insertion
process
is
always
tracked
by
hole
which
/
/
serves
two
purposes
:
/
/
1
.
Protects
integrity
of
v
from
panics
in
is_less
.
/
/
2
.
Fills
the
remaining
hole
in
v
in
the
end
.
/
/
/
/
Panic
safety
:
/
/
/
/
If
is_less
panics
at
any
point
during
the
process
hole
will
get
dropped
and
/
/
fill
the
hole
in
v
with
tmp
thus
ensuring
that
v
still
holds
every
object
it
/
/
initially
held
exactly
once
.
let
mut
hole
=
InsertionHole
{
src
:
&
*
tmp
dest
:
&
mut
v
[
1
]
}
;
ptr
:
:
copy_nonoverlapping
(
&
v
[
1
]
&
mut
v
[
0
]
1
)
;
for
i
in
2
.
.
v
.
len
(
)
{
if
!
is_less
(
&
v
[
i
]
&
*
tmp
)
{
break
;
}
ptr
:
:
copy_nonoverlapping
(
&
v
[
i
]
&
mut
v
[
i
-
1
]
1
)
;
hole
.
dest
=
&
mut
v
[
i
]
;
}
/
/
hole
gets
dropped
and
thus
copies
tmp
into
the
remaining
hole
in
v
.
}
}
/
/
When
dropped
copies
from
src
into
dest
.
struct
InsertionHole
<
T
>
{
src
:
*
const
T
dest
:
*
mut
T
}
impl
<
T
>
Drop
for
InsertionHole
<
T
>
{
fn
drop
(
&
mut
self
)
{
unsafe
{
ptr
:
:
copy_nonoverlapping
(
self
.
src
self
.
dest
1
)
;
}
}
}
}
/
/
/
Merges
non
-
decreasing
runs
v
[
.
.
mid
]
and
v
[
mid
.
.
]
using
buf
as
temporary
storage
and
/
/
/
stores
the
result
into
v
[
.
.
]
.
/
/
/
/
/
/
#
Safety
/
/
/
/
/
/
The
two
slices
must
be
non
-
empty
and
mid
must
be
in
bounds
.
Buffer
buf
must
be
long
enough
/
/
/
to
hold
a
copy
of
the
shorter
slice
.
Also
T
must
not
be
a
zero
-
sized
type
.
unsafe
fn
merge
<
T
F
>
(
v
:
&
mut
[
T
]
mid
:
usize
buf
:
*
mut
T
is_less
:
&
F
)
where
F
:
Fn
(
&
T
&
T
)
-
>
bool
{
let
len
=
v
.
len
(
)
;
let
v
=
v
.
as_mut_ptr
(
)
;
let
v_mid
=
v
.
add
(
mid
)
;
let
v_end
=
v
.
add
(
len
)
;
/
/
The
merge
process
first
copies
the
shorter
run
into
buf
.
Then
it
traces
the
newly
copied
/
/
run
and
the
longer
run
forwards
(
or
backwards
)
comparing
their
next
unconsumed
elements
and
/
/
copying
the
lesser
(
or
greater
)
one
into
v
.
/
/
/
/
As
soon
as
the
shorter
run
is
fully
consumed
the
process
is
done
.
If
the
longer
run
gets
/
/
consumed
first
then
we
must
copy
whatever
is
left
of
the
shorter
run
into
the
remaining
/
/
hole
in
v
.
/
/
/
/
Intermediate
state
of
the
process
is
always
tracked
by
hole
which
serves
two
purposes
:
/
/
1
.
Protects
integrity
of
v
from
panics
in
is_less
.
/
/
2
.
Fills
the
remaining
hole
in
v
if
the
longer
run
gets
consumed
first
.
/
/
/
/
Panic
safety
:
/
/
/
/
If
is_less
panics
at
any
point
during
the
process
hole
will
get
dropped
and
fill
the
/
/
hole
in
v
with
the
unconsumed
range
in
buf
thus
ensuring
that
v
still
holds
every
/
/
object
it
initially
held
exactly
once
.
let
mut
hole
;
if
mid
<
=
len
-
mid
{
/
/
The
left
run
is
shorter
.
ptr
:
:
copy_nonoverlapping
(
v
buf
mid
)
;
hole
=
MergeHole
{
start
:
buf
end
:
buf
.
add
(
mid
)
dest
:
v
}
;
/
/
Initially
these
pointers
point
to
the
beginnings
of
their
arrays
.
let
left
=
&
mut
hole
.
start
;
let
mut
right
=
v_mid
;
let
out
=
&
mut
hole
.
dest
;
while
*
left
<
hole
.
end
&
&
right
<
v_end
{
/
/
Consume
the
lesser
side
.
/
/
If
equal
prefer
the
left
run
to
maintain
stability
.
let
to_copy
=
if
is_less
(
&
*
right
&
*
*
left
)
{
get_and_increment
(
&
mut
right
)
}
else
{
get_and_increment
(
left
)
}
;
ptr
:
:
copy_nonoverlapping
(
to_copy
get_and_increment
(
out
)
1
)
;
}
}
else
{
/
/
The
right
run
is
shorter
.
ptr
:
:
copy_nonoverlapping
(
v_mid
buf
len
-
mid
)
;
hole
=
MergeHole
{
start
:
buf
end
:
buf
.
add
(
len
-
mid
)
dest
:
v_mid
}
;
/
/
Initially
these
pointers
point
past
the
ends
of
their
arrays
.
let
left
=
&
mut
hole
.
dest
;
let
right
=
&
mut
hole
.
end
;
let
mut
out
=
v_end
;
while
v
<
*
left
&
&
buf
<
*
right
{
/
/
Consume
the
greater
side
.
/
/
If
equal
prefer
the
right
run
to
maintain
stability
.
let
to_copy
=
if
is_less
(
&
*
right
.
offset
(
-
1
)
&
*
left
.
offset
(
-
1
)
)
{
decrement_and_get
(
left
)
}
else
{
decrement_and_get
(
right
)
}
;
ptr
:
:
copy_nonoverlapping
(
to_copy
decrement_and_get
(
&
mut
out
)
1
)
;
}
}
/
/
Finally
hole
gets
dropped
.
If
the
shorter
run
was
not
fully
consumed
whatever
remains
of
/
/
it
will
now
be
copied
into
the
hole
in
v
.
/
/
When
dropped
copies
the
range
start
.
.
end
into
dest
.
.
.
struct
MergeHole
<
T
>
{
start
:
*
mut
T
end
:
*
mut
T
dest
:
*
mut
T
}
impl
<
T
>
Drop
for
MergeHole
<
T
>
{
fn
drop
(
&
mut
self
)
{
/
/
T
is
not
a
zero
-
sized
type
so
it
'
s
okay
to
divide
by
its
size
.
unsafe
{
let
len
=
self
.
end
.
offset_from
(
self
.
start
)
as
usize
;
ptr
:
:
copy_nonoverlapping
(
self
.
start
self
.
dest
len
)
;
}
}
}
}
/
/
/
The
result
of
merge
sort
.
#
[
must_use
]
#
[
derive
(
Clone
Copy
PartialEq
Eq
)
]
enum
MergesortResult
{
/
/
/
The
slice
has
already
been
sorted
.
NonDescending
/
/
/
The
slice
has
been
descending
and
therefore
it
was
left
intact
.
Descending
/
/
/
The
slice
was
sorted
.
Sorted
}
/
/
/
A
sorted
run
that
starts
at
index
start
and
is
of
length
len
.
#
[
derive
(
Clone
Copy
)
]
struct
Run
{
start
:
usize
len
:
usize
}
/
/
/
Examines
the
stack
of
runs
and
identifies
the
next
pair
of
runs
to
merge
.
More
specifically
/
/
/
if
Some
(
r
)
is
returned
that
means
runs
[
r
]
and
runs
[
r
+
1
]
must
be
merged
next
.
If
the
/
/
/
algorithm
should
continue
building
a
new
run
instead
None
is
returned
.
/
/
/
/
/
/
TimSort
is
infamous
for
its
buggy
implementations
as
described
here
:
/
/
/
http
:
/
/
envisage
-
project
.
eu
/
timsort
-
specification
-
and
-
verification
/
/
/
/
/
/
/
The
gist
of
the
story
is
:
we
must
enforce
the
invariants
on
the
top
four
runs
on
the
stack
.
/
/
/
Enforcing
them
on
just
top
three
is
not
sufficient
to
ensure
that
the
invariants
will
still
/
/
/
hold
for
*
all
*
runs
in
the
stack
.
/
/
/
/
/
/
This
function
correctly
checks
invariants
for
the
top
four
runs
.
Additionally
if
the
top
/
/
/
run
starts
at
index
0
it
will
always
demand
a
merge
operation
until
the
stack
is
fully
/
/
/
collapsed
in
order
to
complete
the
sort
.
#
[
inline
]
fn
collapse
(
runs
:
&
[
Run
]
)
-
>
Option
<
usize
>
{
let
n
=
runs
.
len
(
)
;
if
n
>
=
2
&
&
(
runs
[
n
-
1
]
.
start
=
=
0
|
|
runs
[
n
-
2
]
.
len
<
=
runs
[
n
-
1
]
.
len
|
|
(
n
>
=
3
&
&
runs
[
n
-
3
]
.
len
<
=
runs
[
n
-
2
]
.
len
+
runs
[
n
-
1
]
.
len
)
|
|
(
n
>
=
4
&
&
runs
[
n
-
4
]
.
len
<
=
runs
[
n
-
3
]
.
len
+
runs
[
n
-
2
]
.
len
)
)
{
if
n
>
=
3
&
&
runs
[
n
-
3
]
.
len
<
runs
[
n
-
1
]
.
len
{
Some
(
n
-
3
)
}
else
{
Some
(
n
-
2
)
}
}
else
{
None
}
}
/
/
/
Sorts
a
slice
using
merge
sort
unless
it
is
already
in
descending
order
.
/
/
/
/
/
/
This
function
doesn
'
t
modify
the
slice
if
it
is
already
non
-
descending
or
descending
.
/
/
/
Otherwise
it
sorts
the
slice
into
non
-
descending
order
.
/
/
/
/
/
/
This
merge
sort
borrows
some
(
but
not
all
)
ideas
from
TimSort
which
is
described
in
detail
/
/
/
[
here
]
(
https
:
/
/
github
.
com
/
python
/
cpython
/
blob
/
main
/
Objects
/
listsort
.
txt
)
.
/
/
/
/
/
/
The
algorithm
identifies
strictly
descending
and
non
-
descending
subsequences
which
are
called
/
/
/
natural
runs
.
There
is
a
stack
of
pending
runs
yet
to
be
merged
.
Each
newly
found
run
is
pushed
/
/
/
onto
the
stack
and
then
some
pairs
of
adjacent
runs
are
merged
until
these
two
invariants
are
/
/
/
satisfied
:
/
/
/
/
/
/
1
.
for
every
i
in
1
.
.
runs
.
len
(
)
:
runs
[
i
-
1
]
.
len
>
runs
[
i
]
.
len
/
/
/
2
.
for
every
i
in
2
.
.
runs
.
len
(
)
:
runs
[
i
-
2
]
.
len
>
runs
[
i
-
1
]
.
len
+
runs
[
i
]
.
len
/
/
/
/
/
/
The
invariants
ensure
that
the
total
running
time
is
*
O
*
(
*
n
*
\
*
log
(
*
n
*
)
)
worst
-
case
.
/
/
/
/
/
/
#
Safety
/
/
/
/
/
/
The
argument
buf
is
used
as
a
temporary
buffer
and
must
be
at
least
as
long
as
v
.
unsafe
fn
mergesort
<
T
F
>
(
v
:
&
mut
[
T
]
buf
:
*
mut
T
is_less
:
&
F
)
-
>
MergesortResult
where
T
:
Send
F
:
Fn
(
&
T
&
T
)
-
>
bool
+
Sync
{
/
/
Very
short
runs
are
extended
using
insertion
sort
to
span
at
least
this
many
elements
.
const
MIN_RUN
:
usize
=
10
;
let
len
=
v
.
len
(
)
;
/
/
In
order
to
identify
natural
runs
in
v
we
traverse
it
backwards
.
That
might
seem
like
a
/
/
strange
decision
but
consider
the
fact
that
merges
more
often
go
in
the
opposite
direction
/
/
(
forwards
)
.
According
to
benchmarks
merging
forwards
is
slightly
faster
than
merging
/
/
backwards
.
To
conclude
identifying
runs
by
traversing
backwards
improves
performance
.
let
mut
runs
=
vec
!
[
]
;
let
mut
end
=
len
;
while
end
>
0
{
/
/
Find
the
next
natural
run
and
reverse
it
if
it
'
s
strictly
descending
.
let
mut
start
=
end
-
1
;
if
start
>
0
{
start
-
=
1
;
if
is_less
(
v
.
get_unchecked
(
start
+
1
)
v
.
get_unchecked
(
start
)
)
{
while
start
>
0
&
&
is_less
(
v
.
get_unchecked
(
start
)
v
.
get_unchecked
(
start
-
1
)
)
{
start
-
=
1
;
}
/
/
If
this
descending
run
covers
the
whole
slice
return
immediately
.
if
start
=
=
0
&
&
end
=
=
len
{
return
MergesortResult
:
:
Descending
;
}
else
{
v
[
start
.
.
end
]
.
reverse
(
)
;
}
}
else
{
while
start
>
0
&
&
!
is_less
(
v
.
get_unchecked
(
start
)
v
.
get_unchecked
(
start
-
1
)
)
{
start
-
=
1
;
}
/
/
If
this
non
-
descending
run
covers
the
whole
slice
return
immediately
.
if
end
-
start
=
=
len
{
return
MergesortResult
:
:
NonDescending
;
}
}
}
/
/
Insert
some
more
elements
into
the
run
if
it
'
s
too
short
.
Insertion
sort
is
faster
than
/
/
merge
sort
on
short
sequences
so
this
significantly
improves
performance
.
while
start
>
0
&
&
end
-
start
<
MIN_RUN
{
start
-
=
1
;
insert_head
(
&
mut
v
[
start
.
.
end
]
&
is_less
)
;
}
/
/
Push
this
run
onto
the
stack
.
runs
.
push
(
Run
{
start
len
:
end
-
start
}
)
;
end
=
start
;
/
/
Merge
some
pairs
of
adjacent
runs
to
satisfy
the
invariants
.
while
let
Some
(
r
)
=
collapse
(
&
runs
)
{
let
left
=
runs
[
r
+
1
]
;
let
right
=
runs
[
r
]
;
merge
(
&
mut
v
[
left
.
start
.
.
right
.
start
+
right
.
len
]
left
.
len
buf
&
is_less
)
;
runs
[
r
]
=
Run
{
start
:
left
.
start
len
:
left
.
len
+
right
.
len
}
;
runs
.
remove
(
r
+
1
)
;
}
}
/
/
Finally
exactly
one
run
must
remain
in
the
stack
.
debug_assert
!
(
runs
.
len
(
)
=
=
1
&
&
runs
[
0
]
.
start
=
=
0
&
&
runs
[
0
]
.
len
=
=
len
)
;
/
/
The
original
order
of
the
slice
was
neither
non
-
descending
nor
descending
.
MergesortResult
:
:
Sorted
}
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
Everything
above
this
line
is
copied
from
std
:
:
slice
:
:
sort
(
with
very
minor
tweaks
)
.
/
/
Everything
below
this
line
is
parallelization
.
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
/
Splits
two
sorted
slices
so
that
they
can
be
merged
in
parallel
.
/
/
/
/
/
/
Returns
two
indices
(
a
b
)
so
that
slices
left
[
.
.
a
]
and
right
[
.
.
b
]
come
before
/
/
/
left
[
a
.
.
]
and
right
[
b
.
.
]
.
fn
split_for_merge
<
T
F
>
(
left
:
&
[
T
]
right
:
&
[
T
]
is_less
:
&
F
)
-
>
(
usize
usize
)
where
F
:
Fn
(
&
T
&
T
)
-
>
bool
{
let
left_len
=
left
.
len
(
)
;
let
right_len
=
right
.
len
(
)
;
if
left_len
>
=
right_len
{
let
left_mid
=
left_len
/
2
;
/
/
Find
the
first
element
in
right
that
is
greater
than
or
equal
to
left
[
left_mid
]
.
let
mut
a
=
0
;
let
mut
b
=
right_len
;
while
a
<
b
{
let
m
=
a
+
(
b
-
a
)
/
2
;
if
is_less
(
&
right
[
m
]
&
left
[
left_mid
]
)
{
a
=
m
+
1
;
}
else
{
b
=
m
;
}
}
(
left_mid
a
)
}
else
{
let
right_mid
=
right_len
/
2
;
/
/
Find
the
first
element
in
left
that
is
greater
than
right
[
right_mid
]
.
let
mut
a
=
0
;
let
mut
b
=
left_len
;
while
a
<
b
{
let
m
=
a
+
(
b
-
a
)
/
2
;
if
is_less
(
&
right
[
right_mid
]
&
left
[
m
]
)
{
b
=
m
;
}
else
{
a
=
m
+
1
;
}
}
(
a
right_mid
)
}
}
/
/
/
Merges
slices
left
and
right
in
parallel
and
stores
the
result
into
dest
.
/
/
/
/
/
/
#
Safety
/
/
/
/
/
/
The
dest
pointer
must
have
enough
space
to
store
the
result
.
/
/
/
/
/
/
Even
if
is_less
panics
at
any
point
during
the
merge
process
this
function
will
fully
copy
/
/
/
all
elements
from
left
and
right
into
dest
(
not
necessarily
in
sorted
order
)
.
unsafe
fn
par_merge
<
T
F
>
(
left
:
&
mut
[
T
]
right
:
&
mut
[
T
]
dest
:
*
mut
T
is_less
:
&
F
)
where
T
:
Send
F
:
Fn
(
&
T
&
T
)
-
>
bool
+
Sync
{
/
/
Slices
whose
lengths
sum
up
to
this
value
are
merged
sequentially
.
This
number
is
slightly
/
/
larger
than
CHUNK_LENGTH
and
the
reason
is
that
merging
is
faster
than
merge
sorting
so
/
/
merging
needs
a
bit
coarser
granularity
in
order
to
hide
the
overhead
of
Rayon
'
s
task
/
/
scheduling
.
const
MAX_SEQUENTIAL
:
usize
=
5000
;
let
left_len
=
left
.
len
(
)
;
let
right_len
=
right
.
len
(
)
;
/
/
Intermediate
state
of
the
merge
process
which
serves
two
purposes
:
/
/
1
.
Protects
integrity
of
dest
from
panics
in
is_less
.
/
/
2
.
Copies
the
remaining
elements
as
soon
as
one
of
the
two
sides
is
exhausted
.
/
/
/
/
Panic
safety
:
/
/
/
/
If
is_less
panics
at
any
point
during
the
merge
process
s
will
get
dropped
and
copy
the
/
/
remaining
parts
of
left
and
right
into
dest
.
let
mut
s
=
State
{
left_start
:
left
.
as_mut_ptr
(
)
left_end
:
left
.
as_mut_ptr
(
)
.
add
(
left_len
)
right_start
:
right
.
as_mut_ptr
(
)
right_end
:
right
.
as_mut_ptr
(
)
.
add
(
right_len
)
dest
}
;
if
left_len
=
=
0
|
|
right_len
=
=
0
|
|
left_len
+
right_len
<
MAX_SEQUENTIAL
{
while
s
.
left_start
<
s
.
left_end
&
&
s
.
right_start
<
s
.
right_end
{
/
/
Consume
the
lesser
side
.
/
/
If
equal
prefer
the
left
run
to
maintain
stability
.
let
to_copy
=
if
is_less
(
&
*
s
.
right_start
&
*
s
.
left_start
)
{
get_and_increment
(
&
mut
s
.
right_start
)
}
else
{
get_and_increment
(
&
mut
s
.
left_start
)
}
;
ptr
:
:
copy_nonoverlapping
(
to_copy
get_and_increment
(
&
mut
s
.
dest
)
1
)
;
}
}
else
{
/
/
Function
split_for_merge
might
panic
.
If
that
happens
s
will
get
destructed
and
copy
/
/
the
whole
left
and
right
into
dest
.
let
(
left_mid
right_mid
)
=
split_for_merge
(
left
right
is_less
)
;
let
(
left_l
left_r
)
=
left
.
split_at_mut
(
left_mid
)
;
let
(
right_l
right_r
)
=
right
.
split_at_mut
(
right_mid
)
;
/
/
Prevent
the
destructor
of
s
from
running
.
Rayon
will
ensure
that
both
calls
to
/
/
par_merge
happen
.
If
one
of
the
two
calls
panics
they
will
ensure
that
elements
still
/
/
get
copied
into
dest_left
and
dest_right
.
mem
:
:
forget
(
s
)
;
/
/
Wrap
pointers
in
SendPtr
so
that
they
can
be
sent
to
another
thread
/
/
See
the
documentation
of
SendPtr
for
a
full
explanation
let
dest_l
=
SendPtr
(
dest
)
;
let
dest_r
=
SendPtr
(
dest
.
add
(
left_l
.
len
(
)
+
right_l
.
len
(
)
)
)
;
rayon_core
:
:
join
(
move
|
|
par_merge
(
left_l
right_l
dest_l
.
get
(
)
is_less
)
move
|
|
par_merge
(
left_r
right_r
dest_r
.
get
(
)
is_less
)
)
;
}
/
/
Finally
s
gets
dropped
if
we
used
sequential
merge
thus
copying
the
remaining
elements
/
/
all
at
once
.
/
/
When
dropped
copies
arrays
left_start
.
.
left_end
and
right_start
.
.
right_end
into
dest
/
/
in
that
order
.
struct
State
<
T
>
{
left_start
:
*
mut
T
left_end
:
*
mut
T
right_start
:
*
mut
T
right_end
:
*
mut
T
dest
:
*
mut
T
}
impl
<
T
>
Drop
for
State
<
T
>
{
fn
drop
(
&
mut
self
)
{
let
size
=
size_of
:
:
<
T
>
(
)
;
let
left_len
=
(
self
.
left_end
as
usize
-
self
.
left_start
as
usize
)
/
size
;
let
right_len
=
(
self
.
right_end
as
usize
-
self
.
right_start
as
usize
)
/
size
;
/
/
Copy
array
left
followed
by
right
.
unsafe
{
ptr
:
:
copy_nonoverlapping
(
self
.
left_start
self
.
dest
left_len
)
;
self
.
dest
=
self
.
dest
.
add
(
left_len
)
;
ptr
:
:
copy_nonoverlapping
(
self
.
right_start
self
.
dest
right_len
)
;
}
}
}
}
/
/
/
Recursively
merges
pre
-
sorted
chunks
inside
v
.
/
/
/
/
/
/
Chunks
of
v
are
stored
in
chunks
as
intervals
(
inclusive
left
and
exclusive
right
bound
)
.
/
/
/
Argument
buf
is
an
auxiliary
buffer
that
will
be
used
during
the
procedure
.
/
/
/
If
into_buf
is
true
the
result
will
be
stored
into
buf
otherwise
it
will
be
in
v
.
/
/
/
/
/
/
#
Safety
/
/
/
/
/
/
The
number
of
chunks
must
be
positive
and
they
must
be
adjacent
:
the
right
bound
of
each
chunk
/
/
/
must
equal
the
left
bound
of
the
following
chunk
.
/
/
/
/
/
/
The
buffer
must
be
at
least
as
long
as
v
.
unsafe
fn
recurse
<
T
F
>
(
v
:
*
mut
T
buf
:
*
mut
T
chunks
:
&
[
(
usize
usize
)
]
into_buf
:
bool
is_less
:
&
F
)
where
T
:
Send
F
:
Fn
(
&
T
&
T
)
-
>
bool
+
Sync
{
let
len
=
chunks
.
len
(
)
;
debug_assert
!
(
len
>
0
)
;
/
/
Base
case
of
the
algorithm
.
/
/
If
only
one
chunk
is
remaining
there
'
s
no
more
work
to
split
and
merge
.
if
len
=
=
1
{
if
into_buf
{
/
/
Copy
the
chunk
from
v
into
buf
.
let
(
start
end
)
=
chunks
[
0
]
;
let
src
=
v
.
add
(
start
)
;
let
dest
=
buf
.
add
(
start
)
;
ptr
:
:
copy_nonoverlapping
(
src
dest
end
-
start
)
;
}
return
;
}
/
/
Split
the
chunks
into
two
halves
.
let
(
start
_
)
=
chunks
[
0
]
;
let
(
mid
_
)
=
chunks
[
len
/
2
]
;
let
(
_
end
)
=
chunks
[
len
-
1
]
;
let
(
left
right
)
=
chunks
.
split_at
(
len
/
2
)
;
/
/
After
recursive
calls
finish
we
'
ll
have
to
merge
chunks
(
start
mid
)
and
(
mid
end
)
from
/
/
src
into
dest
.
If
the
current
invocation
has
to
store
the
result
into
buf
we
'
ll
/
/
merge
chunks
from
v
into
buf
and
vice
versa
.
/
/
/
/
Recursive
calls
flip
into_buf
at
each
level
of
recursion
.
More
concretely
par_merge
/
/
merges
chunks
from
buf
into
v
at
the
first
level
from
v
into
buf
at
the
second
/
/
level
etc
.
let
(
src
dest
)
=
if
into_buf
{
(
v
buf
)
}
else
{
(
buf
v
)
}
;
/
/
Panic
safety
:
/
/
/
/
If
is_less
panics
at
any
point
during
the
recursive
calls
the
destructor
of
guard
will
/
/
be
executed
thus
copying
everything
from
src
into
dest
.
This
way
we
ensure
that
all
/
/
chunks
are
in
fact
copied
into
dest
even
if
the
merge
process
doesn
'
t
finish
.
let
guard
=
CopyOnDrop
{
src
:
src
.
add
(
start
)
dest
:
dest
.
add
(
start
)
len
:
end
-
start
}
;
/
/
Wrap
pointers
in
SendPtr
so
that
they
can
be
sent
to
another
thread
/
/
See
the
documentation
of
SendPtr
for
a
full
explanation
let
v
=
SendPtr
(
v
)
;
let
buf
=
SendPtr
(
buf
)
;
rayon_core
:
:
join
(
move
|
|
recurse
(
v
.
get
(
)
buf
.
get
(
)
left
!
into_buf
is_less
)
move
|
|
recurse
(
v
.
get
(
)
buf
.
get
(
)
right
!
into_buf
is_less
)
)
;
/
/
Everything
went
all
right
-
recursive
calls
didn
'
t
panic
.
/
/
Forget
the
guard
in
order
to
prevent
its
destructor
from
running
.
mem
:
:
forget
(
guard
)
;
/
/
Merge
chunks
(
start
mid
)
and
(
mid
end
)
from
src
into
dest
.
let
src_left
=
slice
:
:
from_raw_parts_mut
(
src
.
add
(
start
)
mid
-
start
)
;
let
src_right
=
slice
:
:
from_raw_parts_mut
(
src
.
add
(
mid
)
end
-
mid
)
;
par_merge
(
src_left
src_right
dest
.
add
(
start
)
is_less
)
;
}
/
/
/
Sorts
v
using
merge
sort
in
parallel
.
/
/
/
/
/
/
The
algorithm
is
stable
allocates
memory
and
O
(
n
log
n
)
worst
-
case
.
/
/
/
The
allocated
temporary
buffer
is
of
the
same
length
as
is
v
.
pub
(
super
)
fn
par_mergesort
<
T
F
>
(
v
:
&
mut
[
T
]
is_less
:
F
)
where
T
:
Send
F
:
Fn
(
&
T
&
T
)
-
>
bool
+
Sync
{
/
/
Slices
of
up
to
this
length
get
sorted
using
insertion
sort
in
order
to
avoid
the
cost
of
/
/
buffer
allocation
.
const
MAX_INSERTION
:
usize
=
20
;
/
/
The
length
of
initial
chunks
.
This
number
is
as
small
as
possible
but
so
that
the
overhead
/
/
of
Rayon
'
s
task
scheduling
is
still
negligible
.
const
CHUNK_LENGTH
:
usize
=
2000
;
/
/
Sorting
has
no
meaningful
behavior
on
zero
-
sized
types
.
if
size_of
:
:
<
T
>
(
)
=
=
0
{
return
;
}
let
len
=
v
.
len
(
)
;
/
/
Short
slices
get
sorted
in
-
place
via
insertion
sort
to
avoid
allocations
.
if
len
<
=
MAX_INSERTION
{
if
len
>
=
2
{
for
i
in
(
0
.
.
len
-
1
)
.
rev
(
)
{
insert_head
(
&
mut
v
[
i
.
.
]
&
is_less
)
;
}
}
return
;
}
/
/
Allocate
a
buffer
to
use
as
scratch
memory
.
We
keep
the
length
0
so
we
can
keep
in
it
/
/
shallow
copies
of
the
contents
of
v
without
risking
the
dtors
running
on
copies
if
/
/
is_less
panics
.
let
mut
buf
=
Vec
:
:
<
T
>
:
:
with_capacity
(
len
)
;
let
buf
=
buf
.
as_mut_ptr
(
)
;
/
/
If
the
slice
is
not
longer
than
one
chunk
would
be
do
sequential
merge
sort
and
return
.
if
len
<
=
CHUNK_LENGTH
{
let
res
=
unsafe
{
mergesort
(
v
buf
&
is_less
)
}
;
if
res
=
=
MergesortResult
:
:
Descending
{
v
.
reverse
(
)
;
}
return
;
}
/
/
Split
the
slice
into
chunks
and
merge
sort
them
in
parallel
.
/
/
However
descending
chunks
will
not
be
sorted
-
they
will
be
simply
left
intact
.
let
mut
iter
=
{
/
/
Wrap
pointer
in
SendPtr
so
that
it
can
be
sent
to
another
thread
/
/
See
the
documentation
of
SendPtr
for
a
full
explanation
let
buf
=
SendPtr
(
buf
)
;
let
is_less
=
&
is_less
;
v
.
par_chunks_mut
(
CHUNK_LENGTH
)
.
with_max_len
(
1
)
.
enumerate
(
)
.
map
(
move
|
(
i
chunk
)
|
{
let
l
=
CHUNK_LENGTH
*
i
;
let
r
=
l
+
chunk
.
len
(
)
;
unsafe
{
let
buf
=
buf
.
get
(
)
.
add
(
l
)
;
(
l
r
mergesort
(
chunk
buf
is_less
)
)
}
}
)
.
collect
:
:
<
Vec
<
_
>
>
(
)
.
into_iter
(
)
.
peekable
(
)
}
;
/
/
Now
attempt
to
concatenate
adjacent
chunks
that
were
left
intact
.
let
mut
chunks
=
Vec
:
:
with_capacity
(
iter
.
len
(
)
)
;
while
let
Some
(
(
a
mut
b
res
)
)
=
iter
.
next
(
)
{
/
/
If
this
chunk
was
not
modified
by
the
sort
procedure
.
.
.
if
res
!
=
MergesortResult
:
:
Sorted
{
while
let
Some
(
&
(
x
y
r
)
)
=
iter
.
peek
(
)
{
/
/
If
the
following
chunk
is
of
the
same
type
and
can
be
concatenated
.
.
.
if
r
=
=
res
&
&
(
r
=
=
MergesortResult
:
:
Descending
)
=
=
is_less
(
&
v
[
x
]
&
v
[
x
-
1
]
)
{
/
/
Concatenate
them
.
b
=
y
;
iter
.
next
(
)
;
}
else
{
break
;
}
}
}
/
/
Descending
chunks
must
be
reversed
.
if
res
=
=
MergesortResult
:
:
Descending
{
v
[
a
.
.
b
]
.
reverse
(
)
;
}
chunks
.
push
(
(
a
b
)
)
;
}
/
/
All
chunks
are
properly
sorted
.
/
/
Now
we
just
have
to
merge
them
together
.
unsafe
{
recurse
(
v
.
as_mut_ptr
(
)
buf
&
chunks
false
&
is_less
)
;
}
}
#
[
cfg
(
test
)
]
mod
tests
{
use
super
:
:
split_for_merge
;
use
rand
:
:
distributions
:
:
Uniform
;
use
rand
:
:
{
thread_rng
Rng
}
;
#
[
test
]
fn
test_split_for_merge
(
)
{
fn
check
(
left
:
&
[
u32
]
right
:
&
[
u32
]
)
{
let
(
l
r
)
=
split_for_merge
(
left
right
&
|
&
a
&
b
|
a
<
b
)
;
assert
!
(
left
[
.
.
l
]
.
iter
(
)
.
all
(
|
&
x
|
right
[
r
.
.
]
.
iter
(
)
.
all
(
|
&
y
|
x
<
=
y
)
)
)
;
assert
!
(
right
[
.
.
r
]
.
iter
(
)
.
all
(
|
&
x
|
left
[
l
.
.
]
.
iter
(
)
.
all
(
|
&
y
|
x
<
y
)
)
)
;
}
check
(
&
[
1
2
2
2
2
3
]
&
[
1
2
2
2
2
3
]
)
;
check
(
&
[
1
2
2
2
2
3
]
&
[
]
)
;
check
(
&
[
]
&
[
1
2
2
2
2
3
]
)
;
let
rng
=
&
mut
thread_rng
(
)
;
for
_
in
0
.
.
100
{
let
limit
:
u32
=
rng
.
gen_range
(
1
.
.
21
)
;
let
left_len
:
usize
=
rng
.
gen_range
(
0
.
.
20
)
;
let
right_len
:
usize
=
rng
.
gen_range
(
0
.
.
20
)
;
let
mut
left
=
rng
.
sample_iter
(
&
Uniform
:
:
new
(
0
limit
)
)
.
take
(
left_len
)
.
collect
:
:
<
Vec
<
_
>
>
(
)
;
let
mut
right
=
rng
.
sample_iter
(
&
Uniform
:
:
new
(
0
limit
)
)
.
take
(
right_len
)
.
collect
:
:
<
Vec
<
_
>
>
(
)
;
left
.
sort
(
)
;
right
.
sort
(
)
;
check
(
&
left
&
right
)
;
}
}
}
| true
|
ed3c636781a812af694f1f2cf035defe8dd97def
|
Rust
|
PyrokinesisStudio/tetris
|
/src/controls.rs
|
UTF-8
| 926
| 3
| 3
|
[] |
no_license
|
use glium::glutin::VirtualKeyCode;
use glium::glutin::VirtualKeyCode::*;
pub struct Controls;
pub enum Action {
RotateCW, RotateCCW, RotateStop,
MoveLeft, MoveRight, MoveStop,
TrySpawn,
GameReset,
}
impl Controls {
pub fn resolve_press(&self, key: VirtualKeyCode) -> Option<Action> {
Some(match key {
Up | K => Action::RotateCW,
Down | J => Action::RotateCCW,
Left | H => Action::MoveLeft,
Right | L => Action::MoveRight,
Space => Action::TrySpawn,
Back => Action::GameReset,
_ => return None
})
}
pub fn resolve_release(&self, key: VirtualKeyCode) -> Option<Action> {
Some(match key {
Up | Down | K | J => Action::RotateStop,
Left | H | Right | L => Action::MoveStop,
_ => return None,
})
}
}
| true
|
cc29f971da65b9730507437485f044b768dadf42
|
Rust
|
simias/cdimage
|
/src/internal.rs
|
UTF-8
| 9,845
| 3.390625
| 3
|
[] |
no_license
|
//! This module provides generic reusable structures to easily store and lookup a CD's structure in
//! an image format implementation.
//!
//! Those are only useful for implementing a new image format backend.
use std::cmp;
use std::fmt;
use std::path::PathBuf;
use subchannel::AdrControl;
use {Bcd, CdError, CdResult, Msf, Toc, Track, TrackFormat};
/// A generic CD index implementation. Each image format can specialize it by adding its own
/// `private` implementation.
pub struct Index<T> {
/// Sector pointed at by this index. Stored as an absolute sector index.
sector_index: u32,
/// Index number
index: Bcd,
/// Track number this index belongs to
track: Bcd,
/// Track format this index belongs to
format: TrackFormat,
/// Session number this index belongs to
session: u8,
/// Control bits for the current track
control: AdrControl,
/// Generic private data associated with this index
private: T,
}
impl<T> Index<T> {
/// Create a new index
pub fn new(
index: Bcd,
start: Msf,
track: Bcd,
format: TrackFormat,
session: u8,
control: AdrControl,
private: T,
) -> Index<T> {
Index {
sector_index: start.sector_index(),
index,
track,
format,
session,
control,
private,
}
}
/// Retrieve the absolute `sector_index` of the sector referenced
/// by this index
pub fn sector_index(&self) -> u32 {
self.sector_index
}
/// Retrieve the MSF of the sector referenced by this index
pub fn msf(&self) -> Msf {
Msf::from_sector_index(self.sector_index).unwrap()
}
/// Retrieve a reference to the `private` data
pub fn private(&self) -> &T {
&self.private
}
/// Retrieve a mutable reference to the `private` data
pub fn private_mut(&mut self) -> &mut T {
&mut self.private
}
/// Retrieve the value of the control bits
pub fn control(&self) -> AdrControl {
self.control
}
/// Retrieve the index number in BCD
pub fn index(&self) -> Bcd {
self.index
}
/// Retrieve the track number in BCD
pub fn track(&self) -> Bcd {
self.track
}
/// Retrieve the format of the track containing this index
pub fn format(&self) -> TrackFormat {
self.format
}
/// Retrieve the session number
pub fn session(&self) -> u8 {
self.session
}
/// Return `true` if the index number is 0
pub fn is_pregap(&self) -> bool {
self.index.bcd() == 0
}
}
impl<T> cmp::PartialEq for Index<T> {
fn eq(&self, other: &Index<T>) -> bool {
self.sector_index == other.sector_index
}
}
impl<T> cmp::Eq for Index<T> {}
impl<T> cmp::PartialOrd for Index<T> {
fn partial_cmp(&self, other: &Index<T>) -> Option<cmp::Ordering> {
self.sector_index.partial_cmp(&other.sector_index)
}
}
impl<T> Ord for Index<T> {
fn cmp(&self, other: &Index<T>) -> cmp::Ordering {
self.sector_index.cmp(&other.sector_index)
}
}
/// A simple cache structure used to quickly look up where an
/// arbitrary MSF lives on the disc.
pub struct IndexCache<T> {
/// Ordered vector containing all the indices in the CD
indices: Vec<Index<T>>,
/// First sector in the lead-out, given as a sector index instead
/// of an MSF to avoid converting back and forth all the time.
lead_out: u32,
}
impl<T> IndexCache<T> {
/// Create a new `IndexCache` from a vector of indices and the MSF
/// of the first sector in the lead-out. This method will return
/// an error if the disc structure makes no sense (duplicate
/// tracks, indices in the wrong order etc...).
pub fn new(
file: PathBuf,
mut indices: Vec<Index<T>>,
lead_out: Msf,
) -> CdResult<IndexCache<T>> {
if indices.is_empty() {
return Err(CdError::BadImage {
path: file,
desc: "Empty disc".to_string(),
});
}
// Make sure the list is sorted
indices.sort();
{
let index0 = &indices[0];
if index0.sector_index != 0 {
let error = format!("Track 01's pregap starts at {}", index0.msf());
return Err(CdError::BadImage {
path: file,
desc: error,
});
}
}
// TODO: Add more validation here.
Ok(IndexCache {
indices,
lead_out: lead_out.sector_index(),
})
}
/// Return the MSF of the first sector in the lead out.
pub fn lead_out(&self) -> Msf {
Msf::from_sector_index(self.lead_out).unwrap()
}
/// Return a reference to the index at position `pos` or `None` if
/// it's out of bounds
pub fn get(&self, pos: usize) -> Option<&Index<T>> {
self.indices.get(pos)
}
/// Locate the index directly before `msf` and return its position along with a reference to
/// the `Index` struct. Returns `None` if the index is in the lead-out.
pub fn find_index_for_msf(&self, msf: Msf) -> Option<(usize, &Index<T>)> {
let sector = msf.sector_index();
if sector >= self.lead_out {
return None;
}
let pos = match self
.indices
.binary_search_by(|index| index.sector_index.cmp(§or))
{
// The MSF matched an index exactly
Ok(i) => i,
// No exact match, the function returns the index of
// the first element greater than `sector` (on one
// past the end if no greater element is found).
Err(i) => i - 1,
};
Some((pos, &self.indices[pos]))
}
/// Locate `index` for `track` and return its position along with
/// a reference to the `Index` struct.
pub fn find_index_for_track(&self, track: Bcd, index: Bcd) -> CdResult<(usize, &Index<T>)> {
match self
.indices
.binary_search_by(|idx| match idx.track().cmp(&track) {
cmp::Ordering::Equal => idx.index().cmp(&index),
o => o,
}) {
Ok(i) => Ok((i, &self.indices[i])),
Err(_) => Err(CdError::BadTrack),
}
}
/// Locate index1 for `track` and return its position along with a
/// reference to the `Index` struct.
pub fn find_index01_for_track(&self, track: Bcd) -> CdResult<(usize, &Index<T>)> {
self.find_index_for_track(track, Bcd::ONE)
}
/// Return the length of the given track starting at INDEX 01, not
/// counting the pregap. Also returns the position and a reference
/// to the INDEX 01 for this track.
pub fn track_length(&self, track: Bcd) -> CdResult<(Msf, usize, &Index<T>)> {
let (pos01, index01) = self.find_index01_for_track(track)?;
// Iterate over the remaining indices to find the beginning of
// the next track
let next_track = self.indices[pos01 + 1..]
.iter()
.find(|i| i.track() != track);
let end = match next_track {
// We found the next track, the previous sector is the
// last one in our track.
Some(next) => next.sector_index(),
// Seems like we got the last track
None => self.lead_out,
};
let len = Msf::from_sector_index(end - index01.sector_index()).unwrap();
Ok((len, pos01, index01))
}
/// Return the absolute Msf for the position `track_msf` in
/// `track`. Will return an error if the `track_msf` is outside of
/// the track or if `track` doesn't exist.
pub fn track_msf(&self, track: Bcd, track_msf: Msf) -> CdResult<Msf> {
// We need to make sure that `track_msf` is not bigger than
// this tracks' length.
let (len, _, index01) = self.track_length(track)?;
if track_msf < len {
Ok(index01.msf() + track_msf)
} else {
Err(CdError::EndOfTrack)
}
}
/// Build a table of contents with the current cache's contents
pub fn toc(&self) -> CdResult<Toc> {
let track_count = self.indices.last().map(|i| i.track).unwrap_or(Bcd::ZERO);
let mut tracks = Vec::with_capacity(track_count.binary() as usize);
for b in 1..=99 {
let track_no = Bcd::from_binary(b).unwrap();
match self.track_length(track_no) {
Ok((len, _, idx)) => {
let track = Track {
track: track_no,
format: idx.format,
start: idx.msf(),
length: len,
control: idx.control,
};
tracks.push(track);
}
// Last track
Err(_) => break,
}
}
Toc::new(tracks)
}
}
impl<T> fmt::Debug for IndexCache<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut force_display = true;
let mut session = 0;
let mut track = Bcd::ZERO;
for i in &self.indices {
if i.session != session || force_display {
writeln!(f, "Session {}:", i.session)?;
session = i.session;
force_display = true;
}
if i.track != track || force_display {
writeln!(f, " Track {} {:?}:", i.track, i.format)?;
track = i.track;
force_display = false;
}
writeln!(f, " Index {}: {}", i.index, i.msf())?;
}
writeln!(f, "Lead-out: {}", self.lead_out())
}
}
| true
|
3894501eda161054bd57e29fcebf926daaa0a36c
|
Rust
|
dfrankland/sfbart
|
/src/client/apis/version_information/version.rs
|
UTF-8
| 1,288
| 2.671875
| 3
|
[
"MIT"
] |
permissive
|
use crate::client::constants::PUBLIC_KEY;
use anyhow::Result;
use reqwest;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Version {
pub api_version: String,
pub copyright: String,
pub license: String,
pub message: String,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
struct Root {
pub root: Version,
}
pub fn url<T: AsRef<str>>(key: Option<T>) -> String {
format!(
"https://api.bart.gov/api/version.aspx?cmd=stns&key={}&json=y",
key.map(|k| String::from(k.as_ref()))
.unwrap_or_else(|| String::from(PUBLIC_KEY))
)
}
pub async fn call<T: AsRef<str>>(key: Option<T>) -> Result<Version> {
let root = reqwest::get(&url(key)).await?.json::<Root>().await?;
Ok(root.root)
}
#[tokio::test]
async fn version() {
assert_eq!(
call::<&str>(None).await.unwrap(),
Version {
api_version: String::from("3.10"),
copyright: String::from("Copyright 2019 Bay Area Rapid Transit District"),
license: String::from(
"http://www.bart.gov/schedules/developers/developer-license-agreement"
),
message: String::from(""),
}
)
}
| true
|
9125cce27ff6cde90a024651248d1f66c320a7c5
|
Rust
|
pepyakin/remarkable-oxide
|
/service/src/extendable_range.rs
|
UTF-8
| 2,288
| 3.59375
| 4
|
[
"MIT"
] |
permissive
|
use futures::prelude::*;
/// This stream is similar to a range iterator: for a range `[lhs..rhs]` it emits all numbers in
/// sequence starting from `lhs` to `rhs`.
///
/// The feature of this stream is that `rhs` can be moved forward. In fact, the `rhs` specified by
/// the `rhs_stream`. Each time `rhs_stream` produces a number, it will extend `rhs` further, making
/// this stream to produce numbers to the new rhs. Thus extendable in the name.
pub fn extendable_range(
start_num: u64,
rhs_stream: impl Stream<Item = u64> + Unpin,
) -> impl Stream<Item = u64> {
struct State<F> {
lhs: u64,
rhs: u64,
rhs_stream: F,
}
stream::unfold(
State {
lhs: start_num,
rhs: start_num,
rhs_stream,
},
|mut state| async move {
let lhs = state.lhs;
if state.lhs >= state.rhs {
loop {
let next_rhs = state.rhs_stream.next().await?;
if next_rhs > state.rhs {
state.rhs = next_rhs;
break;
}
}
}
state.lhs += 1;
Some((lhs, state))
},
)
}
#[cfg(test)]
mod tests {
use super::extendable_range;
use async_std::task;
use futures::channel::mpsc;
use futures::prelude::*;
use std::time::Duration;
#[async_std::test]
async fn initial_send() {
let (mut rhs_stream_tx, rhs_stream_rx) = mpsc::unbounded();
rhs_stream_tx.send(10).await.unwrap();
let range = extendable_range(0, rhs_stream_rx)
.take(10)
.collect::<Vec<_>>()
.await;
assert_eq!(range, &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
}
#[async_std::test]
async fn with_task() {
let (mut rhs_stream_tx, rhs_stream_rx) = mpsc::unbounded();
task::spawn(async move {
rhs_stream_tx.send(5).await.unwrap();
rhs_stream_tx.send(10).await.unwrap();
})
.await;
let range = task::spawn(
extendable_range(0, rhs_stream_rx)
.take(10)
.collect::<Vec<_>>(),
);
assert_eq!(range.await, &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
}
}
| true
|
9c600633a01daa09197dc86ca9001843e9c77c31
|
Rust
|
PistonDevelopers/conrod
|
/conrod_core/src/ui.rs
|
UTF-8
| 65,301
| 2.78125
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use color::Color;
use cursor;
use event;
use fnv;
use graph::{self, Graph};
use input;
use position::{self, Align, Dimensions, Direction, Padding, Point, Position, Range, Rect, Scalar};
use render;
use std;
use std::sync::atomic::{self, AtomicUsize};
use text;
use theme::Theme;
use utils;
use widget::{self, Widget};
/// A constructor type for building a `Ui` instance with a set of optional parameters.
pub struct UiBuilder {
/// The initial dimensions of the window in which the `Ui` exists.
pub window_dimensions: Dimensions,
/// The theme used to set default styling for widgets.
///
/// If this field is `None` when `build` is called, `Theme::default` will be used.
pub maybe_theme: Option<Theme>,
/// An estimation of the maximum number of widgets that will be used with this `Ui` instance.
///
/// This value is used to determine the size with which various collections should be
/// reserved. This may make the first cycle of widget instantiations more efficient as the
/// collections will not be required to grow dynamically. These collections include:
///
/// - the widget graph node and edge `Vec`s
/// - the `HashSet` used to track updated widgets
/// - the widget `DepthOrder` (a kind of toposort describing the order of widgets in their
/// rendering order).
///
/// If this field is `None` when `build` is called, these collections will be initialised with
/// no pre-reserved size and will instead grow organically as needed.
pub maybe_widgets_capacity: Option<usize>,
}
/// `Ui` is the most important type within Conrod and is necessary for rendering and maintaining
/// widget state.
/// # Ui Handles the following:
/// * Contains the state of all widgets which can be indexed via their widget::Id.
/// * Stores rendering state for each widget until the end of each render cycle.
/// * Contains the theme used for default styling of the widgets.
/// * Maintains the latest user input state (for mouse and keyboard).
/// * Maintains the latest window dimensions.
#[derive(Debug)]
pub struct Ui {
/// The theme used to set default styling for widgets.
pub theme: Theme,
/// An index into the root widget of the graph, representing the entire window.
pub window: widget::Id,
/// Handles aggregation of events and providing them to Widgets
global_input: input::Global,
/// Manages all fonts that have been loaded by the user.
pub fonts: text::font::Map,
/// The Widget cache, storing state for all widgets.
widget_graph: Graph,
/// The widget::Id of the widget that was last updated/set.
maybe_prev_widget_id: Option<widget::Id>,
/// The widget::Id of the last widget used as a parent for another widget.
maybe_current_parent_id: Option<widget::Id>,
/// The number of frames that will be used for the `redraw_count` when `need_redraw` is
/// triggered.
num_redraw_frames: u8,
/// Whether or not the `Ui` needs to be re-drawn to screen.
redraw_count: AtomicUsize,
/// A background color to clear the screen with before drawing if one was given.
maybe_background_color: Option<Color>,
/// The order in which widgets from the `widget_graph` are drawn.
depth_order: graph::DepthOrder,
/// The set of widgets that have been updated since the beginning of the `set_widgets` stage.
updated_widgets: fnv::FnvHashSet<widget::Id>,
/// The `updated_widgets` for the previous `set_widgets` stage.
///
/// We use this to compare against the newly generated `updated_widgets` to see whether or not
/// we require re-drawing.
prev_updated_widgets: fnv::FnvHashSet<widget::Id>,
/// Scroll events that have been emitted during a call to `Ui::set_widgets`. These are usually
/// emitted by some widget like the `Scrollbar`.
///
/// These events will be drained and pushed onto the end of the `global_input` event buffer at
/// the end of the `Ui::set_widgets` method. This ensures that the events are received by the
/// target widgets during the next call to `Ui::set_widgets`.
pending_scroll_events: Vec<event::Ui>,
/// Mouse cursor
mouse_cursor: cursor::MouseCursor,
// TODO: Remove the following fields as they should now be handled by `input::Global`.
/// Window width.
pub win_w: f64,
/// Window height.
pub win_h: f64,
}
/// A wrapper around the `Ui` that restricts the user from mutating the `Ui` in certain ways while
/// in the scope of the `Ui::set_widgets` function and within `Widget`s' `update` methods. Using
/// the `UiCell`, users may access the `Ui` immutably (via `Deref`) however they wish, however they
/// may only mutate the `Ui` via the `&mut self` methods provided by the `UiCell`.
///
/// The name came from its likening to a "jail cell for the `Ui`", as it restricts a user's access
/// to it. However, we realise that the name may also cause ambiguity with the std `Cell` and
/// `RefCell` types (which `UiCell` has nothing to do with). Thus, if you have a better name for
/// this type in mind, please let us know at the github repo via an issue or PR sometime before we
/// hit 1.0.0!
#[derive(Debug)]
pub struct UiCell<'a> {
/// A mutable reference to a **Ui**.
ui: &'a mut Ui,
}
/// Each time conrod is required to redraw the GUI, it must draw for at least the next three frames
/// to ensure that, in the case that graphics buffers are being swapped, we have filled each
/// buffer. Otherwise if we don't draw into each buffer, we will probably be subject to flickering.
pub const SAFE_REDRAW_COUNT: u8 = 3;
impl UiBuilder {
/// Begin building a new `Ui` instance.
///
/// Give the initial dimensions of the window within which the `Ui` will be instantiated as a
/// `Scalar` (DPI agnostic) value.
pub fn new(window_dimensions: Dimensions) -> Self {
UiBuilder {
window_dimensions: window_dimensions,
maybe_theme: None,
maybe_widgets_capacity: None,
}
}
/// The theme used to set default styling for widgets.
///
/// If this field is `None` when `build` is called, `Theme::default` will be used.
pub fn theme(mut self, value: Theme) -> Self {
self.maybe_theme = Some(value);
self
}
/// An estimation of the maximum number of widgets that will be used with this `Ui` instance.
///
/// This value is used to determine the size with which various collections should be
/// reserved. This may make the first cycle of widget instantiations more efficient as the
/// collections will not be required to grow dynamically. These collections include:
///
/// - the widget graph node and edge `Vec`s
/// - the `HashSet` used to track updated widgets
/// - the widget `DepthOrder` (a kind of toposort describing the order of widgets in their
/// rendering order).
///
/// If this field is `None` when `build` is called, these collections will be initialised with
/// no pre-reserved size and will instead grow organically as required.
pub fn widgets_capacity(mut self, value: usize) -> Self {
self.maybe_widgets_capacity = Some(value);
self
}
/// Build **Ui** from the given builder
pub fn build(self) -> Ui {
Ui::new(self)
}
}
impl Ui {
/// A new, empty **Ui**.
fn new(builder: UiBuilder) -> Self {
let UiBuilder {
window_dimensions,
maybe_widgets_capacity,
maybe_theme,
} = builder;
let (mut widget_graph, depth_order, updated_widgets) = maybe_widgets_capacity.map_or_else(
|| {
(
Graph::new(),
graph::DepthOrder::new(),
fnv::FnvHashSet::default(),
)
},
|n| {
(
Graph::with_node_capacity(n),
graph::DepthOrder::with_node_capacity(n),
std::collections::HashSet::with_capacity_and_hasher(
n,
fnv::FnvBuildHasher::default(),
),
)
},
);
let window = widget_graph.add_placeholder();
let prev_updated_widgets = updated_widgets.clone();
Ui {
widget_graph: widget_graph,
theme: maybe_theme.unwrap_or_else(|| Theme::default()),
fonts: text::font::Map::new(),
window: window,
win_w: window_dimensions[0],
win_h: window_dimensions[1],
maybe_prev_widget_id: None,
maybe_current_parent_id: None,
num_redraw_frames: SAFE_REDRAW_COUNT,
redraw_count: AtomicUsize::new(SAFE_REDRAW_COUNT as usize),
maybe_background_color: None,
depth_order: depth_order,
updated_widgets: updated_widgets,
prev_updated_widgets: prev_updated_widgets,
global_input: input::Global::new(),
pending_scroll_events: Vec::new(),
mouse_cursor: cursor::MouseCursor::Arrow,
}
}
/// Returns a `input::Widget` for the given widget
pub fn widget_input(&self, widget: widget::Id) -> input::Widget {
// If there's no rectangle for a given widget, then we use one with zero area.
// This means that the resulting `input::Widget` will not include any mouse events
// unless it has captured the mouse, since none will have occured over that area.
let rect = self.rect_of(widget).unwrap_or_else(|| {
let right_edge = self.win_w / 2.0;
let bottom_edge = self.win_h / 2.0;
Rect::from_xy_dim([right_edge, bottom_edge], [0.0, 0.0])
});
input::Widget::for_widget(widget, rect, &self.global_input)
}
/// The **Rect** for the widget at the given index.
///
/// Returns `None` if there is no widget for the given index.
pub fn rect_of(&self, id: widget::Id) -> Option<Rect> {
self.widget_graph.widget(id).map(|widget| widget.rect)
}
/// The absolute width of the widget at the given index.
///
/// Returns `None` if there is no widget for the given index.
pub fn w_of(&self, id: widget::Id) -> Option<Scalar> {
self.rect_of(id).map(|rect| rect.w())
}
/// The absolute height of the widget at the given index.
///
/// Returns `None` if there is no widget for the given index.
pub fn h_of(&self, id: widget::Id) -> Option<Scalar> {
self.rect_of(id).map(|rect| rect.h())
}
/// The absolute dimensions for the widget at the given index.
///
/// Returns `None` if there is no widget for the given index.
pub fn wh_of(&self, id: widget::Id) -> Option<Dimensions> {
self.rect_of(id).map(|rect| rect.dim())
}
/// The coordinates for the widget at the given index.
///
/// Returns `None` if there is no widget for the given index.
pub fn xy_of(&self, id: widget::Id) -> Option<Point> {
self.rect_of(id).map(|rect| rect.xy())
}
/// The `kid_area` of the widget at the given index.
///
/// Returns `None` if there is no widget for the given index.
pub fn kid_area_of(&self, id: widget::Id) -> Option<Rect> {
self.widget_graph
.widget(id)
.map(|widget| widget.kid_area.rect.padding(widget.kid_area.pad))
}
/// An index to the previously updated widget if there is one.
pub fn maybe_prev_widget(&self) -> Option<widget::Id> {
self.maybe_prev_widget_id
}
/// Borrow the **Ui**'s `widget_graph`.
pub fn widget_graph(&self) -> &Graph {
&self.widget_graph
}
/// Borrow the **Ui**'s set of updated widgets.
///
/// This set indicates which widgets have been instantiated since the beginning of the most
/// recent `Ui::set_widgets` call.
pub fn updated_widgets(&self) -> &fnv::FnvHashSet<widget::Id> {
&self.updated_widgets
}
/// Borrow the **Ui**'s set of updated widgets.
///
/// This set indicates which widgets have were instantiated during the previous call to
/// `Ui::set_widgets`.
pub fn prev_updated_widgets(&self) -> &fnv::FnvHashSet<widget::Id> {
&self.prev_updated_widgets
}
/// Produces a type that may be used to generate new unique `widget::Id`s.
///
/// See the [**widget::id::Generator**](../widget/id/struct.Generator.html) docs for details on
/// how to use this correctly.
pub fn widget_id_generator(&mut self) -> widget::id::Generator {
widget::id::Generator::new(&mut self.widget_graph)
}
/// Scroll the widget at the given index by the given offset amount.
///
/// The produced `Scroll` event will be applied upon the next call to `Ui::set_widgets`.
pub fn scroll_widget(&mut self, widget_id: widget::Id, offset: [Scalar; 2]) {
let (x, y) = (offset[0], offset[1]);
if x != 0.0 || y != 0.0 {
let event = event::Ui::Scroll(
Some(widget_id),
event::Scroll {
x: x,
y: y,
modifiers: self.global_input.current.modifiers,
},
)
.into();
self.global_input.push_event(event);
}
}
/// Determines which widget is currently under the mouse and sets it within the `Ui`'s
/// `input::Global`'s `input::State`.
///
/// If the `widget_under_mouse` has changed, this function will also update the
/// `widget_capturing_mouse`.
///
/// If the left mouse button is up, we assume that the widget directly under the
/// mouse cursor captures all input from the mouse.
///
/// If the left mouse button is down, we assume that the widget that was clicked
/// remains "pinned" and will continue to capture the mouse until it is
/// released.
///
/// Note: This function expects that `ui.global_input.current.mouse.xy` is up-to-date.
fn track_widget_under_mouse_and_update_capturing(&mut self) {
self.global_input.current.widget_under_mouse = graph::algo::pick_widgets(
&self.depth_order.indices,
self.global_input.current.mouse.xy,
)
.next(&self.widget_graph, &self.depth_order.indices, &self.theme);
// If MouseButton::Left is up and `widget_under_mouse` has changed, capture new widget
// under mouse.
if self.global_input.current.mouse.buttons.left().is_up() {
let widget_under_mouse = self.global_input.current.widget_under_mouse;
// Check to see if we need to uncapture a widget.
if let Some(idx) = self.global_input.current.widget_capturing_mouse {
if widget_under_mouse != Some(idx) {
let source = input::Source::Mouse;
let event = event::Ui::WidgetUncapturesInputSource(idx, source).into();
self.global_input.push_event(event);
self.global_input.current.widget_capturing_mouse = None;
}
}
// Check to see if there is a new widget capturing the mouse.
if self.global_input.current.widget_capturing_mouse.is_none() {
if let Some(idx) = widget_under_mouse {
let source = input::Source::Mouse;
let event = event::Ui::WidgetCapturesInputSource(idx, source).into();
self.global_input.push_event(event);
self.global_input.current.widget_capturing_mouse = Some(idx);
}
}
}
}
/// Handle raw window events and update the `Ui` state accordingly.
///
/// This occurs within several stages:
///
/// 1. Convert the user's given `event` to a `RawEvent` so that the `Ui` may use it.
/// 2. Interpret the `RawEvent` for higher-level `Event`s such as `DoubleClick`,
/// `WidgetCapturesKeyboard`, etc.
/// 3. Update the `Ui`'s `global_input` `State` accordingly, depending on the `RawEvent`.
/// 4. Store newly produced `event::Ui`s within the `global_input` so that they may be filtered
/// and fed to `Widget`s next time `Ui::set_widget` is called.
///
/// This method *drives* the `Ui` forward, and is what allows for using conrod's `Ui` with any
/// window event stream.
///
/// The given `event` must implement the **ToRawEvent** trait so that it can be converted to a
/// `RawEvent` that can be used by the `Ui`.
pub fn handle_event(&mut self, event: event::Input) {
use event::Input;
use input::state::mouse::Button as MouseButton;
use input::{Button, Key, ModifierKey, Motion};
// A function for filtering `ModifierKey`s.
fn filter_modifier(key: Key) -> Option<ModifierKey> {
match key {
Key::LCtrl | Key::RCtrl => Some(ModifierKey::CTRL),
Key::LShift | Key::RShift => Some(ModifierKey::SHIFT),
Key::LAlt | Key::RAlt => Some(ModifierKey::ALT),
Key::LGui | Key::RGui => Some(ModifierKey::GUI),
_ => None,
}
}
// Here we handle all user input given to conrod.
//
// Not only do we store the `Input` event as an `Event::Raw`, we also use them to
// interpret higher level events such as `Click` or `Drag`.
//
// Finally, we also ensure that the `current_state` is up-to-date.
self.global_input.push_event(event.clone().into());
match event {
// Some button was pressed, whether keyboard, mouse or some other device.
Input::Press(button_type) => match button_type {
// Check to see whether we need to (un)capture the keyboard or mouse.
Button::Mouse(mouse_button) => {
// Create a mouse `Press` event.
let mouse_xy = self.global_input.current.mouse.xy;
let press = event::Press {
button: event::Button::Mouse(mouse_button, mouse_xy),
modifiers: self.global_input.current.modifiers,
};
let widget = self.global_input.current.widget_capturing_mouse;
let press_event = event::Ui::Press(widget, press).into();
self.global_input.push_event(press_event);
if let MouseButton::Left = mouse_button {
// Check to see if we need to uncapture the keyboard.
if let Some(idx) = self.global_input.current.widget_capturing_keyboard {
if Some(idx) != self.global_input.current.widget_under_mouse {
let source = input::Source::Keyboard;
let event = event::Ui::WidgetUncapturesInputSource(idx, source);
self.global_input.push_event(event.into());
self.global_input.current.widget_capturing_keyboard = None;
}
}
// Check to see if we need to capture the keyboard.
if let Some(idx) = self.global_input.current.widget_under_mouse {
let source = input::Source::Keyboard;
let event = event::Ui::WidgetCapturesInputSource(idx, source);
self.global_input.push_event(event.into());
self.global_input.current.widget_capturing_keyboard = Some(idx);
}
}
// Keep track of pressed buttons in the current input::State.
let xy = self.global_input.current.mouse.xy;
let widget = self.global_input.current.widget_under_mouse;
self.global_input
.current
.mouse
.buttons
.press(mouse_button, xy, widget);
}
Button::Keyboard(key) => {
// Create a keyboard `Press` event.
let press = event::Press {
button: event::Button::Keyboard(key),
modifiers: self.global_input.current.modifiers,
};
let widget = self.global_input.current.widget_capturing_keyboard;
let press_event = event::Ui::Press(widget, press).into();
self.global_input.push_event(press_event);
// If some modifier key was pressed, add it to the current modifiers.
if let Some(modifier) = filter_modifier(key) {
self.global_input.current.modifiers.insert(modifier);
}
// If `Esc` was pressed, check to see if we need to cancel a `Drag` or
// uncapture a widget.
if let Key::Escape = key {
// TODO:
// 1. Cancel `Drag` if currently under way.
// 2. If mouse is captured due to pinning widget with left mouse button,
// cancel capturing.
}
}
_ => {}
},
// Some button was released.
//
// Checks for events in the following order:
// 1. Click
// 2. DoubleClick
// 2. WidgetUncapturesMouse
Input::Release(button_type) => match button_type {
Button::Mouse(mouse_button) => {
// Create a `Release` event.
let mouse_xy = self.global_input.current.mouse.xy;
let release = event::Release {
button: event::Button::Mouse(mouse_button, mouse_xy),
modifiers: self.global_input.current.modifiers,
};
let widget = self.global_input.current.widget_capturing_mouse;
let release_event = event::Ui::Release(widget, release).into();
self.global_input.push_event(release_event);
// Check for `Click` and `DoubleClick` events.
let down = self.global_input.current.mouse.buttons[mouse_button].if_down();
if let Some((_, widget)) = down {
// The widget that's being clicked.
let clicked_widget =
self.global_input
.current
.widget_under_mouse
.and_then(|released| {
widget.and_then(|pressed| {
if pressed == released {
Some(released)
} else {
None
}
})
});
let click = event::Click {
button: mouse_button,
xy: self.global_input.current.mouse.xy,
modifiers: self.global_input.current.modifiers,
};
let click_event = event::Ui::Click(clicked_widget, click).into();
self.global_input.push_event(click_event);
let now = instant::Instant::now();
let double_click =
self.global_input
.last_click
.and_then(|(last_time, last_click)| {
// If the button of this click is different to the button
// of last click, don't create a `DoubleClick`.
if click.button != last_click.button {
return None;
}
// If the mouse has moved since the last click, don't
// create a `DoubleClick`.
if click.xy != last_click.xy {
return None;
}
// If the duration since the last click is longer than the
// double_click_threshold, don't create a `DoubleClick`.
let duration = now.duration_since(last_time);
// TODO: Work out how to get this threshold from the user's
// system preferences.
let threshold = self.theme.double_click_threshold;
if duration >= threshold {
return None;
}
Some(event::DoubleClick {
button: click.button,
xy: click.xy,
modifiers: click.modifiers,
})
});
if let Some(double_click) = double_click {
// Reset the `last_click` to `None`, as to not register another
// `DoubleClick` on the next consecutive `Click`.
self.global_input.last_click = None;
let double_click_event =
event::Ui::DoubleClick(clicked_widget, double_click).into();
self.global_input.push_event(double_click_event);
} else {
// Set the `Click` that we just stored as the `last_click`.
self.global_input.last_click = Some((now, click));
}
}
// Uncapture widget capturing mouse if MouseButton::Left is down and
// widget_under_mouse != capturing widget.
if let MouseButton::Left = mouse_button {
if let Some(idx) = self.global_input.current.widget_capturing_mouse {
if Some(idx) != self.global_input.current.widget_under_mouse {
let source = input::Source::Mouse;
let event = event::Ui::WidgetUncapturesInputSource(idx, source);
self.global_input.push_event(event.into());
self.global_input.current.widget_capturing_mouse = None;
}
}
}
// Release the given mouse_button from the input::State.
self.global_input
.current
.mouse
.buttons
.release(mouse_button);
}
Button::Keyboard(key) => {
// Create a `Release` event.
let release = event::Release {
button: event::Button::Keyboard(key),
modifiers: self.global_input.current.modifiers,
};
let widget = self.global_input.current.widget_capturing_keyboard;
let release_event = event::Ui::Release(widget, release).into();
self.global_input.push_event(release_event);
// If a modifier key was released, remove it from the current set.
if let Some(modifier) = filter_modifier(key) {
self.global_input.current.modifiers.remove(modifier);
}
}
_ => (),
},
// The window was resized.
Input::Resize(w, h) => {
// Create a `WindowResized` event.
let (w, h) = (w as Scalar, h as Scalar);
let window_resized = event::Ui::WindowResized([w, h]).into();
self.global_input.push_event(window_resized);
self.win_w = w;
self.win_h = h;
self.needs_redraw();
self.track_widget_under_mouse_and_update_capturing();
}
// The mouse cursor was moved to a new position.
//
// Checks for events in the following order:
// 1. `Drag`
// 2. `WidgetUncapturesMouse`
// 3. `WidgetCapturesMouse`
Input::Motion(motion) => {
// Create a `Motion` event.
let move_ = event::Motion {
motion: motion,
modifiers: self.global_input.current.modifiers,
};
let widget = self.global_input.current.widget_capturing_mouse;
let move_event = event::Ui::Motion(widget, move_).into();
self.global_input.push_event(move_event);
match motion {
Motion::MouseCursor { x, y } => {
// Check for drag events.
let last_mouse_xy = self.global_input.current.mouse.xy;
let mouse_xy = [x, y];
let delta_xy = utils::vec2_sub(mouse_xy, last_mouse_xy);
// For each button that is down, trigger a drag event.
let buttons = self.global_input.current.mouse.buttons.clone();
for (btn, btn_xy, widget) in buttons.pressed() {
let total_delta_xy = utils::vec2_sub(mouse_xy, btn_xy);
let distance = (total_delta_xy[0] + total_delta_xy[1]).abs().sqrt();
if distance > self.theme.mouse_drag_threshold {
let event = event::Ui::Drag(
widget,
event::Drag {
button: btn,
origin: btn_xy,
from: last_mouse_xy,
to: mouse_xy,
delta_xy: delta_xy,
total_delta_xy: total_delta_xy,
modifiers: self.global_input.current.modifiers,
},
)
.into();
self.global_input.push_event(event);
}
}
// Update the position of the mouse within the global_input's
// input::State.
self.global_input.current.mouse.xy = mouse_xy;
self.track_widget_under_mouse_and_update_capturing();
}
// Some scrolling occurred (e.g. mouse scroll wheel).
Motion::Scroll { x, y } => {
let mut scrollable_widgets = {
let depth_order = &self.depth_order.indices;
let mouse_xy = self.global_input.current.mouse.xy;
graph::algo::pick_scrollable_widgets(depth_order, mouse_xy)
};
// Iterate through the scrollable widgets from top to bottom.
//
// A scroll event will be created for the first scrollable widget
// that hasn't already reached the bound of the scroll event's
// direction.
while let Some(idx) = scrollable_widgets.next(
&self.widget_graph,
&self.depth_order.indices,
&self.theme,
) {
let (kid_area, maybe_x_scroll, maybe_y_scroll) =
match self.widget_graph.widget(idx) {
Some(widget) => (
widget.kid_area,
widget.maybe_x_scroll_state,
widget.maybe_y_scroll_state,
),
None => continue,
};
fn offset_is_at_bound<A>(
scroll: &widget::scroll::State<A>,
additional_offset: Scalar,
) -> bool {
fn approx_eq(a: Scalar, b: Scalar) -> bool {
(a - b).abs() < 0.000001
}
if additional_offset.is_sign_positive() {
let max = utils::partial_max(
scroll.offset_bounds.start,
scroll.offset_bounds.end,
);
approx_eq(scroll.offset, max)
} else {
let min = utils::partial_min(
scroll.offset_bounds.start,
scroll.offset_bounds.end,
);
approx_eq(scroll.offset, min)
}
}
let mut scroll_x = false;
let mut scroll_y = false;
// Check whether the x axis is scrollable.
if x != 0.0 {
let new_scroll = widget::scroll::State::update(
self,
idx,
&kid_area,
maybe_x_scroll,
x,
);
if let Some(prev_scroll) = maybe_x_scroll {
let (prev_is_at_bound, new_is_at_bound) = (
offset_is_at_bound(&prev_scroll, x),
offset_is_at_bound(&new_scroll, x),
);
scroll_x = !prev_is_at_bound || !new_is_at_bound;
}
}
// Check whether the y axis is scrollable.
if y != 0.0 {
let new_scroll = widget::scroll::State::update(
self,
idx,
&kid_area,
maybe_y_scroll,
y,
);
if let Some(prev_scroll) = maybe_y_scroll {
let (prev_is_at_bound, new_is_at_bound) = (
offset_is_at_bound(&prev_scroll, y),
offset_is_at_bound(&new_scroll, y),
);
scroll_y = !prev_is_at_bound || !new_is_at_bound;
}
}
// Create a `Scroll` event if either axis is scrollable.
if scroll_x || scroll_y {
let event = event::Ui::Scroll(
Some(idx),
event::Scroll {
x: x,
y: y,
modifiers: self.global_input.current.modifiers,
},
)
.into();
self.global_input.push_event(event);
// Now that we've scrolled the top, scrollable widget,
// we're done with the loop.
break;
}
}
// If no scrollable widgets could be scrolled, emit the event to
// the widget that currently captures the mouse.
if x != 0.0 || y != 0.0 {
let widget = self.global_input.current.widget_capturing_mouse;
if let Some(idx) = widget {
if let Some(widget) = self.widget_graph.widget(idx) {
// Only create the event if the widget is not
// scrollable, as the event would have already been
// created within the above loop.
if widget.maybe_x_scroll_state.is_none()
&& widget.maybe_y_scroll_state.is_none()
{
let scroll = event::Scroll {
x: x,
y: y,
modifiers: self.global_input.current.modifiers,
};
let event = event::Ui::Scroll(Some(idx), scroll);
self.global_input.push_event(event.into());
}
}
}
}
// Now that there might be a different widget under the mouse, we
// must update the capturing state.
self.track_widget_under_mouse_and_update_capturing();
}
_ => (),
}
}
Input::Text(string) => {
// Create a `Text` event.
let text = event::Text {
string: string,
modifiers: self.global_input.current.modifiers,
};
let widget = self.global_input.current.widget_capturing_keyboard;
let text_event = event::Ui::Text(widget, text).into();
self.global_input.push_event(text_event);
}
Input::Touch(touch) => match touch.phase {
input::touch::Phase::Start => {
// Find the widget under the touch.
let widget_under_touch =
graph::algo::pick_widgets(&self.depth_order.indices, touch.xy).next(
&self.widget_graph,
&self.depth_order.indices,
&self.theme,
);
// The start of the touch interaction state to be stored.
let start = input::state::touch::Start {
time: instant::Instant::now(),
xy: touch.xy,
widget: widget_under_touch,
};
// The touch interaction state to be stored in the map.
let state = input::state::touch::Touch {
start: start,
xy: touch.xy,
widget: widget_under_touch,
};
// Insert the touch state into the map.
self.global_input.current.touch.insert(touch.id, state);
// Push touch event.
let event = event::Ui::Touch(widget_under_touch, touch);
self.global_input.push_event(event.into());
// Push capture event.
if let Some(widget) = widget_under_touch {
let source = input::Source::Touch(touch.id);
let event = event::Ui::WidgetCapturesInputSource(widget, source);
self.global_input.push_event(event.into());
}
}
input::touch::Phase::Move => {
// Update the widget under the touch and return the widget capturing the touch.
let widget = match self.global_input.current.touch.get_mut(&touch.id) {
Some(touch_state) => {
touch_state.widget =
graph::algo::pick_widgets(&self.depth_order.indices, touch.xy)
.next(
&self.widget_graph,
&self.depth_order.indices,
&self.theme,
);
touch_state.xy = touch.xy;
touch_state.start.widget
}
None => None,
};
let event = event::Ui::Touch(widget, touch);
self.global_input.push_event(event.into());
}
input::touch::Phase::Cancel => {
let widget = self
.global_input
.current
.touch
.remove(&touch.id)
.and_then(|t| t.start.widget);
let event = event::Ui::Touch(widget, touch);
self.global_input.push_event(event.into());
// Generate an "uncaptures" event if necessary.
if let Some(widget) = widget {
let source = input::Source::Touch(touch.id);
let event = event::Ui::WidgetUncapturesInputSource(widget, source);
self.global_input.push_event(event.into());
}
}
input::touch::Phase::End => {
let old_touch = self
.global_input
.current
.touch
.remove(&touch.id)
.map(|touch| touch);
let widget_capturing = old_touch.as_ref().and_then(|touch| touch.start.widget);
let event = event::Ui::Touch(widget_capturing, touch);
self.global_input.push_event(event.into());
// Create a `Tap` event.
//
// If the widget at the end of the touch is the same as the widget at the start
// of the touch, that widget receives the `Tap`.
let tapped_widget =
graph::algo::pick_widgets(&self.depth_order.indices, touch.xy)
.next(&self.widget_graph, &self.depth_order.indices, &self.theme)
.and_then(|widget| match Some(widget) == widget_capturing {
true => Some(widget),
false => None,
});
let tap = event::Tap {
id: touch.id,
xy: touch.xy,
};
let event = event::Ui::Tap(tapped_widget, tap);
self.global_input.push_event(event.into());
// Generate an "uncaptures" event if necessary.
if let Some(widget) = widget_capturing {
let source = input::Source::Touch(touch.id);
let event = event::Ui::WidgetUncapturesInputSource(widget, source);
self.global_input.push_event(event.into());
}
}
},
Input::Focus(focused) if focused == true => self.needs_redraw(),
Input::Focus(_focused) => (),
Input::Redraw => self.needs_redraw(),
}
}
/// Get an immutable reference to global input. Handles aggregation of events and providing them to Widgets
///
/// Can be used to access the current input state, e.g. which widgets are currently capturing inputs.
pub fn global_input(&self) -> &input::Global {
&self.global_input
}
/// Set keyboard capturing widget
pub fn keyboard_capture(&mut self, idx: widget::Id) {
let source = input::Source::Keyboard;
if self
.global_input
.current
.widget_capturing_keyboard
.is_some()
{
let event = event::Ui::WidgetUncapturesInputSource(idx, source);
self.global_input.push_event(event.into());
self.global_input.current.widget_capturing_keyboard = None;
}
let event = event::Ui::WidgetCapturesInputSource(idx, source).into();
self.global_input.push_event(event);
self.global_input.current.widget_capturing_keyboard = Some(idx);
}
/// Get the centred xy coords for some given `Dimension`s, `Position` and alignment.
///
/// If getting the xy for a specific widget, its `widget::Id` should be specified so that we
/// can also consider the scroll offset of the scrollable parent widgets.
///
/// The `place_on_kid_area` argument specifies whether or not **Place** **Position** variants
/// should target a **Widget**'s `kid_area`, or simply the **Widget**'s total area.
pub fn calc_xy(
&self,
maybe_id: Option<widget::Id>,
maybe_parent_id: Option<widget::Id>,
x_position: Position,
y_position: Position,
dim: Dimensions,
place_on_kid_area: bool,
) -> Point {
use utils::vec2_add;
// Retrieves the absolute **Scalar** position from the given position for a single axis.
//
// The axis used is specified by the given range_from_rect function which, given some
// **Rect**, returns the relevant **Range**.
fn abs_from_position<R, P>(
ui: &Ui,
maybe_parent_id: Option<widget::Id>,
position: Position,
dim: Scalar,
place_on_kid_area: bool,
range_from_rect: R,
start_and_end_pad: P,
) -> Scalar
where
R: FnOnce(Rect) -> Range,
P: FnOnce(Padding) -> Range,
{
let (relative, maybe_id) = match position {
Position::Absolute(abs) => return abs,
Position::Relative(relative, maybe_id) => (relative, maybe_id),
};
match relative {
position::Relative::Scalar(scalar) => maybe_id
.or(ui.maybe_prev_widget_id)
.or(Some(ui.window.into()))
.and_then(|idx| ui.rect_of(idx).map(range_from_rect))
.map(|other_range| other_range.middle() + scalar)
.unwrap_or(scalar),
position::Relative::Direction(direction, amt) => maybe_id
.or(ui.maybe_prev_widget_id)
.and_then(|idx| ui.rect_of(idx).map(range_from_rect))
.map(|other_range| {
let range = Range::from_pos_and_len(0.0, dim);
match direction {
Direction::Forwards => range.align_after(other_range).middle() + amt,
Direction::Backwards => range.align_before(other_range).middle() - amt,
}
})
.unwrap_or_else(|| match direction {
Direction::Forwards => amt,
Direction::Backwards => -amt,
}),
position::Relative::Align(align) => maybe_id
.or(ui.maybe_prev_widget_id)
.or(Some(ui.window.into()))
.and_then(|idx| ui.rect_of(idx).map(range_from_rect))
.map(|other_range| {
let range = Range::from_pos_and_len(0.0, dim);
match align {
Align::Start => range.align_start_of(other_range).middle(),
Align::Middle => other_range.middle(),
Align::End => range.align_end_of(other_range).middle(),
}
})
.unwrap_or(0.0),
position::Relative::Place(place) => {
let parent_id = maybe_id
.or(maybe_parent_id)
.or(ui.maybe_current_parent_id)
.unwrap_or(ui.window.into());
let maybe_area = match place_on_kid_area {
true => ui
.widget_graph
.widget(parent_id)
.map(|w| w.kid_area)
.map(|k| (range_from_rect(k.rect), start_and_end_pad(k.pad))),
false => ui
.rect_of(parent_id)
.map(|rect| (range_from_rect(rect), Range::new(0.0, 0.0))),
};
maybe_area
.map(|(parent_range, pad)| {
let range = Range::from_pos_and_len(0.0, dim);
let parent_range = parent_range.pad_start(pad.start).pad_end(pad.end);
match place {
position::Place::Start(maybe_mgn) => {
range.align_start_of(parent_range).middle()
+ maybe_mgn.unwrap_or(0.0)
}
position::Place::Middle => parent_range.middle(),
position::Place::End(maybe_mgn) => {
range.align_end_of(parent_range).middle()
- maybe_mgn.unwrap_or(0.0)
}
}
})
.unwrap_or(0.0)
}
}
}
fn x_range(rect: Rect) -> Range {
rect.x
}
fn y_range(rect: Rect) -> Range {
rect.y
}
fn x_pad(pad: Padding) -> Range {
pad.x
}
fn y_pad(pad: Padding) -> Range {
pad.y
}
let x = abs_from_position(
self,
maybe_parent_id,
x_position,
dim[0],
place_on_kid_area,
x_range,
x_pad,
);
let y = abs_from_position(
self,
maybe_parent_id,
y_position,
dim[1],
place_on_kid_area,
y_range,
y_pad,
);
let xy = [x, y];
// Add the widget's parents' total combined scroll offset to the given xy.
maybe_id
.map(|idx| vec2_add(xy, graph::algo::scroll_offset(&self.widget_graph, idx)))
.unwrap_or(xy)
}
/// A function within which all widgets are instantiated by the user, normally situated within
/// the "update" stage of an event loop.
pub fn set_widgets(&mut self) -> UiCell {
self.maybe_prev_widget_id = None;
self.maybe_current_parent_id = None;
// Move the previous `updated_widgets` to `prev_updated_widgets` and clear
// `updated_widgets` so that we're ready to store the newly updated widgets.
{
let Ui {
ref mut updated_widgets,
ref mut prev_updated_widgets,
..
} = *self;
std::mem::swap(updated_widgets, prev_updated_widgets);
updated_widgets.clear();
}
let mut ui_cell = UiCell { ui: self };
// Instantiate the root `Window` `Widget`.
//
// This widget acts as the parent-most widget and root node for the Ui's `widget_graph`,
// upon which all other widgets are placed.
{
use {color, Borderable, Colorable, Positionable};
type Window = widget::BorderedRectangle;
Window::new([ui_cell.win_w, ui_cell.win_h])
.no_parent()
.x_y(0.0, 0.0)
.border(0.0)
.border_color(color::BLACK.alpha(0.0))
.color(
ui_cell
.maybe_background_color
.unwrap_or(color::BLACK.alpha(0.0)),
)
.set(ui_cell.window, &mut ui_cell);
}
ui_cell.ui.maybe_current_parent_id = Some(ui_cell.window.into());
ui_cell.set_mouse_cursor(cursor::MouseCursor::Arrow);
ui_cell
}
/// Set the number of frames that the `Ui` should draw in the case that `needs_redraw` is
/// called. The default is `3` (see the SAFE_REDRAW_COUNT docs for details).
pub fn set_num_redraw_frames(&mut self, num_frames: u8) {
self.num_redraw_frames = num_frames;
}
/// Tells the `Ui` that it needs to re-draw everything. It does this by setting the redraw
/// count to `num_redraw_frames`. See the docs for `set_num_redraw_frames`, SAFE_REDRAW_COUNT
/// or `draw_if_changed` for more info on how/why the redraw count is used.
pub fn needs_redraw(&self) {
self.redraw_count
.store(self.num_redraw_frames as usize, atomic::Ordering::Relaxed);
}
/// The first of the `Primitives` yielded by `Ui::draw` or `Ui::draw_if_changed` will always
/// be a `Rectangle` the size of the window in which conrod is hosted.
///
/// This method sets the colour with which this `Rectangle` is drawn (the default being
/// `conrod::color::TRANSPARENT`.
pub fn clear_with(&mut self, color: Color) {
self.maybe_background_color = Some(color);
}
/// Draw the `Ui` in it's current state.
///
/// NOTE: If you don't need to redraw your conrod GUI every frame, it is recommended to use the
/// `Ui::draw_if_changed` method instead.
pub fn draw(&self) -> render::Primitives {
let Ui {
ref redraw_count,
ref widget_graph,
ref depth_order,
ref theme,
ref fonts,
win_w,
win_h,
..
} = *self;
// Use the depth_order indices as the order for drawing.
let indices = &depth_order.indices;
// We're about to draw everything, so take one from the redraw count.
let remaining_redraws = redraw_count.load(atomic::Ordering::Relaxed);
if remaining_redraws > 0 {
redraw_count.store(remaining_redraws - 1, atomic::Ordering::Relaxed);
}
render::Primitives::new(widget_graph, indices, theme, fonts, [win_w, win_h])
}
/// Same as the `Ui::draw` method, but *only* draws if the `redraw_count` is greater than 0.
///
/// The `redraw_count` is set to `SAFE_REDRAW_COUNT` whenever a `Widget` indicates that it
/// needs to be re-drawn.
///
/// It can also be triggered manually by the user using the `Ui::needs_redraw` method.
///
/// This method is generally preferred over `Ui::draw` as it requires far less CPU usage, only
/// redrawing to the screen if necessary.
///
/// Note that when `Ui::needs_redraw` is triggered, it sets the `redraw_count` to 3 by default.
/// This ensures that conrod is drawn to each buffer in the case that there is buffer swapping
/// happening. Let us know if you need finer control over this and we'll expose a way for you
/// to set the redraw count manually.
pub fn draw_if_changed(&self) -> Option<render::Primitives> {
if self.has_changed() {
return Some(self.draw());
}
None
}
/// Returns if the redraw_count is greater than 0 and thus draw_if_changed would draw
/// See `Ui::draw_if_changed` for when this is triggered
pub fn has_changed(&self) -> bool {
self.redraw_count.load(atomic::Ordering::Relaxed) > 0
}
/// The **Rect** that bounds the kids of the widget with the given index.
pub fn kids_bounding_box(&self, id: widget::Id) -> Option<Rect> {
graph::algo::kids_bounding_box(&self.widget_graph, &self.prev_updated_widgets, id)
}
/// The **Rect** that represents the maximum fully visible area for the widget with the given
/// index, including consideration of cropped scroll area.
///
/// Otherwise, return None if the widget is not visible.
pub fn visible_area(&self, id: widget::Id) -> Option<Rect> {
graph::algo::cropped_area_of_widget(&self.widget_graph, id)
}
/// Get mouse cursor state.
pub fn mouse_cursor(&self) -> cursor::MouseCursor {
self.mouse_cursor
}
}
impl<'a> UiCell<'a> {
/// A reference to the `Theme` that is currently active within the `Ui`.
pub fn theme(&self) -> &Theme {
&self.ui.theme
}
/// A convenience method for borrowing the `Font` for the given `Id` if it exists.
pub fn font(&self, id: text::font::Id) -> Option<&text::Font> {
self.ui.fonts.get(id)
}
/// Returns the dimensions of the window
pub fn window_dim(&self) -> Dimensions {
[self.ui.win_w, self.ui.win_h]
}
/// Returns an immutable reference to the `input::Global` of the `Ui`.
///
/// All coordinates here will be relative to the center of the window.
pub fn global_input(&self) -> &input::Global {
&self.ui.global_input
}
/// Returns a `input::Widget` with input events for the widget.
///
/// All coordinates in the `input::Widget` will be relative to the widget at the given index.
pub fn widget_input(&self, id: widget::Id) -> input::Widget {
self.ui.widget_input(id)
}
/// Produces a type that may be used to generate new unique `widget::Id`s.
///
/// See the [**widget::id::Generator**](../widget/id/struct.Generator.html) docs for details on
/// how to use this correctly.
pub fn widget_id_generator(&mut self) -> widget::id::Generator {
self.ui.widget_id_generator()
}
/// The **Rect** that bounds the kids of the widget with the given index.
///
/// Returns `None` if the widget has no children or if there's is no widget for the given index.
pub fn kids_bounding_box(&self, id: widget::Id) -> Option<Rect> {
self.ui.kids_bounding_box(id)
}
/// Scroll the widget at the given index by the given offset amount.
///
/// The produced `Scroll` event will be pushed to the `pending_scroll_events` and will be
/// applied to the widget during the next call to `Ui::set_widgets`.
pub fn scroll_widget(&mut self, id: widget::Id, offset: [Scalar; 2]) {
let (x, y) = (offset[0], offset[1]);
if x != 0.0 || y != 0.0 {
let event = event::Ui::Scroll(
Some(id),
event::Scroll {
x: x,
y: y,
modifiers: self.ui.global_input.current.modifiers,
},
);
self.ui.pending_scroll_events.push(event);
}
}
/// Sets the mouse cursor
pub fn set_mouse_cursor(&mut self, cursor: cursor::MouseCursor) {
self.ui.mouse_cursor = cursor;
}
}
impl<'a> Drop for UiCell<'a> {
fn drop(&mut self) {
// We'll need to re-draw if we have gained or lost widgets.
let changed = self.ui.updated_widgets != self.ui.prev_updated_widgets;
if changed {
self.ui.needs_redraw();
}
// Update the **DepthOrder** so that it reflects the **Graph**'s current state.
{
let Ui {
ref widget_graph,
ref mut depth_order,
window,
ref updated_widgets,
..
} = *self.ui;
depth_order.update(widget_graph, window, updated_widgets);
}
// Reset the global input state. Note that this is the **only** time this should be called.
self.ui.global_input.clear_events_and_update_start_state();
// Update which widget is under the cursor.
if changed {
self.ui.track_widget_under_mouse_and_update_capturing();
}
// Move all pending `Scroll` events that have been produced since the start of this method
// into the `global_input` event buffer.
for scroll_event in self.ui.pending_scroll_events.drain(0..) {
self.ui.global_input.push_event(scroll_event.into());
}
}
}
impl<'a> ::std::ops::Deref for UiCell<'a> {
type Target = Ui;
fn deref(&self) -> &Ui {
self.ui
}
}
impl<'a> AsRef<Ui> for UiCell<'a> {
fn as_ref(&self) -> &Ui {
&self.ui
}
}
/// A function for retrieving the `&mut Ui<B>` from a `UiCell<B>`.
///
/// This function is only for internal use to allow for some `Ui` type acrobatics in order to
/// provide a nice *safe* API for the user.
pub fn ref_mut_from_ui_cell<'a, 'b: 'a>(ui_cell: &'a mut UiCell<'b>) -> &'a mut Ui {
ui_cell.ui
}
/// A mutable reference to the given `Ui`'s widget `Graph`.
pub fn widget_graph_mut(ui: &mut Ui) -> &mut Graph {
&mut ui.widget_graph
}
/// Infer a widget's `Depth` parent by examining it's *x* and *y* `Position`s.
///
/// When a different parent may be inferred from either `Position`, the *x* `Position` is favoured.
pub fn infer_parent_from_position(ui: &Ui, x: Position, y: Position) -> Option<widget::Id> {
use position::Relative::{Align, Direction, Place, Scalar};
use Position::Relative;
match (x, y) {
(Relative(Place(_), maybe_parent_id), _) | (_, Relative(Place(_), maybe_parent_id)) => {
maybe_parent_id
}
(Relative(Direction(_, _), maybe_id), _)
| (_, Relative(Direction(_, _), maybe_id))
| (Relative(Align(_), maybe_id), _)
| (_, Relative(Align(_), maybe_id))
| (Relative(Scalar(_), maybe_id), _)
| (_, Relative(Scalar(_), maybe_id)) => maybe_id
.or(ui.maybe_prev_widget_id)
.and_then(|idx| ui.widget_graph.depth_parent(idx)),
_ => None,
}
}
/// Attempts to infer the parent of a widget from its *x*/*y* `Position`s and the current state of
/// the `Ui`.
///
/// If no parent can be inferred via the `Position`s, the `maybe_current_parent_id` will be used.
///
/// If `maybe_current_parent_id` is `None`, the `Ui`'s `window` widget will be used.
///
/// **Note:** This function does not check whether or not using the `window` widget would cause a
/// cycle.
pub fn infer_parent_unchecked(ui: &Ui, x_pos: Position, y_pos: Position) -> widget::Id {
infer_parent_from_position(ui, x_pos, y_pos)
.or(ui.maybe_current_parent_id)
.unwrap_or(ui.window.into())
}
/// Cache some `PreUpdateCache` widget data into the widget graph.
/// Set the widget that is being cached as the new `prev_widget`.
/// Set the widget's parent as the new `current_parent`.
pub fn pre_update_cache(ui: &mut Ui, widget: widget::PreUpdateCache) {
ui.maybe_prev_widget_id = Some(widget.id);
ui.maybe_current_parent_id = widget.maybe_parent_id;
let widget_id = widget.id;
ui.widget_graph
.pre_update_cache(ui.window, widget, ui.updated_widgets.len());
// Add the widget's `widget::Id` to the set of updated widgets.
ui.updated_widgets.insert(widget_id);
}
/// Cache some `PostUpdateCache` widget data into the widget graph.
/// Set the widget that is being cached as the new `prev_widget`.
/// Set the widget's parent as the new `current_parent`.
pub fn post_update_cache<W>(ui: &mut Ui, widget: widget::PostUpdateCache<W>)
where
W: Widget,
W::State: 'static,
W::Style: 'static,
{
ui.maybe_prev_widget_id = Some(widget.id);
ui.maybe_current_parent_id = widget.maybe_parent_id;
ui.widget_graph.post_update_cache(widget);
}
| true
|
d14383111457cefa7b0ba6cd21f3b242a2afb459
|
Rust
|
katsyoshi/zatsu
|
/rust/atcoder/practice_contest/src/d.rs
|
UTF-8
| 702
| 3.203125
| 3
|
[] |
no_license
|
fn read_line<T>() -> Vec<T>
where T: std::str::FromStr, <T as std::str::FromStr>::Err : std::fmt::Debug {
let mut s = String::new();
std::io::stdin().read_line(&mut s).unwrap();
s.trim().split_whitespace().map(|c| T::from_str(c).unwrap()).collect()
}
fn c<'a>(v: &'a Vec<i32>) -> bool {
v.iter().filter(|&i| i % 2 == 1).count() == 0
}
fn f<'a>(v: &'a Vec<i32>, n: &'a mut i32) -> Vec<i32> {
if c(v) {
*n = *n + 1;
let k = v.iter().map(|&i| i / 2).collect::<Vec<i32>>();
f(&k, n)
} else { vec!() }
}
fn main() {
let _: Vec<usize> = read_line();
let x: Vec<i32> = read_line();
let mut y: i32 = 0;
f(&x, &mut y);
println!("{}", y);
}
| true
|
1040949077d225808c26ab31919ee8abdf15aa97
|
Rust
|
KuekenPartei/proofmarshal
|
/hoard/src/pile/mod.rs
|
UTF-8
| 1,418
| 2.5625
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use crate::blob::Blob;
use crate::load::{Load, LoadIn, LoadRefIn, MaybeValid};
use crate::ptr::{TryGet, AsZone};
use crate::owned::Take;
pub use crate::offset::{Offset, OffsetMut, Error, SliceGetBlobError};
#[derive(Debug, Default)]
pub struct Pile<B> {
inner: B,
}
impl<B> Pile<B> {
pub fn new(inner: B) -> Self {
Self { inner }
}
}
impl<B: AsRef<[u8]>> Pile<B> {
pub fn try_get_tip<'p, T>(&'p self) -> Result<MaybeValid<T>, Error<SliceGetBlobError>>
where T: Load,
&'p [u8]: AsZone<T::Zone>,
{
let mapping: &'p [u8] = self.inner.as_ref();
let offset = mapping.len().saturating_sub(T::Blob::SIZE);
let offset = Offset::new(offset as u64, mapping);
unsafe {
offset.try_take::<T>(())
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bag::Bag;
#[test]
fn pile_try_get_tip_trivial() -> Result<(), Box<dyn std::error::Error>> {
let pile = Pile::new(vec![42]);
let tip = pile.try_get_tip::<u8>()?.trust();
assert_eq!(tip, 42);
Ok(())
}
#[test]
fn pile_try_get_tip() -> Result<(), Box<dyn std::error::Error>> {
let pile = Pile::new(vec![
42,
0,0,0,0,0,0,0,0,
]);
let tip: Bag<u8, OffsetMut<&[u8]>> = pile.try_get_tip()?.trust();
assert_eq!(tip.get(), &42);
Ok(())
}
}
| true
|
4088451acdb3ef90515cbdfce4b7406f9acc6cdd
|
Rust
|
EFanZh/LeetCode
|
/src/problem_1482_minimum_number_of_days_to_make_m_bouquets/binary_search_2.rs
|
UTF-8
| 2,524
| 3.125
| 3
|
[] |
no_license
|
pub struct Solution;
// ------------------------------------------------------ snip ------------------------------------------------------ //
use std::num::NonZeroU32;
impl Solution {
fn min_max(values: &[i32]) -> (u32, u32) {
let mut iter = values.iter().map(|&num| num as u32);
let mut min = iter.next().unwrap();
let mut max = min;
while let Some(left) = iter.next() {
if let Some(right) = iter.next() {
let (new_min, new_max) = if right < left { (right, left) } else { (left, right) };
min = min.min(new_min);
max = max.max(new_max);
} else {
if left < min {
min = left;
} else if left > max {
max = left;
}
break;
}
}
(min, max)
}
fn inner(bloom_day: &[i32], m: u32, k: NonZeroU32) -> u32 {
if bloom_day.len() as u32 / k < m {
u32::MAX
} else {
let check = |middle: u32| {
let mut length = 0;
let mut bouquets = 0;
for &day in bloom_day {
if day as u32 <= middle {
length += 1;
} else {
bouquets += length / k;
if bouquets >= m {
return true;
}
length = 0;
}
}
bouquets += length / k;
bouquets >= m
};
let (mut left, mut right) = Self::min_max(bloom_day);
while left < right {
let middle = (left + right) / 2;
if check(middle) {
right = middle;
} else {
left = middle + 1;
}
}
left
}
}
pub fn min_days(bloom_day: Vec<i32>, m: i32, k: i32) -> i32 {
Self::inner(&bloom_day, m as _, NonZeroU32::new(k as _).unwrap()) as _
}
}
// ------------------------------------------------------ snip ------------------------------------------------------ //
impl super::Solution for Solution {
fn min_days(bloom_day: Vec<i32>, m: i32, k: i32) -> i32 {
Self::min_days(bloom_day, m, k)
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_solution() {
super::super::tests::run::<super::Solution>();
}
}
| true
|
ab224e08a975dc405b295412af7b41bff149b9b1
|
Rust
|
cmyr/cargo-instruments
|
/src/instruments.rs
|
UTF-8
| 15,344
| 3.078125
| 3
|
[
"MIT"
] |
permissive
|
//! interfacing with the `instruments` command line tool
use std::fmt::Write;
use std::fs;
use std::path::{Path, PathBuf};
use std::process::{Command, Output};
use anyhow::{anyhow, Result};
use cargo::core::Workspace;
use semver::Version;
use crate::opt::AppConfig;
/// Holds available templates.
pub struct TemplateCatalog {
standard_templates: Vec<String>,
custom_templates: Vec<String>,
}
/// Represents the Xcode Instrument version detected.
pub enum XcodeInstruments {
XcTrace,
InstrumentsBinary,
}
impl XcodeInstruments {
/// Detects which version of Xcode Instruments is installed and if it can be launched.
pub(crate) fn detect() -> Result<XcodeInstruments> {
let cur_version = get_macos_version()?;
let macos_xctrace_version = Version::parse("10.15.0").unwrap();
if cur_version >= macos_xctrace_version {
// This is the check used by Homebrew,see
// https://github.com/Homebrew/install/blob/a1d820fc8950312c35073700d0ea88a531bc5950/install.sh#L216
let clt_git_filepath = Path::new("/Library/Developer/CommandLineTools/usr/bin/git");
if clt_git_filepath.exists() {
return Ok(XcodeInstruments::XcTrace);
}
} else {
let instruments_app_filepath = Path::new("/usr/bin/instruments");
if instruments_app_filepath.exists() {
return Ok(XcodeInstruments::InstrumentsBinary);
}
}
Err(anyhow!(
"Xcode Instruments is not installed. Please install the Xcode Command Line Tools."
))
}
/// Return a catalog of available Instruments Templates.
///
/// The custom templates only appears if you have custom templates.
pub(crate) fn available_templates(&self) -> Result<TemplateCatalog> {
match self {
XcodeInstruments::XcTrace => parse_xctrace_template_list(),
XcodeInstruments::InstrumentsBinary => parse_instruments_template_list(),
}
}
/// Prepare the Xcode Instruments profiling command
///
/// If the `xctrace` tool is used, the prepared command looks like
///
/// ```sh
/// xcrun xctrace record --template MyTemplate \
/// --time-limit 5000ms \
/// --output path/to/tracefile \
/// --launch \
/// --
/// ```
///
/// If the older `instruments` tool is used, the prepared command looks
/// like
///
/// ```sh
/// instruments -t MyTemplate \
/// -D /path/to/tracefile \
/// -l 5000ms
/// ```
fn profiling_command(
&self,
template_name: &str,
trace_filepath: &Path,
time_limit: Option<usize>,
) -> Result<Command> {
match self {
XcodeInstruments::XcTrace => {
let mut command = Command::new("xcrun");
command.args(["xctrace", "record"]);
command.args(["--template", template_name]);
if let Some(limit_millis) = time_limit {
let limit_millis_str = format!("{}ms", limit_millis);
command.args(["--time-limit", &limit_millis_str]);
}
command.args(["--output", trace_filepath.to_str().unwrap()]);
// redirect stdin & err to the user's terminal
if let Some(tty) = get_tty()? {
command.args(["--target-stdin", &tty, "--target-stdout", &tty]);
}
command.args(["--launch", "--"]);
Ok(command)
}
XcodeInstruments::InstrumentsBinary => {
let mut command = Command::new("instruments");
command.args(["-t", template_name]);
command.arg("-D").arg(trace_filepath);
if let Some(limit) = time_limit {
command.args(["-l", &limit.to_string()]);
}
Ok(command)
}
}
}
}
/// Return the macOS version.
///
/// This function parses the output of `sw_vers -productVersion` (a string like '11.2.3`)
/// and returns the corresponding semver struct `Version{major: 11, minor: 2, patch: 3}`.
fn get_macos_version() -> Result<Version> {
let Output { status, stdout, .. } =
Command::new("sw_vers").args(["-productVersion"]).output()?;
if !status.success() {
return Err(anyhow!("macOS version cannot be determined"));
}
semver_from_utf8(&stdout)
}
/// Returns a semver given a slice of bytes
///
/// This function tries to construct a semver struct given a raw utf8 byte array
/// that may not contain a patch number, `"11.1"` is parsed as `"11.1.0"`.
fn semver_from_utf8(version: &[u8]) -> Result<Version> {
let to_semver = |version_string: &str| {
Version::parse(version_string).map_err(|error| {
anyhow!("cannot parse version: `{}`, because of {}", version_string, error)
})
};
let version_string = std::str::from_utf8(version)?;
match version_string.split('.').count() {
1 => to_semver(&format!("{}.0.0", version_string.trim())),
2 => to_semver(&format!("{}.0", version_string.trim())),
3 => to_semver(version_string.trim()),
_ => Err(anyhow!("invalid version: {}", version_string)),
}
}
/// Parse xctrace template listing.
///
/// Xctrace prints the list on either stderr (older versions) or stdout (recent).
/// In either case, the expected output is:
///
/// ```
/// == Standard Templates ==
/// Activity Monitor
/// Allocations
/// Animation Hitches
/// App Launch
/// Core Data
/// Counters
/// Energy Log
/// File Activity
/// Game Performance
/// Leaks
/// Logging
/// Metal System Trace
/// Network
/// SceneKit
/// SwiftUI
/// System Trace
/// Time Profiler
/// Zombies
///
/// == Custom Templates ==
/// MyTemplate
/// ```
fn parse_xctrace_template_list() -> Result<TemplateCatalog> {
let Output { status, stdout, stderr } =
Command::new("xcrun").args(["xctrace", "list", "templates"]).output()?;
if !status.success() {
return Err(anyhow!(
"Could not list templates. Please check your Xcode Instruments installation."
));
}
// Some older versions of xctrace print results on stderr,
// newer version print results on stdout.
let output = if stdout.is_empty() { stderr } else { stdout };
let templates_str = std::str::from_utf8(&output)?;
let mut templates_iter = templates_str.lines();
let standard_templates = templates_iter
.by_ref()
.skip(1)
.map(|line| line.trim())
.take_while(|line| !line.starts_with('=') && !line.is_empty())
.map(|line| line.into())
.collect::<Vec<_>>();
if standard_templates.is_empty() {
return Err(anyhow!(
"No available templates. Please check your Xcode Instruments installation."
));
}
let custom_templates = templates_iter
.map(|line| line.trim())
.skip_while(|line| line.starts_with('=') || line.is_empty())
.map(|line| line.into())
.collect::<Vec<_>>();
Ok(TemplateCatalog { standard_templates, custom_templates })
}
/// Parse /usr/bin/instruments template list.
///
/// The expected output on stdout is:
///
/// ```
/// Known Templates:
/// "Activity Monitor"
/// "Allocations"
/// "Animation Hitches"
/// "App Launch"
/// "Blank"
/// "Core Data"
/// "Counters"
/// "Energy Log"
/// "File Activity"
/// "Game Performance"
/// "Leaks"
/// "Logging"
/// "Metal System Trace"
/// "Network"
/// "SceneKit"
/// "SwiftUI"
/// "System Trace"
/// "Time Profiler"
/// "Zombies"
/// "~/Library/Application Support/Instruments/Templates/MyTemplate.tracetemplate"
/// ```
fn parse_instruments_template_list() -> Result<TemplateCatalog> {
let Output { status, stdout, .. } =
Command::new("instruments").args(["-s", "templates"]).output()?;
if !status.success() {
return Err(anyhow!(
"Could not list templates. Please check your Xcode Instruments installation."
));
}
let templates_str = std::str::from_utf8(&stdout)?;
let standard_templates = templates_str
.lines()
.skip(1)
.map(|line| line.trim().trim_matches('"'))
.take_while(|line| !line.starts_with("~/Library/"))
.map(|line| line.into())
.collect::<Vec<_>>();
if standard_templates.is_empty() {
return Err(anyhow!(
"No available templates. Please check your Xcode Instruments installation."
));
}
let custom_templates = templates_str
.lines()
.map(|line| line.trim().trim_matches('"'))
.skip_while(|line| !line.starts_with("~/Library/"))
.take_while(|line| !line.is_empty())
.map(|line| Path::new(line).file_stem().unwrap().to_string_lossy())
.map(|line| line.into())
.collect::<Vec<_>>();
Ok(TemplateCatalog { standard_templates, custom_templates })
}
/// Render the template catalog content as a string.
///
/// The returned string is similar to
///
/// ```text
/// Xcode Instruments templates:
///
/// built-in abbrev
/// --------------------------
/// Activity Monitor
/// Allocations (alloc)
/// Animation Hitches
/// App Launch
/// Core Data
/// Counters
/// Energy Log
/// File Activity (io)
/// Game Performance
/// Leaks
/// Logging
/// Metal System Trace
/// Network
/// SceneKit
/// SwiftUI
/// System Trace (sys)
/// Time Profiler (time)
/// Zombies
///
/// custom
/// --------------------------
/// MyTemplate
/// ```
pub fn render_template_catalog(catalog: &TemplateCatalog) -> String {
let mut output: String = "Xcode Instruments templates:\n".into();
let max_width = catalog
.standard_templates
.iter()
.chain(catalog.custom_templates.iter())
.map(|name| name.len())
.max()
.unwrap();
// column headers
write!(&mut output, "\n{:width$}abbrev", "built-in", width = max_width + 2).unwrap();
write!(&mut output, "\n{:-<width$}", "", width = max_width + 8).unwrap();
for name in &catalog.standard_templates {
output.push('\n');
if let Some(abbrv) = abbrev_name(name.trim_matches('"')) {
write!(&mut output, "{:width$}({abbrev})", name, width = max_width + 2, abbrev = abbrv)
.unwrap();
} else {
output.push_str(name);
}
}
output.push('\n');
// column headers
write!(&mut output, "\n{:width$}", "custom", width = max_width + 2).unwrap();
write!(&mut output, "\n{:-<width$}", "", width = max_width + 8).unwrap();
for name in &catalog.custom_templates {
output.push('\n');
output.push_str(name);
}
output.push('\n');
output
}
/// Compute the tracefile output path, creating the directory structure
/// in `target/instruments` if needed.
fn prepare_trace_filepath(
target_filepath: &Path,
template_name: &str,
app_config: &AppConfig,
workspace_root: &Path,
) -> Result<PathBuf> {
if let Some(ref path) = app_config.trace_filepath {
return Ok(path.to_path_buf());
}
let trace_dir = workspace_root.join("target").join("instruments");
if !trace_dir.exists() {
fs::create_dir_all(&trace_dir)
.map_err(|e| anyhow!("failed to create {:?}: {}", &trace_dir, e))?;
}
let trace_filename = {
let target_shortname = target_filepath
.file_stem()
.and_then(|s| s.to_str())
.ok_or_else(|| anyhow!("invalid target path {:?}", target_filepath))?;
let template_name = template_name.replace(' ', "-");
let now = chrono::Local::now();
format!("{}_{}_{}.trace", target_shortname, template_name, now.format("%F_%H%M%S-%3f"))
};
let trace_filepath = trace_dir.join(trace_filename);
Ok(trace_filepath)
}
/// Return the complete template name, replacing abbreviation if provided.
fn resolve_template_name(template_name: &str) -> &str {
match template_name {
"time" => "Time Profiler",
"alloc" => "Allocations",
"io" => "File Activity",
"sys" => "System Trace",
other => other,
}
}
/// Return the template name abbreviation if available.
fn abbrev_name(template_name: &str) -> Option<&str> {
match template_name {
"Time Profiler" => Some("time"),
"Allocations" => Some("alloc"),
"File Activity" => Some("io"),
"System Trace" => Some("sys"),
_ => None,
}
}
/// Profile the target binary at `binary_filepath`, write results at
/// `trace_filepath` and returns its path.
pub(crate) fn profile_target(
target_filepath: &Path,
xctrace_tool: &XcodeInstruments,
app_config: &AppConfig,
workspace: &Workspace,
) -> Result<PathBuf> {
// 1. Get the template name from config
// This borrows a ref to the String in Option<String>. The value can be
// unwrapped because in this version the template was checked earlier to
// be a `Some(x)`.
let template_name = resolve_template_name(app_config.template_name.as_deref().unwrap());
// 2. Compute the trace filepath and create its parent directory
let workspace_root = workspace.root().to_path_buf();
let trace_filepath = prepare_trace_filepath(
target_filepath,
template_name,
app_config,
workspace_root.as_path(),
)?;
// 3. Print current activity `Profiling target/debug/tries`
{
let target_shortpath = target_filepath
.strip_prefix(workspace_root)
.unwrap_or(target_filepath)
.to_string_lossy();
let status_detail = format!("{} with template '{}'", target_shortpath, template_name);
workspace.config().shell().status("Profiling", status_detail)?;
}
let mut command =
xctrace_tool.profiling_command(template_name, &trace_filepath, app_config.time_limit)?;
command.arg(target_filepath);
if !app_config.target_args.is_empty() {
command.args(app_config.target_args.as_slice());
}
let output = command.output()?;
if !output.status.success() {
let stderr =
String::from_utf8(output.stderr).unwrap_or_else(|_| "failed to capture stderr".into());
let stdout =
String::from_utf8(output.stdout).unwrap_or_else(|_| "failed to capture stdout".into());
return Err(anyhow!("instruments errored: {} {}", stderr, stdout));
}
Ok(trace_filepath)
}
/// get the tty of th current terminal session
fn get_tty() -> Result<Option<String>> {
let mut command = Command::new("ps");
command.arg("otty=").arg(std::process::id().to_string());
Ok(String::from_utf8(command.output()?.stdout)?
.split_whitespace()
.next()
.map(|tty| format!("/dev/{}", tty)))
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn semvers_can_be_parsed() {
assert_eq!(semver_from_utf8(b"2.3.4").unwrap(), Version::parse("2.3.4").unwrap());
assert_eq!(semver_from_utf8(b"11.1").unwrap(), Version::parse("11.1.0").unwrap());
assert_eq!(semver_from_utf8(b"11").unwrap(), Version::parse("11.0.0").unwrap());
}
}
| true
|
3bd77a117606eef9698f3148200954a60e726879
|
Rust
|
Tehforsch/amazingGrame
|
/src/render/draw.rs
|
UTF-8
| 727
| 2.53125
| 3
|
[] |
no_license
|
use piston_window;
use piston_window::ellipse::Ellipse;
use piston_window::{Context, Transformed};
use piston_window::types::Color;
use opengl_graphics::GlGraphics;
use ::point::Point;
pub fn circle(pos: Point, radius: f64, color: Color, context: Context, gl: &mut GlGraphics) {
Ellipse {
color: color,
border: None,
resolution: 16,
}.draw(
[0.0, 0.0, 2.0*radius, 2.0*radius],
&Default::default(),
context.trans(pos.x-radius, pos.y-radius).transform,
gl);
}
pub fn line(start: Point, end: Point, color: Color, context: Context, gl: &mut GlGraphics) {
piston_window::line(color, 1., [ start.x, start.y, end.x, end.y ], context.transform, gl);
}
| true
|
b2e308267ee7aa14375375edc45c9c8e8990dc6d
|
Rust
|
jgautier/monkey
|
/src/object.rs
|
UTF-8
| 7,348
| 3.21875
| 3
|
[] |
no_license
|
use crate::code::Instructions;
use crate::ast::BlockStatement;
use crate::ast::Node;
use std::rc::Rc;
use std::collections::HashMap;
use std::cell::RefCell;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum HashKey {
Integer(i64),
String(String),
Boolean(bool)
}
impl HashKey {
pub fn inspect(&self) -> String {
match self {
HashKey::Integer(i) => {
format!("{}", i)
}
HashKey::Boolean(b) => {
format!("{}", b)
}
HashKey::String(string) => {
string.to_string()
}
}
}
pub fn from_object(obj: &Object) -> Option<HashKey> {
match obj {
Object::String(val) => {
Some(HashKey::String(val.to_string()))
},
Object::Integer(val) => {
Some(HashKey::Integer(*val))
},
Object::Boolean(val) => {
Some(HashKey::Boolean(*val))
},
_ => None
}
}
}
#[derive(Debug)]
pub enum Object {
Integer(i64),
String(String),
Boolean(bool),
Null,
Return(Rc<Object>),
Function{ params: Vec<String>, body: BlockStatement, env: Env },
BuiltIn(fn(Vec<Rc<Object>>) -> Rc<Object>),
Array(Vec<Rc<Object>>),
Hash(HashMap<HashKey, Rc<Object>>),
Error(String),
CompiledFunction(Instructions, usize, usize),
Closure(Rc<Object>, Vec<Rc<Object>>)
}
impl Object {
pub fn object_type(&self) -> String {
match self {
Object::Integer(_) => {
"INTEGER".to_string()
},
Object::Boolean(_) => {
"BOOLEAN".to_string()
},
Object::Null => {
"NULL".to_string()
},
Object::Return(_) => {
"RETURN".to_string()
},
Object::Error(_) => {
"ERROR".to_string()
},
Object::Function{..} => {
"FUNCTION".to_string()
},
Object::String(_) => {
"STRING".to_string()
},
Object::BuiltIn(_) => {
"BUILTIN".to_string()
},
Object::Array(_) => {
"ARRAY".to_string()
},
Object::Hash(_) => {
"HASH".to_string()
},
Object::CompiledFunction(_,_,_) => {
"COMPILED_FUNCTION".to_string()
},
Object::Closure(_, _) => {
"CLOSURE".to_string()
}
}
}
pub fn inspect(&self) -> String {
match self {
Object::Integer(i) => {
format!("{}", i)
},
Object::Boolean(b) => {
format!("{}", b)
},
Object::Null => {
"null".to_string()
},
Object::Return(obj) => {
obj.inspect()
},
Object::Error(err_str) => {
err_str.to_string()
},
Object::String(string) => {
string.to_string()
}
Object::Function{ params, body, ..} => {
let params = params.join(",");
let strings = vec![
format!("fn({}) {{", params), body.to_string(), "}}".to_string()
];
strings.join("\n")
},
Object::BuiltIn(_) => {
"builtin function".to_string()
}
Object::Array(arr) => {
let els = arr.iter().map(|el| el.inspect()).collect::<Vec<String>>().join(",");
format!("[{}]", els)
}
Object::Hash(hash) => {
let pairs = hash.iter().map(|pair| format!("{}: {}", pair.0.inspect(), pair.1.inspect())).collect::<Vec<String>>().join(", ");
format!("{{{}}}", pairs)
},
Object::CompiledFunction(cf, _, _) => {
format!("{:?}", cf)
},
Object::Closure(cf, free) => {
format!("{:?} {:?}", cf, free)
}
}
}
}
#[derive(Debug)]
pub struct Environment {
store: HashMap<String, Rc<Object>>,
outer: Option<Env>
}
impl Environment {
pub fn new(outer: Option<Env>) -> Self {
Self {
store: HashMap::new(),
outer
}
}
pub fn get(&self, var_name: String) -> Option<Rc<Object>> {
let val = self.store.get(&var_name);
if let Some(value) = val {
return Some(value.clone());
}
if let Some(env) = &self.outer {
return env.borrow().get(var_name);
}
None
}
pub fn set(&mut self, var_name: String, value: Rc<Object>) {
self.store.insert(var_name, value);
}
}
pub type Env = Rc<RefCell<Environment>>;
pub fn get_built_in_vec() -> Vec<(String, Rc<Object>)> {
vec![
("len".to_string(), Rc::new(Object::BuiltIn(|args: Vec<Rc<Object>>| -> Rc<Object> {
if args.len() > 1 {
return Rc::new(Object::Error(format!("Expected 1 argument got {}", args.len())));
}
match &*args[0] {
Object::String(string) => {
Rc::new(Object::Integer(string.len() as i64))
},
Object::Array(arr) => {
Rc::new(Object::Integer(arr.len() as i64))
},
_=> {
Rc::new(Object::Error(format!("Expected a String or Array got a {}", args[0].object_type())))
}
}
}))),
("first".to_string(), Rc::new(Object::BuiltIn(|args: Vec<Rc<Object>>| -> Rc<Object> {
if args.len() > 1 {
return Rc::new(Object::Error(format!("Expected 1 argument got {}", args.len())))
}
match &*args[0] {
Object::Array(arr) => {
arr[0].clone()
},
_=> {
Rc::new(Object::Error(format!("Expected a Array got a {}", args[0].object_type())))
}
}
}))),
("last".to_string(), Rc::new(Object::BuiltIn(|args: Vec<Rc<Object>>| -> Rc<Object> {
if args.len() > 1 {
return Rc::new(Object::Error(format!("Expected 1 argument got {}", args.len())));
}
match &*args[0] {
Object::Array(arr) => {
if let Some(obj) = arr.last() {
return obj.clone()
}
Rc::new(Object::Null)
},
_=> {
Rc::new(Object::Error(format!("Expected a Array got a {}", args[0].object_type())))
}
}
}))),
("rest".to_string(), Rc::new(Object::BuiltIn(|args: Vec<Rc<Object>>| -> Rc<Object> {
if args.len() > 1 {
return Rc::new(Object::Error(format!("Expected 1 argument got {}", args.len())));
}
match &*args[0] {
Object::Array(arr) => {
if let Some((_, elements)) = arr.split_first() {
Rc::new(Object::Array(elements.to_vec()))
} else {
Rc::new(Object::Null)
}
},
_=> {
Rc::new(Object::Error(format!("Expected a Array got a {}", args[0].object_type())))
}
}
}))),
("push".to_string(), Rc::new(Object::BuiltIn(|args: Vec<Rc<Object>>| -> Rc<Object> {
if args.len() > 2 {
return Rc::new(Object::Error(format!("Expected 2 argument got {}", args.len())))
}
match &*args[0] {
Object::Array(arr) => {
let mut new_arr = arr.to_vec();
new_arr.push(args[1].clone());
Rc::new(Object::Array(new_arr))
},
_=> {
Rc::new(Object::Error(format!("Expected a Array got a {}", args[0].object_type())))
}
}
}))),
("puts".to_string(), Rc::new(Object::BuiltIn(|args: Vec<Rc<Object>>| -> Rc<Object> {
for arg in args {
println!("{}", arg.inspect())
}
Rc::new(Object::Null)
})))
]
}
pub fn get_built_in_map() -> HashMap<String, Rc<Object>> {
let mut built_in_map = HashMap::new();
let built_ins = get_built_in_vec();
for built_in in built_ins {
built_in_map.insert(built_in.0, built_in.1);
}
built_in_map
}
| true
|
7f0d7f44ad5bd61f9645b2acfe3cb1b24f7065a2
|
Rust
|
MoAlyousef/vial
|
/examples/filters.rs
|
UTF-8
| 1,442
| 3.046875
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use {
std::sync::atomic::{AtomicUsize, Ordering},
vial::prelude::*,
};
routes! {
// `count` will run before all routes in this block
#![filter(count)]
GET "/" => |_| "Hey there!";
GET "/hits" => hits;
// `count` will run again when /double is visited
#[filter(count)]
GET "/double" => double;
// `echo` will be called when /echo is visited, as well as `count`
// because it's a module-level filter
#[filter(echo)]
GET "/echo" => |_| "Is there an echo in here?";
}
fn hits(req: Request) -> impl Responder {
format!("Hits: {}", req.counter().count())
}
fn double(_req: Request) -> impl Responder {
"Double trouble."
}
fn echo(req: &mut Request) -> Option<Response> {
println!("{:#?}", req);
None
}
fn count(req: &mut Request) -> Option<Response> {
req.counter().incr();
None
}
#[derive(Debug, Default)]
struct Counter(AtomicUsize);
impl Counter {
fn count(&self) -> String {
self.0.load(Ordering::Relaxed).to_string()
}
fn incr(&self) {
self.0.fetch_add(1, Ordering::Relaxed);
}
}
// We do this purely for the convenience of using `req.counter()`
// instead of `req.state::<Counter>()`.
trait WithCounter {
fn counter(&self) -> &Counter;
}
impl WithCounter for Request {
fn counter(&self) -> &Counter {
self.state::<Counter>()
}
}
fn main() {
use_state!(Counter::default());
run!().unwrap();
}
| true
|
863623ff96d1cd69b973f9999f7a9e638b94ec4c
|
Rust
|
redmar/aoc2020
|
/day08/src/main.rs
|
UTF-8
| 3,262
| 3.203125
| 3
|
[] |
no_license
|
#[derive(Debug, Copy, Clone)]
enum Instruction {
Nop(i32),
Acc(i32),
Jmp(i32),
}
#[derive(Debug, PartialEq)]
enum Mode {
Part1,
Part2,
}
impl Instruction {
fn parse_instruction(line: &str) -> Instruction {
let mut itr = line.split(' ');
let instruction = itr.next();
let nr = itr.next().unwrap().parse::<i32>().unwrap();
match instruction {
Some("nop") => Instruction::Nop(nr),
Some("acc") => Instruction::Acc(nr),
Some("jmp") => Instruction::Jmp(nr),
Some(_) | None => panic!("unhandled instruction"),
}
}
}
fn run_bootcode(
program: &Vec<Instruction>,
mode: Mode,
overlay_instruction: Option<(usize, Instruction)>,
) -> Option<i32> {
let mut ic: i32 = 0;
let mut acc = 0;
let mut offset: i32 = 0;
// let mut prev_ic = 0;
let mut visited: Vec<bool> = Vec::new();
for _ in 0..program.len() {
visited.push(false);
}
loop {
// prev_ic = ic;
let offset_until_wrap = (program.len() as i32 - 1) - ic;
if offset > offset_until_wrap {
ic = 0 + offset - offset_until_wrap;
} else if ic + offset < 0 {
ic = (program.len() as i32 - 1) + (ic + offset);
} else {
ic += offset;
}
if visited[ic as usize] {
if mode == Mode::Part1 {
// println!("visited {} twice! acc = {}", prev_ic, acc);
return Some(acc);
} else {
return None;
}
} else {
visited[ic as usize] = true;
}
let current_instruction =
if let Some((overlay_idx, overlay_instruction)) = overlay_instruction {
if overlay_idx == (ic as usize) {
overlay_instruction
} else {
program[ic as usize]
}
} else {
program[ic as usize]
};
// println!("{} = {:?} \t| {}", ic, current_instruction, acc);
match current_instruction {
Instruction::Nop(_) => offset = 1,
Instruction::Acc(inc) => {
acc += inc;
offset = 1
}
Instruction::Jmp(inc) => offset = inc,
};
if mode == Mode::Part2 && ic as usize == program.len() - 1 {
// println!("last instruction executed! acc = {}", acc);
return Some(acc);
}
}
}
fn main() {
let input = include_str!("../input.txt");
let program = input
.lines()
.map(Instruction::parse_instruction)
.collect::<Vec<Instruction>>();
println!("part1 = {:?}", run_bootcode(&program, Mode::Part1, None));
for (idx, mutate_instruction) in program.iter().enumerate() {
let overlay_instruction = match mutate_instruction {
Instruction::Nop(x) => Some((idx, Instruction::Jmp(*x))),
Instruction::Acc(_inc) => None,
Instruction::Jmp(x) => Some((idx, Instruction::Nop(*x))),
};
if let Some(acc) = run_bootcode(&program, Mode::Part2, overlay_instruction) {
println!("part2 = {:?}", acc);
break;
}
}
}
| true
|
07fe6be1903916041ffcb4271800f1a3e6011a7f
|
Rust
|
mast3rsoft/vdvault
|
/src/main.rs
|
UTF-8
| 3,271
| 3.171875
| 3
|
[] |
no_license
|
use strsim::levenshtein;
use std::fs::{read_dir, DirEntry};
use std::path::PathBuf;
use regex::*;
use std::process::*;
use rustyline::Editor;
use ansi_term::Color;
fn find_dir(to_match: &str, media: &str) -> Option<DirEntry> {
let media_folder = read_dir(media).expect("media");
let mut top_path = PathBuf::new();
let mut top_levenstein = 1000;
let mut top_entry = None;
for direntry in media_folder {
let entry = direntry.expect("Baf dir entry");
let dir_name = entry.file_name();
let dir_name = dir_name.to_str().unwrap();
let regex = format!(".*{}.*",to_match );
println!("Regex is {}", regex);
let dir_lebenstein = levenshtein(to_match, dir_name );
let mut dir_regex = RegexBuilder::new(regex.as_str());
dir_regex.ignore_whitespace(true);
dir_regex.case_insensitive(true);
let dir_regex = dir_regex.build().unwrap();
println!("Lebenstein for {} is {:?}", dir_lebenstein, entry.path() );
if dir_regex.is_match(dir_name) {
println!("Regex");
top_path = entry.path();
top_entry = Some(entry);
return top_entry
} else if dir_lebenstein < top_levenstein {
println!("Higher tan before {:#?} ", entry.file_name());
top_levenstein = dir_lebenstein;
top_path = entry.path();
top_entry = Some(entry);
}
}
let entri = &top_entry;
if entri.as_ref().unwrap().file_type().unwrap().is_dir() {
println!("Found directory {:#?}. Please be more specific... ", entri.as_ref().unwrap().file_name());
let mut more_specific_path = String::new();
let mut rl = Editor::<()>::new();
let readline = rl.readline(">>");
match readline {
Ok(line) => {
more_specific_path = line
},
Err(_) => {
println!("Bye");
std::process::exit(1);
}
}
let mut specicficPathByf = PathBuf::new();
specicficPathByf.push(entri.as_ref().unwrap().path());
println!("{:?}", specicficPathByf);
return find_dir(&more_specific_path,specicficPathByf.to_str().unwrap());
}
top_entry
}
fn main() {
let cli_args: Vec<String> = std::env::args().collect();
if cli_args.len() == 1 {
println!("{}", Color::Red.bold().paint("Not enough args"));
std::process::exit(1);
}
let cli_args = cli_args.as_slice();
let cli_args = &cli_args[1..];
let cli_args = Vec::from(cli_args);
let cli_args = cli_args.join(" ");
println!("Hey thr args {:?}", cli_args);
let entry = &find_dir(cli_args.as_str(), "/Volumes/media/Videos/");
println!("The best match is {:#?}", entry.as_ref().unwrap().file_name());
println!("Opening...");
let entry_path = entry.as_ref().unwrap().path();
let entry_path_str = entry_path.as_os_str().to_str();
std::process::Command::new("open")
.arg("-a")
.arg("VLC")
.arg(entry_path_str.unwrap())
.spawn()
.expect("Failure launching vlc")
;
}
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.