blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
85b6b5b591fb8663fbb3aae9b3056207e9f98111
|
Rust
|
matklad/rust-analyzer
|
/crates/ide_completion/src/completions/fn_param.rs
|
UTF-8
| 3,883
| 3.03125
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! See `complete_fn_param`.
use rustc_hash::FxHashMap;
use syntax::{
ast::{self, ModuleItemOwner},
match_ast, AstNode,
};
use crate::{CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions};
/// Complete repeated parameters, both name and type. For example, if all
/// functions in a file have a `spam: &mut Spam` parameter, a completion with
/// `spam: &mut Spam` insert text/label and `spam` lookup string will be
/// suggested.
pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) {
if !ctx.is_param {
return;
}
let mut params = FxHashMap::default();
let me = ctx.token.ancestors().find_map(ast::Fn::cast);
let mut process_fn = |func: ast::Fn| {
if Some(&func) == me.as_ref() {
return;
}
func.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| {
if let Some(pat) = param.pat() {
let text = param.syntax().text().to_string();
let lookup = pat.syntax().text().to_string();
params.entry(text).or_insert(lookup);
}
});
};
for node in ctx.token.ancestors() {
match_ast! {
match node {
ast::SourceFile(it) => it.items().filter_map(|item| match item {
ast::Item::Fn(it) => Some(it),
_ => None,
}).for_each(&mut process_fn),
ast::ItemList(it) => it.items().filter_map(|item| match item {
ast::Item::Fn(it) => Some(it),
_ => None,
}).for_each(&mut process_fn),
ast::AssocItemList(it) => it.assoc_items().filter_map(|item| match item {
ast::AssocItem::Fn(it) => Some(it),
_ => None,
}).for_each(&mut process_fn),
_ => continue,
}
};
}
params.into_iter().for_each(|(label, lookup)| {
let mut item = CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label);
item.kind(CompletionItemKind::Binding).lookup_by(lookup);
item.add_to(acc)
});
}
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use crate::{tests::filtered_completion_list, CompletionKind};
fn check(ra_fixture: &str, expect: Expect) {
let actual = filtered_completion_list(ra_fixture, CompletionKind::Magic);
expect.assert_eq(&actual);
}
#[test]
fn test_param_completion_last_param() {
check(
r#"
fn foo(file_id: FileId) {}
fn bar(file_id: FileId) {}
fn baz(file$0) {}
"#,
expect![[r#"
bn file_id: FileId
"#]],
);
}
#[test]
fn test_param_completion_nth_param() {
check(
r#"
fn foo(file_id: FileId) {}
fn baz(file$0, x: i32) {}
"#,
expect![[r#"
bn file_id: FileId
"#]],
);
}
#[test]
fn test_param_completion_trait_param() {
check(
r#"
pub(crate) trait SourceRoot {
pub fn contains(&self, file_id: FileId) -> bool;
pub fn module_map(&self) -> &ModuleMap;
pub fn lines(&self, file_id: FileId) -> &LineIndex;
pub fn syntax(&self, file$0)
}
"#,
expect![[r#"
bn file_id: FileId
"#]],
);
}
#[test]
fn completes_param_in_inner_function() {
check(
r#"
fn outer(text: String) {
fn inner($0)
}
"#,
expect![[r#"
bn text: String
"#]],
)
}
#[test]
fn completes_non_ident_pat_param() {
check(
r#"
struct Bar { bar: u32 }
fn foo(Bar { bar }: Bar) {}
fn foo2($0) {}
"#,
expect![[r#"
bn Bar { bar }: Bar
"#]],
)
}
}
| true
|
541cf0e5e3cb5a171b8227c74458304da2bd3bdb
|
Rust
|
oday0311/imkey-core
|
/api/src/filecoin_address.rs
|
UTF-8
| 906
| 2.546875
| 3
|
[
"Apache-2.0"
] |
permissive
|
use crate::api::{AddressParam, AddressResult};
use crate::error_handling::Result;
use crate::message_handler::encode_message;
use coin_filecoin::address::FilecoinAddress;
use prost::Message;
pub fn get_address(param: &AddressParam) -> Result<Vec<u8>> {
let address = FilecoinAddress::get_address(param.path.as_ref(), param.network.as_ref())?;
let address_message = AddressResult {
path: param.path.to_owned(),
chain_type: param.chain_type.to_string(),
address,
};
encode_message(address_message)
}
pub fn display_filecoin_address(param: &AddressParam) -> Result<Vec<u8>> {
let address = FilecoinAddress::display_address(param.path.as_ref(), param.network.as_ref())?;
let address_message = AddressResult {
path: param.path.to_owned(),
chain_type: param.chain_type.to_string(),
address,
};
encode_message(address_message)
}
| true
|
5d9d9131d02473acb5556378047ab922c88a8012
|
Rust
|
NYStud/hedr
|
/src/file_sel.rs
|
UTF-8
| 8,433
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
use std::fs;
use std::io;
use std::path::PathBuf;
use std::cmp::Ordering;
use std::ffi::{OsString, OsStr};
use super::screen::*;
use super::term::*;
use super::editor;
use super::editor::Editor;
#[derive(Eq)]
struct FileInfo {
pub name : OsString,
pub is_dir : bool,
}
impl Ord for FileInfo {
fn cmp(&self, other: &FileInfo) -> Ordering {
if self.is_dir && ! other.is_dir {
Ordering::Less
} else if ! self.is_dir && other.is_dir {
Ordering::Greater
} else {
self.name.cmp(&other.name)
}
}
}
impl PartialOrd for FileInfo {
fn partial_cmp(&self, other : &FileInfo) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for FileInfo {
fn eq(&self, other : &FileInfo) -> bool {
self.name.eq(&other.name)
}
}
pub struct FileSel<'a, 'b : 'a> {
editor : &'a mut Editor<'b>,
quit : bool,
selected_filename : Option<OsString>,
cur_dir : Option<PathBuf>,
files : Vec<FileInfo>,
sel_index : usize,
top_index : usize,
}
impl<'a, 'b> FileSel<'a, 'b> {
pub fn new(editor : &'a mut Editor<'b>) -> FileSel<'a, 'b> {
FileSel {
editor : editor,
quit : false,
selected_filename : None,
cur_dir : None,
files : vec![],
sel_index : 0,
top_index : 0,
}
}
fn draw_header(&mut self) {
self.editor.screen.move_cursor(1, 1);
set_color(Color::FGBlack, Color::BGGray);
print!(" Select File");
clear_eol();
self.editor.screen.move_cursor(self.editor.screen.w - 11, 1);
print!(" hedx v0.1");
clear_eol();
self.editor.screen.move_cursor(1, editor::HEADER_LINES);
reset_color();
clear_eol();
}
fn draw_footer(&mut self) {
reset_color();
let w = editor::SHORTCUT_SPACING;
let h = self.editor.screen.h;
// key shortcuts
self.editor.void_key_help(1 + 0*w, h-1);
self.editor.draw_key_help(1 + 0*w, h-0, "^C", "Cancel");
self.editor.void_key_help(1 + 1*w, h-1);
self.editor.void_key_help(1 + 1*w, h-0);
// message
self.editor.screen.move_cursor(1, self.editor.screen.h - editor::FOOTER_LINES + 1);
if let Some(ref msg) = self.editor.screen.msg {
set_color(Color::FGBlack, Color::BGGray);
print!(" {}", msg);
}
clear_eol();
}
fn draw_main_screen(&mut self) {
self.draw_header();
self.draw_footer();
let mut max_filename_len = 0;
for fi in &self.files {
let count = fi.name.to_string_lossy().chars().count();
if max_filename_len < count {
max_filename_len = count;
}
}
if self.editor.screen.w < 20 {
return;
}
if max_filename_len > (self.editor.screen.w - 20) as usize {
max_filename_len = (self.editor.screen.w - 20) as usize;
}
let mut line = editor::HEADER_LINES + 1;
let mut file_index = self.top_index;
reset_color();
while file_index < self.files.len() && line <= self.editor.screen.h - editor::FOOTER_LINES {
self.editor.screen.move_cursor(1, line);
if file_index == self.sel_index {
set_color(Color::FGBlack, Color::BGGray);
}
let fi = &self.files[file_index];
let len = fi.name.to_string_lossy().chars().count();
for (i, c) in fi.name.to_string_lossy().char_indices() {
if len > max_filename_len && i > max_filename_len-3 {
print!("...");
break;
}
print!("{}", c);
}
for _ in len..max_filename_len {
print!(" ");
}
if fi.is_dir {
print!(" (dir)");
} else {
print!(" {:12} bytes", 0);
}
reset_color();
clear_eol();
file_index += 1;
line += 1;
}
while file_index < self.files.len() && line <= self.editor.screen.h - editor::FOOTER_LINES {
self.editor.screen.move_cursor(1, line);
clear_eol();
}
flush_screen();
}
fn process_input(&mut self) {
let key = self.editor.read_key();
if key == ctrl_key!('c') {
self.quit = true;
return;
}
if key == 13 {
self.confirm_selection();
return;
}
self.editor.screen.msg_was_set = false;
if key == KEY_ARROW_UP {
self.move_sel_up();
} else if key == KEY_ARROW_DOWN {
self.move_sel_down();
}
if ! self.editor.screen.msg_was_set {
self.editor.clear_msg();
}
}
fn confirm_selection(&mut self) {
if self.sel_index >= self.files.len() {
return;
}
let file = self.files.remove(self.sel_index);
if file.is_dir {
if let Err(e) = self.change_dir(&file.name) {
self.editor.show_msg(format!("Error listing directory: {}", e));
}
reset_color();
clear_screen();
self.editor.screen.redraw_needed = true;
} else {
self.selected_filename = if let Some(ref mut dir) = self.cur_dir.take() {
dir.push(file.name.clone());
match dir.canonicalize() {
Ok(dir) => Some(dir.as_os_str().to_os_string()),
Err(_) => Some(dir.as_os_str().to_os_string())
}
} else {
Some(file.name.clone())
};
self.quit = true;
}
}
fn ensure_sel_visible(&mut self) {
let n_page_lines = (self.editor.screen.h - editor::BORDER_LINES) as usize;
if self.sel_index < self.top_index || self.sel_index >= self.top_index + n_page_lines {
if self.sel_index >= n_page_lines/2 {
self.top_index = self.sel_index - n_page_lines/2;
} else {
self.top_index = 0;
}
self.editor.screen.redraw_needed = true;
}
}
fn move_sel_up(&mut self) {
if self.sel_index > 0 {
self.sel_index -= 1;
self.ensure_sel_visible();
}
self.editor.screen.redraw_needed = true;
}
fn move_sel_down(&mut self) {
if self.sel_index+1 < self.files.len() {
self.sel_index += 1;
self.ensure_sel_visible();
}
self.editor.screen.redraw_needed = true;
}
fn change_dir(&mut self, dir : &OsStr) -> io::Result<()> {
let mut path = PathBuf::new();
if let Some(ref root) = self.cur_dir {
path.push(root);
}
path.push(dir);
let mut files = vec![];
files.push(FileInfo {
name : OsStr::new("..").to_os_string(),
is_dir : true,
});
let list = fs::read_dir(&path)?;
for file in list {
let file = file?;
let fi = FileInfo {
name : file.file_name(),
is_dir : file.path().is_dir(),
};
files.push(fi);
}
files.sort();
self.files = files;
self.sel_index = 0;
self.top_index = 0;
self.cur_dir = Some(path);
Ok(())
}
pub fn select_file(&mut self, root_dir : &OsStr) -> Option<OsString> {
reset_color();
clear_screen();
flush_screen();
if let Err(e) = self.change_dir(root_dir) {
self.editor.show_msg(format!("Error reading directory: {}", e));
} else {
self.editor.clear_msg();
}
self.editor.screen.redraw_needed = true;
while ! self.quit && ! self.editor.quit {
if self.editor.screen.redraw_needed {
self.draw_main_screen();
}
self.process_input();
}
reset_color();
clear_screen();
flush_screen();
self.editor.screen.redraw_needed = true;
self.selected_filename.take()
}
}
| true
|
37ffff8cb085b4142e431eee7088792503c966f2
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/ui/numbers-arithmetic/float.rs
|
UTF-8
| 244
| 2.578125
| 3
|
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
// run-pass
pub fn main() {
let pi = 3.1415927f64;
println!("{}", -pi * (pi + 2.0 / pi) - pi * 5.0);
if pi == 5.0 || pi < 10.0 || pi <= 2.0 || pi != 22.0 / 7.0 || pi >= 10.0
|| pi > 1.0 {
println!("yes");
}
}
| true
|
a7cdccde0c3d21bd7bac36fd29902b24c0a268b1
|
Rust
|
ytakhs/leetcode-rs
|
/examples/archives/sum_of_root_to_leaf_binary_numbers.rs
|
UTF-8
| 2,443
| 3.453125
| 3
|
[] |
no_license
|
#[derive(Debug, PartialEq, Eq)]
pub struct TreeNode {
pub val: i32,
pub left: Option<Rc<RefCell<TreeNode>>>,
pub right: Option<Rc<RefCell<TreeNode>>>,
}
impl TreeNode {
#[inline]
pub fn new(val: i32) -> Self {
TreeNode {
val,
left: None,
right: None,
}
}
}
use std::cell::RefCell;
use std::rc::Rc;
struct Solution {}
impl Solution {
pub fn sum_root_to_leaf(root: Option<Rc<RefCell<TreeNode>>>) -> i32 {
fn dfs(node: Option<Rc<RefCell<TreeNode>>>, current: i32, result: &mut Vec<i32>) {
if let Some(n) = node {
let mut b = n.borrow_mut();
let mut current = current;
current = current << 1;
current |= b.val;
if b.left.is_none() && b.right.is_none() {
result.push(current);
return;
}
dfs(b.left.take(), current, result);
dfs(b.right.take(), current, result);
}
}
let mut arr = vec![];
dfs(root, 0, &mut arr);
arr.into_iter().sum()
}
}
fn main() {
assert_eq!(
Solution::sum_root_to_leaf(Some(Rc::new(RefCell::new(TreeNode {
val: 1,
left: Some(Rc::new(RefCell::new(TreeNode {
val: 0,
left: Some(Rc::new(RefCell::new(TreeNode {
val: 0,
left: None,
right: None
}))),
right: Some(Rc::new(RefCell::new(TreeNode {
val: 1,
left: None,
right: None
}))),
}))),
right: Some(Rc::new(RefCell::new(TreeNode {
val: 1,
left: Some(Rc::new(RefCell::new(TreeNode {
val: 0,
left: None,
right: None
}))),
right: Some(Rc::new(RefCell::new(TreeNode {
val: 1,
left: None,
right: None
}))),
}))),
})))),
22
);
assert_eq!(Solution::sum_root_to_leaf(None), 0);
assert_eq!(
Solution::sum_root_to_leaf(Some(Rc::new(RefCell::new(TreeNode {
val: 1,
left: None,
right: None
})))),
1
);
}
| true
|
da86e371cd4f6de7a15e70731beb2df9aeffb382
|
Rust
|
decentninja/RustRelations
|
/src/lib.rs
|
UTF-8
| 3,528
| 3.28125
| 3
|
[] |
no_license
|
use std::collections::HashSet;
trait Relation<T, W> {
fn operation(&self, a: T, b: T) -> bool;
fn is_transitive(&self) -> bool;
fn is_reflexive(&self) -> bool;
fn is_symetric(&self) -> bool;
fn is_equivalence(&self) -> bool {
self.is_symetric() && self.is_reflexive() && self.is_transitive()
}
/// Test if reasoning is correct given a set operations ex [(1, 2), (3, 4)] where the numbers correspond to unknown variables.
fn valid(&self, reasoning: Vec<(uint, uint)>, nvariables: uint) -> bool {
if self.is_equivalence() {
true
} else {
let mut facts: HashSet<(uint, uint)> = HashSet::new();
let mut constrained: Vec<bool> = Vec::new();
constrained.grow(nvariables, false);
for &reason in reasoning.iter() {
if !facts.contains(&reason) {
if !constrained[reason.0] || !constrained[reason.1] {
// reason contains an unconstrained variable or the reason is reflexive and have no constraints, add this as a fact
constrained[reason.0] = true;
constrained[reason.1] = true;
} else {
// Both variables in relation is constrained, should check if constraint is filled before adding as fact
let mut found = false;
if self.is_reflexive() {
found = true;
}
if self.is_symetric() && !found {
for &fact in facts.iter() {
if fact.0 == fact.1 && fact.1 == fact.0 {
found = true;
}
}
}
if self.is_transitive() && !found {
let (from, to) = reason;
if find_path(from, to, &facts) >= 2 {
found = true;
}
}
if !found {
return false;
}
}
facts.insert(reason);
if self.is_symetric() {
facts.insert((reason.1, reason.0));
}
}
}
return true;
}
}
}
fn find_path(from: uint, to: uint, graph: &HashSet<(uint, uint)>) -> int {
let mut stack = Vec::new();
let mut visisted = HashSet::new();
let mut depth = 0;
stack.push(from);
while !stack.is_empty() {
let visit = stack.pop().unwrap();
if visit == to {
return depth;
}
depth += 1;
if !visisted.contains(&visit) {
visisted.insert(visit);
for &(f, t) in graph.iter() {
if f == visit {
stack.push(t);
}
}
}
}
return -1
}
#[test]
fn find_path_test() {
let mut a = HashSet::new();
a.insert((1, 2));
a.insert((2, 3));
a.insert((3, 4));
assert!(find_path(1, 4, &a) == 3);
assert!(find_path(4, 1, &a) == -1);
}
struct And;
impl Relation<bool, bool> for And {
fn operation(&self, a: bool, b: bool) -> bool {
a && b
}
fn is_transitive(&self) -> bool {
true
}
fn is_reflexive(&self) -> bool {
true
}
fn is_symetric(&self) -> bool {
true
}
}
struct Greater;
impl Relation<int, int> for Greater {
fn operation(&self, a: int, b: int) -> bool {
a > b
}
fn is_reflexive(&self) -> bool {
false
}
fn is_symetric(&self) -> bool {
false
}
fn is_transitive(&self) -> bool {
true
}
}
#[test]
fn is_equivalence() {
assert!(And.is_equivalence());
assert!(Greater.is_equivalence() == false);
}
#[test]
fn valid() {
// Test if this holds, a && b, b && a, b && c, c && b
let valid_reasoning = vec!((0, 1), (1, 0), (1, 2), (2, 1));
assert!(And.valid(valid_reasoning, 3));
// a > b, b > c, a > c
let valid_reasoning_2 = vec!((0, 1), (1, 2), (0, 2));
assert!(Greater.valid(valid_reasoning_2, 3));
}
#[test]
fn invalid() {
// test if this holds, (it should not), a > b, b > a
let invalid_reasoning = vec!((0, 1), (1, 0));
assert!(Greater.valid(invalid_reasoning, 2) == false);
}
| true
|
55264f4ccaffa57d843cdf867793b32755a2eee2
|
Rust
|
AtomicMegaNerd/pig_latin
|
/src/main.rs
|
UTF-8
| 224
| 2.953125
| 3
|
[] |
no_license
|
use pig_latin::pig_latinize;
fn main() {
println!("{}", pig_latinize("Hello again, out there wonderful world"));
println!(
"{}",
pig_latinize("Today is a fantastic day, and I am so happy!")
);
}
| true
|
14f43703a55872e7e34efeb076edf0b5b4f33622
|
Rust
|
tinmarino/libnotcurses-sys
|
/src/cell/test/reimplemented.rs
|
UTF-8
| 3,267
| 2.59375
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! Test `cell*_*` reimplemented functions
use serial_test::serial;
use crate::{c_api, NcAlpha, NcAlphaApi, NcCell};
#[test]
#[serial]
fn rgb() {
// rgb
let mut c1 = NcCell::new();
assert_eq![0, c_api::nccell_fg_rgb(&c1)];
assert_eq![0, c_api::nccell_bg_rgb(&c1)];
c_api::nccell_set_fg_rgb(&mut c1, 0x99112233);
assert_eq![0x112233, c_api::nccell_fg_rgb(&c1)];
c_api::nccell_set_bg_rgb(&mut c1, 0x99445566);
assert_eq![0x445566, c_api::nccell_bg_rgb(&c1)];
// rgb8
let mut c2 = NcCell::new();
let (mut r, mut g, mut b) = (0, 0, 0);
c_api::nccell_set_fg_rgb8(&mut c2, 0x11, 0x22, 0x33);
let fchannel = c_api::nccell_fg_rgb8(&c2, &mut r, &mut g, &mut b);
assert_eq!((0x11, 0x22, 0x33), (r, g, b));
assert_eq![0x112233, fchannel & !c_api::NC_BGDEFAULT_MASK];
c_api::nccell_set_bg_rgb8(&mut c2, 0x44, 0x55, 0x66);
let bchannel = c_api::nccell_bg_rgb8(&c2, &mut r, &mut g, &mut b);
assert_eq!((0x44, 0x55, 0x66), (r, g, b));
assert_eq![0x445566, bchannel & !c_api::NC_BGDEFAULT_MASK];
}
#[test]
#[serial]
fn alpha() {
let mut c1 = NcCell::new();
assert_eq![0, c_api::nccell_fg_alpha(&c1)];
assert_eq![0, c_api::nccell_bg_alpha(&c1)];
c_api::nccell_set_fg_alpha(&mut c1, NcAlpha::TRANSPARENT);
assert_eq![crate::NcAlpha::TRANSPARENT, c_api::nccell_fg_alpha(&c1)];
c_api::nccell_set_bg_alpha(&mut c1, crate::NcAlpha::BLEND);
assert_eq![crate::NcAlpha::BLEND, c_api::nccell_bg_alpha(&c1)];
}
#[test]
#[serial]
fn default() {
let mut c1 = NcCell::new();
assert_eq![true, c_api::nccell_fg_default_p(&c1)];
assert_eq![true, c_api::nccell_bg_default_p(&c1)];
// rgb
c_api::nccell_set_fg_rgb(&mut c1, 0x112233);
c_api::nccell_set_bg_rgb(&mut c1, 0x445566);
assert_eq![false, c_api::nccell_fg_default_p(&c1)];
assert_eq![false, c_api::nccell_bg_default_p(&c1)];
// reset
c_api::nccell_set_fg_default(&mut c1);
c_api::nccell_set_bg_default(&mut c1);
assert_eq![true, c_api::nccell_fg_default_p(&c1)];
assert_eq![true, c_api::nccell_bg_default_p(&c1)];
// rgb8
c_api::nccell_set_fg_rgb8(&mut c1, 0x11, 0x22, 0x33);
c_api::nccell_set_bg_rgb8(&mut c1, 0x44, 0x55, 0x66);
assert_eq![false, c_api::nccell_fg_default_p(&c1)];
assert_eq![false, c_api::nccell_bg_default_p(&c1)];
// reset
c_api::nccell_set_fg_default(&mut c1);
c_api::nccell_set_bg_default(&mut c1);
// palette
c_api::nccell_set_fg_palindex(&mut c1, 5);
c_api::nccell_set_bg_palindex(&mut c1, 6);
assert_eq![false, c_api::nccell_fg_default_p(&c1)];
assert_eq![false, c_api::nccell_bg_default_p(&c1)];
}
#[test]
#[serial]
fn palette() {
let mut c1 = NcCell::new();
assert_eq![false, c_api::nccell_fg_palindex_p(&c1)];
assert_eq![false, c_api::nccell_bg_palindex_p(&c1)];
assert_eq![0, c_api::nccell_fg_palindex(&c1)];
assert_eq![0, c_api::nccell_bg_palindex(&c1)];
c_api::nccell_set_fg_palindex(&mut c1, 5);
c_api::nccell_set_bg_palindex(&mut c1, 6);
assert_eq![true, c_api::nccell_fg_palindex_p(&c1)];
assert_eq![true, c_api::nccell_bg_palindex_p(&c1)];
assert_eq![5, c_api::nccell_fg_palindex(&c1)];
assert_eq![6, c_api::nccell_bg_palindex(&c1)];
}
| true
|
46e5be9634deb6d90b6d6a152dc3d79ae0146077
|
Rust
|
robjtede/adventofcode2017
|
/day9b/src/main.rs
|
UTF-8
| 640
| 2.65625
| 3
|
[] |
no_license
|
extern crate day9b as lib;
use std::env;
use std::process;
use lib::count_garbage;
use lib::config::Config;
use lib::parser::{get_input, parse_input};
fn main() {
let config = Config::new(env::args()).unwrap_or_else(|err| {
println!("Problem parsing arguments: {}", err);
process::exit(1);
});
let input: String = get_input(&config).unwrap_or_else(|err| {
println!("Problem getting input: {}", err);
process::exit(1);
});
let token_stream = parse_input(&input);
let score = count_garbage(&token_stream);
let answer = score;
println!("Answer: {}", answer);
}
| true
|
722ee177b01e063cf49b92758bc1da47b21daaa7
|
Rust
|
loyston500/CodeGodRust
|
/bot/src/commands/misc.rs
|
UTF-8
| 2,698
| 3.03125
| 3
|
[] |
no_license
|
use std::time::{Duration, SystemTime};
use serenity::client::Context;
use serenity::framework::standard::{macros::command, Args, CommandResult};
use serenity::model::channel::Message;
use crate::Database;
/// tells the ping (heart beat)
#[command]
pub async fn ping(ctx: &Context, msg: &Message) -> CommandResult {
let start = SystemTime::now();
let mut message = msg.channel_id.say(&ctx.http, "Ping?").await?;
let end = SystemTime::now();
if let Ok(duration) = end.duration_since(start) {
message
.edit(ctx, |m| {
m.content(format!("Pong!\nLatency: {:?}", duration))
})
.await?;
}
Ok(())
}
/// sets the trigger emoji of the server.
#[command]
pub async fn setemoji(ctx: &Context, msg: &Message) -> CommandResult {
match msg.guild(&ctx).await {
Some(guild) => {
if !guild
.member_permissions(&ctx, msg.author.id)
.await?
.administrator()
{
msg.channel_id
.say(&ctx, "I'm sorry, this command is admin only.")
.await?;
return Ok(());
}
}
None => {
msg.channel_id
.say(&ctx, "This command can only be used in a guild/server.")
.await?;
return Ok(());
}
}
let mut resp = msg
.channel_id
.say(
&ctx,
"React to this message with the emoji you want to set.",
)
.await?;
if let Some(reaction) = resp
.await_reaction(&ctx)
.timeout(Duration::from_secs(30))
.author_id(msg.author.id)
.message_id(resp.id)
.channel_id(resp.channel_id)
.await
{
let emoji = reaction.as_inner_ref().emoji.as_data();
let result = {
let data = ctx.data.read().await;
data.get::<Database>()
.expect("Error: database is not initialized properlly.")
.clone()
.read()
.await
.set_emoji(msg.guild_id.unwrap().0, &emoji)
.await
};
if let Ok(_) = result {
resp.edit(&ctx, |m| {
m.content(format!("Successfully set the trigger emoji to {}.", emoji))
})
.await?;
} else {
resp.edit(&ctx, |m| {
m.content("An error occured while setting the emoji, please try again.")
})
.await?;
}
} else {
resp.edit(&ctx, |m| m.content("You did not react in time."))
.await?;
}
Ok(())
}
| true
|
6fb80fe6f3f0c9a8bd410a591bb9706d29fbe7e5
|
Rust
|
Laastine/hinterland
|
/src/zombie/zombies.rs
|
UTF-8
| 3,232
| 2.6875
| 3
|
[
"GPL-1.0-or-later",
"Apache-2.0"
] |
permissive
|
use crate::shaders::Position;
use crate::zombie::ZombieDrawable;
pub struct Zombies {
pub zombies: Vec<ZombieDrawable>,
}
impl Zombies {
pub fn new() -> Zombies {
Zombies {
zombies: vec![
// 1
ZombieDrawable::new(Position::new(500.0, 40.0)),
ZombieDrawable::new(Position::new(-500.0, 40.0)),
ZombieDrawable::new(Position::new(40.0, 500.0)),
ZombieDrawable::new(Position::new(40.0, -500.0)),
ZombieDrawable::new(Position::new(300.0, -300.0)),
ZombieDrawable::new(Position::new(-300.0, -300.0)),
ZombieDrawable::new(Position::new(300.0, 300.0)),
ZombieDrawable::new(Position::new(-300.0, 300.0)),
ZombieDrawable::new(Position::new(500.0, -500.0)),
ZombieDrawable::new(Position::new(-500.0, -500.0)),
ZombieDrawable::new(Position::new(-500.0, 500.0)),
ZombieDrawable::new(Position::new(500.0, 500.0)),
ZombieDrawable::new(Position::new(600.0, -600.0)),
ZombieDrawable::new(Position::new(-600.0, -600.0)),
ZombieDrawable::new(Position::new(-600.0, 600.0)),
ZombieDrawable::new(Position::new(600.0, 600.0)),
ZombieDrawable::new(Position::new(650.0, -650.0)),
ZombieDrawable::new(Position::new(-650.0, -650.0)),
ZombieDrawable::new(Position::new(-650.0, 650.0)),
ZombieDrawable::new(Position::new(650.0, 650.0)),
// 2
ZombieDrawable::new(Position::new(700.0, 60.0)),
ZombieDrawable::new(Position::new(-900.0, 60.0)),
ZombieDrawable::new(Position::new(60.0, 700.0)),
ZombieDrawable::new(Position::new(60.0, -700.0)),
// 3
ZombieDrawable::new(Position::new(750.0, 60.0)),
ZombieDrawable::new(Position::new(-750.0, 60.0)),
ZombieDrawable::new(Position::new(60.0, 750.0)),
ZombieDrawable::new(Position::new(60.0, -750.0)),
// 4
ZombieDrawable::new(Position::new(800.0, 160.0)),
ZombieDrawable::new(Position::new(-1000.0, 160.0)),
ZombieDrawable::new(Position::new(160.0, 800.0)),
ZombieDrawable::new(Position::new(160.0, -800.0)),
// 5
ZombieDrawable::new(Position::new(900.0, 10.0)),
ZombieDrawable::new(Position::new(-900.0, 10.0)),
ZombieDrawable::new(Position::new(10.0, 900.0)),
ZombieDrawable::new(Position::new(10.0, -900.0)),
// 6
ZombieDrawable::new(Position::new(1000.0, 10.0)),
ZombieDrawable::new(Position::new(-1000.0, 10.0)),
ZombieDrawable::new(Position::new(10.0, 1000.0)),
ZombieDrawable::new(Position::new(10.0, -1000.0)),
// 7
ZombieDrawable::new(Position::new(1100.0, 10.0)),
ZombieDrawable::new(Position::new(-1100.0, 10.0)),
ZombieDrawable::new(Position::new(10.0, 1100.0)),
ZombieDrawable::new(Position::new(10.0, -1100.0)),
// 8
ZombieDrawable::new(Position::new(1200.0, 10.0)),
ZombieDrawable::new(Position::new(-1200.0, 10.0)),
ZombieDrawable::new(Position::new(10.0, 1200.0)),
ZombieDrawable::new(Position::new(10.0, -1200.0)),
]
}
}
}
impl specs::prelude::Component for Zombies {
type Storage = specs::storage::VecStorage<Zombies>;
}
| true
|
82691d876f08bc28fe49a06a193e90b0c54fcb47
|
Rust
|
keesvv/boltchat-rs
|
/src/events/event.rs
|
UTF-8
| 742
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
use std::time::{SystemTime, UNIX_EPOCH};
use serde::{Deserialize, Serialize};
/// Eventmeta that is appended to every event.
#[derive(Serialize, Deserialize, Debug)]
pub struct EventMeta {
pub t: String,
pub c: u64,
}
impl EventMeta {
pub fn new(t: &str) -> Self {
EventMeta {
t: String::from(t),
c: SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(),
}
}
}
/// User struct to de- serialize the user.
/// Holds the users nick and pubkey.
/// Used in the join and leave events.
#[derive(Serialize, Deserialize, Debug)]
pub struct User {
pub nick: String,
pub pubkey: String,
}
/// Event struct to decode all events from.
#[derive(Serialize, Deserialize, Debug)]
pub struct Event {
pub e: EventMeta
}
| true
|
de78e7aedd9bcdea034486514ecb86588a99f6ed
|
Rust
|
edouardparis/opennode-rs
|
/opennode-client/src/lib.rs
|
UTF-8
| 2,559
| 2.515625
| 3
|
[
"MIT"
] |
permissive
|
pub mod client;
pub mod error;
use opennode::account::{Balance};
use opennode::charge;
use opennode::charge::Charge;
use opennode::withdrawal;
use opennode::withdrawal::Withdrawal;
use opennode::refund;
use opennode::refund::Refund;
use opennode::currency::Currency;
use opennode::rate::Rates;
use crate::client::Client;
use crate::error::Error;
/// Create charge
pub async fn create_charge(client: &Client, payload: charge::Payload) -> Result<Charge, Error> {
client.post("/v1/charges", Some(payload)).await
}
/// Retrieve charge with the given id
pub async fn get_charge(client: &Client, id: &str) -> Result< Charge, Error> {
let path = format!("/v1/charge/{}", id);
client.get(&path, None as Option<String>).await
}
/// Retrieve paid charges.
pub async fn list_charges(client: &Client) -> Result<Vec<Charge>, Error> {
client.get("/v1/charges", None as Option<String>).await
}
/// Create withdrawal
pub async fn create_withdrawal(client: &Client, payload: withdrawal::Payload) -> Result< Withdrawal, Error> {
client.post("/v2/withdrawals", Some(payload)).await
}
/// Retrieve withdrawal with the given id
pub async fn get_withdrawal(client: &Client, id: &str) -> Result<Withdrawal, Error> {
let path = format!("/v1/withdrawal/{}", id);
client.get(&path, None as Option<String>).await
}
/// Retrieve withdrawals.
pub async fn list_withdrawals(client: &Client) -> Result<Vec<Withdrawal>, Error> {
client.get("/v1/withdrawals", None as Option<String>).await
}
/// Create refund
pub async fn create_refund(client: &Client, payload: refund::Payload) -> Result<Refund, Error> {
client.post("/v1/refunds", Some(payload)).await
}
/// Retrieve refund with the given id
pub async fn get_refund(client: &Client, id: &str) -> Result<Refund, Error> {
let path = format!("/v1/refund/{}", id);
client.get(&path, None as Option<String>).await
}
/// Retrieve refunds.
pub async fn list_refunds(client: &Client) -> Result<Vec<Refund>, Error> {
client.get("/v1/refunds", None as Option<String>).await
}
/// Retrieve available currencies.
pub async fn list_currencies(client: &Client) -> Result<Vec<Currency>, Error> {
client.get("/v1/currencies", None as Option<String>).await
}
/// Retrieve account balance.
pub async fn get_account_balance(client: &Client) -> Result<Balance, Error> {
client.get("/v1/account/balance", None as Option<String>).await
}
/// Retrieve rate list.
pub async fn list_rates(client: &Client) -> Result<Rates, Error> {
client.get("/v1/rates", None as Option<String>).await
}
| true
|
4cc0b107f1ae31c45d26490a273ec73641ec6654
|
Rust
|
TimelyDataflow/differential-dataflow
|
/src/algorithms/graphs/scc.rs
|
UTF-8
| 2,194
| 2.625
| 3
|
[
"MIT"
] |
permissive
|
//! Strongly connected component structure.
use std::mem;
use std::hash::Hash;
use timely::dataflow::*;
use ::{Collection, ExchangeData};
use ::operators::*;
use ::lattice::Lattice;
use ::difference::{Abelian, Multiply};
use super::propagate::propagate;
/// Iteratively removes nodes with no in-edges.
pub fn trim<G, N, R>(graph: &Collection<G, (N,N), R>) -> Collection<G, (N,N), R>
where
G: Scope,
G::Timestamp: Lattice+Ord,
N: ExchangeData+Hash,
R: ExchangeData + Abelian,
R: Multiply<R, Output=R>,
R: From<i8>,
{
graph.iterate(|edges| {
// keep edges from active edge destinations.
let active =
edges.map(|(_src,dst)| dst)
.threshold(|_,c| if c.is_zero() { R::from(0 as i8) } else { R::from(1 as i8) });
graph.enter(&edges.scope())
.semijoin(&active)
})
}
/// Returns the subset of edges in the same strongly connected component.
pub fn strongly_connected<G, N, R>(graph: &Collection<G, (N,N), R>) -> Collection<G, (N,N), R>
where
G: Scope,
G::Timestamp: Lattice+Ord,
N: ExchangeData+Hash,
R: ExchangeData + Abelian,
R: Multiply<R, Output=R>,
R: From<i8>
{
graph.iterate(|inner| {
let edges = graph.enter(&inner.scope());
let trans = edges.map_in_place(|x| mem::swap(&mut x.0, &mut x.1));
trim_edges(&trim_edges(inner, &edges), &trans)
})
}
fn trim_edges<G, N, R>(cycle: &Collection<G, (N,N), R>, edges: &Collection<G, (N,N), R>)
-> Collection<G, (N,N), R>
where
G: Scope,
G::Timestamp: Lattice+Ord,
N: ExchangeData+Hash,
R: ExchangeData + Abelian,
R: Multiply<R, Output=R>,
R: From<i8>
{
let nodes = edges.map_in_place(|x| x.0 = x.1.clone())
.consolidate();
// NOTE: With a node -> int function, can be improved by:
// let labels = propagate_at(&cycle, &nodes, |x| *x as u64);
let labels = propagate(&cycle, &nodes);
edges.join_map(&labels, |e1,e2,l1| (e2.clone(),(e1.clone(),l1.clone())))
.join_map(&labels, |e2,(e1,l1),l2| ((e1.clone(),e2.clone()),(l1.clone(),l2.clone())))
.filter(|(_,(l1,l2))| l1 == l2)
.map(|((x1,x2),_)| (x2,x1))
}
| true
|
156d7378bbd81ca228a14c54fd4a6899479f435f
|
Rust
|
jacobmischka/adventofcode-2020
|
/src/bin/13.rs
|
UTF-8
| 2,162
| 2.96875
| 3
|
[] |
no_license
|
use std::io::{self, BufRead};
fn main() {
let stdin = io::stdin();
let mut lines = stdin.lock().lines();
let earliest_timestamp: u32 = lines.next().unwrap().unwrap().parse().unwrap();
let bus_ids: Vec<Option<u32>> = lines
.next()
.unwrap()
.unwrap()
.split(',')
.map(str::parse::<u32>)
.map(Result::ok)
.collect();
let earliest_float = earliest_timestamp as f64;
let (min_id, min_diff) = bus_ids
.iter()
.filter_map(|x| {
x.map(|x| {
let mult = (earliest_float / x as f64).ceil();
(x, (mult as u32 * x) - earliest_timestamp)
})
})
.fold(None, |acc, (id, diff)| match acc {
Some((_min_id, min_diff)) => {
if diff < min_diff {
Some((id, diff))
} else {
acc
}
}
None => Some((id, diff)),
})
.unwrap();
let part_1 = min_id * min_diff;
println!("Part 1: {}", part_1);
let offsets_and_ids: Vec<_> = bus_ids
.iter()
.enumerate()
.filter_map(|(offset, id)| id.map(|id| (offset as u128, id as u128)))
.collect();
// TODO: I'd like to actually do the math to unify myself here, but too lazy right now
println!("Part 2: Stick this boy into WolframAlpha, lol");
for (offset, id) in offsets_and_ids.iter() {
print!("((x + {}) mod {}) = ", offset, id);
}
println!("0");
// Naive solution for examples
//
// let (max_id_offset, max_id) =
// offsets_and_ids
// .iter()
// .fold((0, 0), |acc, pair| if pair.1 > acc.1 { *pair } else { acc });
//
// let mut max_relative_t = max_id;
// loop {
// if offsets_and_ids
// .iter()
// .all(|(offset, id)| (max_relative_t - max_id_offset + offset) % *id == 0)
// {
// break;
// }
//
// max_relative_t += max_id;
// }
//
// let part_2 = max_relative_t - max_id_offset;
// println!("Part 2: {}", part_2);
}
| true
|
96b722c09c84261664671bb7be56f5a94d2d245b
|
Rust
|
LoggedFuSkater/rust
|
/tests/implementation_test.rs
|
UTF-8
| 2,416
| 2.578125
| 3
|
[
"MIT"
] |
permissive
|
extern crate logged_fu_skater;
#[cfg(feature = "default-implementations")]
use logged_fu_skater::Obfuscateable;
#[test]
fn test_default_implementation() {
for test in TEST_CASES {
let result = test.input.obfp(test.padding);
assert_eq!(&result, test.expected_result, "input: {},\npadding: {}",test.input, test.padding)
}
}
struct TestData {
input: &'static str,
padding: u8,
expected_result: &'static str
}
const TEST_CASES: &[TestData] = &[
// Empty input
TestData {
input: "",
padding: 0,
expected_result: "AbsentmindedlyMuscularChildhood",
},
// Test padding, positive and negative cases. Also, same input -> same output regardless of padding size.
TestData {
input: "asdf",
padding: 0,
expected_result: "HonestlyErgonomicSloth",
},
TestData {
input: "asdf",
padding: 2,
expected_result: "HonestlyErgonomicSloth5012",
},
TestData {
input: "asdf",
padding: 4,
expected_result: "HonestlyErgonomicSloth5012F6C6",
},
TestData {
input: "asdf",
padding: 8,
expected_result: "HonestlyErgonomicSloth5012F6C60B27661C",
},
// Test a few unique UUID:s
TestData {
input: "ac968750-7ca2-4dde-908b-aacbbed2f470",
padding: 1,
expected_result: "VerticallyInterestingCarF4",
},
TestData {
input: "3e3278cd-6030-400d-9c0d-ef9be0119853",
padding: 5,
expected_result: "StillBlueGorillaA2DEC84AEE",
},
TestData {
input: "6745dc33-2fbd-4311-8884-10aab93199a9",
padding: 7,
expected_result: "AmazinglyBraindeadTalent7F2343BF6927EA",
},
// Big data blob
TestData {
input: "mc093284750932nv2ono2hvfnoh3fo9ch3fxh23omhf293u4hfcqoiuwnhfc093u4hfc2938hnfc209u3hfc092hu3fc092nu3hfc92u3h4fc92nu3h4nfc923h40fc92h340fu2h34fc9u2nh3409uh2304hufc2093u4hfc0\nfcn9n2j43fc 9hu23cfj32fc2\nfc234ufh2o3ihfoh4f92c3hnfc928h43c92mj3fc23\ncfhfcliuw hfroiwuehgoiwuregoiwuecpowi hcpoqiwjecpoiqwhecp9824r+9u3h4f9283 h4f8w73hfwo83fou3wh4fcpoqihfp2u3h4fc983h4fcpu3nh4fcpoh3pf2h34pfc8h3p48hcqp348hfcqp384hfcpq834nfcpq9834hfcpq3h4fc",
padding: 0,
expected_result: "BestSadTalent",
},
];
| true
|
f841c3b2ed39003abd31d0dc2475384074ade6db
|
Rust
|
oatley/rust-programs
|
/arrays/src/main.rs
|
UTF-8
| 302
| 3.53125
| 4
|
[] |
no_license
|
fn main() {
// Arrays are fixed length, same type items
let arrr = [1,2,3,4,5];
println!("First: {}, Last: {}", arrr[0], arrr[4]);
// Set a specific type with syntax [type; length]
let arrr2: [i64; 3] = [10,11,12];
println!("First: {}, Last: {}", arrr2[0], arrr2[2]);
}
| true
|
cf76b53143ee6cd38d850c4ed5a0557b45be989c
|
Rust
|
liuchengxu/vim-clap
|
/crates/cli/src/command/blines.rs
|
UTF-8
| 2,959
| 2.78125
| 3
|
[
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
use crate::app::Args;
use anyhow::Result;
use clap::Parser;
use filter::SequentialSource;
use maple_core::paths::AbsPathBuf;
use matcher::{Bonus, MatchResult};
use rayon::iter::ParallelBridge;
use std::borrow::Cow;
use std::io::BufRead;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use types::ClapItem;
use utils::display_width;
/// Fuzzy filter the current vim buffer given the query.
#[derive(Parser, Debug, Clone)]
pub struct Blines {
/// Initial query string
#[clap(index = 1)]
query: String,
/// File path of current vim buffer.
#[clap(index = 2)]
input: AbsPathBuf,
#[clap(long)]
par_run: bool,
}
#[derive(Debug)]
pub struct BlinesItem {
pub raw: String,
pub line_number: usize,
}
impl ClapItem for BlinesItem {
fn raw_text(&self) -> &str {
self.raw.as_str()
}
fn output_text(&self) -> Cow<'_, str> {
format!("{} {}", self.line_number, self.raw).into()
}
fn match_result_callback(&self, match_result: MatchResult) -> MatchResult {
let mut match_result = match_result;
match_result.indices.iter_mut().for_each(|x| {
*x += display_width(self.line_number) + 1;
});
match_result
}
}
impl Blines {
/// Looks for matches of `query` in lines of the current vim buffer.
pub fn run(&self, args: Args) -> Result<()> {
let source_file = std::fs::File::open(&self.input)?;
let index = AtomicUsize::new(0);
let blines_item_stream = || {
std::io::BufReader::new(source_file)
.lines()
.filter_map(|x| {
x.ok().and_then(|line: String| {
let index = index.fetch_add(1, Ordering::SeqCst);
if line.trim().is_empty() {
None
} else {
let item: Arc<dyn ClapItem> = Arc::new(BlinesItem {
raw: line,
line_number: index + 1,
});
Some(item)
}
})
})
};
let filter_context = if let Some(extension) = self
.input
.extension()
.and_then(|s| s.to_str().map(|s| s.to_string()))
{
args.into_filter_context()
.bonuses(vec![Bonus::Language(extension.into())])
} else {
args.into_filter_context()
};
if self.par_run {
filter::par_dyn_run_list(
&self.query,
filter_context,
blines_item_stream().par_bridge(),
);
} else {
filter::dyn_run(
&self.query,
filter_context,
SequentialSource::List(blines_item_stream()),
)?;
}
Ok(())
}
}
| true
|
957ea60676c41602711e4e0684c808b1447ec6a6
|
Rust
|
valarauca/grpc-example
|
/grpc-examples/src/bin/greeter_client.rs
|
UTF-8
| 1,650
| 2.875
| 3
|
[
"MIT"
] |
permissive
|
extern crate grpc_examples;
extern crate grpc;
extern crate futures;
use futures::{Async,Stream};
use grpc_examples::helloworld_grpc::*;
use grpc_examples::helloworld::*;
use std::env;
use std::time::Duration;
use std::thread;
struct Name {
name: String
}
impl From<String> for Name {
fn from(name: String) -> Name {
Name { name }
}
}
impl Stream for Name {
type Item = HelloRequest;
type Error = grpc::Error;
fn poll(&mut self) -> Result<Async<Option<Self::Item>>,Self::Error> {
thread::sleep(Duration::from_millis(500));
let mut req = HelloRequest::new();
req.set_name(self.name.clone());
println!("Sending msg {}", &self.name);
Ok(Async::Ready(Some(req)))
}
}
unsafe impl Send for Name { }
fn main() {
let name = "Cody".to_string();
let client = GreeterClient::new_plain("localhost", 50051, Default::default()).unwrap();
let resp = client.say_hello(grpc::RequestOptions::new(), grpc::StreamingRequest::new(Name::from(name)));
let mut resp = resp.drop_metadata();
loop {
match resp.poll() {
Ok(Async::Ready(Option::Some(msg))) => {
println!("Server says {:?}", msg);
}
Ok(Async::Ready(Option::None)) => {
panic!("Server closed connection");
}
Ok(Async::NotReady) => {
println!("Server said nothing");
::std::thread::sleep(::std::time::Duration::from_millis(1000));
continue;
}
Err(e) => {
panic!("Server sent error {:?}", e);
}
}
}
}
| true
|
d9611738363e7f9592354397d23b4f98c4ca572b
|
Rust
|
nilq/foon
|
/src/main.rs
|
UTF-8
| 397
| 2.890625
| 3
|
[
"MIT"
] |
permissive
|
mod foon;
use foon::*;
fn main() {
let test = r#"
[i32; 2]: a = {10, 10}
mut [i32; 2]: b =
10000
b =
b - 100
"#;
let lexer = lexer(&mut test.chars());
let traveler = Traveler::new(lexer.collect());
let mut parser = Parser::new(traveler);
match parser.parse() {
Err(why) => println!("error: {}", why),
Ok(stuff) => println!("{:#?}", stuff),
}
}
| true
|
4588ce3f07083cbe13e514e22bc3496cf0cefd18
|
Rust
|
beewyka819/Advent-of-Code-2020
|
/day-1/src/main.rs
|
UTF-8
| 1,639
| 3.328125
| 3
|
[] |
no_license
|
use std::{fs::File, io::Read, path::Path, str::FromStr};
fn main() {
let input = read_file_to_string("input.txt".to_string());
let nums = input
.lines()
.map(|i| i32::from_str(i).unwrap())
.collect::<Vec<i32>>();
let dual_pairs = tuple_pairs_two(&nums);
let (x, y) = *dual_pairs
.iter()
.find(|(x, y)| *x + *y == 2020)
.expect("No valid combo");
println!("Part 1: {}", x * y);
let triple_pairs = tuple_pairs_three(&nums);
let (x, y, z) = *triple_pairs
.iter()
.find(|(x, y, z)| *x + *y + *z == 2020)
.expect("No valid combo");
println!("Part 2: {}", x * y * z);
}
fn read_file_to_string<P: AsRef<Path>>(path: P) -> String {
let mut f = File::open(path).expect("File not found");
let mut input_text = String::new();
f.read_to_string(&mut input_text)
.expect("Failed to read from file");
return String::from(input_text.trim());
}
fn tuple_pairs_two(nums: &Vec<i32>) -> Vec<(i32, i32)> {
let mut vec = Vec::new();
for (i, x) in nums.iter().enumerate() {
for (j, y) in nums.iter().enumerate() {
if i != j {
vec.push((*x, *y));
}
}
}
vec
}
fn tuple_pairs_three(nums: &Vec<i32>) -> Vec<(i32, i32, i32)> {
let mut vec = Vec::new();
for (i, x) in nums.iter().enumerate() {
for (j, y) in nums.iter().enumerate() {
for (k, z) in nums.iter().enumerate() {
if i != j && i != k && j != k {
vec.push((*x, *y, *z));
}
}
}
}
vec
}
| true
|
a9c47d55499dff3d3b2ef73c4393da64bfd7b704
|
Rust
|
Sasszem/rttt
|
/src/game/ai/smart.rs
|
UTF-8
| 2,001
| 3.5625
| 4
|
[] |
no_license
|
//! Hard AI
use super::super::board::Board;
use super::super::enums::Symbol;
use super::AI;
use rand::Rng;
/// Hard AI
///
/// Strategy:
/// - winning move if there is one
/// - blocking player's winning move
/// - random corner
/// - random edge
/// - center
pub struct SmartAI {}
impl AI for SmartAI {
fn do_move(&self, board: &mut Board, player: Symbol) {
let sym = Symbol::other(player);
// win if can
for i in 0..9 {
if board.can_win(sym, i / 3, i % 3) {
board.set(i / 3, i % 3, sym);
return;
}
}
// block if must
for i in 0..9 {
if board.can_win(player, i / 3, i % 3) {
board.set(i / 3, i % 3, sym);
return;
}
}
// pick in priority order: corner, edge, middle
// corners
if CORNERS
.iter()
.map(|x| board.get(x.0, x.1))
.filter(|s| *s == Symbol::Nil)
.count()
> 0
{
loop {
let (i, j) = CORNERS[rand::thread_rng().gen_range(0, 4)];
if board.get(i, j) == Symbol::Nil {
board.set(i, j, sym);
return;
}
}
}
// edges
if EDGES
.iter()
.map(|x| board.get(x.0, x.1))
.filter(|s| *s == Symbol::Nil)
.count()
> 0
{
loop {
let (i, j) = EDGES[rand::thread_rng().gen_range(0, 4)];
if board.get(i, j) == Symbol::Nil {
board.set(i, j, sym);
return;
}
}
}
// center
board.set(1, 1, sym);
}
}
/// List of coordinates of corners
const CORNERS: [(u32, u32); 4] = [(0, 0), (0, 2), (2, 0), (2, 2)];
/// List of coordinates of edges
const EDGES: [(u32, u32); 4] = [(0, 1), (1, 0), (1, 2), (2, 1)];
| true
|
ba94f59460d44b4b63f85ba08066d4e148f95d3d
|
Rust
|
expenses/nebulawar
|
/src/systems/rendering.rs
|
UTF-8
| 12,749
| 2.515625
| 3
|
[] |
no_license
|
use crate::util::*;
use specs::*;
use crate::components;
use crate::context::*;
use super::*;
use cgmath::Matrix4;
pub struct ObjectRenderer;
impl<'a> System<'a> for ObjectRenderer {
type SystemData = (
Write<'a, ModelBuffers>,
ReadStorage<'a, Position>,
ReadStorage<'a, components::Rotation>,
ReadStorage<'a, Size>,
ReadStorage<'a, Model>,
);
fn run(&mut self, (mut buffers, pos, rot, size, model): Self::SystemData) {
for (pos, rot, size, model) in (&pos, &rot, &size, &model).join() {
let scale = Matrix4::from_scale(size.0);
let rotation: Matrix4<f32> = rot.0.into();
let position = Matrix4::from_translation(pos.0) * rotation * scale;
let instance = InstanceVertex::new(position);
buffers.push_model(*model, instance);
}
}
}
pub struct RenderSystem;
impl<'a> System<'a> for RenderSystem {
type SystemData = (
Read<'a, Camera>,
Read<'a, StarSystem>,
Write<'a, BillboardBuffer>,
);
fn run(&mut self, (camera, system, mut buffer): Self::SystemData) {
let offset = system.light * BACKGROUND_DISTANCE;
let rotation: Matrix4<f32> = look_at(offset).into();
let matrix = Matrix4::from_translation(camera.position() + offset) * rotation * Matrix4::from_scale(BACKGROUND_DISTANCE / 10.0);
buffer.push_billboard(matrix, Image::Star);
}
}
pub struct RenderSelected;
impl<'a> System<'a> for RenderSelected {
type SystemData = (
Entities<'a>,
Read<'a, Camera>,
Write<'a, LineBuffers>,
Read<'a, ScreenDimensions>,
ReadStorage<'a, Position>,
ReadStorage<'a, Selectable>,
ReadStorage<'a, Size>,
ReadStorage<'a, Side>
);
fn run(&mut self, (entities, camera, mut buffer, screen_dims, pos, selectable, size, side): Self::SystemData) {
for (entity, pos, selectable, side) in (&entities, &pos, &selectable, &side).join() {
if selectable.selected {
let size = size.get(entity).map(|size| size.0).unwrap_or(1.0);
buffer.push_circle(pos.0, size, side.colour(), screen_dims.0, &camera);
}
}
}
}
pub struct RenderCommandPaths;
impl<'a> System<'a> for RenderCommandPaths {
type SystemData = (
Write<'a, LineBuffers>,
Read<'a, Camera>,
Read<'a, ScreenDimensions>,
ReadStorage<'a, Position>,
ReadStorage<'a, Selectable>,
ReadStorage<'a, Commands>
);
fn run(&mut self, (mut buffers, camera, screen_dims, positions, selectable, commands): Self::SystemData) {
(&positions, &selectable, &commands).join()
.filter(|(_, selectable, _)| selectable.selected)
.for_each(|(pos, _, commands)| {
let points = iter_owned([pos.0])
.chain(commands.iter().filter_map(|command| command.point(&positions)));
buffers.push_3d_lines(points, WHITE, screen_dims.0, &camera);
});
}
}
pub struct RenderUI;
impl<'a> System<'a> for RenderUI {
type SystemData = (
Entities<'a>,
Write<'a, TextBuffer>,
Read<'a, Time>,
Read<'a, Formation>,
Read<'a, Paused>,
Read<'a, Help>,
Read<'a, Dpi>,
ReadStorage<'a, ShipType>,
ReadStorage<'a, Selectable>,
ReadStorage<'a, Occupation>,
ReadStorage<'a, Parent>,
ReadStorage<'a, Materials>,
ReadStorage<'a, MineableMaterials>,
ReadStorage<'a, Health>
);
fn run(&mut self, (entities, mut text_buffer, time, formation, paused, help, dpi, tag, selectable, occupation, parent, materials, mineable, health): Self::SystemData) {
let y = &mut 10.0;
let mut render_text = |text: &str| {
text_buffer.push_text(text, 10.0, *y, dpi.0);
*y += 20.0;
};
if help.0 {
render_text("Controls:");
render_text("WASD to move camera");
render_text("Drag the right mouse button to rotate the camera");
render_text("Scroll with the mouse wheel to move the camera closer or further away");
render_text("Click or drag with the left mouse button to select ships");
render_text("Hold shift while clicking/dragging to add to the selection");
render_text("Press C or click the middle mouse button to center the camera on the selected ships");
render_text("Right click the mouse to order the ships to do something");
render_text("Holding shift while right clicking will queue orders");
render_text("Holding shift while moving the mouse up and down will move the plane of movement vertically");
render_text("Press , and . to rotate through the formation list");
render_text("Press P to pause/unpause");
render_text("Press / for the debug view");
render_text("Press H to toggle this text");
render_text("---------------------------");
}
if paused.0 {
render_text("PAUSED");
}
render_text(&format!("Time: {:.1}", time.0));
render_text(&format!("Population: {}", occupation.join().count()));
render_text(&format!("Formation: {:?}", *formation));
let (ships, ships_total) = summarize(tag.join());
render_text(&format!("Ship count: {}", ships_total));
for (tag, num) in ships {
render_text(&format!("{:?}: {}", tag, num));
}
let entity = (&entities, &selectable).join()
.filter(|(_, selectable)| selectable.selected)
.map(|(entity, _)| entity)
.next();
if let Some(entity) = entity {
render_text("---------------------");
if let Some(health) = health.get(entity) {
render_text(&format!("Health: {}", health.0));
}
if let Some(materials) = materials.get(entity) {
render_text(&format!("Materials: {}", materials.0));
}
if let Some(mineable) = mineable.get(entity) {
render_text(&format!("Mineable Materials: {}", mineable.0));
}
let people = (&occupation, &parent).join()
.filter(|(_, parent)| parent.0 == entity)
.map(|(occupation, _)| occupation);
let (people, total) = summarize(people);
render_text(&format!("Population: {}", total));
for (tag, num) in people {
render_text(&format!("{:?}: {}", tag, num));
}
}
}
}
pub struct RenderMouse;
impl<'a> System<'a> for RenderMouse {
type SystemData = (
Write<'a, LineBuffers>,
Read<'a, RightClickOrder>,
Read<'a, Controls>,
Read<'a, ScreenDimensions>,
);
fn run(&mut self, (mut buffers, order, controls, screen_dims): Self::SystemData) {
let (x, y) = controls.mouse();
if let Some(Command::GoToAnd(_, interaction)) = order.command {
buffers.push_image(interaction.image(), x + 32.0, y + 32.0, 64.0, 64.0, [0.0; 4], screen_dims.0);
}
}
}
pub struct RenderDebug;
impl<'a> System<'a> for RenderDebug {
type SystemData = (
Entities<'a>,
Write<'a, LineBuffers>,
Read<'a, Camera>,
Read<'a, EntityUnderMouse>,
Read<'a, Debug>,
Read<'a, Meshes>,
Read<'a, ScreenDimensions>,
ReadStorage<'a, Position>,
ReadStorage<'a, components::Rotation>,
ReadStorage<'a, Size>,
ReadStorage<'a, Model>,
ReadStorage<'a, Velocity>,
ReadStorage<'a, SeekForce>,
ReadStorage<'a, AvoidanceForce>,
ReadStorage<'a, FrictionForce>
);
fn run(&mut self, (entities, mut buffers, camera, entity, debug, meshes, screen_dims, pos, rot, size, model, vel, seek, avoid, friction): Self::SystemData) {
if !debug.0 {
return;
}
if let Some((_, point)) = entity.0 {
buffers.push_circle(point, 10.0, [1.0; 3], screen_dims.0, &camera);
}
let scale = 1000.0;
for (entity, pos, rot, size, model, vel) in (&entities, &pos, &rot, &size, &model, &vel).join() {
let step = Vector3::new(0.0, 0.05, 0.0);
let mut position = pos.0 + step;
if let Some(seek) = seek.get(entity) {
buffers.push_3d_line(position, position + seek.0 * scale, [1.0, 0.0, 0.0], screen_dims.0, &camera);
position += step;
}
if let Some(avoid) = avoid.get(entity) {
buffers.push_3d_line(position, position + avoid.0 * scale, [0.0, 1.0, 0.0], screen_dims.0, &camera);
position += step;
}
if let Some(friction) = friction.get(entity) {
buffers.push_3d_line(position, position + friction.0 * scale, [0.0, 0.0, 1.0], screen_dims.0, &camera);
position += step;
}
buffers.push_3d_line(position, position + vel.0 * scale / 10.0, [0.0, 1.0, 1.0], screen_dims.0, &camera);
// render bbox
let bbox = meshes.get_bbox(*model, pos.0, rot.0, size.0);
let min = na_point_to_vector(*bbox.mins());
let max = na_point_to_vector(*bbox.maxs());
for i in 0 .. 3 {
let start = min;
let mut end = start;
end[i] = max[i];
buffers.push_3d_line(start, end, WHITE, screen_dims.0, &camera);
let start = max;
let mut end = start;
end[i] = min[i];
buffers.push_3d_line(start, end, WHITE, screen_dims.0, &camera);
}
}
}
}
pub struct RenderDragSelection;
impl<'a> System<'a> for RenderDragSelection {
type SystemData = (Write<'a, LineBuffers>, Read<'a, Controls>, Read<'a, ScreenDimensions>);
fn run(&mut self, (mut buffers, controls, screen_dims): Self::SystemData) {
if let Some(origin) = controls.left_dragging() {
buffers.push_rect(origin, controls.mouse(), screen_dims.0);
}
}
}
pub struct RenderMovementPlane;
impl<'a> System<'a> for RenderMovementPlane {
type SystemData = (Write<'a, LineBuffers>, Read<'a, RightClickOrder>, Read<'a, Camera>, Read<'a, ScreenDimensions>);
fn run(&mut self, (mut buffers, order, camera, screen_dims): Self::SystemData) {
if let Some(Command::MoveTo(point)) = order.command {
let distance = 20.0;
let point = Vector3::new(round_to(point.x, distance), point.y, round_to(point.z, distance));
let points = 5;
let radius = points as f32 * distance / 2.0;
for i in 0 .. points + 1 {
let i = i as f32 * distance - radius;
buffers.push_3d_line(
point + Vector3::new(i, 0.0, -radius),
point + Vector3::new(i, 0.0, radius),
WHITE, screen_dims.0, &camera
);
buffers.push_3d_line(
point + Vector3::new(-radius, 0.0, i),
point + Vector3::new(radius, 0.0, i),
WHITE, screen_dims.0, &camera
);
}
}
}
}
pub struct RenderLogSystem;
impl<'a> System<'a> for RenderLogSystem {
type SystemData = (
Write<'a, TextBuffer>,
Read<'a, Log>,
Read<'a, ScreenDimensions>,
Read<'a, Dpi>,
);
fn run(&mut self, (mut buffer, log, screen_dims, dpi): Self::SystemData) {
log.render(&mut buffer, (screen_dims.0).y, dpi.0);
}
}
pub struct RenderBillboards;
impl<'a> System<'a> for RenderBillboards {
type SystemData = (
Read<'a, Camera>,
Write<'a, BillboardBuffer>,
ReadStorage<'a, Position>,
ReadStorage<'a, Size>,
ReadStorage<'a, Image>
);
fn run(&mut self, (camera, mut buffer, pos, size, image): Self::SystemData) {
let cam_pos = camera.position();
let rotation = look_at(-camera.direction());
let pred = |point: &Position| point.distance2(cam_pos);
let mut billboards: Vec<_> = (&pos, &size, &image).join().collect();
billboards.sort_unstable_by(|a, b| cmp_floats(pred(a.0), pred(b.0)));
for (pos, size, image) in billboards {
let scale = Matrix4::from_scale(size.0);
let rotation: Matrix4<f32> = rotation.into();
let position = Matrix4::from_translation(pos.0) * rotation * scale;
buffer.push_billboard(position, *image);
}
}
}
| true
|
f8eed20bc20f8e14b747da18b33c877401ca5c0d
|
Rust
|
cpfiffer/code
|
/rustbook/src/main.rs
|
UTF-8
| 277
| 3.078125
| 3
|
[] |
no_license
|
mod consts;
use std::collections::HashMap;
fn main() {
println!("Hello, world!");
let mut map = HashMap::new();
map.insert(consts::HELLO, consts::WORLD);
for (key, value) in &map {
println!("{} {}", key, value);
println!("What fun.")
}
}
| true
|
9579c657622ede975b9f746f49a5f9e582cd64f6
|
Rust
|
AtsukiTak/bitcoinrs
|
/src/lib/blockchain/block.rs
|
UTF-8
| 1,940
| 2.703125
| 3
|
[] |
no_license
|
use bitcoin::blockdata::{block::{Block, BlockHeader}, constants::genesis_block};
use bitcoin::network::{constants::Network, serialize::BitcoinHash};
use bitcoin::util::hash::Sha256dHash;
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
pub struct BlockData
{
pub header: BlockHeader,
pub height: u32,
hash: Sha256dHash,
}
impl BlockData
{
pub fn new(header: BlockHeader, height: u32) -> BlockData
{
BlockData {
hash: header.bitcoin_hash(),
header,
height,
}
}
pub fn genesis(network: Network) -> BlockData
{
BlockData::new(genesis_block(network).header, 0)
}
pub fn header(&self) -> &BlockHeader
{
&self.header
}
pub fn height(&self) -> u32
{
self.height
}
}
impl BitcoinHash for BlockData
{
fn bitcoin_hash(&self) -> Sha256dHash
{
self.hash
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FullBlockData
{
pub block: Block,
pub height: u32,
hash: Sha256dHash,
}
impl FullBlockData
{
pub fn new(block: Block, height: u32) -> FullBlockData
{
FullBlockData {
hash: block.bitcoin_hash(),
block,
height,
}
}
pub fn genesis(network: Network) -> FullBlockData
{
FullBlockData::new(genesis_block(network), 0)
}
}
impl BitcoinHash for FullBlockData
{
fn bitcoin_hash(&self) -> Sha256dHash
{
self.hash
}
}
pub trait BlockDataLike: BitcoinHash
{
fn header(&self) -> &BlockHeader;
fn height(&self) -> u32;
}
impl BlockDataLike for BlockData
{
fn header(&self) -> &BlockHeader
{
self.header()
}
fn height(&self) -> u32
{
self.height()
}
}
impl BlockDataLike for FullBlockData
{
fn header(&self) -> &BlockHeader
{
&self.block.header
}
fn height(&self) -> u32
{
self.height
}
}
| true
|
0bb51b067c10d967481806572d00bf0803d24add
|
Rust
|
n8henrie/exercism-exercises
|
/rust/minesweeper/src/lib.rs
|
UTF-8
| 1,630
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
fn get_adjacent_count(pos: (usize, usize), minefield: &[Vec<char>]) -> u32 {
let min_x = pos.0.saturating_sub(1);
let min_y = pos.1.saturating_sub(1);
let max_x = {
let newx = pos.0 + 1;
if newx < minefield[0].len() {
newx
} else {
pos.0
}
};
let max_y = {
let newy = pos.1 + 1;
if newy < minefield.len() {
newy
} else {
pos.1
}
};
(min_x..=max_x)
.flat_map(|x| (min_y..=max_y).map(move |y| (x, y)))
.fold(
0,
|acc, (x, y)| {
if minefield[y][x] == '*' {
acc + 1
} else {
acc
}
},
)
}
pub fn annotate(minefield: &[&str]) -> Vec<String> {
let minefield: Vec<Vec<char>> = minefield.iter().map(|&s| s.chars().collect()).collect();
let mut output = Vec::new();
for (ri, row) in minefield.iter().enumerate() {
let mut row_str = String::new();
for (ci, &val) in row.iter().enumerate() {
let outval = match val {
'*' => '*',
_ => {
let count = get_adjacent_count((ci, ri), &minefield);
if count > 0 {
std::char::from_digit(count, 10)
.expect(&format!("invalid count: {}", count))
} else {
' '
}
}
};
row_str.push(outval);
}
output.push(row_str)
}
output
}
| true
|
02fb638b6d8dc2370880e917a0e1d6f725a62e7a
|
Rust
|
ErikUggeldahl/Advent2019
|
/day9/src/main.rs
|
UTF-8
| 897
| 3.078125
| 3
|
[] |
no_license
|
use std::fs::File;
use std::io::prelude::*;
use intcode::Intcode;
fn main() {
let mut file = File::open("input.txt").expect("Could not open input file.");
let mut program = String::new();
file.read_to_string(&mut program).expect("Could not read file to string.");
let program = program
.trim()
.split(',')
.map(|i| i.parse::<i64>().unwrap())
.collect::<Vec<_>>();
let mut computer = Intcode::new(program.clone());
let mut output = Vec::new();
computer.compute(&b"1"[..], &mut output);
let result = String::from_utf8(output).expect("Could not stringify output.");
println!("{}", result);
let mut computer = Intcode::new(program);
let mut output = Vec::new();
computer.compute(&b"2"[..], &mut output);
let result = String::from_utf8(output).expect("Could not stringify output.");
println!("{}", result);
}
| true
|
8c3f9b42853dd39c4ce9ad1b49dc964b312550b5
|
Rust
|
Lol3rrr/hc-vault
|
/src/database/get_credentials.rs
|
UTF-8
| 1,698
| 2.953125
| 3
|
[
"MIT"
] |
permissive
|
use crate::Auth;
use crate::Client;
use crate::Error;
use serde::Deserialize;
use std::time::Duration;
#[derive(Deserialize)]
struct DBCreds {
username: String,
password: String,
}
#[allow(dead_code)]
#[derive(Deserialize)]
struct DBCredsResponse {
lease_id: String,
lease_duration: u64,
renewable: bool,
data: DBCreds,
}
/// This struct holds Database Credentials returned by vault
#[derive(Debug)]
pub struct DatabaseCreds {
/// The username to use when logging in to the database
pub username: String,
/// The password to use when logging in to the database
pub password: String,
/// The duration for which these credentials are valid for
pub duration: Duration,
}
impl PartialEq for DatabaseCreds {
fn eq(&self, other: &Self) -> bool {
self.username == other.username
&& self.password == other.password
&& self.duration == other.duration
}
}
/// This function is used to actually load the Database credentials from vault
pub async fn get_credentials(
client: &Client<impl Auth>,
name: &str,
) -> Result<DatabaseCreds, Error> {
let path = format!("database/creds/{}", name);
let response = match client
.vault_request::<String>(reqwest::Method::GET, &path, None)
.await
{
Err(e) => return Err(e),
Ok(res) => res,
};
let resp_body = match response.json::<DBCredsResponse>().await {
Err(e) => return Err(Error::from(e)),
Ok(body) => body,
};
Ok(DatabaseCreds {
username: resp_body.data.username,
password: resp_body.data.password,
duration: Duration::from_secs(resp_body.lease_duration),
})
}
| true
|
ae32a82fc9bb2a60838704a0bc13cf566fc074e6
|
Rust
|
Kerollmops/Advent-of-Code-2020
|
/solutions/day-9/src/main.rs
|
UTF-8
| 1,787
| 3.484375
| 3
|
[] |
no_license
|
use itertools::Itertools;
const INPUT: &str = include_str!("../../../inputs/day-9.txt");
fn main() {
println!("part one answer is: {}", part_one(INPUT, 25));
println!("part two answer is: {}", part_two(INPUT, 25));
}
fn parse_numbers(input: &str) -> Vec<i64> {
input.split_whitespace().map(|s| s.parse().unwrap()).collect()
}
fn part_one(input: &str, preamble: usize) -> i64 {
let numbers = parse_numbers(input);
part_one_inner(&numbers, preamble)
}
fn part_one_inner(numbers: &[i64], preamble: usize) -> i64 {
*numbers.windows(preamble + 1).find(|window| {
let (num, head) = window.split_last().unwrap();
!head.iter().cartesian_product(head.iter()).any(|(a, b)| a != b && a + b == *num)
}).unwrap().last().unwrap()
}
fn part_two(input: &str, preamble: usize) -> i64 {
let numbers = parse_numbers(input);
let number = part_one_inner(&numbers, preamble);
for start in 0..numbers.len() {
let mut count = 0;
for (i, x) in numbers[start..].iter().enumerate() {
count += x;
if count == number {
let (min, max) = numbers[start..start + i].iter().minmax().into_option().unwrap();
return min + max;
} else if count > number {
break
}
}
}
panic!("count not find a range for this number")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn simple_part_one() {
let input = "35
20
15
25
47
40
62
55
65
95
102
117
150
182
127
219
299
277
309
576";
assert_eq!(part_one(input, 5), 127);
}
#[test]
fn simple_part_two() {
let input = "35
20
15
25
47
40
62
55
65
95
102
117
150
182
127
219
299
277
309
576";
assert_eq!(part_two(input, 5), 62);
}
}
| true
|
41ff5cdba8f660d24be0f461739f3ac1f52a92fa
|
Rust
|
iCodeIN/code
|
/adventofcode2020/day2_1.rs
|
UTF-8
| 1,211
| 3.171875
| 3
|
[
"MIT"
] |
permissive
|
use std::io::{self, Read};
fn main() -> io::Result<()> {
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer)?;
println!("{}", buffer);
let v: Vec<&str> = buffer.split('\n').collect();
let mut valid_count = 0;
for e in v {
let ev: Vec<&str> = e.split(':').collect();
if ev.len() != 2 {
continue;
}
let password = ev[1];
let ev: Vec<&str> = ev[0].split(' ').collect();
let char_to_be_included = ev[1].chars().nth(0).unwrap();
let ev: Vec<&str> = ev[0].split('-').collect();
let left = ev[0].parse::<i32>().unwrap();
let right = ev[1].parse::<i32>().unwrap();
let mut counter = 0;
for c in password.chars() {
if c == char_to_be_included {
counter = counter + 1;
}
}
let is_valid = left <= counter && counter <= right;
println!(
"{} - {}, '{}', \"{}\", {}, {}",
left, right, char_to_be_included, password, counter, is_valid
);
if is_valid {
valid_count = valid_count + 1;
}
}
println!("{} passwords are valid", valid_count);
Ok(())
}
| true
|
d9a34a3ce7f6edf6de4e8785650b279a0d13eb3c
|
Rust
|
replicante-io/common
|
/util/tracing/src/config.rs
|
UTF-8
| 4,494
| 3.046875
| 3
|
[
"MIT"
] |
permissive
|
use std::collections::BTreeMap;
use serde::Deserialize;
use serde::Serialize;
/// Supported tracing backends and their configuration.
#[derive(Clone, Default, Eq, PartialEq, Hash, Debug, Serialize, Deserialize)]
#[serde(tag = "backend", content = "options")]
pub enum Config {
/// The `Noop` tracer (default).
///
/// A tracer that discards all spans.
/// Used when integration with distributed tracing is not needed.
#[default]
#[serde(rename = "noop")]
Noop,
/// [Zipkin] tracer backend.
///
/// Spans are sent to [Zipkin] over the [Kafka] collector.
///
/// [Kafka]: https://kafka.apache.org/
/// [Zipkin]: https://zipkin.io/
#[serde(rename = "zipkin")]
Zipkin(ZipkinConfig),
}
/// Zipkin specific configuration options.
#[derive(Clone, Eq, PartialEq, Hash, Debug, Serialize, Deserialize)]
#[serde(tag = "transport", content = "options")]
pub enum ZipkinConfig {
/// Zipkin HTTP transport options.
#[serde(rename = "http")]
Http(ZipkinHttp),
}
/// Zipkin HTTP transport options.
#[derive(Clone, Eq, PartialEq, Hash, Debug, Serialize, Deserialize)]
pub struct ZipkinHttp {
/// Number of buffered spans that should trigger a flush.
#[serde(default = "ZipkinHttp::default_flush_count")]
pub flush_count: usize,
/// Muximum delay between span flushes in milliseconds.
#[serde(default)]
pub flush_timeout_millis: Option<u64>,
/// Custom headers to attach to POST requests.
#[serde(default)]
pub headers: BTreeMap<String, String>,
/// Target URL to post spans to.
pub url: String,
}
impl ZipkinHttp {
fn default_flush_count() -> usize {
100
}
}
#[cfg(test)]
mod tests {
mod noop {
use serde_yaml;
use super::super::Config;
#[test]
fn deserialise() {
let text = "backend: noop";
let config: Config = serde_yaml::from_str(text).unwrap();
assert_eq!(config, Config::Noop);
}
#[test]
fn serialise() {
let config = Config::Noop;
let text = serde_yaml::to_string(&config).unwrap();
assert_eq!(text, "backend: noop\n");
}
}
mod zipkin {
use serde_yaml;
use super::super::Config;
use super::super::ZipkinConfig;
use super::super::ZipkinHttp;
#[test]
fn deserialise() {
let text = r#"backend: zipkin
options:
transport: http
options:
flush_count: 1
flush_timeout_millis: 2000
url: http://localhost:1234"#;
let config: Config = serde_yaml::from_str(text).unwrap();
assert_eq!(
config,
Config::Zipkin(ZipkinConfig::Http(ZipkinHttp {
flush_count: 1,
flush_timeout_millis: Some(2000),
headers: Default::default(),
url: String::from("http://localhost:1234"),
}))
);
}
#[test]
fn deserialise_defaults() {
let text = r#"backend: zipkin
options:
transport: http
options:
url: http://localhost:1234"#;
let config: Config = serde_yaml::from_str(text).unwrap();
assert_eq!(
config,
Config::Zipkin(ZipkinConfig::Http(ZipkinHttp {
flush_count: 100,
flush_timeout_millis: None,
headers: Default::default(),
url: String::from("http://localhost:1234"),
}))
);
}
#[test]
#[should_panic(expected = "missing field `url`")]
fn deserialise_fails() {
let text = r#"backend: zipkin
options:
transport: http
options: {}"#;
let _config: Config = serde_yaml::from_str(text).unwrap();
}
#[test]
fn serialise() {
let config = Config::Zipkin(ZipkinConfig::Http(ZipkinHttp {
flush_count: 100,
flush_timeout_millis: None,
headers: Default::default(),
url: String::from("http://localhost:1234"),
}));
let text = serde_yaml::to_string(&config).unwrap();
assert_eq!(
text,
r#"backend: zipkin
options:
transport: http
options:
flush_count: 100
flush_timeout_millis: null
headers: {}
url: http://localhost:1234
"#
);
}
}
}
| true
|
750f46f42769fc5484b107a0e341a42f3c3a2688
|
Rust
|
tramulns/zip_password_bruteforcer
|
/src/main.rs
|
UTF-8
| 2,566
| 3.03125
| 3
|
[] |
no_license
|
use clap::{App, Arg};
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
use std::time::Instant;
fn main() -> Result<(), std::io::Error> {
let matches = App::new("Zip Password Brute Forcer")
.version("0.1.0")
.arg(
Arg::with_name("file")
.short("f")
.long("file")
.takes_value(true)
.help("ZIP File Address"),
)
.arg(
Arg::with_name("word")
.short("w")
.long("word")
.takes_value(true)
.help("Password List Address"),
)
.get_matches();
let zipfile = matches
.value_of("file")
.expect("Please check the ZIP file's path");
let word_list = matches
.value_of("word")
.expect("Please check the Password List file's path");
let file = File::open(zipfile)?;
let mut archive = zip::ZipArchive::new(&file)?;
let mut password = "".to_string();
let mut attempts = 0;
let start = Instant::now();
let mut duration = start.elapsed();
if let Ok(lines) = read_lines(word_list) {
'passwords: for line in lines {
if let Ok(passes) = line {
attempts += 1;
let mut count_decrypt_success = 0;
for i in 0..archive.len() {
let file = archive.by_index_decrypt(i, passes.as_bytes());
match file {
Ok(Ok(_)) => count_decrypt_success += 1,
_ => continue 'passwords,
};
}
if count_decrypt_success == archive.len() {
password = passes;
duration = start.elapsed();
break 'passwords;
}
}
}
}
if password.is_empty() {
println!(" [X] Sorry, Password Not Found :(");
} else {
println!(" [*] Password Found :)");
println!(" [*] Password: {}", password);
println!(
" [***] Took {} seconds. That is, {} attempts per second.",
duration.as_secs(),
if duration.as_secs() == 0 {
attempts as f64
} else {
attempts as f64 / duration.as_secs() as f64
}
)
}
Ok(())
}
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where
P: AsRef<Path>,
{
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
| true
|
5c377436e43c59c5a51e8c50f5fdab300240416c
|
Rust
|
fossabot/krator
|
/krator/src/manager/tasks.rs
|
UTF-8
| 7,859
| 2.546875
| 3
|
[
"Apache-2.0"
] |
permissive
|
//! Defines common `async` tasks used by Krator's Controller
//! [Manager](crate::manager::Manager).
use std::future::Future;
use futures::FutureExt;
use kube::{api::ApiResource, api::GroupVersionKind, Resource};
use kube_runtime::watcher::Event;
use tracing::{debug, info, warn};
use crate::{
manager::controller::ControllerBuilder,
operator::Operator,
store::Store,
util::{concrete_event, DynamicEvent, PrettyEvent},
};
use super::watch::WatchHandle;
use super::Controller;
/// Watcher task which forwards [DynamicEvent](crate::util::DynamicEvent) to
/// a [channel](tokio::sync::mpsc::channel).
pub(crate) async fn launch_watcher(client: kube::Client, handle: WatchHandle) {
use futures::StreamExt;
use futures::TryStreamExt;
info!(
watch=?handle.watch,
"Starting Watcher."
);
let api: kube::Api<kube::api::DynamicObject> = match handle.watch.namespace {
Some(namespace) => kube::Api::namespaced_with(
client,
&namespace,
&ApiResource::from_gvk(&handle.watch.gvk),
),
None => kube::Api::all_with(client, &ApiResource::from_gvk(&handle.watch.gvk)),
};
let mut watcher = kube_runtime::watcher(api, handle.watch.list_params).boxed();
loop {
match watcher.try_next().await {
Ok(Some(event)) => {
debug!(
event = ?PrettyEvent::from(&event),
"Handling event."
);
handle.tx.send(event).await.unwrap()
}
Ok(None) => break,
Err(error) => warn!(?error, "Error streaming object events."),
}
}
}
/// Task for executing a single Controller / Operator. Listens for
/// [DynamicEvent](crate::util::DynamicEvent) on a
/// [channel](tokio::sync::mpsc::channel) and forwards them to a Krator
/// [OperatorRuntime](crate::OperatorRuntime).
///
/// # Errors
///
/// A warning will be logged if a `DynamicEvent` cannot be converted to a
/// concrete `Event<O::Manifest>`.
async fn launch_runtime<O: Operator>(
kubeconfig: kube::Config,
controller: O,
mut rx: tokio::sync::mpsc::Receiver<DynamicEvent>,
store: Store,
) {
info!(
group = &*O::Manifest::group(&()),
version = &*O::Manifest::version(&()),
kind = &*O::Manifest::kind(&()),
"Starting OperatorRuntime."
);
let mut runtime =
crate::OperatorRuntime::new_with_store(&kubeconfig, controller, Default::default(), store);
while let Some(dynamic_event) = rx.recv().await {
debug!(
group=&*O::Manifest::group(&()),
version=&*O::Manifest::version(&()),
kind=&*O::Manifest::kind(&()),
event = ?PrettyEvent::from(&dynamic_event),
"Handling managed event."
);
match concrete_event::<O::Manifest>(dynamic_event.clone()) {
Ok(event) => runtime.handle_event(event).await,
Err(e) => {
warn!(
group=&*O::Manifest::group(&()),
version=&*O::Manifest::version(&()),
kind=&*O::Manifest::kind(&()),
error=?e,
"Error deserializing dynamic object: {:#?}", dynamic_event
);
}
}
}
warn!(
group = &*O::Manifest::group(&()),
version = &*O::Manifest::version(&()),
kind = &*O::Manifest::kind(&()),
"Managed Sender dropped."
);
}
/// Task for monitoring `watched` or `owned` resources. Listens for
/// [DynamicEvent](crate::util::DynamicEvent) on a
/// [channel](tokio::sync::mpsc::channel) and updates
/// [Store](crate::store::Store).
///
/// # Errors
///
/// Will warn on and drop objects with no `metadata.name` field set.
///
/// # TODO
///
/// * Support notifications for `owned` resources.
async fn launch_watches(
mut rx: tokio::sync::mpsc::Receiver<DynamicEvent>,
gvk: GroupVersionKind,
store: Store,
) {
while let Some(dynamic_event) = rx.recv().await {
debug!(
gvk=?gvk,
event = ?PrettyEvent::from(&dynamic_event),
"Handling watched event."
);
match dynamic_event {
Event::Applied(dynamic_object) => {
let namespace = dynamic_object.metadata.namespace.clone();
let name = match dynamic_object.metadata.name.clone() {
Some(name) => name,
None => {
warn!(
gvk=?gvk,
"Object without name."
);
continue;
}
};
store
.insert_gvk(namespace, name, &gvk, dynamic_object)
.await;
}
Event::Deleted(dynamic_object) => {
let namespace = dynamic_object.metadata.namespace.clone();
let name = match dynamic_object.metadata.name.clone() {
Some(name) => name,
None => {
warn!(
gvk=?gvk,
"Object without name."
);
continue;
}
};
store.delete_gvk(namespace, name, &gvk).await;
}
Event::Restarted(dynamic_objects) => {
store.reset(&gvk).await;
for dynamic_object in dynamic_objects {
let namespace = dynamic_object.metadata.namespace.clone();
let name = match dynamic_object.metadata.name.clone() {
Some(name) => name,
None => {
warn!(
gvk=?gvk,
"Object without name."
);
continue;
}
};
store
.insert_gvk(namespace, name, &gvk, dynamic_object)
.await;
}
}
}
}
}
/// Shorthand for the opaque Future type of the tasks in this module. These
/// must be `awaited` in order to execute.
pub(crate) type OperatorTask = std::pin::Pin<Box<dyn Future<Output = ()> + Send>>;
/// Generates the `async` tasks needed to run a single controller / operator.
///
/// In general, converts a
/// [ControllerBuilder](crate::manager::controller::ControllerBuilder) to a
/// `Vec` of [OperatorTask](crate::manager::tasks::OperatorTask) which can be
/// executed using [join_all](futures::future::join_all).
pub(crate) fn controller_tasks<C: Operator>(
kubeconfig: kube::Config,
controller: ControllerBuilder<C>,
store: Store,
) -> (Controller, Vec<OperatorTask>) {
let mut watches = Vec::new();
let mut owns = Vec::new();
let mut tasks = Vec::new();
let buffer = controller.buffer();
// Create main Operator task.
let (manages, rx) = controller.manages().handle(buffer);
let task = launch_runtime(kubeconfig, controller.controller, rx, store.clone()).boxed();
tasks.push(task);
for watch in controller.watches {
let (handle, rx) = watch.handle(buffer);
let task = launch_watches(rx, handle.watch.gvk.clone(), store.clone()).boxed();
watches.push(handle);
tasks.push(task);
}
for own in controller.owns {
let (handle, rx) = own.handle(buffer);
let task = launch_watches(rx, handle.watch.gvk.clone(), store.clone()).boxed();
owns.push(handle);
tasks.push(task);
}
(
Controller {
manages,
owns,
watches,
},
tasks,
)
}
| true
|
ce7a221afdd820a6a6905de2caa9361f235f5171
|
Rust
|
rustytools/examples
|
/streams/src/lib.rs
|
UTF-8
| 703
| 2.71875
| 3
|
[] |
no_license
|
#[macro_use]
extern crate errloc_macros;
pub struct MySource {
count: u32
}
impl MySource {
pub fn new() -> Self {
return Self { count: 0 }
}
}
impl std::io::Read for MySource {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
if self.count < 42 {
let mut st = std::string::String::new();
self.count += 1;
st.push_str(self.count.to_string().as_str());
self.count += 1;
st.push_str(self.count.to_string().as_str());
let vec = st.into_bytes();
let len = vec.as_slice().read(buf).expect(errloc!());
Ok(len)
} else {
Ok(0)
}
}
}
| true
|
1e7fec08256a07aa1806ddfc0268ecd7b6705808
|
Rust
|
JustinRyanH/Rustlike
|
/gl/tests/program.rs
|
UTF-8
| 2,487
| 2.640625
| 3
|
[
"Apache-2.0"
] |
permissive
|
extern crate glutin;
extern crate rl_gl;
use glutin::GlContext;
use rl_gl::raw::types::*;
#[cfg(test)]
fn headless_gl_window() -> ((), glutin::HeadlessContext) {
let width: i32 = 256;
let height: i32 = 256;
let window = glutin::HeadlessRendererBuilder::new(width as u32, height as u32)
.build()
.unwrap();
unsafe { window.make_current().expect("Couldn't make window current") };
let gl = rl_gl::raw::load_with(|symbol| window.get_proc_address(symbol) as *const _);
return (gl, window);
}
#[cfg(test)]
mod spec {
use rl_gl::GlObject;
use super::*;
#[test]
fn shaders_compile() {
static VS_SRC: &'static str = "#version 150\n\
in vec2 position;\n\
void main() {\n\
gl_Position = vec4(position, 0.0, 1.0);\n\
}";
static FS_SRC: &'static str = "#version 150\n\
out vec4 out_color;\n\
void main() {\n\
out_color = vec4(1.0, 1.0, 1.0, 1.0);\n\
}";
let (_gl, _window) = headless_gl_window();
let vs_id: GLuint;
let _program = {
let vs =
rl_gl::program::CompiledShader::new(VS_SRC, rl_gl::program::ShaderKind::Vertex)
.unwrap();
let fs =
rl_gl::program::CompiledShader::new(FS_SRC, rl_gl::program::ShaderKind::Fragment)
.unwrap();
vs_id = vs.as_gl_id();
rl_gl::program::ShaderProgram::new(&vs, &fs).unwrap()
};
assert!(rl_gl::program::questions::shader::is_shader(vs_id).is_ok());
assert_eq!(
rl_gl::program::questions::shader::is_deleted(vs_id).unwrap(),
true
);
}
#[test]
fn shaders_fail_if_shader_is_bad() {
static VS_SRC: &'static str = "#version 150\n\
in vec2 position;\n\
void main() {\n\
gl_Position = vec4(position, 1.0);\n\
}";
let (_gl, _window) = headless_gl_window();
assert!(
rl_gl::program::CompiledShader::new(VS_SRC, rl_gl::program::ShaderKind::Vertex).is_err()
);
}
}
| true
|
1794d53b899a03174f2b07fe8533825cd81c23f9
|
Rust
|
rocurley/jex
|
/src/cursor.rs
|
UTF-8
| 32,723
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
use crate::{
jq::jv::{JVArray, JVObject, JVString, OwnedObjectIterator, JV},
lines::{Leaf, LeafContent, LineCursor, UnstyledSpans},
};
use log::trace;
use regex::Regex;
use std::{borrow::Cow, cmp::Ordering, collections::HashSet, fmt, rc::Rc};
use tui::{layout::Rect, text::Spans};
// Requirements:
// * Produce the current line
// * Step forward
// * (Optionally, for searching): Step backwards
// * Can be "dehydrated" into something hashable for storing folds (other metadata?)
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Clone, Copy)]
pub enum FocusPosition {
Start,
Value,
End,
}
impl FocusPosition {
pub fn starting(json: &JV) -> Self {
match json {
JV::Array(_) | JV::Object(_) => FocusPosition::Start,
_ => FocusPosition::Value,
}
}
pub fn ending(json: &JV) -> Self {
match json {
JV::Array(_) | JV::Object(_) => FocusPosition::End,
_ => FocusPosition::Value,
}
}
}
#[derive(Clone)]
pub enum CursorFrame {
Array {
index: usize,
json: JVArray,
},
Object {
index: usize,
key: JVString,
json: JVObject,
iterator: OwnedObjectIterator,
},
}
impl fmt::Debug for CursorFrame {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
CursorFrame::Array { index, json } => fmt
.debug_struct("Array")
.field("index", index)
.field("json", json)
.finish(),
CursorFrame::Object {
index,
key,
json,
iterator: _,
} => fmt
.debug_struct("Object")
.field("index", index)
.field("key", key)
.field("json", json)
.finish(),
}
}
}
impl PartialEq for CursorFrame {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(
CursorFrame::Array { index, json },
CursorFrame::Array {
index: other_index,
json: other_json,
},
) => (index == other_index && json == other_json),
(
CursorFrame::Object {
index, key, json, ..
},
CursorFrame::Object {
index: other_index,
key: other_key,
json: other_json,
..
},
) => (index == other_index && json == other_json && key == other_key),
_ => false,
}
}
}
impl Eq for CursorFrame {}
fn open_container(json: JV) -> (Option<CursorFrame>, JV, FocusPosition) {
match json {
JV::Array(arr) => {
let mut iterator = Box::new(arr.clone().into_iter());
match iterator.next() {
None => (None, arr.into(), FocusPosition::End),
Some(child) => {
let focus_position = FocusPosition::starting(&child);
(
Some(CursorFrame::Array {
index: 0,
json: arr,
}),
child,
focus_position,
)
}
}
}
JV::Object(obj) => {
let mut iterator = obj.clone().into_iter();
match iterator.next() {
None => (None, obj.into(), FocusPosition::End),
Some((key, child)) => {
let focus_position = FocusPosition::starting(&child);
(
Some(CursorFrame::Object {
index: 0,
json: obj,
key,
iterator,
}),
child,
focus_position,
)
}
}
}
_ => panic!("Can't make a cursor frame from a leaf json"),
}
}
fn open_container_end(json: JV) -> (Option<CursorFrame>, JV, FocusPosition) {
match json {
JV::Array(arr) => {
if arr.is_empty() {
(None, arr.into(), FocusPosition::Start)
} else {
let index = arr.len() - 1;
let child = arr.get(index).expect("Array should not be empty here");
let focus_position = FocusPosition::ending(&child);
(
Some(CursorFrame::Array {
index: index as usize,
json: arr,
}),
child,
focus_position,
)
}
}
JV::Object(obj) => {
let iterator = Box::new(obj.clone().into_iter());
match iterator.last() {
None => (None, obj.into(), FocusPosition::Start),
Some((key, child)) => {
let index = obj.len() as usize - 1;
let focus_position = FocusPosition::ending(&child);
(
Some(CursorFrame::Object {
index,
json: obj.clone(),
key,
iterator: obj.into_empty_iter(),
}),
child,
focus_position,
)
}
}
}
_ => panic!("Can't make a cursor frame from a leaf json"),
}
}
impl CursorFrame {
pub fn index(&self) -> usize {
match self {
CursorFrame::Array { index, .. } => *index as usize,
CursorFrame::Object { index, .. } => *index as usize,
}
}
fn advance(self) -> (Option<Self>, JV, FocusPosition) {
use CursorFrame::*;
match self {
Array { index, json } => match json.get(index as i32 + 1) {
None => (None, json.into(), FocusPosition::End),
Some(child) => {
let focus_position = FocusPosition::starting(&child);
(
Some(Array {
index: index + 1,
json,
}),
child,
focus_position,
)
}
},
Object {
index,
json,
mut iterator,
..
} => match iterator.next() {
None => (None, json.into(), FocusPosition::End),
Some((key, child)) => {
let focus_position = FocusPosition::starting(&child);
(
Some(Object {
index: index + 1,
key,
json,
iterator,
}),
child,
focus_position,
)
}
},
}
}
fn regress(self) -> (Option<Self>, JV, FocusPosition) {
use CursorFrame::*;
match self {
Array { index, json } => match index.checked_sub(1) {
None => (None, json.into(), FocusPosition::Start),
Some(index) => {
let child = json
.get(index as i32)
.expect("Stepped back and didn't find a child");
let focus_position = FocusPosition::ending(&child);
(Some(Array { index, json }), child, focus_position)
}
},
Object {
index,
json,
iterator: _,
..
} => match index.checked_sub(1) {
None => (None, json.into(), FocusPosition::Start),
Some(index) => {
let mut iterator = json.clone().into_iter();
let (key, child) = iterator
.nth(index)
.expect("Stepped back and didn't find a child");
let focus_position = FocusPosition::ending(&child);
(
Some(Object {
index,
key,
json,
iterator,
}),
child,
focus_position,
)
}
},
}
}
}
#[derive(Debug, Clone)]
pub struct GlobalCursor {
pub value_cursor: LeafCursor,
pub line_cursor: LineCursor,
}
impl GlobalCursor {
pub fn new(jsons: Rc<[JV]>, width: u16, folds: &HashSet<(usize, Vec<usize>)>) -> Option<Self> {
let cursor = LeafCursor::new(jsons)?;
let line = cursor.current_line(folds, width);
let line_cursor = LineCursor::new_at_start(line.render(), width);
Some(GlobalCursor {
value_cursor: cursor,
line_cursor,
})
}
pub fn new_end(
jsons: Rc<[JV]>,
width: u16,
folds: &HashSet<(usize, Vec<usize>)>,
) -> Option<Self> {
let cursor = LeafCursor::new_end(jsons)?;
let line = cursor.current_line(folds, width);
let line_cursor = LineCursor::new_at_start(line.render(), width);
Some(GlobalCursor {
value_cursor: cursor,
line_cursor,
})
}
pub fn current_line(&self) -> UnstyledSpans {
self.line_cursor
.current()
.expect("Global cursor should not be able to have invalid line cursor")
}
pub fn render_lines(
&mut self,
cursor: Option<&LeafCursor>,
folds: &HashSet<(usize, Vec<usize>)>,
rect: Rect,
) -> Vec<Spans<'static>> {
let mut lines = Vec::with_capacity(rect.height as usize);
self.resize_to(rect);
lines.push(
self.current_line()
.to_spans(Some(&self.value_cursor) == cursor),
);
while lines.len() < rect.height as usize {
if let None = self.advance(folds, rect.width) {
break;
};
lines.push(
self.current_line()
.to_spans(Some(&self.value_cursor) == cursor),
);
}
lines
}
pub fn advance(&mut self, folds: &HashSet<(usize, Vec<usize>)>, width: u16) -> Option<()> {
trace!("Advancing global cursor (width={}): {:#?}", width, self);
let lc = &mut self.line_cursor;
lc.move_next();
if lc.valid() {
trace!("Advanced global cursor {:#?}", self);
return Some(());
} else {
lc.move_prev();
}
self.value_cursor.advance(folds)?;
let line = self.value_cursor.current_line(folds, width);
self.line_cursor = LineCursor::new_at_start(line.render(), width);
trace!("Advanced global cursor {:#?}", self);
Some(())
}
pub fn regress(&mut self, folds: &HashSet<(usize, Vec<usize>)>, width: u16) -> Option<()> {
let lc = &mut self.line_cursor;
lc.move_prev();
if lc.valid() {
return Some(());
} else {
lc.move_next();
}
self.value_cursor.regress(folds)?;
let line = self.value_cursor.current_line(folds, width);
self.line_cursor = LineCursor::new_at_end(line.render(), width);
Some(())
}
pub fn to_path(&self) -> GlobalPath {
let current_line = self
.line_cursor
.current_line()
.expect("GlobalCursor should not have invalid LineCursor");
GlobalPath {
value_path: self.value_cursor.to_path(),
current_line,
}
}
pub fn resize_to(&mut self, rect: Rect) {
self.line_cursor.set_width(rect.width);
}
pub fn at_line_end(&self) -> bool {
self.line_cursor
.at_end()
.expect("GlobalCursor should not contain invalid LineCursor")
}
}
#[derive(PartialEq, Eq, Debug, Clone)]
pub struct LeafCursor {
// Top level jsons of the view
pub jsons: Rc<[JV]>,
// Index locating the json this cursor is focused (somewhere) on
pub top_index: usize,
// Stores the ancestors of the current focus, the index of their focused child, and an iterator
// that will continue right after that child.
pub frames: Vec<CursorFrame>,
// Currently focused json value
pub focus: JV,
// If the json is an array or object, indicates whether the currently focused line is the
// opening or closing bracket.
pub focus_position: FocusPosition,
}
impl LeafCursor {
pub fn new(jsons: Rc<[JV]>) -> Option<Self> {
let focus = jsons.first()?.clone();
let focus_position = FocusPosition::starting(&focus);
Some(LeafCursor {
jsons,
top_index: 0,
frames: Vec::new(),
focus,
focus_position,
})
}
pub fn new_end(jsons: Rc<[JV]>) -> Option<Self> {
let top_index = jsons.len() - 1;
let focus = jsons.last()?.clone();
let focus_position = FocusPosition::ending(&focus);
Some(LeafCursor {
jsons,
top_index,
frames: Vec::new(),
focus,
focus_position,
})
}
pub fn to_path(&self) -> ValuePath {
ValuePath {
top_index: self.top_index,
frames: self.frames.iter().map(CursorFrame::index).collect(),
focus_position: self.focus_position,
}
}
pub fn from_path(jsons: Rc<[JV]>, path: &ValuePath) -> Self {
let mut focus = jsons[path.top_index].clone();
let mut frames = Vec::new();
for &index in path.frames.iter() {
match focus {
JV::Array(arr) => {
let json = arr.clone();
focus = arr
.get(index as i32)
.expect("Shape of path does not match shape of jsons");
frames.push(CursorFrame::Array { index, json });
}
JV::Object(obj) => {
let json = obj.clone();
let mut iterator = obj.clone().into_iter();
let (key, new_focus) = iterator
.nth(index)
.expect("Shape of path does not match shape of jsons");
focus = new_focus;
frames.push(CursorFrame::Object {
index,
json,
key,
iterator,
});
}
_ => panic!("Shape of path does not match shape of jsons"),
}
}
LeafCursor {
jsons,
top_index: path.top_index,
frames,
focus,
focus_position: path.focus_position,
}
}
pub fn current_key(&self) -> Option<JVString> {
match self.focus_position {
FocusPosition::End => None,
_ => match self.frames.last() {
None => None,
Some(CursorFrame::Array { .. }) => None,
Some(CursorFrame::Object { key, .. }) => Some(key.clone()),
},
}
}
pub fn current_indent(&self, width: u16) -> u16 {
let desired_indent = (self.frames.len() * 2) as u16;
std::cmp::min(desired_indent, width - 7)
}
pub fn current_line<'a>(&'a self, folds: &HashSet<(usize, Vec<usize>)>, width: u16) -> Leaf {
use FocusPosition::*;
let folded = folds.contains(&self.to_path().strip_position());
let content = match (&self.focus, self.focus_position, folded) {
(JV::Object(_), Start, false) => LeafContent::ObjectStart,
(JV::Object(_), End, false) => LeafContent::ObjectEnd,
(JV::Object(obj), Start, true) => LeafContent::FoldedObject(obj.len() as usize),
(JV::Array(_), Start, false) => LeafContent::ArrayStart,
(JV::Array(_), End, false) => LeafContent::ArrayEnd,
(JV::Array(arr), Start, true) => LeafContent::FoldedArray(arr.len() as usize),
(JV::Null(_), Value, _) => LeafContent::Null,
(JV::Bool(b), Value, _) => LeafContent::Bool(b.value()),
(JV::Number(x), Value, _) => LeafContent::Number(x.value()),
(JV::String(s), Value, _) => LeafContent::String(s.clone()),
triple => panic!("Illegal json/focus_position/folded triple: {:?}", triple),
};
let key = self.current_key();
let comma = match self.focus_position {
FocusPosition::Start => false,
_ => match self.frames.last() {
None => false,
Some(CursorFrame::Array { json, index, .. }) => *index != json.len() as usize - 1,
Some(CursorFrame::Object { iterator, .. }) => iterator.len() != 0,
},
};
let indent = self.current_indent(width);
Leaf {
content,
key,
comma,
indent,
}
}
pub fn advance(&mut self, folds: &HashSet<(usize, Vec<usize>)>) -> Option<()> {
// This gets pretty deep into nested match statements, so an english guide to what's going
// on here.
// Cases:
// * We're focused on an open bracket. Push a new frame and start in on the contents of the
// container. (open_container)
// * We're focused on a leaf...
// * and we have no parent, so advance the very top level, or roll off the end.
// * and we have a parent... (Frame::advance)
// * and there are more leaves, so focus on the next leaf.
// * and there are no more leaves, so pop the frame, focus on the parent's close bracket
// * We're focused on a close bracket. Advance the parent as if we were focused on a leaf.
let is_folded = folds.contains(&self.to_path().strip_position());
match self.focus_position {
FocusPosition::Start if !is_folded => {
let (new_frame, new_focus, new_focus_position) = open_container(self.focus.clone());
if let Some(new_frame) = new_frame {
self.frames.push(new_frame);
}
self.focus = new_focus;
self.focus_position = new_focus_position;
}
_ => match self.frames.pop() {
None => {
self.focus = self.jsons.get(self.top_index + 1)?.clone();
self.top_index += 1;
self.focus_position = FocusPosition::starting(&self.focus);
}
Some(frame) => {
let (new_frame, new_focus, new_focus_position) = frame.advance();
if let Some(new_frame) = new_frame {
self.frames.push(new_frame);
}
self.focus = new_focus;
self.focus_position = new_focus_position;
}
},
}
Some(())
}
pub fn regress(&mut self, folds: &HashSet<(usize, Vec<usize>)>) -> Option<()> {
// Pretty mechanical opposite of advance
match self.focus_position {
FocusPosition::End => {
let (new_frame, new_focus, new_focus_position) =
open_container_end(self.focus.clone());
if let Some(new_frame) = new_frame {
self.frames.push(new_frame);
}
self.focus = new_focus;
self.focus_position = new_focus_position;
}
FocusPosition::Value | FocusPosition::Start => match self.frames.pop() {
None => {
self.top_index = self.top_index.checked_sub(1)?;
self.focus = self.jsons[self.top_index].clone();
self.focus_position = FocusPosition::ending(&self.focus);
}
Some(frame) => {
let (new_frame, new_focus, new_focus_position) = frame.regress();
if let Some(new_frame) = new_frame {
self.frames.push(new_frame);
}
self.focus = new_focus;
self.focus_position = new_focus_position;
}
},
}
let is_folded = folds.contains(&self.to_path().strip_position());
if is_folded {
self.focus_position = FocusPosition::Start;
}
Some(())
}
fn leaf_to_string(&self) -> Option<Cow<str>> {
match &self.focus {
JV::Null(_) => Some("null".into()),
JV::Bool(b) => Some(b.value().to_string().into()),
JV::Number(x) => Some(x.value().to_string().into()),
JV::String(s) => Some(s.value().into()),
_ => None,
}
}
// TODO: do something more efficient
pub fn matches_path(&self, path: &ValuePath) -> bool {
self.to_path() == *path
}
pub fn regex_matches(&self, re: &Regex) -> bool {
if let Some(leaf) = self.leaf_to_string() {
if re.is_match(&leaf) {
return true;
}
}
if let Some(CursorFrame::Object { key, .. }) = self.frames.last() {
if re.is_match(key.value()) {
return true;
}
}
false
}
pub fn search(mut self, re: &Regex) -> Option<Self> {
let mock_folds = HashSet::new();
let start = self.to_path();
while let Some(()) = self.advance(&mock_folds) {
if self.regex_matches(re) {
return Some(self);
}
}
let mut cursor = LeafCursor::new(self.jsons).expect("Jsons can't be empty here");
while !cursor.matches_path(&start) {
if cursor.regex_matches(re) {
return Some(cursor);
}
cursor
.advance(&mock_folds)
.expect("Shouldn't hit end again before hitting initial position");
}
None
}
pub fn search_back(mut self, re: &Regex) -> Option<Self> {
let mock_folds = HashSet::new();
let start = self.to_path();
while let Some(()) = self.regress(&mock_folds) {
if self.regex_matches(re) {
return Some(self);
}
}
let mut cursor = LeafCursor::new_end(self.jsons).expect("Jsons can't be empty here");
while !cursor.matches_path(&start) {
if cursor.regex_matches(re) {
return Some(cursor);
}
cursor
.regress(&mock_folds)
.expect("Shouldn't hit start again before hitting initial position");
}
None
}
pub fn descends_from_or_matches(&self, other: &Self) -> bool {
if self.top_index != other.top_index {
return false;
}
if self.frames.len() < other.frames.len() {
return false;
}
self.frames
.iter()
.zip(other.frames.iter())
.all(|(self_frame, other_frame)| self_frame == other_frame)
}
}
#[derive(PartialEq, Eq, Hash, Debug, Clone)]
pub struct ValuePath {
top_index: usize,
frames: Vec<usize>,
focus_position: FocusPosition,
}
impl ValuePath {
pub fn strip_position(self) -> (usize, Vec<usize>) {
let ValuePath {
top_index,
frames,
focus_position: _,
} = self;
(top_index, frames)
}
}
impl PartialOrd for ValuePath {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ValuePath {
fn cmp(&self, other: &Self) -> Ordering {
match self.top_index.cmp(&other.top_index) {
Ordering::Equal => {}
ordering => return ordering,
}
let mut self_frames = self.frames.iter();
let mut other_frames = other.frames.iter();
loop {
match (self_frames.next(), other_frames.next()) {
(Some(self_frame), Some(other_frame)) => match self_frame.cmp(other_frame) {
Ordering::Equal => {}
ordering => return ordering,
},
(None, Some(_)) => match self.focus_position {
FocusPosition::Start => return Ordering::Less,
FocusPosition::Value => {
panic!("Cannot compare paths that index different jsons")
}
FocusPosition::End => return Ordering::Greater,
},
(Some(_), None) => match other.focus_position {
FocusPosition::Start => return Ordering::Greater,
FocusPosition::Value => {
panic!("Cannot compare paths that index different jsons")
}
FocusPosition::End => return Ordering::Less,
},
(None, None) => return self.focus_position.cmp(&other.focus_position),
}
}
}
}
#[derive(PartialEq, Eq, Debug, Clone, PartialOrd, Ord)]
pub struct GlobalPath {
pub value_path: ValuePath,
pub current_line: usize,
}
#[cfg(test)]
mod tests {
use super::{GlobalCursor, LeafCursor};
use crate::{
jq::jv::JV,
lines::LineCursor,
testing::{arb_json, json_to_lines},
};
use pretty_assertions::assert_eq;
use proptest::proptest;
use serde_json::{json, Value};
use std::{collections::HashSet, rc::Rc};
fn check_advancing_terminates(jsons: Vec<Value>) {
let jsons: Vec<JV> = jsons.iter().map(|v| v.into()).collect();
let folds = HashSet::new();
if let Some(mut cursor) = LeafCursor::new(jsons.into()) {
let mut last_path = cursor.to_path();
while let Some(()) = cursor.advance(&folds) {
let path = cursor.to_path();
assert_ne!(last_path, path);
last_path = path;
}
}
}
#[test]
fn unit_advancing_terminates() {
check_advancing_terminates(vec![json![{}]]);
}
fn check_lines(values: Vec<Value>) {
let jsons: Vec<JV> = values.iter().map(|v| v.into()).collect();
let folds = HashSet::new();
let width = u16::MAX;
let mut expected_lines = json_to_lines(values.iter()).into_iter();
if let Some(mut cursor) = GlobalCursor::new(jsons.into(), width, &folds) {
let mut actual_lines = Vec::new();
actual_lines.push(cursor.current_line());
let expected_line = expected_lines
.next()
.expect("Expected lines shorter than actual lines");
let expected = LineCursor::new_at_start(expected_line.render(), width)
.current()
.unwrap();
assert_eq!(cursor.current_line(), expected);
while let Some(()) = cursor.advance(&folds, width) {
let expected_line = expected_lines
.next()
.expect("Expected lines shorter than actual lines");
let expected = LineCursor::new_at_start(expected_line.render(), width)
.current()
.unwrap();
assert_eq!(cursor.current_line(), expected);
}
}
assert!(expected_lines.next().is_none());
}
proptest! {
#[test]
fn prop_lines(values in proptest::collection::vec(arb_json(), 1..10)) {
check_lines(values);
}
}
#[test]
fn unit_lines() {
check_lines(vec![json!([{ "": null }])]);
}
fn check_path_roundtrip_inner(cursor: &LeafCursor, jsons: Rc<[JV]>) {
let path = cursor.to_path();
let new_cursor = LeafCursor::from_path(jsons, &path);
assert_eq!(*cursor, new_cursor);
}
fn check_path_roundtrip(values: Vec<serde_json::Value>) {
let jsons: Vec<JV> = values.iter().map(|v| v.into()).collect();
let jsons: Rc<[JV]> = jsons.into();
let folds = HashSet::new();
if let Some(mut cursor) = LeafCursor::new(jsons.clone()) {
check_path_roundtrip_inner(&cursor, jsons.clone());
while let Some(()) = cursor.advance(&folds) {
check_path_roundtrip_inner(&cursor, jsons.clone());
}
}
}
#[test]
fn unit_path_roundtrip() {
check_path_roundtrip(vec![json!([{ "": null }])])
}
proptest! {
#[test]
fn prop_path_roundtrip(values in proptest::collection::vec(arb_json(), 1..10)) {
check_path_roundtrip(values)
}
}
fn check_advance_regress(
cursor: &GlobalCursor,
folds: &HashSet<(usize, Vec<usize>)>,
width: u16,
) {
let mut actual = cursor.clone();
if actual.advance(folds, width).is_none() {
return;
}
actual.regress(folds, width).unwrap();
assert_eq!(actual.to_path(), cursor.to_path());
}
fn hashable_cursor_key(cursor: &GlobalCursor) -> impl std::hash::Hash + Eq {
(
cursor.value_cursor.to_path(),
cursor.line_cursor.current_line(),
)
}
proptest! {
fn prop_advance_regress(values in proptest::collection::vec(arb_json(), 1..10), width in 8u16..250) {
let jsons : Vec<JV> = values.iter().map(|v| v.into()).collect();
let jsons : Rc<[JV]> = jsons.into();
let folds = HashSet::new();
let mut seen = HashSet::new();
if let Some(mut cursor) = GlobalCursor::new(jsons.clone(), width, &folds) {
check_advance_regress(&cursor, &folds, width);
while let Some(()) = cursor.advance(&folds, width) {
let key = hashable_cursor_key(&cursor);
if seen.contains(&key) {
panic!("Infinite loop");
}
seen.insert(key);
check_advance_regress(&cursor, &folds, width);
}
}
}
}
#[test]
fn unit_advance_regress() {
let tests = vec![
(vec![json!([""])], 50),
(vec![json!("aaa\u{e000}¡")], 8),
(vec![json!([[{"\u{20f1}¡¡a": "\u{b}"}]])], 16),
(vec![json!([[{"\u{20f1}¡¡a": "\u{b}"}]])], 16),
(
vec![json!([[{"¡¡": "\u{0}\u{0}\u{7f}\u{3fffe}®\u{e000}A0\u{3fffe}𠀀\""}]])],
8,
),
];
for (values, width) in tests {
let jsons: Vec<JV> = values.iter().map(|v| v.into()).collect();
let jsons: Rc<[JV]> = jsons.into();
let folds = HashSet::new();
let mut seen = HashSet::new();
if let Some(mut cursor) = GlobalCursor::new(jsons.clone(), width, &folds) {
check_advance_regress(&cursor, &folds, width);
while let Some(()) = cursor.advance(&folds, width) {
let key = hashable_cursor_key(&cursor);
if seen.contains(&key) {
panic!("Infinite loop");
}
seen.insert(key);
check_advance_regress(&cursor, &folds, width);
}
}
}
}
proptest! {
#[test]
fn prop_path_ordering(values in proptest::collection::vec(arb_json(), 1..10)) {
let jsons : Vec<JV> = values.iter().map(|v| v.into()).collect();
let jsons : Rc<[JV]> = jsons.into();
let folds = HashSet::new();
if let Some(mut cursor) = LeafCursor::new(jsons) {
let mut prior_path = cursor.to_path();
while let Some(()) = cursor.advance(&folds) {
let new_path = cursor.to_path();
assert!(new_path > prior_path, "Expected {:?} > {:?}", &new_path, &prior_path);
prior_path = new_path;
}
}
}
}
}
| true
|
4c73614ad59c9efd619ba24e2fc44781e89553e1
|
Rust
|
tommyshem/learn-to-code
|
/learn-rustlang/coding-exercises/rust-examples/command_line/environment_variables/src/main.rs
|
UTF-8
| 258
| 3.421875
| 3
|
[] |
no_license
|
use std::env;
fn main() {
let key = "HOME";
// check if the key is set and get value
match env::var(key) {
Ok(val) => println!("Key = {} : value ={:?}", key, val),
Err(e) => println!("couldn't interpret {}: {}", key, e),
}
}
| true
|
4ac80ccaff601ebd41ebf24ae6e92c3eb27d51a0
|
Rust
|
vigna/sux-rs
|
/src/word_array.rs
|
UTF-8
| 4,621
| 3.1875
| 3
|
[
"Apache-2.0",
"LGPL-2.1-only"
] |
permissive
|
use crate::prelude::*;
use anyhow::{Context, Result};
use common_traits::*;
use log::info;
use mmap_rs::{Mmap, MmapMut};
use std::path::Path;
/// A simple wrapper around a slice of bytes interpreted as native-endianess words
/// with utility methods for mmapping.
pub struct WordArray<W: Word, B: AsRef<[u8]>> {
data: B,
_marker: core::marker::PhantomData<W>,
}
impl<W: Word, B: AsRef<[u8]>> WordArray<W, B> {
#[inline(always)]
pub fn get(&self, index: usize) -> W {
if index >= self.len() {
panic!("Index out of bounds: {} >= {}", index, self.len())
} else {
unsafe { self.get_unchecked(index) }
}
}
/// # Safety
///
/// `index` must be between 0 (included) and `len()` (excluded)
#[inline(always)]
pub unsafe fn get_unchecked(&self, index: usize) -> W {
debug_assert!(index < self.len());
*(self.data.as_ref().as_ptr() as *const W).add(index)
}
#[inline(always)]
pub fn len(&self) -> usize {
self.data.as_ref().len() / W::BYTES
}
#[inline(always)]
pub fn is_empty(&self) -> bool {
self.data.as_ref().is_empty()
}
}
impl<W: Word, B: AsRef<[u8]> + AsMut<[u8]>> WordArray<W, B> {
#[inline(always)]
pub fn set(&mut self, index: usize, value: W) {
if index >= self.len() {
panic!("Index out of bounds: {} >= {}", index, self.len())
} else {
unsafe { self.set_unchecked(index, value) }
}
}
/// # Safety
///
/// `index` must be between 0 (included) and `len()` (excluded)
#[inline(always)]
pub unsafe fn set_unchecked(&mut self, index: usize, value: W) {
debug_assert!(index < self.len());
*(self.data.as_ref().as_ptr() as *mut W).add(index) = value;
}
}
impl<W: Word> WordArray<W, Mmap> {
/// Load a `.order` file
pub fn load<P: AsRef<std::path::Path>>(path: P) -> Result<Self> {
let path = path.as_ref();
let file_len = path.metadata()?.len();
let file = std::fs::File::open(path)?;
let data = unsafe {
mmap_rs::MmapOptions::new(file_len as _)?
.with_flags((crate::prelude::Flags::TRANSPARENT_HUGE_PAGES).mmap_flags())
.with_file(file, 0)
.map()?
};
#[cfg(target_os = "linux")]
unsafe {
libc::madvise(data.as_ptr() as *mut _, data.len(), libc::MADV_RANDOM)
};
Ok(Self {
data,
_marker: Default::default(),
})
}
}
impl<W: Word> WordArray<W, MmapMut> {
/// Create a new `.order` file
pub fn new_file<P: AsRef<Path>>(path: P, num_nodes: u64) -> Result<Self> {
let path = path.as_ref();
// compute the size of the file we are creating in bytes
let file_len = num_nodes * core::mem::size_of::<u64>() as u64;
info!(
"The file {} will be {} bytes long.",
path.to_string_lossy(),
file_len
);
// create the file
let file = std::fs::File::options()
.read(true)
.write(true)
.create(true)
.open(path)
.with_context(|| {
format!("While creating the .order file: {}", path.to_string_lossy())
})?;
// fallocate the file with zeros so we can fill it without ever resizing it
file.set_len(file_len)
.with_context(|| "While fallocating the file with zeros")?;
// create a mutable mmap to the file so we can directly write it in place
let mmap = unsafe {
mmap_rs::MmapOptions::new(file_len as _)?
.with_file(file, 0)
.map_mut()
.with_context(|| "While mmapping the file")?
};
Ok(Self {
data: mmap,
_marker: Default::default(),
})
}
/// Load a mutable `.order` file
pub fn load_mut<P: AsRef<Path>>(path: P) -> Result<Self> {
let path = path.as_ref();
let file_len = path.metadata()?.len();
let file = std::fs::File::open(path)?;
let data = unsafe {
mmap_rs::MmapOptions::new(file_len as _)?
.with_flags((crate::prelude::Flags::TRANSPARENT_HUGE_PAGES).mmap_flags())
.with_file(file, 0)
.map_mut()?
};
#[cfg(target_os = "linux")]
unsafe {
libc::madvise(data.as_ptr() as *mut _, data.len(), libc::MADV_RANDOM)
};
Ok(Self {
data,
_marker: Default::default(),
})
}
}
| true
|
72ab4038a770042f43c8576db2341fd698354d71
|
Rust
|
sugyan/atcoder
|
/abc138/src/bin/d.rs
|
UTF-8
| 841
| 2.65625
| 3
|
[] |
no_license
|
use proconio::{fastout, input};
use std::collections::VecDeque;
#[fastout]
fn main() {
input! {
n: usize, q: usize,
ab: [(usize, usize); n - 1],
pq: [(usize, u32); q],
}
let mut graph = vec![Vec::new(); n];
for &(a, b) in &ab {
graph[a - 1].push(b - 1);
graph[b - 1].push(a - 1);
}
let mut values = vec![0; n];
for &(p, q) in &pq {
values[p - 1] += q;
}
let mut vd = VecDeque::new();
vd.push_back((0, None));
while let Some((i, prev)) = vd.pop_front() {
for &j in &graph[i] {
if prev != Some(j) {
values[j] += values[i];
vd.push_back((j, Some(i)));
}
}
}
let answers = values.iter().map(|v| v.to_string()).collect::<Vec<_>>();
println!("{}", answers.join(" "));
}
| true
|
00f5973094b306fb962bfe584ecb16ba1998ba1e
|
Rust
|
kvsari/dicey-dice
|
/src/memorize.rs
|
UTF-8
| 2,048
| 3.625
| 4
|
[
"MIT"
] |
permissive
|
//! Memorization. Not quite memoization because the memory must be passed in.
use std::collections::HashMap;
use std::hash::Hash;
pub fn recall<T, R, F>(
request: T, memory: &mut HashMap<T, R>, calculate: F
) -> R
where T: Clone + PartialEq + Eq + Hash,
R: Clone,
F: Fn(T) -> R,
{
if let Some(r) = memory.get(&request) {
return r.clone();
}
let r = (calculate)(request.clone());
memory.insert(request, r.clone());
r
}
#[cfg(test)]
mod test {
use super::*;
fn calculate(param1: u64, param2: u32) -> u128 {
let param1 = param1 as u128;
let param2 = param2 as u128;
param1 * param1 - param2
}
#[test]
fn memorizes() {
let mut memory = HashMap::new();
let r1p1 = 19;
let r1p2 = 2;
let o1 = calculate(r1p1, r1p2);
let r2p1 = 100;
let r2p2 = 101;
let o2 = calculate(r2p1, r2p2);
let r3p1 = 77;
let r3p2 = 3;
let o3 = calculate(r3p1, r3p2);
assert!(memory.len() == 0);
assert!(o1 == recall((r1p1, r1p2), &mut memory, |(p1, p2)| calculate(p1, p2)));
assert!(memory.len() == 1);
assert!(o1 == recall((r1p1, r1p2), &mut memory, |(p1, p2)| calculate(p1, p2)));
assert!(memory.len() == 1);
assert!(o1 == recall((r1p1, r1p2), &mut memory, |(p1, p2)| calculate(p1, p2)));
assert!(memory.len() == 1);
assert!(o2 == recall((r2p1, r2p2), &mut memory, |(p1, p2)| calculate(p1, p2)));
assert!(memory.len() == 2);
assert!(o1 == recall((r1p1, r1p2), &mut memory, |(p1, p2)| calculate(p1, p2)));
assert!(memory.len() == 2);
assert!(o3 == recall((r3p1, r3p2), &mut memory, |(p1, p2)| calculate(p1, p2)));
assert!(memory.len() == 3);
assert!(o2 == recall((r2p1, r2p2), &mut memory, |(p1, p2)| calculate(p1, p2)));
assert!(memory.len() == 3);
assert!(o1 == recall((r1p1, r1p2), &mut memory, |(p1, p2)| calculate(p1, p2)));
assert!(memory.len() == 3);
}
}
| true
|
e4d03e52d1944576ef65562b854fc539cebb9b4e
|
Rust
|
dobrite/game-rs
|
/src/chunk.rs
|
UTF-8
| 3,226
| 2.875
| 3
|
[] |
no_license
|
use std::collections::HashMap;
use std::cell::Cell;
use renderer::{CubeVertex, VertexBuffer};
use cube::create_cube;
pub const CHUNK_SIZE: uint = 1;
pub const WORLD_HEIGHT: u16 = 256;
pub enum BlockType {
Empty,
Grass,
Dirt,
}
pub struct Block {
block_type: BlockType,
}
pub struct Chunk {
pub blocks: [[[Block, ..CHUNK_SIZE], ..CHUNK_SIZE], ..CHUNK_SIZE],
pub buffer: Cell<Option<VertexBuffer>>,
//fn create_mesh // iterate through blocks calling create_cube
//fn create_cube //create vertexes
//fn render translate position and call render_mesh
}
impl Clone for Chunk {
fn clone(&self) -> Chunk {
*self
}
}
impl Chunk {
pub fn fill_buffer(&self, cx: i32, cz: i32, cy: i32, vbuffer: &mut Vec<CubeVertex>) {
for y in range(0u, CHUNK_SIZE) {
for z in range(0u, CHUNK_SIZE) {
for x in range(0u, CHUNK_SIZE) {
create_cube(
(cx as f32 * CHUNK_SIZE as f32) + x as f32,
(cz as f32 * CHUNK_SIZE as f32) + z as f32,
(cy as f32 * CHUNK_SIZE as f32) + y as f32,
vbuffer,
);
}
}
}
}
}
pub struct ChunkColumn {
pub chunks: Vec<Chunk>
}
pub struct ChunkManager {
chunks: HashMap<(i32, i32, i32), Chunk>,
}
//pub struct Buffer {
// buffer: gfx::BufferHandle<CubeVertex>,
// batch: render::batch::RefBatch<_ShaderParamLink, ShaderParam>
//}
impl ChunkManager {
pub fn new() -> ChunkManager {
ChunkManager {
chunks: HashMap::new(),
}
}
/*
* pub fun update(f32 dt, vec3 camera_position, vec3 camera_view) {
* update_async_chunker() ?
* update_load_list()
* update_setup_list()
* update_rebuild_list()
* update_flags_list
* update_unload_list
* update_visibility_list(camera_position)
* if(last_camera_position != camera_position || last_camera_view != camera_view) {
* update_render_list()
* }
*
* last_camera_position = camera_position
* last_camera_view = camera_view
* }
*/
/*
* pub fn update_load_list() {
* num_chunks_loaded = 0
* iterate over all chunks calling load if not loaded
* break early when num_chunks_loaded limit is reached
* clear update_load_list each frame (reupdated in update_visible_list)
* }
*/
/*
* pub fn update_setup_list() {
* iterate over setup_list calling setup on any chunk loaded and not setup
* clear list each frame (reupdated in update_visiblity_list)
* }
*/
pub fn create_chunk(&mut self, cx: i32, cz: i32, cy: i32) {
self.chunks.insert((cx, cz, cy), Chunk {
blocks: [[[Block { block_type: Dirt }, ..CHUNK_SIZE as uint], ..CHUNK_SIZE as uint], ..CHUNK_SIZE as uint],
buffer: Cell::new(None),
});
}
pub fn each_chunk<'a>(&'a self, f: |cx: i32, cy: i32, cz: i32, c: &'a Chunk, b: Option<VertexBuffer>|) {
for (&(cx, cz, cy), chunk) in self.chunks.iter() {
f(cx, cy, cz, chunk, chunk.buffer.get())
}
}
}
| true
|
e01997c7979027d1bc5caaec98f2890abe7a3a3f
|
Rust
|
withoutboats/mock_io
|
/tests/test.rs
|
UTF-8
| 2,167
| 2.734375
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
#![feature(const_fn, static_mutex)]
#[macro_use]
extern crate mock_io;
use std::io::{Read, Write};
use std::marker::PhantomData;
use mock_io::{Io, Stdio};
#[test]
fn test_stdin() {
use_mock_stdio!();
mock_stdio::set_stdin(b"YELLOW SUBMARINE");
let mut buf = vec![0; 16];
mock_stdio::stdin().read(&mut buf).unwrap();
assert_eq!(&buf, b"YELLOW SUBMARINE");
}
#[test]
fn test_stdout() {
use_mock_stdio!();
mock_stdio::stdout().write(b"Hello, world!").unwrap();
mock_stdio::check_stdout(|stdout| assert_eq!(stdout, b"Hello, world!"));
}
#[test]
fn test_stderr() {
use_mock_stdio!();
mock_stdio::stderr().write(b"Danger, Will Robinson!").unwrap();
mock_stdio::check_stderr(|stderr| assert_eq!(stderr, b"Danger, Will Robinson!"));
}
#[test]
fn test_isolation() {
fn first_test() {
use_mock_stdio!();
mock_stdio::set_stdin(b"YELLOW SUBMARINE");
}
fn second_test() {
use_mock_stdio!();
let mut buf = vec![0; 16];
mock_stdio::stdin().read_to_end(&mut buf).unwrap();
assert_eq!(&buf, &[0; 16]);
}
first_test();
second_test();
}
struct Foo<T=Stdio> where T: for<'a> Io<'a> {
_spoopy: PhantomData<T>
}
impl<T=Stdio> Foo<T> where T: for<'a> Io<'a> {
fn print(data: &[u8]) {
T::stdout().write(data).unwrap();
}
}
#[test]
fn test_in_type_parameters() {
use_mock_stdio!();
Foo::<mock_stdio::MockStdio>::print(b"Hello, world!");
mock_stdio::check_stdout(|stdout| assert_eq!(stdout, b"Hello, world!"));
}
#[test]
#[should_panic]
fn test_without_mock_stdio() {
use_mock_stdio!();
Foo::<Stdio>::print(b"Hello, world!");
mock_stdio::check_stdout(|stdout| assert_eq!(stdout, b"Hello, world!"));
}
fn echo<T>() where T: for<'a> Io<'a> {
let mut buf = String::new();
T::stdin_read_line(&mut buf).unwrap();
T::stdout().write(buf.as_bytes()).unwrap();
}
#[test]
fn test_in_functions() {
const HELLO_WORLD: &'static [u8] = b"Hello, world!\n";
use_mock_stdio!();
mock_stdio::set_stdin(HELLO_WORLD);
echo::<mock_stdio::MockStdio>();
mock_stdio::check_stdout(|stdout| assert_eq!(stdout, HELLO_WORLD));
}
| true
|
e88847d1791047e16bd8316aee7cdddc5bfdf5f2
|
Rust
|
reposefulGrass/chip_eight
|
/src/chip8.rs
|
UTF-8
| 600
| 2.9375
| 3
|
[] |
no_license
|
use crate::memory::Memory;
use crate::cpu::Cpu;
pub const MAP_ROM_BEGIN: u16 = 0x200;
pub struct Chip8 {
cpu: Cpu,
ram: Memory,
}
impl Chip8 {
pub fn new () -> Chip8 {
Chip8 {
cpu: Cpu::new(),
ram: Memory::new(),
}
}
pub fn load_rom (&mut self, rom: &Vec<u8>) {
for (i, &byte) in rom.iter().enumerate() {
let offset = MAP_ROM_BEGIN;
self.ram.write_byte(offset + i as u16, byte);
}
}
pub fn step_instruction (&mut self) {
self.cpu.execute(&mut self.ram);
}
}
| true
|
44d61d897e23d30737426dd8fd847525439d5f49
|
Rust
|
Mirabellensaft/tmp1x2-rs
|
/src/conversion.rs
|
UTF-8
| 6,731
| 3.265625
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
// functions to convert temperature to and from register values
pub fn convert_temp_from_register(msb: u8, lsb: u8) -> f32 {
let mut sign = (u16::from(msb & 0b1000_0000)) << 8;
let extended_mode = (lsb & 1) != 0;
if extended_mode {
if sign != 0 {
sign |= 0b1111_0000 << 8;
}
let msb = u16::from(msb & 0b0111_1111);
let value = sign | (msb << 5) | u16::from(lsb >> 3);
// the value is stored as two's complement
f32::from(value as i16) * 0.0625
} else {
if sign != 0 {
sign |= 0b1111_1000 << 8;
}
let msb = u16::from(msb & 0b0111_1111);
let value = sign | (msb << 4) | u16::from(lsb >> 4);
// the value is stored as two's complement
f32::from(value as i16) * 0.0625
}
}
pub fn convert_temp_to_register_normal(mut t: f32) -> (u8, u8) {
if t > 127.9375 {
t = 127.9375;
}
if t < -128.0 {
t = -128.0
}
let value = t / 0.0625;
let value = (value as i16) << 4;
((value >> 8) as u8, (value as u8 & 0b1111_0000))
}
pub fn convert_temp_to_register_extended(mut t: f32) -> (u8, u8) {
if t > 255.875 {
t = 255.875;
}
if t < -256.0 {
t = -256.0
}
let value = t / 0.0625;
let value = (value as i16) << 3;
((value >> 8) as u8, (value as u8 & 0b1111_1000))
}
#[cfg(test)]
mod tests {
use super::{
convert_temp_from_register as convert_from_reg,
convert_temp_to_register_extended as convert_to_reg_ext,
convert_temp_to_register_normal as convert_to_reg,
};
macro_rules! assert_near {
($left:expr, $right:expr) => {
assert!(($left - $right) < core::f32::EPSILON && ($right - $left) < core::f32::EPSILON);
};
}
#[test]
fn assert_near_can_succeed() {
assert_near!(1.0, 1.0);
}
#[test]
#[should_panic]
fn assert_near_can_fail() {
assert_near!(1.0, 1.1);
}
#[test]
fn can_convert_temperature_from_register_normal_mode() {
assert_near!(127.9375, convert_from_reg(0b0111_1111, 0b1111_0000));
assert_near!(100.0, convert_from_reg(0b0110_0100, 0b0000_0000));
assert_near!(80.0, convert_from_reg(0b0101_0000, 0b0000_0000));
assert_near!(75.0, convert_from_reg(0b0100_1011, 0b0000_0000));
assert_near!(50.0, convert_from_reg(0b0011_0010, 0b0000_0000));
assert_near!(25.0, convert_from_reg(0b0001_1001, 0b0000_0000));
assert_near!(0.25, convert_from_reg(0b0000_0000, 0b0100_0000));
assert_near!(0.0, convert_from_reg(0b0000_0000, 0b0000_0000));
assert_near!(-0.25, convert_from_reg(0b1111_1111, 0b1100_0000));
assert_near!(-25.0, convert_from_reg(0b1110_0111, 0b0000_0000));
assert_near!(-55.0, convert_from_reg(0b1100_1001, 0b0000_0000));
assert_near!(-64.0, convert_from_reg(0b1100_0000, 0b0000_0000));
assert_near!(-128.0, convert_from_reg(0b1000_0000, 0b0000_0000));
}
#[test]
fn can_convert_temperature_from_register_extended_mode() {
assert_near!(255.875, convert_from_reg(0b0111_1111, 0b1111_0001));
assert_near!(150.0, convert_from_reg(0b0100_1011, 0b0000_0001));
assert_near!(128.0, convert_from_reg(0b0100_0000, 0b0000_0001));
assert_near!(127.9375, convert_from_reg(0b0011_1111, 0b1111_1001));
assert_near!(100.0, convert_from_reg(0b0011_0010, 0b0000_0001));
assert_near!(80.0, convert_from_reg(0b0010_1000, 0b0000_0001));
assert_near!(75.0, convert_from_reg(0b0010_0101, 0b1000_0001));
assert_near!(50.0, convert_from_reg(0b0001_1001, 0b0000_0001));
assert_near!(25.0, convert_from_reg(0b0000_1100, 0b1000_0001));
assert_near!(0.25, convert_from_reg(0b0000_0000, 0b0010_0001));
assert_near!(0.0, convert_from_reg(0b0000_0000, 0b0000_0001));
assert_near!(-0.25, convert_from_reg(0b1111_1111, 0b1110_0001));
assert_near!(-25.0, convert_from_reg(0b1111_0011, 0b1000_0001));
assert_near!(-55.0, convert_from_reg(0b1110_0100, 0b1000_0001));
assert_near!(-256.0, convert_from_reg(0b1000_0000, 0b0000_0001));
}
#[test]
fn values_are_clamped() {
assert_eq!((0b0111_1111, 0b1111_0000), convert_to_reg(129.0));
assert_eq!((0b1000_0000, 0b0000_0000), convert_to_reg(-129.0));
assert_eq!((0b0111_1111, 0b1111_0000), convert_to_reg_ext(256.0));
assert_eq!((0b1000_0000, 0b0000_0000), convert_to_reg_ext(-257.0));
}
#[test]
fn can_convert_temperature_to_register_normal_mode() {
assert_eq!((0b0111_1111, 0b1111_0000), convert_to_reg(128.0));
assert_eq!((0b0111_1111, 0b1111_0000), convert_to_reg(127.9375));
assert_eq!((0b0110_0100, 0b0000_0000), convert_to_reg(100.0));
assert_eq!((0b0101_0000, 0b0000_0000), convert_to_reg(80.0));
assert_eq!((0b0100_1011, 0b0000_0000), convert_to_reg(75.0));
assert_eq!((0b0011_0010, 0b0000_0000), convert_to_reg(50.0));
assert_eq!((0b0001_1001, 0b0000_0000), convert_to_reg(25.0));
assert_eq!((0b0000_0000, 0b0100_0000), convert_to_reg(0.25));
assert_eq!((0b0000_0000, 0b0000_0000), convert_to_reg(0.0));
assert_eq!((0b1111_1111, 0b1100_0000), convert_to_reg(-0.25));
assert_eq!((0b1110_0111, 0b0000_0000), convert_to_reg(-25.0));
assert_eq!((0b1100_1001, 0b0000_0000), convert_to_reg(-55.0));
assert_eq!((0b1000_0000, 0b0000_0000), convert_to_reg(-128.0));
}
#[test]
fn can_convert_temperature_to_register_extended_mode() {
assert_eq!((0b0111_1111, 0b1111_0000), convert_to_reg_ext(255.875));
assert_eq!((0b0100_1011, 0b0000_0000), convert_to_reg_ext(150.0));
assert_eq!((0b0100_0000, 0b0000_0000), convert_to_reg_ext(128.0));
assert_eq!((0b0011_1111, 0b1111_1000), convert_to_reg_ext(127.9375));
assert_eq!((0b0011_0010, 0b0000_0000), convert_to_reg_ext(100.0));
assert_eq!((0b0010_1000, 0b0000_0000), convert_to_reg_ext(80.0));
assert_eq!((0b0010_0101, 0b1000_0000), convert_to_reg_ext(75.0));
assert_eq!((0b0001_1001, 0b0000_0000), convert_to_reg_ext(50.0));
assert_eq!((0b0000_1100, 0b1000_0000), convert_to_reg_ext(25.0));
assert_eq!((0b0000_0000, 0b0010_0000), convert_to_reg_ext(0.25));
assert_eq!((0b0000_0000, 0b0000_0000), convert_to_reg_ext(0.0));
assert_eq!((0b1111_1111, 0b1110_0000), convert_to_reg_ext(-0.25));
assert_eq!((0b1111_0011, 0b1000_0000), convert_to_reg_ext(-25.0));
assert_eq!((0b1110_0100, 0b1000_0000), convert_to_reg_ext(-55.0));
assert_eq!((0b1000_0000, 0b0000_0000), convert_to_reg_ext(-256.0));
}
}
| true
|
52a78a0412a1031fab3b88f6a1c06277bac68ea3
|
Rust
|
bobmcwhirter/defmt
|
/decoder/src/frame.rs
|
UTF-8
| 17,158
| 2.890625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::{
convert::TryFrom,
fmt::{self, Write as _},
mem,
};
use crate::{Arg, BitflagsKey, Table};
use chrono::TimeZone;
use colored::Colorize;
use defmt_parser::{DisplayHint, Fragment, Level, ParserMode, TimePrecision, Type};
/// A log frame
#[derive(Debug, PartialEq)]
pub struct Frame<'t> {
table: &'t Table,
level: Option<Level>,
index: u64,
timestamp_format: Option<&'t str>,
timestamp_args: Vec<Arg<'t>>,
// Format string
format: &'t str,
args: Vec<Arg<'t>>,
}
impl<'t> Frame<'t> {
pub(crate) fn new(
table: &'t Table,
level: Option<Level>,
index: u64,
timestamp_format: Option<&'t str>,
timestamp_args: Vec<Arg<'t>>,
format: &'t str,
args: Vec<Arg<'t>>,
) -> Self {
Self {
table,
level,
index,
timestamp_format,
timestamp_args,
format,
args,
}
}
/// Returns a struct that will format this log frame (including message, timestamp, level,
/// etc.).
pub fn display(&'t self, colored: bool) -> DisplayFrame<'t> {
DisplayFrame {
frame: self,
colored,
}
}
pub fn display_timestamp(&'t self) -> Option<DisplayTimestamp<'t>> {
self.timestamp_format
.map(|_| DisplayTimestamp { frame: self })
}
/// Returns a struct that will format the message contained in this log frame.
pub fn display_message(&'t self) -> DisplayMessage<'t> {
DisplayMessage { frame: self }
}
pub fn level(&self) -> Option<Level> {
self.level
}
pub fn index(&self) -> u64 {
self.index
}
fn format_args(&self, format: &str, args: &[Arg], parent_hint: Option<&DisplayHint>) -> String {
self.format_args_real(format, args, parent_hint).unwrap() // cannot fail, we only write to a `String`
}
fn format_args_real(
&self,
format: &str,
args: &[Arg],
parent_hint: Option<&DisplayHint>,
) -> Result<String, fmt::Error> {
let params = defmt_parser::parse(format, ParserMode::ForwardsCompatible).unwrap();
let mut buf = String::new();
for param in params {
match param {
Fragment::Literal(lit) => {
buf.push_str(&lit);
}
Fragment::Parameter(param) => {
let hint = param.hint.as_ref().or(parent_hint);
match &args[param.index] {
Arg::Bool(x) => write!(buf, "{}", x)?,
Arg::F32(x) => write!(buf, "{}", ryu::Buffer::new().format(*x))?,
Arg::F64(x) => write!(buf, "{}", ryu::Buffer::new().format(*x))?,
Arg::Uxx(x) => {
match param.ty {
Type::BitField(range) => {
let left_zeroes =
mem::size_of::<u128>() * 8 - range.end as usize;
let right_zeroes = left_zeroes + range.start as usize;
// isolate the desired bitfields
let bitfields = (*x << left_zeroes) >> right_zeroes;
if let Some(DisplayHint::Ascii) = hint {
let bstr = bitfields
.to_be_bytes()
.iter()
.skip(right_zeroes / 8)
.copied()
.collect::<Vec<u8>>();
self.format_bytes(&bstr, hint, &mut buf)?
} else {
self.format_u128(bitfields as u128, hint, &mut buf)?;
}
}
_ => match hint {
Some(DisplayHint::ISO8601(precision)) => {
self.format_iso8601(*x as u64, precision, &mut buf)?
}
Some(DisplayHint::Debug) => {
self.format_u128(*x as u128, parent_hint, &mut buf)?
}
_ => self.format_u128(*x as u128, hint, &mut buf)?,
},
}
}
Arg::Ixx(x) => self.format_i128(*x as i128, hint, &mut buf)?,
Arg::Str(x) | Arg::Preformatted(x) => self.format_str(x, hint, &mut buf)?,
Arg::IStr(x) => self.format_str(x, hint, &mut buf)?,
Arg::Format { format, args } => match parent_hint {
Some(DisplayHint::Ascii) => {
buf.push_str(&self.format_args(format, args, parent_hint));
}
_ => buf.push_str(&self.format_args(format, args, hint)),
},
Arg::FormatSequence { args } => {
for arg in args {
buf.push_str(&self.format_args("{=?}", &[arg.clone()], hint))
}
}
Arg::FormatSlice { elements } => {
match hint {
// Filter Ascii Hints, which contains u8 byte slices
Some(DisplayHint::Ascii)
if elements.iter().filter(|e| e.format == "{=u8}").count()
!= 0 =>
{
let vals = elements
.iter()
.map(|e| match e.args.as_slice() {
[Arg::Uxx(v)] => u8::try_from(*v)
.expect("the value must be in u8 range"),
_ => panic!(
"FormatSlice should only contain one argument"
),
})
.collect::<Vec<u8>>();
self.format_bytes(&vals, hint, &mut buf)?
}
_ => {
buf.write_str("[")?;
let mut is_first = true;
for element in elements {
if !is_first {
buf.write_str(", ")?;
}
is_first = false;
buf.write_str(&self.format_args(
element.format,
&element.args,
hint,
))?;
}
buf.write_str("]")?;
}
}
}
Arg::Slice(x) => self.format_bytes(x, hint, &mut buf)?,
Arg::Char(c) => write!(buf, "{}", c)?,
}
}
}
}
Ok(buf)
}
fn format_u128(
&self,
x: u128,
hint: Option<&DisplayHint>,
buf: &mut String,
) -> Result<(), fmt::Error> {
match hint {
Some(DisplayHint::NoHint { zero_pad }) => write!(buf, "{:01$}", x, zero_pad)?,
Some(DisplayHint::Binary {
alternate,
zero_pad,
}) => match alternate {
true => write!(buf, "{:#01$b}", x, zero_pad)?,
false => write!(buf, "{:01$b}", x, zero_pad)?,
},
Some(DisplayHint::Hexadecimal {
uppercase,
alternate,
zero_pad,
}) => match (alternate, uppercase) {
(false, false) => write!(buf, "{:01$x}", x, zero_pad)?,
(false, true) => write!(buf, "{:01$X}", x, zero_pad)?,
(true, false) => write!(buf, "{:#01$x}", x, zero_pad)?,
(true, true) => write!(buf, "{:#01$X}", x, zero_pad)?,
},
Some(DisplayHint::Microseconds) => {
let seconds = x / 1_000_000;
let micros = x % 1_000_000;
write!(buf, "{}.{:06}", seconds, micros)?;
}
Some(DisplayHint::Bitflags {
name,
package,
disambiguator,
}) => {
// The bitflags hint is only used internally, in `Format` impls generated by
// `defmt::bitflags!`.
let key = BitflagsKey {
ident: name.clone(),
package: package.clone(),
disambig: disambiguator.clone(),
};
match self.table.bitflags.get(&key) {
Some(flags) => {
let set_flags = flags
.iter()
.filter(|(_, value)| {
if *value == 0 && x != 0 {
false
} else {
x & value == *value
}
})
.map(|(name, _)| name.clone())
.collect::<Vec<_>>();
if set_flags.is_empty() {
write!(buf, "(empty)")?;
} else {
write!(buf, "{}", set_flags.join(" | "))?;
}
}
None => {
// FIXME return an internal error here
write!(buf, "{}", x)?;
}
}
}
_ => write!(buf, "{}", x)?,
}
Ok(())
}
fn format_i128(
&self,
x: i128,
hint: Option<&DisplayHint>,
buf: &mut String,
) -> Result<(), fmt::Error> {
match hint {
Some(DisplayHint::NoHint { zero_pad }) => write!(buf, "{:01$}", x, zero_pad)?,
Some(DisplayHint::Binary {
alternate,
zero_pad,
}) => match alternate {
true => write!(buf, "{:#01$b}", x, zero_pad)?,
false => write!(buf, "{:01$b}", x, zero_pad)?,
},
Some(DisplayHint::Hexadecimal {
uppercase,
alternate,
zero_pad,
}) => match (alternate, uppercase) {
(false, false) => write!(buf, "{:01$x}", x, zero_pad)?,
(false, true) => write!(buf, "{:01$X}", x, zero_pad)?,
(true, false) => write!(buf, "{:#01$x}", x, zero_pad)?,
(true, true) => write!(buf, "{:#01$X}", x, zero_pad)?,
},
_ => write!(buf, "{}", x)?,
}
Ok(())
}
fn format_bytes(
&self,
bytes: &[u8],
hint: Option<&DisplayHint>,
buf: &mut String,
) -> Result<(), fmt::Error> {
match hint {
Some(DisplayHint::Ascii) => {
// byte string literal syntax: b"Hello\xffworld"
buf.push_str("b\"");
for byte in bytes {
match byte {
// special escaping
b'\t' => buf.push_str("\\t"),
b'\n' => buf.push_str("\\n"),
b'\r' => buf.push_str("\\r"),
b' ' => buf.push(' '),
b'\"' => buf.push_str("\\\""),
b'\\' => buf.push_str("\\\\"),
_ => {
if byte.is_ascii_graphic() {
buf.push(*byte as char);
} else {
// general escaped form
write!(buf, "\\x{:02x}", byte).ok();
}
}
}
}
buf.push('\"');
}
Some(DisplayHint::Hexadecimal { .. }) | Some(DisplayHint::Binary { .. }) => {
// `core::write!` doesn't quite produce the output we want, for example
// `write!("{:#04x?}", bytes)` produces a multi-line output
// `write!("{:02x?}", bytes)` is single-line but each byte doesn't include the "0x" prefix
buf.push('[');
let mut is_first = true;
for byte in bytes {
if !is_first {
buf.push_str(", ");
}
is_first = false;
self.format_u128(*byte as u128, hint, buf)?;
}
buf.push(']');
}
_ => write!(buf, "{:?}", bytes)?,
}
Ok(())
}
fn format_str(
&self,
s: &str,
hint: Option<&DisplayHint>,
buf: &mut String,
) -> Result<(), fmt::Error> {
if hint == Some(&DisplayHint::Debug) {
write!(buf, "{:?}", s)?;
} else {
buf.push_str(s);
}
Ok(())
}
fn format_iso8601(
&self,
timestamp: u64,
precision: &TimePrecision,
buf: &mut String,
) -> Result<(), fmt::Error> {
let format = match precision {
TimePrecision::Millis => chrono::SecondsFormat::Millis,
TimePrecision::Seconds => chrono::SecondsFormat::Secs,
};
let date_time = match precision {
TimePrecision::Millis => chrono::Utc.timestamp_millis(timestamp as i64),
TimePrecision::Seconds => chrono::Utc.timestamp(timestamp as i64, 0),
};
write!(buf, "{}", date_time.to_rfc3339_opts(format, true))
}
}
pub struct DisplayTimestamp<'t> {
frame: &'t Frame<'t>,
}
impl fmt::Display for DisplayTimestamp<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let args = self.frame.format_args(
self.frame.timestamp_format.unwrap(),
&self.frame.timestamp_args,
None,
);
f.write_str(&args)
}
}
pub struct DisplayMessage<'t> {
frame: &'t Frame<'t>,
}
impl fmt::Display for DisplayMessage<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let args = self
.frame
.format_args(self.frame.format, &self.frame.args, None);
f.write_str(&args)
}
}
/// Prints a `Frame` when formatted via `fmt::Display`, including all included metadata (level,
/// timestamp, ...).
pub struct DisplayFrame<'t> {
frame: &'t Frame<'t>,
colored: bool,
}
impl fmt::Display for DisplayFrame<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let level = if let Some(level) = self.frame.level {
let level = if self.colored {
match level {
Level::Trace => "TRACE".dimmed().to_string(),
Level::Debug => "DEBUG".normal().to_string(),
Level::Info => "INFO".green().to_string(),
Level::Warn => "WARN".yellow().to_string(),
Level::Error => "ERROR".red().to_string(),
}
} else {
match level {
Level::Trace => "TRACE".to_string(),
Level::Debug => "DEBUG".to_string(),
Level::Info => "INFO".to_string(),
Level::Warn => "WARN".to_string(),
Level::Error => "ERROR".to_string(),
}
};
format!("{} ", level)
} else {
"".to_string()
};
let timestamp = self
.frame
.timestamp_format
.map(|fmt| {
format!(
"{} ",
self.frame
.format_args(fmt, &self.frame.timestamp_args, None,),
)
})
.unwrap_or_default();
let args = self
.frame
.format_args(self.frame.format, &self.frame.args, None);
write!(f, "{}{}{}", timestamp, level, args)
}
}
| true
|
dad3ef5cabea9fcde2c97b62c3bee1381b1392c2
|
Rust
|
5l1v3r1/cloudman
|
/src/views/log_view.rs
|
UTF-8
| 4,895
| 2.65625
| 3
|
[
"MIT"
] |
permissive
|
extern crate cursive;
use cursive::{Rect, direction::Direction, view::scroll::{self, Core}};
use cursive::event::{Event, EventResult, Key};
use cursive::vec::Vec2;
use cursive::{Printer, view::View};
cursive::impl_scroller!(LogView::core);
pub struct LogView {
lines: Vec<String>,
core: cursive::view::scroll::Core,
}
impl LogView {
pub fn scrollable(buf: &[u8]) -> Self {
let lines = parse_lines(&buf);
LogView {
lines,
core: Core::new(),
}
}
fn inner_required_size(&mut self, _req: Vec2) -> Vec2 {
Vec2::new(80, self.lines.len())
}
fn inner_on_event(&mut self, event: Event) -> EventResult {
let height = self.core.content_viewport().height();
match event {
Event::Key(Key::Up) => {
self.core.scroll_up(1);
EventResult::Consumed(None)
}
Event::Key(Key::Down) => {
self.core.scroll_down(1);
EventResult::Consumed(None)
}
Event::Key(Key::PageUp) => {
self.core.scroll_up(height);
EventResult::Consumed(None)
}
Event::Key(Key::PageDown) => {
self.core.scroll_down(height);
EventResult::Consumed(None)
}
Event::Char('g') => {
self.core.scroll_to_top();
EventResult::Consumed(None)
}
Event::Shift(Key::Home) => {
self.core.scroll_to_top();
EventResult::Consumed(None)
}
Event::Shift(Key::End) => {
self.core.scroll_to_bottom();
EventResult::Consumed(None)
}
Event::Key(Key::Home) => {
self.core.scroll_to_top();
EventResult::Consumed(None)
}
Event::Key(Key::End) => {
self.core.scroll_to_bottom();
EventResult::Consumed(None)
}
Event::Char('H') => {
self.core.scroll_to_bottom();
EventResult::Consumed(None)
}
Event::Char('/') => {
// search
EventResult::Consumed(None)
}
_ => EventResult::Ignored
}
}
fn inner_important_area(&self, size: Vec2) -> Rect {
Rect::from_size((0, 0), (size.x, self.lines.len()))
}
}
fn parse_lines(buf: &[u8]) -> Vec<String> {
let mut statemachine = vte::Parser::new();
let mut parser = Log::new();
for byte in &buf[..] {
statemachine.advance(&mut parser, *byte);
}
parser.lines
}
impl View for LogView {
fn draw(&self, printer: &Printer<'_, '_>) {
let lines = self.lines.clone();
scroll::draw_lines(self, &printer, |_, printer, i| {
// ignore the first line, as it is incomplete
if let Some(line) = lines.get(i + 1) {
printer.print((0, 0), line);
} else {
printer.print((0, 0), "⍇");
}
});
}
fn required_size(&mut self, req: Vec2) -> Vec2 {
scroll::required_size(
self,
req,
true,
Self::inner_required_size,
)
}
fn take_focus(&mut self, _: Direction) -> bool {
true
}
fn layout(&mut self, size: Vec2) {
scroll::layout(
self,
size,
true,
|_s, _size| (),
Self::inner_required_size,
);
}
fn important_area(&self, size: Vec2) -> Rect {
scroll::important_area(
self,
size,
Self::inner_important_area,
)
}
fn on_event(&mut self, event: Event) -> EventResult {
scroll::on_event(
self,
event,
Self::inner_on_event,
Self::inner_important_area,
)
}
}
#[derive(Default)]
struct Log {
s: String,
lines: Vec<String>,
}
impl Log {
fn new() -> Self {
Log {
s: String::new(),
lines: vec![],
}
}
}
impl vte::Perform for Log {
fn print(&mut self, c: char) {
self.s.push(c);
}
fn execute(&mut self, _c: u8) {
let s = self.s.clone();
if s.is_empty() {
return;
}
self.lines.push(s);
self.s = String::new();
}
fn hook(&mut self, _params: &[i64], _intermediates: &[u8], _ignore: bool, _c: char) {}
fn put(&mut self, _byte: u8) {}
fn unhook(&mut self) {}
fn osc_dispatch(&mut self, _params: &[&[u8]], _bell_terminated: bool) {}
fn csi_dispatch(&mut self, _params: &[i64], _intermediates: &[u8], _ignore: bool, _c: char) {}
fn esc_dispatch(&mut self, _intermediates: &[u8], _ignore: bool, _byte: u8) {}
}
| true
|
ec5a60f25c3479a6d16f63e47edc61852bed094d
|
Rust
|
croaxx/ProjectEuler
|
/LeetCode/LetterCombinations/solution.rs
|
UTF-8
| 2,503
| 3.65625
| 4
|
[] |
no_license
|
// Problem description: https://leetcode.com/problems/letter-combinations-of-a-phone-number/
// Time complexity: O(const^n), n=len(digits) - generate all combinations
// Space complexity: O(const^n) - store all combinations
use std::{collections::HashMap, iter::FromIterator};
pub struct Solution {}
impl Solution {
pub fn letter_combinations(digits: String) -> Vec<String> {
let solver = Solver::new(digits.as_str());
let results = solver.solve();
results
}
}
pub struct Solver {
phone_map: HashMap<u8, Vec<char>>,
digits: Vec<u8>,
}
impl Solver {
pub fn new(digits: &str) -> Self {
let phone_map: HashMap<_, _> = HashMap::from_iter([
(2, vec!['a', 'b', 'c']),
(3, vec!['d', 'e', 'f']),
(4, vec!['g', 'h', 'i']),
(5, vec!['j', 'k', 'l']),
(6, vec!['m', 'n', 'o']),
(7, vec!['p', 'q', 'r', 's']),
(8, vec!['t', 'u', 'v']),
(9, vec!['w', 'x', 'y', 'z']),
]);
Self {
digits: digits
.chars()
.map(|x| x.to_digit(10).unwrap() as u8)
.collect::<Vec<_>>(),
phone_map,
}
}
pub fn solve(&self) -> Vec<String> {
let mut result = Vec::new();
let mut combination = String::new();
self.assemble_combinations(&mut combination, &mut result);
result
}
pub fn assemble_combinations(&self, combination: &mut String, combinations: &mut Vec<String>) {
let idx = combination.len();
if idx == self.digits.len() {
if combination != "" {
combinations.push(combination.to_owned());
}
return;
}
for ch in self.phone_map.get(&self.digits[idx]).unwrap() {
combination.push(*ch);
self.assemble_combinations(combination, combinations);
combination.pop();
}
}
}
#[cfg(test)]
mod tests {
use crate::Solution;
#[test]
fn letter_combinations_success() {
let digits: String = "23".to_owned();
let result = Solution::letter_combinations(digits);
assert_eq!(
result,
vec!["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"]
);
}
#[test]
fn letter_combinations_success_2() {
let digits: String = "".to_owned();
let result = Solution::letter_combinations(digits);
assert_eq!(result.len(), 0);
}
}
| true
|
b19136d695331b49ebe3dbaa783870018ac86a44
|
Rust
|
zrma/1d1rust
|
/src/boj/p25k/p25192.rs
|
UTF-8
| 1,633
| 2.953125
| 3
|
[] |
no_license
|
use crate::utils::io::read_line;
use std::io::{BufRead, Write};
#[allow(dead_code)]
fn solve25192(reader: &mut impl BufRead, writer: &mut impl Write) {
let n = read_line(reader).parse::<usize>().unwrap();
let mut cnt = 0;
let mut set = std::collections::HashSet::new();
for _ in 0..n {
let s = read_line(reader);
if s == "ENTER" {
set.clear();
} else {
let name = s;
if !set.contains(&name) {
set.insert(name);
cnt += 1;
}
}
}
write!(writer, "{}", cnt).unwrap();
}
// https://www.acmicpc.net/problem/25192
// 인사성 밝은 곰곰이
// noinspection SpellCheckingInspection
#[test]
fn test_solve25192() {
struct TestData {
s: String,
want: String,
}
for (i, data) in vec![
TestData {
s: "9
ENTER
pjshwa
chansol
chogahui05
lms0806
pichulia
r4pidstart
swoon
tony9402"
.to_string(),
want: "8".to_string(),
},
TestData {
s: "7
ENTER
pjshwa
chansol
chogahui05
ENTER
pjshwa
chansol"
.to_string(),
want: "5".to_string(),
},
TestData {
s: "3
ENTER
lms0806
lms0806"
.to_string(),
want: "1".to_string(),
},
]
.iter()
.enumerate()
{
let mut reader = data.s.as_bytes();
let mut writer = vec![];
solve25192(&mut reader, &mut writer);
let got = String::from_utf8(writer).unwrap();
assert_eq!(got, data.want, "failed at {} with {}", i, data.s);
}
}
| true
|
e1098c2908f87f790c8acbbdc60454fa33ee53a7
|
Rust
|
pebble8888/ellipticcurve
|
/src/assert_eq_str.rs
|
UTF-8
| 314
| 2.5625
| 3
|
[
"MIT"
] |
permissive
|
#[allow(unused_macros)]
macro_rules! assert_eq_str {
($left:expr, $right:expr) => ({
match (&$left, &$right) {
(left_val, right_val) => {
let left_val_string = left_val.to_string();
assert_eq!(&left_val_string, right_val);
}
}
});
}
| true
|
0b3a05c33482f7f0438e8a9a93f3520da9439c01
|
Rust
|
zeroclassic/zeroclassic-vendored-crates
|
/vendored-sources/crypto_api/src/pbkdf.rs
|
UTF-8
| 1,612
| 2.9375
| 3
|
[
"BSD-2-Clause",
"MIT"
] |
permissive
|
use std::{ error::Error, ops::Range };
/// Information about a PBKDF implementation
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct PbkdfInfo {
/// The name
pub name: &'static str,
/// The supported output lengths
pub output_len_r: Range<usize>,
/// The supported password lengths
pub password_len_r: Range<usize>,
/// The supported password lengths
pub salt_len_r: Range<usize>,
/// The default CPU cost
pub cpu_cost: u64,
/// The supported CPU costs
pub cpu_cost_r: Range<usize>,
/// The default memory cost (is `0` if the PBKDF is not memory hard)
pub memory_cost: u64,
/// The supported memory costs (is `0..0` if the PBKDF is not memory hard)
pub memory_cost_r: Range<u64>,
/// The default parallelism (is `0` if the PBKDF does not support parallelism)
pub parallelism: u64,
/// The supported parallelism (is `0..0` if the PBKDF does not support parallelism)
pub parallelism_r: Range<u64>
}
/// A stateless (oneshot) PBKDF interface
pub trait Pbkdf {
/// Returns information about the PBKDF
fn info(&self) -> PbkdfInfo;
/// Fills `buf` with key bytes derived from `password` parametrized by `cpu_cost`
fn derive(&self, buf: &mut[u8], password: &[u8], salt: &[u8], cpu_cost: u64)
-> Result<(), Box<dyn Error + 'static>>;
}
/// A stateless (oneshot) memory-hard PBKDF interface
pub trait MemoryHardPbkdf: Pbkdf {
/// Fills `buf` with key bytes derived from `password` parametrized by `cpu_cost`
fn derive_memory_hard(&self, buf: &mut[u8], password: &[u8], salt: &[u8], cpu_cost: u64,
memory_cost: u64, parallelism: u64) -> Result<(), Box<dyn Error + 'static>>;
}
| true
|
581e492473acfc355e372514e3cff047d01f1205
|
Rust
|
zolbatar/web-rust-scheduler
|
/src/data/schema.rs
|
UTF-8
| 1,922
| 2.640625
| 3
|
[] |
no_license
|
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(PartialEq, Clone, Serialize, Deserialize, Debug)]
pub struct Activity {
pub id: String,
pub lat: f64,
pub lon: f64,
}
#[allow(dead_code)]
impl Activity {
fn new(id: String, lat: f64, lon: f64) -> Activity {
Activity { id, lat, lon }
}
}
#[derive(PartialEq, Clone, Serialize, Deserialize, Debug)]
pub struct Resource {
pub id: String,
pub lat: f64,
pub lon: f64,
}
#[allow(dead_code)]
impl Resource {
pub fn new(id: String, lat: f64, lon: f64) -> Resource {
Resource { id, lat, lon }
}
}
#[derive(PartialEq, Clone, Serialize, Deserialize, Debug)]
pub struct Allocation {
pub activity_id: String,
pub dist: f64,
pub time: f64,
pub travel_to: f64,
pub travel_from: f64,
}
#[allow(dead_code)]
impl Allocation {
pub fn new(activity_id: String, dist: f64, time: f64, travel_from: f64, travel_to: f64) -> Allocation {
Allocation { activity_id, dist, time, travel_to, travel_from }
}
}
#[derive(PartialEq, Clone, Serialize, Deserialize, Debug)]
pub struct Route {
pub resource_id: String,
pub allocation: Vec<String>,
pub time: f64,
}
#[allow(dead_code)]
impl Route {
pub fn new(resource_id: String, time: f64) -> Route {
Route { resource_id, allocation: Vec::new(), time }
}
}
#[derive(PartialEq, Clone, Serialize, Deserialize, Default, Debug)]
pub struct Status {
pub start_time: f64,
pub new_data: bool,
pub changed: bool,
pub quality: f64,
pub distance: f64,
pub value: f64,
pub travel_time: f64,
}
#[derive(PartialEq, Clone, Serialize, Deserialize, Default, Debug)]
pub struct SchemaData {
pub activity: HashMap<String, Activity>,
pub resource: HashMap<String, Resource>,
pub allocation: HashMap<String, Allocation>,
pub route: Vec<Route>,
pub status: Option<Status>,
}
| true
|
568a81d4e5dc466cf1be934537ece468be3a429e
|
Rust
|
jduimovich/rustgc
|
/src/gc/mod.rs
|
UTF-8
| 10,350
| 2.59375
| 3
|
[] |
no_license
|
use std::mem;
use std::time::SystemTime;
#[cfg(feature="dynamic_mem")]
const MAX_MEMORY_SLOTS: usize = 1024 * 1024 * 2;
#[cfg(not(feature="dynamic_mem"))]
const MAX_MEMORY_SLOTS: usize = 1024 * 128;
type Bits = u128;
const MARK_BITS_PER_SLOT: usize = mem::size_of::<Bits>();
const MARK_BITS: usize = MAX_MEMORY_SLOTS / MARK_BITS_PER_SLOT;
#[cfg(feature="dynamic_mem")]
type Mem = Vec<usize>;
#[cfg(not(feature="dynamic_mem"))]
type Mem = [usize; MAX_MEMORY_SLOTS] ;
pub const OBJECT_HEADER_SLOTS: usize = 1;
pub struct Memory {
head: usize,
mem: Mem,
mark_bits: [u128; MARK_BITS],
roots: Vec<usize>,
gc_count: usize,
allocates: usize,
last_gc_ms: u128,
total_gc_ms: u128,
lastgc_live_mem: usize,
lastgc_free_mem: usize,
show_gc: bool,
show_allocates: bool,
show_heap_map: bool,
show_free_list: bool,
}
impl<'a> IntoIterator for &'a Memory {
type Item = usize;
type IntoIter = MemoryIntoIterator<'a>;
fn into_iter(self) -> Self::IntoIter {
MemoryIntoIterator {
mem: self,
scan: 0,
free: 0,
}
}
}
pub struct MemoryIntoIterator<'a> {
mem: &'a Memory,
scan: usize,
free: usize,
}
impl<'a> Iterator for MemoryIntoIterator<'a> {
type Item = usize;
fn next(&mut self) -> Option<Self::Item> {
if self.scan == 0 {
self.scan = 1;
self.free = self.mem.head;
} else {
self.scan = self.mem.next_object_in_heap(self.scan);
}
while self.scan == self.free {
self.scan = self.mem.next_object_in_heap(self.free);
self.free = self.mem.get_fl_next(self.free);
}
if self.scan >= MAX_MEMORY_SLOTS - 1 {
return None;
} else {
return Some(self.scan);
}
}
}
#[cfg(feature = "dynamic_mem")]
fn im() -> Mem {
return vec![0; MAX_MEMORY_SLOTS];
}
#[cfg(not(feature = "dynamic_mem"))]
fn im() -> Mem {
return [0; MAX_MEMORY_SLOTS];
}
impl Memory {
pub fn initialze_memory() -> Memory {
let mut mem = Memory {
head: 1,
mem: im(),
mark_bits: [0; MARK_BITS],
roots: Vec::new(),
gc_count: 0,
allocates: 0,
lastgc_live_mem: 0,
lastgc_free_mem: 0,
last_gc_ms: 0,
total_gc_ms: 0,
show_gc: false,
show_allocates: false,
show_heap_map: false,
show_free_list: false,
};
mem.set_size(0, MAX_MEMORY_SLOTS); // magic memory at zero is heap_size
mem.set_size(mem.head, MAX_MEMORY_SLOTS - 2); // set initial object size as all heap
mem.set_fl_next(mem.head, 0);
mem
}
// objects API
// allocate_object (size) --- size is number of indexable slots
// add/remote_root () --- add to or remove from gc root set.
// element_size() - number of indexable slots - get_size() - OBJECT_HEADER_SLOTS
// at_put - store into object slot at index
// at -- fetch object slot at index
pub fn allocate_object(&mut self, unrounded_size: usize) -> usize {
self.allocates += 1;
let mut result = self.allocate_object_nocompress(unrounded_size);
if result == 0 {
self.gc();
result = self.allocate_object_nocompress(unrounded_size);
if result == 0 {
self.print_freelist();
self.print_heap();
panic!("out of memory");
}
}
result
}
pub fn live_objects(&self) -> MemoryIntoIterator {
return self.into_iter();
}
pub fn add_root(&mut self, obj: usize) {
self.roots.push(obj);
}
pub fn remove_root(&mut self, obj: usize) {
for i in 0..self.roots.len() {
if obj == self.roots[i] {
self.roots.remove(i);
return;
}
}
}
pub fn at_put(&mut self, obj: usize, index: usize, value: usize) {
let slots = self.mem[obj];
let base = obj+OBJECT_HEADER_SLOTS;
let object =&mut self.mem[ base.. base + slots ];
object[index] = value;
}
pub fn at(&self, obj: usize, index: usize) -> usize {
let slots = self.mem[obj];
let base = obj+OBJECT_HEADER_SLOTS;
let object =&self.mem[ base.. base + slots ];
return object[index];
}
pub fn element_size(&self, obj: usize) -> usize {
return self.mem[obj] - OBJECT_HEADER_SLOTS;
}
pub fn enable_show_heap_map(&mut self, enabled: bool) {
self.show_heap_map = enabled;
}
pub fn enable_show_freelist(&mut self, enabled: bool) {
self.show_free_list = enabled;
}
pub fn enable_show_gc(&mut self, enabled: bool) {
self.show_gc = enabled;
}
pub fn enable_show_allocates(&mut self, enabled: bool) {
self.show_allocates = enabled;
}
fn rounded_size(unrounded_size: usize) -> usize {
(unrounded_size + 1) & !(1) // rounded to 2
}
fn get_size(&self, obj: usize) -> usize {
return self.mem[obj];
}
fn set_size(&mut self, obj: usize, size: usize) {
self.mem[obj] = size;
}
fn next_object_in_heap(&self, obj: usize) -> usize {
return obj + self.get_size(obj);
}
//free list is linked off the first slot
fn get_fl_next(&self, obj: usize) -> usize {
return self.mem[obj + 1];
}
fn set_fl_next(&mut self, obj: usize, next: usize) {
self.mem[obj + 1] = next;
}
fn mark_object(&mut self, obj: usize) {
self.mark_bits[obj / MARK_BITS_PER_SLOT] |= 1 << (obj % MARK_BITS_PER_SLOT);
}
fn unmark_object(&mut self, obj: usize) {
self.mark_bits[obj / MARK_BITS_PER_SLOT] &= !(1 << (obj % MARK_BITS_PER_SLOT));
}
fn is_marked(&self, obj: usize) -> bool {
(self.mark_bits[obj / MARK_BITS_PER_SLOT] & (1 << (obj % MARK_BITS_PER_SLOT))) != 0
}
fn allocate_object_nocompress(&mut self, unrounded_size: usize) -> usize {
let size = Memory::rounded_size(unrounded_size + OBJECT_HEADER_SLOTS);
let mut free = self.head;
while free != 0 {
let avail = self.get_size(free);
if avail > size {
let newsize = avail - size;
if newsize < 2 {
panic!("remaining size is less than 2");
}
// shrink current free to smaller size
self.set_size(free, newsize);
// new object is on the end of current free object
let new_object = free + newsize;
self.set_size(new_object, size);
for index in 0..self.element_size(new_object) {
self.at_put(new_object, index, 0);
}
if self.show_allocates {
println!(
"Success: allocate_object returning -> {} size {}",
new_object, size
);
}
if self.head != free {
if self.show_allocates {
println!("Reset head past intermediate free blocks \n");
let mut show = self.head;
while show != free {
println!("Abandon {} size {}\n", show, self.get_size(show));
show = self.get_fl_next(show);
}
}
self.head = free;
}
return new_object;
}
free = self.get_fl_next(free);
}
0
}
pub fn gc(&mut self) {
let start = SystemTime::now();
for i in 0..self.roots.len() {
self.mark_and_scan(self.roots[i]);
}
self.sweep();
self.gc_count += 1;
if self.show_gc {
self.print_gc_stats();
}
match start.elapsed() {
Ok(elapsed) => {
self.last_gc_ms = elapsed.as_millis();
self.total_gc_ms += self.last_gc_ms;
}
Err(e) => {
println!("Error: {:?}", e);
}
}
}
fn sweep(&mut self) {
let mut scan = 1;
self.head = 0;
let mut tail = self.head;
self.lastgc_free_mem = 0;
self.lastgc_live_mem = 0;
while scan < MAX_MEMORY_SLOTS - 1 {
if self.is_marked(scan) {
self.unmark_object(scan);
self.lastgc_live_mem += self.get_size(scan);
} else {
self.lastgc_free_mem += self.get_size(scan);
if tail == 0 {
self.head = scan;
self.set_fl_next(scan, 0);
tail = scan;
} else {
if self.next_object_in_heap(tail) == scan {
self.set_size(tail, self.get_size(tail) + self.get_size(scan));
} else {
self.set_fl_next(tail, scan);
self.set_fl_next(scan, 0);
tail = scan;
}
}
}
scan = self.next_object_in_heap(scan);
}
if self.show_free_list {
self.print_freelist();
}
if self.show_heap_map {
self.print_heap();
}
}
fn mark_and_scan(&mut self, object: usize) {
if object == 0 || self.is_marked(object) {
return;
}
let slots = self.get_size(object);
self.mark_object(object);
for i in OBJECT_HEADER_SLOTS..slots {
self.mark_and_scan(self.mem[object + i]);
}
}
pub fn print_gc_stats(&self) {
println!(
"{} gcs, {} object allocates, Last GC: Live {} Dead {} in {} ms, Lifetime GC {} ms\n",
self.gc_count,
self.allocates,
self.lastgc_live_mem,
self.lastgc_free_mem,
self.last_gc_ms,
self.total_gc_ms,
);
}
fn print_heap(&mut self) {
print!("\x1B[{};{}H", 1, 1);
let mut scan = 1;
let mut count = 0;
let mut free = self.head;
while scan < MAX_MEMORY_SLOTS - 1 {
// skip free ones, print x's //
let mut num_chars_to_print = 0;
let mut char_to_print = '?';
if scan == free {
while scan == free {
char_to_print = 'x';
num_chars_to_print += self.get_size(scan);
scan = self.next_object_in_heap(free);
free = self.get_fl_next(free);
}
} else {
char_to_print = '.';
num_chars_to_print += self.get_size(scan);
scan = self.next_object_in_heap(scan);
}
for _i in 1..num_chars_to_print / 2 {
print!("{}", char_to_print);
count += 1;
if count % 120 == 0 {
print!("\n");
}
}
}
self.print_gc_stats();
}
pub fn print_freelist(&mut self) {
println!("\nprint_freelist: Head = {}", self.head);
let mut free = self.head;
let mut count = 0;
let mut total_free = 0;
while free != 0 {
let size = self.get_size(free);
let next = self.get_fl_next(free);
total_free += self.get_size(free);
println!("{}: Free = {} {} slots next = {}", count, free, size, next);
free = next;
count += 1;
if count > MAX_MEMORY_SLOTS {
panic!()
}
}
println!(
"print_freelist {} elements, total free = {}\n",
count, total_free
);
}
}
| true
|
fb585bb8bbe9a1b42a7188c659f8fe77b7150093
|
Rust
|
vadixidav/machine-vision-formats
|
/src/pixel_format.rs
|
UTF-8
| 7,851
| 3.171875
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Implementations of specific pixel formats.
//
// TODO: Check if we should use [PFNC (Pixel Format Naming
// Convention)](https://www.emva.org/wp-content/uploads/GenICamPixelFormatValues.pdf)
// names.
// TODO: Check if names from ffmpeg (e.g. `AV_PIX_FMT_YUVA444P`) would be
// better.
// Also note the formats at
// https://docs.microsoft.com/en-us/windows/win32/medfound/video-subtype-guids.
#[cfg(not(feature = "std"))]
extern crate core as std;
use std::convert::TryFrom;
/// This type allows runtime inspection of pixel format.
#[derive(Debug, Clone, Copy, PartialEq)]
#[non_exhaustive]
pub enum PixFmt {
Mono8,
Mono32f,
RGB8,
BayerRG8,
BayerRG32f,
BayerBG8,
BayerBG32f,
BayerGB8,
BayerGB32f,
BayerGR8,
BayerGR32f,
YUV444,
YUV422,
NV12,
}
impl PixFmt {
/// Convert a runtime variant into a static type.
pub fn to_static<FMT: PixelFormat>(&self) -> Option<std::marker::PhantomData<FMT>> {
let other = pixfmt::<FMT>();
if Ok(self) == other.as_ref() {
Some(std::marker::PhantomData)
} else {
None
}
}
/// The average number of bits per pixel.
pub const fn bits_per_pixel(&self) -> u8 {
use PixFmt::*;
match self {
Mono8 => 8,
Mono32f => 32,
RGB8 => 24,
BayerRG8 => 8,
BayerRG32f => 32,
BayerBG8 => 8,
BayerBG32f => 32,
BayerGB8 => 8,
BayerGB32f => 32,
BayerGR8 => 8,
BayerGR32f => 32,
YUV444 => 24,
YUV422 => 16,
NV12 => 12,
}
}
/// The name of the pixel format.
pub const fn as_str(&self) -> &'static str {
use PixFmt::*;
match self {
Mono8 => "Mono8",
Mono32f => "Mono32f",
RGB8 => "RGB8",
BayerRG8 => "BayerRG8",
BayerRG32f => "BayerRG32f",
BayerBG8 => "BayerBG8",
BayerBG32f => "BayerBG32f",
BayerGB8 => "BayerGB8",
BayerGB32f => "BayerGB32f",
BayerGR8 => "BayerGR8",
BayerGR32f => "BayerGR32f",
YUV444 => "YUV444",
YUV422 => "YUV422",
NV12 => "NV12",
}
}
}
impl std::fmt::Display for PixFmt {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl std::str::FromStr for PixFmt {
type Err = &'static str;
fn from_str(instr: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
use PixFmt::*;
if instr == "Mono8" {
Ok(Mono8)
} else if instr == "Mono32f" {
Ok(Mono32f)
} else if instr == "RGB8" {
Ok(RGB8)
} else if instr == "BayerRG8" {
Ok(BayerRG8)
} else if instr == "BayerRG32f" {
Ok(BayerRG32f)
} else if instr == "BayerBG8" {
Ok(BayerBG8)
} else if instr == "BayerBG32f" {
Ok(BayerBG32f)
} else if instr == "BayerGB8" {
Ok(BayerGB8)
} else if instr == "BayerGB32f" {
Ok(BayerGB32f)
} else if instr == "BayerGR8" {
Ok(BayerGR8)
} else if instr == "BayerGR32f" {
Ok(BayerGR32f)
} else if instr == "YUV444" {
Ok(YUV444)
} else if instr == "YUV422" {
Ok(YUV422)
} else if instr == "NV12" {
Ok(NV12)
} else {
Err("Cannot parse string")
}
}
}
#[test]
fn test_pixfmt_roundtrip() {
use PixFmt::*;
let fmts = [
Mono8, Mono32f, RGB8, BayerRG8, BayerRG32f, BayerBG8, BayerBG32f, BayerGB8, BayerGB32f,
BayerGR8, BayerGR32f, YUV444, YUV422, NV12,
];
for fmt in &fmts {
let fmt_str = fmt.as_str();
dbg!(fmt_str);
let fmt2 = std::str::FromStr::from_str(fmt_str).unwrap();
assert_eq!(fmt, &fmt2);
}
}
macro_rules! try_downcast {
($name:ident, $orig:expr) => {{
if let Some(_) = <dyn std::any::Any>::downcast_ref::<std::marker::PhantomData<$name>>($orig)
{
return Ok(PixFmt::$name);
}
}};
}
impl<FMT> TryFrom<std::marker::PhantomData<FMT>> for PixFmt
where
FMT: PixelFormat,
{
type Error = &'static str;
fn try_from(orig: std::marker::PhantomData<FMT>) -> Result<PixFmt, Self::Error> {
try_downcast!(Mono8, &orig);
try_downcast!(Mono32f, &orig);
try_downcast!(RGB8, &orig);
try_downcast!(BayerRG8, &orig);
try_downcast!(BayerRG32f, &orig);
try_downcast!(BayerBG8, &orig);
try_downcast!(BayerBG32f, &orig);
try_downcast!(BayerGB8, &orig);
try_downcast!(BayerGB32f, &orig);
try_downcast!(BayerGR8, &orig);
try_downcast!(BayerGR32f, &orig);
try_downcast!(YUV444, &orig);
try_downcast!(YUV422, &orig);
try_downcast!(NV12, &orig);
Err("unknown PixelFormat implementation could not be converted to PixFmt")
}
}
/// Convert a compile-time type FMT into a runtime type.
#[inline]
pub fn pixfmt<FMT: PixelFormat>() -> Result<PixFmt, &'static str> {
use std::convert::TryInto;
let concrete: std::marker::PhantomData<FMT> = std::marker::PhantomData;
concrete.try_into()
}
#[test]
fn test_compile_runtime_roundtrip() {
macro_rules! gen_test {
($name:ident) => {{
let x = PixFmt::$name;
let y = x.to_static::<$name>().unwrap();
let z = PixFmt::try_from(y).unwrap();
assert_eq!(x, z);
}};
}
gen_test!(Mono8);
gen_test!(Mono32f);
gen_test!(RGB8);
gen_test!(BayerRG8);
gen_test!(BayerRG32f);
gen_test!(BayerBG8);
gen_test!(BayerBG32f);
gen_test!(BayerGB8);
gen_test!(BayerGB32f);
gen_test!(BayerGR8);
gen_test!(BayerGR32f);
gen_test!(YUV444);
gen_test!(YUV422);
gen_test!(NV12);
}
/// Implementations of this trait describe the format of raw image data.
///
/// Note that when [const generics for custom
/// types](https://blog.rust-lang.org/2021/02/26/const-generics-mvp-beta.html#const-generics-for-custom-types)
/// are introduced to the rust compiler, we intend to switch PixelFormat to use
/// that feature.
pub trait PixelFormat: std::any::Any + Clone {}
macro_rules! define_pixel_format {
($name:ident, $comment:literal) => {
#[doc = $comment]
#[derive(Clone, Debug)]
pub struct $name {}
impl PixelFormat for $name {}
};
}
define_pixel_format!(
Mono8,
"Luminance, 1 byte per pixel. Sometimes also called Gray8."
);
define_pixel_format!(
Mono32f,
"Luminance, 32 bytes per pixel, Little-Endian, IEEE-754"
);
define_pixel_format!(
RGB8,
"Red, Green, Blue, 1 byte each, total 3 bytes per pixel.
Also sometimes called `RGB8packed`."
);
define_pixel_format!(BayerRG8, "Bayer Red Green pattern, 1 byte per pixel.");
define_pixel_format!(BayerRG32f, "Bayer Red Green pattern, 4 bytes per pixel.");
define_pixel_format!(BayerBG8, "Bayer Blue Green pattern, 1 byte per pixel.");
define_pixel_format!(BayerBG32f, "Bayer Blue Green pattern, 4 bytes per pixel.");
define_pixel_format!(BayerGB8, "Bayer Green Blue pattern, 1 byte per pixel.");
define_pixel_format!(BayerGB32f, "Bayer Green Blue pattern, 4 bytes per pixel.");
define_pixel_format!(BayerGR8, "Bayer Green Red pattern, 1 byte per pixel.");
define_pixel_format!(BayerGR32f, "Bayer Green Red pattern, 4 bytes per pixel.");
define_pixel_format!(YUV444, "YUV 4:4:4 8-bit, total 3 bytes per pixel.");
define_pixel_format!(YUV422, "YUV 4:2:2 8-bit, total 2 bytes per pixel.");
define_pixel_format!(NV12, "NV12 format, average 12 bits per pixel");
#[test]
fn test_debug_types() {
format!("{:?}", BayerRG8 {});
}
| true
|
f5f0e48344b10c1890c34522a499c5394d9fc062
|
Rust
|
ryanfowler/lines
|
/src/cli.rs
|
UTF-8
| 4,191
| 2.671875
| 3
|
[
"MIT"
] |
permissive
|
// The MIT License (MIT)
//
// Copyright (c) 2022 Ryan Fowler
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// copies of the Software, and to permit persons to whom the Software is
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use clap::Parser;
use num_format::{Locale, ToFormattedString};
use serde::Serialize;
use std::path::PathBuf;
use std::str::FromStr;
use std::string::ToString;
use tabled::{
settings::{
object::{Columns, Rows},
Alignment, Border, Modify, Style,
},
Table, Tabled,
};
use crate::lang;
#[derive(Clone, Debug)]
pub enum Format {
Json,
Table,
}
impl FromStr for Format {
type Err = String;
fn from_str(format: &str) -> Result<Self, Self::Err> {
match format {
"json" => Ok(Format::Json),
"table" => Ok(Format::Table),
_ => Err(format.to_string()),
}
}
}
/// Count lines of code.
#[derive(Debug, Parser)]
#[clap(version, about)]
pub struct Args {
/// Output format ("table" or "json").
#[clap(short = 'o', long = "output", default_value = "table")]
pub format: Format,
/// Show timing information.
#[clap(short, long)]
pub timing: bool,
/// Directory or file to scan.
#[clap(default_value = ".")]
pub path: PathBuf,
}
#[derive(Debug, Serialize)]
pub struct Output {
pub languages: Vec<LangOut>,
pub total_num_files: u64,
pub total_num_lines: u64,
#[serde(skip_serializing_if = "Option::is_none")]
pub elapsed_ms: Option<u64>,
}
#[derive(Debug, Serialize)]
pub struct LangOut {
pub language: lang::Language,
pub num_files: u64,
pub num_lines: u64,
}
pub fn get_options() -> Args {
Args::parse()
}
pub fn write_output(out: &Output, format: Format) {
match format {
Format::Json => write_json_pretty(out),
Format::Table => write_table(out),
}
}
fn write_json_pretty(out: &Output) {
println!("{}", serde_json::to_string_pretty(&out).unwrap());
}
#[derive(Tabled)]
struct Row {
#[tabled(rename = "Language")]
language: &'static str,
#[tabled(rename = "Files")]
files: String,
#[tabled(rename = "Lines")]
lines: String,
}
fn write_table(out: &Output) {
let mut data = Vec::new();
for lang in &out.languages {
data.push(Row {
language: lang.language.as_str(),
files: lang.num_files.to_formatted_string(&Locale::en),
lines: lang.num_lines.to_formatted_string(&Locale::en),
});
}
if out.languages.len() != 1 {
data.push(Row {
language: "Total",
files: out.total_num_files.to_formatted_string(&Locale::en),
lines: out.total_num_lines.to_formatted_string(&Locale::en),
});
}
let mut table = Table::new(&data);
table
.with(Style::psql())
.with(Modify::new(Columns::first()).with(Alignment::left()))
.with(Modify::new(Columns::new(1..=2)).with(Alignment::right()))
.with(Modify::new(Rows::first()).with(Alignment::left()));
if out.languages.len() != 1 {
table.with(Modify::new(Rows::last()).with(Border::default().top('-')));
}
println!("{}", table);
if let Some(elapsed_ms) = out.elapsed_ms {
println!("\nTook: {}ms", elapsed_ms);
}
}
| true
|
403514ae8025493783f4a9ac9ff0ced94e2e076f
|
Rust
|
qiongtubao/search_index_test
|
/src/query/cat_query.rs
|
UTF-8
| 14,072
| 2.546875
| 3
|
[] |
no_license
|
use tantivy::query::{Query, Weight, Scorer, Explanation, BooleanQuery, RangeQuery, BooleanWeight, BitSetDocSet, Intersection, ConstScorer, TermScorer, VecDocSet};
use tantivy::{Searcher, TantivyError, SegmentReader, DocSet, Term, InvertedIndexReader, DocId, SkipResult, BitSet};
use tantivy::schema::{Field, IndexRecordOption};
use tantivy::postings::SegmentPostings;
use std::sync::{Arc, RwLock};
use tantivy::directory::DirectoryClone;
use serde_json::de::ParserNumber::U64;
use std::borrow::BorrowMut;
use std::rc::Rc;
use std::time::SystemTime;
use tantivy::termdict::{TermDictionary, TermStreamer};
use std::collections::{Bound, BTreeMap, HashMap, BinaryHeap};
use std::fmt;
#[derive(Clone, Debug)]
pub struct CatQuery {
query: BooleanQuery,
field: Field,
left: u64,
right: u64,
limit: usize
}
impl CatQuery {
pub fn new(query: BooleanQuery, field: Field, left: u64, right: u64, limit: usize) -> Self {
CatQuery {
query,
field,
left,
right,
limit
}
}
pub fn set_limit(&mut self, limit: usize) {
self.limit = limit;
}
}
impl Query for CatQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> Result<Box<Weight>, TantivyError> {
Ok(Box::new(CatWeight {
weight: self.query.weight(searcher, scoring_enabled)?,
field: self.field,
left: self.left,
right: self.right,
limit: self.limit
}))
}
}
struct CatWeight {
weight: Box<dyn Weight>,
field: Field,
left: u64,
right: u64,
limit: usize
}
//impl DocSet for Rc<dyn DocSet> {
// fn advance(&mut self) -> bool {
// self.borrow_mut().advance()
// }
//
// fn doc(&self) -> u32 {
// unimplemented!()
// }
//
// fn size_hint(&self) -> u32 {
// unimplemented!()
// }
//
// fn get_name(&mut self) -> &'static str {
// unimplemented!()
// }
//}
#[derive(Clone)]
struct ArcVecDocSet {
vec_doc_set:Rc<RwLock<VecDocSet>>,
}
impl DocSet for ArcVecDocSet {
fn advance(&mut self) -> bool {
self.vec_doc_set.write().expect("").advance()
}
fn doc(&self) -> u32 {
self.vec_doc_set.read().expect("").doc()
}
fn size_hint(&self) -> u32 {
self.vec_doc_set.read().expect("").size_hint()
}
fn get_name(&mut self) -> &'static str {
self.vec_doc_set.write().expect("").get_name()
}
}
//fn intersection_all(left: &mut DocSet, right: &mut Vec<DocSet>) -> bool {
//
// return false;
//}
fn intersection_cache(left: &mut DocSet, right: &mut DocSet, cache: &mut BitSet) -> bool {
if !right.advance() {
return false;
}
let mut candidate = right.doc();
if cache.contains(candidate) {
return true;
}
loop {
match left.skip_next(candidate) {
SkipResult::Reached => {
return true;
}
SkipResult::OverStep => {
candidate = left.doc();
cache.insert(candidate);
}
SkipResult::End => {
return false;
}
}
match right.skip_next(candidate) {
SkipResult::Reached => {
return true;
}
SkipResult::OverStep => {
candidate = right.doc();
}
SkipResult::End => {
return false;
}
}
}
}
fn intersection(left: &mut DocSet, right: &mut DocSet) -> bool {
if !left.advance() {
return false;
}
let mut candidate = left.doc();
loop {
match right.skip_next(candidate) {
SkipResult::Reached => {
return true;
}
SkipResult::OverStep => {
candidate = right.doc();
}
SkipResult::End => {
return false;
}
}
match left.skip_next(candidate) {
SkipResult::Reached => {
return true;
}
SkipResult::OverStep => {
candidate = left.doc();
}
SkipResult::End => {
return false;
}
}
}
}
impl CatWeight {
fn term_range<'a>(&self, term_dict: &'a TermDictionary) -> TermStreamer<'a> {
use std::collections::Bound::*;
let mut term_stream_builder = term_dict.range();
term_stream_builder = match Bound::Included(Term::from_field_u64(self.field, self.left).value_bytes().to_owned()) {
Included(ref term_val) => term_stream_builder.ge(term_val),
Excluded(ref term_val) => term_stream_builder.gt(term_val),
Unbounded => term_stream_builder,
};
term_stream_builder = match Bound::Included(Term::from_field_u64(self.field, self.right).value_bytes().to_owned()) {
Included(ref term_val) => term_stream_builder.le(term_val),
Excluded(ref term_val) => term_stream_builder.lt(term_val),
Unbounded => term_stream_builder,
};
term_stream_builder.into_stream()
}
fn scorer1(&self, reader: &SegmentReader) -> Result<Box<Scorer>, TantivyError> {
let inverted_index = reader.inverted_index(self.field);
// let fieldnorm_reader = reader.get_fieldnorms_reader(field);
let mut scorer = self.weight.scorer(reader)?;
let mut doc_vec = vec![];
let mut v = vec![];
// println!("{:?}", scorer.size_hint());
scorer.for_each(&mut |doc, score| {
v.push(doc);
});
let mut num = 0;
for i in self.left..self.right {
let term = Term::from_field_u64(self.field, i);
if let Some(mut right) = inverted_index.read_postings(&term, IndexRecordOption::Basic) {
let array :Vec<Box<dyn DocSet>> = vec![Box::new(VecDocSet::from(v.clone())), Box::new(right)];
let mut intersection_scorer = Intersection::new(array);
let start_time = SystemTime::now();
while intersection_scorer.advance() {
doc_vec.push(intersection_scorer.doc());
num = num + 1;
if num >= self.limit {
return Ok(Box::new(ConstScorer::new(VecDocSet::from(doc_vec))));
}
}
println!("run time {:?}",SystemTime::now().duration_since(start_time).expect("??"));
}
}
Ok(Box::new(ConstScorer::new(VecDocSet::from(doc_vec))))
}
fn scorer2(&self, reader: &SegmentReader) -> Result<Box<Scorer>, TantivyError> {
let inverted_index = reader.inverted_index(self.field);
let max_doc = reader.max_doc();
let mut doc_bitset = BitSet::with_max_value(max_doc);
let term_dict = inverted_index.terms();
let mut term_range = self.term_range(term_dict);
while term_range.advance() {
let term_info = term_range.value();
let mut block_segment_postings = inverted_index
.read_block_postings_from_terminfo(term_info, IndexRecordOption::Basic);
while block_segment_postings.advance() {
for &doc in block_segment_postings.docs() {
doc_bitset.insert(doc);
}
}
}
let doc_bitset = BitSetDocSet::from(doc_bitset);
let mut scorer = self.weight.scorer(reader)?;
let array :Vec<Box<dyn DocSet>> = vec![Box::new(doc_bitset), Box::new(scorer)];
Ok(Box::new(ConstScorer::new(Intersection::new(array))))
}
fn scorer3(&self, reader: &SegmentReader) -> Result<Box<Scorer>, TantivyError> {
let inverted_index = reader.inverted_index(self.field);
let max_doc = reader.max_doc();
let mut doc_bitset = BitSet::with_max_value(max_doc);
for i in self.left..self.right {
let term = Term::from_field_u64(self.field, i);
if let Some(mut right) = inverted_index.read_postings(&term, IndexRecordOption::Basic) {
while right.advance() {
doc_bitset.insert(right.doc());
}
}
}
let doc_bitset = BitSetDocSet::from(doc_bitset);
let mut scorer = self.weight.scorer(reader)?;
let array :Vec<Box<dyn DocSet>> = vec![Box::new(doc_bitset), Box::new(scorer)];
Ok(Box::new(ConstScorer::new(Intersection::new(array))))
}
//3s
fn scorer4(&self, reader: &SegmentReader) -> Result<Box<Scorer>, TantivyError> {
let mut scorer = self.weight.scorer(reader)?;
let inverted_index = reader.inverted_index(self.field);
let max_doc = reader.max_doc();
let mut doc_bitset = BitSet::with_max_value(max_doc);
let mut btree_map = BTreeMap::new();
// println!("{:?}", scorer.size_hint());
scorer.for_each(&mut |doc, score| {
btree_map.insert(doc, score);
});
let mut k = 0;
for i in self.left..self.right {
let term = Term::from_field_u64(self.field, i);
if let Some(mut right) = inverted_index.read_postings(&term, IndexRecordOption::Basic) {
while right.advance() {
let doc_id = right.doc();
if btree_map.contains_key(&doc_id) {
doc_bitset.insert(doc_id);
if k >= self.limit && self.limit != 0 {
let doc_bitset = BitSetDocSet::from(doc_bitset);
return Ok(Box::new(ConstScorer::new(doc_bitset)));
}
}
}
}
}
let doc_bitset = BitSetDocSet::from(doc_bitset);
Ok(Box::new(ConstScorer::new(doc_bitset)))
}
fn scorer5(&self, reader: &SegmentReader) -> Result<Box<Scorer>, TantivyError> {
let mut scorer = self.weight.scorer(reader)?;
let inverted_index = reader.inverted_index(self.field);
let max_doc = reader.max_doc();
let mut doc_bitmap = BitSet::with_max_value(max_doc);
while scorer.advance() {
doc_bitmap.insert(scorer.doc());
}
let mut doc_bitset = BitSet::with_max_value(max_doc);
let mut k = 0;
for i in self.left..self.right {
let term = Term::from_field_u64(self.field, i);
if let Some(mut right) = inverted_index.read_postings(&term, IndexRecordOption::Basic) {
while right.advance() {
if doc_bitmap.contains(right.doc()) {
doc_bitset.insert(right.doc());
k= k + 1;
if k >= self.limit && self.limit != 0{
return Ok(Box::new(ConstScorer::new(BitSetDocSet::from(doc_bitset))));
}
}
}
}
}
Ok(Box::new(ConstScorer::new(BitSetDocSet::from(doc_bitset))))
}
fn scorer6(&self, reader: &SegmentReader) -> Result<Box<Scorer>, TantivyError> {
let inverted_index = reader.inverted_index(self.field);
let max_doc = reader.max_doc();
let mut doc_bitset = BitSet::with_max_value(max_doc);
for i in self.left..self.right {
let term = Term::from_field_u64(self.field, i);
if let Some(mut right) = inverted_index.read_postings(&term, IndexRecordOption::Basic) {
while right.advance() {
doc_bitset.insert(right.doc());
}
}
}
let mut scorer = self.weight.scorer(reader)?;
let mut doc_map = BitSet::with_max_value(max_doc);
let mut right = BitSetDocSet::from(doc_bitset);
let mut k = 0;
while intersection(&mut scorer, &mut right) {
doc_map.insert(scorer.doc());
k = k + 1;
if self.limit > 0 && k >= self.limit {
return Ok(Box::new(ConstScorer::new( BitSetDocSet::from(doc_map))));
}
}
Ok(Box::new(ConstScorer::new( BitSetDocSet::from(doc_map))))
}
fn scorer7(&self, reader: &SegmentReader) -> Result<Box<Scorer>, TantivyError> {
let inverted_index = reader.inverted_index(self.field);
let max_doc = reader.max_doc();
let mut doc_bitset = BitSet::with_max_value(max_doc);
let term_dict = inverted_index.terms();
let mut term_range = self.term_range(term_dict);
while term_range.advance() {
let term_info = term_range.value();
let mut block_segment_postings = inverted_index
.read_block_postings_from_terminfo(term_info, IndexRecordOption::Basic);
while block_segment_postings.advance() {
for &doc in block_segment_postings.docs() {
doc_bitset.insert(doc);
}
}
}
let mut scorer = self.weight.scorer(reader)?;
let mut doc_map = BitSet::with_max_value(max_doc);
let mut right = BitSetDocSet::from(doc_bitset);
let mut k = 0;
while intersection(&mut scorer, &mut right) {
doc_map.insert(scorer.doc());
k = k + 1;
if self.limit > 0 && k >= self.limit {
return Ok(Box::new(ConstScorer::new( BitSetDocSet::from(doc_map))));
}
}
Ok(Box::new(ConstScorer::new(BitSetDocSet::from(doc_map))))
}
}
impl Weight for CatWeight {
fn scorer(&self, reader: &SegmentReader) -> Result<Box<Scorer>, TantivyError> {
self.scorer7(reader)
}
fn explain(&self, reader: &SegmentReader, doc: u32) -> Result<Explanation, TantivyError> {
let mut scorer = self.scorer(reader)?;
if scorer.skip_next(doc) != SkipResult::Reached {
return Err( TantivyError::InvalidArgument(format!("Document #({}) does not match", doc)));
}
Ok(Explanation::new("CatQuery", 1.0f32))
}
}
| true
|
91ce448b0f9de05e8cbc8b2ec7a9cb7676bededb
|
Rust
|
gnoliyil/fuchsia
|
/src/sys/lib/fidl-fuchsia-pkg-ext/src/fidl_iterator_to_stream.rs
|
UTF-8
| 5,568
| 2.796875
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
// Copyright 2022 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
fidl_fuchsia_pkg as fpkg,
futures::{future::TryFutureExt as _, stream::Stream},
};
/// Converts a proxy to a FIDL iterator like:
///
/// protocol PayloadIterator {
/// Next() -> (vector<Payload>:MAX payloads);
/// };
///
/// into a `Stream` of `Result<Vec<Payload>, fidl::Error>`s.
///
/// The returned stream will never yield an empty `Vec`. When e.g. `PayloadIterator::Next` returns
/// an empty Vec, the returned stream will yield `None` (signaling the end of the stream).
///
/// To use with a new protocol (e.g. `PayloadIterator`), implement `FidlIterator` for
/// `PayloadIteratorProxy`.
pub fn fidl_iterator_to_stream<T: FidlIterator>(
iterator: T,
) -> impl Stream<Item = Result<Vec<T::Item>, fidl::Error>> + Unpin {
futures::stream::try_unfold(iterator, |iterator| {
iterator.next().map_ok(|v| if v.is_empty() { None } else { Some((v, iterator)) })
})
}
/// A FIDL proxy for a FIDL protocol following the iterator pattern.
pub trait FidlIterator {
type Item: Unpin;
fn next(&self) -> fidl::client::QueryResponseFut<Vec<Self::Item>>;
}
impl FidlIterator for fpkg::BlobInfoIteratorProxy {
type Item = fpkg::BlobInfo;
fn next(&self) -> fidl::client::QueryResponseFut<Vec<Self::Item>> {
self.next()
}
}
#[cfg(test)]
mod tests {
use {
super::*,
assert_matches::assert_matches,
fidl::endpoints::{ControlHandle as _, Responder as _},
fuchsia_zircon_status::Status,
futures::{
future::join,
stream::{StreamExt as _, TryStreamExt as _},
},
};
struct MockIteratorServer {
reqs: fpkg::BlobInfoIteratorRequestStream,
}
impl MockIteratorServer {
fn new() -> (Self, impl Stream<Item = Result<Vec<fpkg::BlobInfo>, fidl::Error>>) {
let (proxy, reqs) =
fidl::endpoints::create_proxy_and_stream::<fpkg::BlobInfoIteratorMarker>().unwrap();
(Self { reqs }, fidl_iterator_to_stream(proxy))
}
// On Some(resp) responds with resp, else closes channel with NO_RESOURCES.
async fn expect_next(&mut self, resp: Option<Vec<fpkg::BlobInfo>>) {
let fpkg::BlobInfoIteratorRequest::Next { responder } =
self.reqs.next().await.unwrap().unwrap();
match resp {
Some(mut resp) => responder.send(&mut resp.iter_mut()).unwrap(),
None => responder.control_handle().shutdown_with_epitaph(Status::NO_RESOURCES),
}
}
}
fn blob_info(u: u8) -> fpkg::BlobInfo {
fpkg::BlobInfo { blob_id: fpkg::BlobId { merkle_root: [u; 32] }, length: 0 }
}
#[fuchsia_async::run_singlethreaded(test)]
async fn read_one_item() {
let (mut server, mut stream) = MockIteratorServer::new();
let ((), item) = join(server.expect_next(Some(vec![blob_info(1)])), stream.next()).await;
assert_matches!(item, Some(Ok(v)) if v == vec![blob_info(1)]);
}
#[fuchsia_async::run_singlethreaded(test)]
async fn read_two_items() {
let (mut server, mut stream) = MockIteratorServer::new();
let ((), (first, second)) = join(
async {
server.expect_next(Some(vec![blob_info(1)])).await;
server.expect_next(Some(vec![blob_info(2)])).await
},
async { (stream.next().await, stream.next().await) },
)
.await;
assert_matches!(first, Some(Ok(v)) if v == vec![blob_info(1)]);
assert_matches!(second, Some(Ok(v)) if v == vec![blob_info(2)]);
}
#[fuchsia_async::run_singlethreaded(test)]
async fn error_terminates() {
let (mut server, mut stream) = MockIteratorServer::new();
let ((), (first, second)) =
join(server.expect_next(None), async { (stream.next().await, stream.next().await) })
.await;
assert_matches!(
first,
Some(Err(fidl::Error::ClientChannelClosed{status, ..}))
if status == Status::NO_RESOURCES
);
assert_matches!(second, None);
}
#[fuchsia_async::run_singlethreaded(test)]
async fn empty_response_terminates() {
let (mut server, mut stream) = MockIteratorServer::new();
let ((), item) = join(server.expect_next(Some(vec![])), stream.next()).await;
assert_matches!(item, None);
}
#[fuchsia_async::run_singlethreaded(test)]
async fn read_one_item_then_terminate_successfully() {
let (mut server, stream) = MockIteratorServer::new();
let ((), items) = join(
async {
server.expect_next(Some(vec![blob_info(1)])).await;
server.expect_next(Some(vec![])).await
},
stream.map_err(|_| ()).try_concat(),
)
.await;
assert_eq!(items, Ok(vec![blob_info(1)]));
}
#[fuchsia_async::run_singlethreaded(test)]
async fn read_one_item_then_terminate_with_error() {
let (mut server, stream) = MockIteratorServer::new();
let ((), items) = join(
async {
server.expect_next(Some(vec![blob_info(1)])).await;
server.expect_next(None).await
},
stream.map_err(|_| ()).collect::<Vec<_>>(),
)
.await;
assert_eq!(items, vec![Ok(vec![blob_info(1)]), Err(())]);
}
}
| true
|
8ce3fe1f4e8578ea5e7c632414e1f226b8b44e11
|
Rust
|
sbnair/toy-rust-ipfs
|
/src/config.rs
|
UTF-8
| 1,703
| 2.640625
| 3
|
[] |
no_license
|
use crypto;
use util;
use rustc_serialize::Decodable;
use rustc_serialize::base64::{self, ToBase64};
use rustc_serialize::json::{self, Json};
use rust_multihash::Multihash;
use std::io::Read;
use std::path::PathBuf;
pub const DEFAULT_REPO_ROOT: &'static str = "~/";
pub const DEFAULT_REPO_PATH: &'static str = ".rust-ipfs";
pub const DEFAULT_CONFIG_FILE: &'static str = "config";
pub const ENV_NAME_REPO_DIR: &'static str = "IPFS_PATH";
pub const DEFAULT_KEYPAIR_NUM_BITS: usize = 2048;
#[derive(RustcEncodable, RustcDecodable)]
pub struct Identity {
pub peer_id: Multihash,
pub private_key: String,
}
#[derive(RustcEncodable, RustcDecodable)]
pub struct Config {
pub identity: Identity,
}
impl Config {
pub fn from_reader<R: Read>(reader: &mut R) -> Result<Config, String> {
let json = try!(Json::from_reader(reader)
.map_err(|e| format!("Error parsing Json: {}", e)));
let mut decoder = json::Decoder::new(json);
Decodable::decode(&mut decoder)
.map_err(|e| format!("Error decoding Config from reader: {}", e))
}
pub fn to_json_string(&self) -> json::EncodeResult<String> {
json::encode(self)
}
}
pub fn repo_path_to_config_file(mut repo_path: PathBuf) -> PathBuf {
repo_path.push(DEFAULT_CONFIG_FILE);
repo_path
}
pub fn init(num_key_pair_bits: usize) -> Config {
let pkey = crypto::gen_key_pair(num_key_pair_bits);
let pub_bytes = pkey.save_pub();
let priv_b64_string = pkey.save_priv().to_base64(base64::STANDARD);
Config {
identity: Identity {
peer_id: util::hash(&pub_bytes[..]),
private_key: priv_b64_string,
},
}
}
| true
|
d558092d70d87ffa3719386025e1289889dbe541
|
Rust
|
a-kenji/relm4
|
/relm4-macros/src/funcs.rs
|
UTF-8
| 3,001
| 2.640625
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use proc_macro2::TokenStream as TokenStream2;
use quote::quote;
use syn::{spanned::Spanned, Error, ImplItemMethod, Result};
pub(super) struct Funcs {
pub pre_init: Option<TokenStream2>,
pub post_init: Option<TokenStream2>,
pub pre_connect_components: Option<TokenStream2>,
pub post_connect_components: Option<TokenStream2>,
pub manual_view: Option<TokenStream2>,
}
impl Funcs {
pub fn new(funcs: &[ImplItemMethod]) -> Result<Self> {
let mut pre_init = None;
let mut post_init = None;
let mut pre_connect_components = None;
let mut post_connect_components = None;
let mut manual_view = None;
for func in funcs {
let ident = &func.sig.ident;
let stmts = &func.block.stmts;
let tokens = quote! { #(#stmts)* };
if ident == "pre_init" {
if pre_init.is_some() {
return Err(Error::new(
func.span().unwrap().into(),
"pre_init method defined multiple times",
));
}
pre_init = Some(tokens);
} else if ident == "post_init" {
if post_init.is_some() {
return Err(Error::new(
func.span().unwrap().into(),
"post_init method defined multiple times",
));
}
post_init = Some(tokens);
} else if ident == "pre_connect_components" {
if pre_connect_components.is_some() {
return Err(Error::new(
func.span().unwrap().into(),
"pre_connect_components method defined multiple times",
));
}
pre_connect_components= Some(tokens);
} else if ident == "post_connect_components" {
if post_connect_components.is_some() {
return Err(Error::new(
func.span().unwrap().into(),
"post_connect_components method defined multiple times",
));
}
post_connect_components = Some(tokens);
} else if ident == "manual_view" {
if manual_view.is_some() {
return Err(Error::new(
func.span().unwrap().into(),
"manual_view method defined multiple times",
));
}
manual_view = Some(tokens);
} else {
return Err(Error::new(
func.span().unwrap().into(),
"Expected identifier pre_init, post_init or manual_view",
));
}
}
Ok(Funcs {
pre_init,
post_init,
pre_connect_components,
post_connect_components,
manual_view,
})
}
}
| true
|
475d1dd165de28a9e99fa2357018ec6a61c2eb15
|
Rust
|
dsnam/LocustDB
|
/src/engine/vector_op/nonzero_indices.rs
|
UTF-8
| 1,570
| 2.609375
| 3
|
[
"Apache-2.0"
] |
permissive
|
use std::marker::PhantomData;
use engine::typed_vec::AnyVec;
use engine::vector_op::*;
use engine::*;
#[derive(Debug)]
pub struct NonzeroIndices<T, U> {
input: BufferRef,
output: BufferRef,
t: PhantomData<T>,
u: PhantomData<U>,
}
impl<T: GenericIntVec<T> + IntoUsize, U: GenericIntVec<U>> NonzeroIndices<T, U> {
pub fn boxed<'a>(input: BufferRef, output: BufferRef) -> BoxedOperator<'a> {
Box::new(NonzeroIndices::<T, U> { input, output, t: PhantomData, u: PhantomData })
}
}
impl<'a, T: GenericIntVec<T> + IntoUsize, U: GenericIntVec<U>> VecOperator<'a> for NonzeroIndices<T, U> {
fn execute(&mut self, _: bool, scratchpad: &mut Scratchpad<'a>) {
let exists = scratchpad.get::<T>(self.input);
let mut unique = scratchpad.get_mut::<U>(self.output);
for (index, &n) in exists.iter().enumerate() {
if n > T::zero() {
unique.push(U::from(index).unwrap());
}
}
}
fn init(&mut self, _: usize, _: usize, _: bool, scratchpad: &mut Scratchpad<'a>) {
// TODO(clemens): output size estimate?
scratchpad.set(self.output, AnyVec::owned(Vec::<U>::new()));
}
fn inputs(&self) -> Vec<BufferRef> { vec![self.input] }
fn outputs(&self) -> Vec<BufferRef> { vec![self.output] }
fn can_stream_input(&self) -> bool { true }
fn can_stream_output(&self, _: BufferRef) -> bool { false }
fn allocates(&self) -> bool { true }
fn display_op(&self, _: bool) -> String {
format!("nonzero_indices({})", self.input)
}
}
| true
|
51d8a6d2e4d7aa8627e9305148f8c6376b28884b
|
Rust
|
bytecodealliance/wasmtime
|
/crates/fuzzing/src/generators/module.rs
|
UTF-8
| 2,461
| 3.015625
| 3
|
[
"LLVM-exception",
"Apache-2.0"
] |
permissive
|
//! Generate a Wasm module and the configuration for generating it.
use arbitrary::{Arbitrary, Unstructured};
use wasm_smith::SwarmConfig;
/// Default module-level configuration for fuzzing Wasmtime.
///
/// Internally this uses `wasm-smith`'s own `SwarmConfig` but we further refine
/// the defaults here as well.
#[derive(Debug, Clone)]
pub struct ModuleConfig {
#[allow(missing_docs)]
pub config: SwarmConfig,
}
impl<'a> Arbitrary<'a> for ModuleConfig {
fn arbitrary(u: &mut Unstructured<'a>) -> arbitrary::Result<ModuleConfig> {
let mut config = SwarmConfig::arbitrary(u)?;
// Allow multi-memory but make it unlikely
if u.ratio(1, 20)? {
config.max_memories = config.max_memories.max(2);
} else {
config.max_memories = 1;
}
// Allow multi-table by default.
if config.reference_types_enabled {
config.max_tables = config.max_tables.max(4);
}
// Allow enabling some various wasm proposals by default. Note that
// these are all unconditionally turned off even with
// `SwarmConfig::arbitrary`.
config.memory64_enabled = u.ratio(1, 20)?;
// Allow the threads proposal if memory64 is not already enabled. FIXME:
// to allow threads and memory64 to coexist, see
// https://github.com/bytecodealliance/wasmtime/issues/4267.
config.threads_enabled = !config.memory64_enabled && u.ratio(1, 20)?;
// We get better differential execution when we disallow traps, so we'll
// do that most of the time.
config.disallow_traps = u.ratio(9, 10)?;
Ok(ModuleConfig { config })
}
}
impl ModuleConfig {
/// Uses this configuration and the supplied source of data to generate a
/// Wasm module.
///
/// If a `default_fuel` is provided, the resulting module will be configured
/// to ensure termination; as doing so will add an additional global to the
/// module, the pooling allocator, if configured, must also have its globals
/// limit updated.
pub fn generate(
&self,
input: &mut Unstructured<'_>,
default_fuel: Option<u32>,
) -> arbitrary::Result<wasm_smith::Module> {
let mut module = wasm_smith::Module::new(self.config.clone(), input)?;
if let Some(default_fuel) = default_fuel {
module.ensure_termination(default_fuel);
}
Ok(module)
}
}
| true
|
4e8281513614f05c03b919b01ee7782a5cdb7f56
|
Rust
|
denzp/rust-crate-compile-test
|
/src/cargo_messages.rs
|
UTF-8
| 2,020
| 2.78125
| 3
|
[
"MIT"
] |
permissive
|
use std::path::PathBuf;
use steps::check_errors::{CompilerMessage, MessageLocation, MessageType};
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum DiagnosticLevel {
Error,
Warning,
Note,
Help,
#[serde(rename = "")]
Empty,
}
#[derive(Debug, Deserialize)]
pub struct Diagnostic {
pub message: Option<DiagnosticMessage>,
pub reason: String,
}
#[derive(Debug, Deserialize, Clone)]
pub struct DiagnosticMessage {
pub message: String,
pub level: DiagnosticLevel,
pub code: Option<DiagnosticCode>,
pub spans: Vec<DiagnosticSpan>,
pub children: Vec<DiagnosticMessage>,
}
#[derive(Debug, Deserialize, Clone)]
pub struct DiagnosticSpan {
pub file_name: String,
pub line_start: usize,
pub is_primary: bool,
}
#[derive(Debug, Deserialize, Clone)]
pub struct DiagnosticCode {
pub code: String,
}
impl Default for DiagnosticSpan {
fn default() -> Self {
DiagnosticSpan {
file_name: "unknown".into(),
line_start: 1,
is_primary: true,
}
}
}
impl<'a> From<&'a str> for DiagnosticLevel {
fn from(text: &str) -> Self {
match text {
"ERROR" => DiagnosticLevel::Error,
"WARNING" => DiagnosticLevel::Warning,
"NOTE" => DiagnosticLevel::Note,
"HELP" => DiagnosticLevel::Help,
_ => DiagnosticLevel::Empty,
}
}
}
impl Into<CompilerMessage> for DiagnosticMessage {
fn into(self) -> CompilerMessage {
let location = self.spans
.into_iter()
.filter(|item| item.is_primary)
.nth(0)
.map(|span| MessageLocation {
file: PathBuf::from(span.file_name),
line: span.line_start,
});
CompilerMessage {
message: MessageType::Text(self.message),
level: self.level,
code: self.code.map(|item| item.code),
location,
}
}
}
| true
|
a96a0131c60eea56df1d546308850064e60d2371
|
Rust
|
mich101mich/aoc-2017
|
/src/days/day_03.rs
|
UTF-8
| 1,632
| 2.921875
| 3
|
[] |
no_license
|
use crate::utils::*;
#[allow(unused)]
pub fn run() {
#[allow(unused_variables)]
let input = include_str!("../input/03.txt");
// let input = r#"1024"#;
let num = parse(input);
let mut values: HashMap<(isize, isize), isize> = HashMap::new();
values.insert((0, 0), 1);
values.insert((1, 0), 1);
let mut current = 1isize;
let mut x = 1isize;
let mut y = 0isize;
let mut dir = 0;
while current < num {
match dir {
0 => y -= 1,
1 => x += 1,
2 => y += 1,
3 => x -= 1,
n => panic!("Invalid dir: {}", n),
}
current = values
.iter()
.filter(|(&pos, _)| moore_i(pos, (x, y)) == 1)
.map(|(_, &v)| v)
.sum();
values.insert((x, y), current);
if dir != 1 && x.abs() == y.abs() || dir == 1 && x == y + 1 {
dir = (dir + 3) % 4;
}
}
pv!(current);
}
#[allow(unused)]
pub fn part_one() {
#[allow(unused_variables)]
let input = include_str!("../input/03.txt");
// let input = r#"1024"#;
let num = isize::from_str(input).unwrap();
let mut current = 3;
let mut x: isize = 1;
let mut y: isize = -1;
let mut dir = 3;
while current < num {
match dir {
0 => y -= 1,
1 => x += 1,
2 => y += 1,
3 => x -= 1,
n => panic!("Invalid dir: {}", n),
}
current += 1;
if dir != 1 && x.abs() == y.abs() || dir == 1 && x == y + 1 {
dir = (dir + 3) % 4;
}
}
pv!(x.abs() + y.abs());
}
| true
|
b8f39dc8c3961af0519e06b651ca3dde95ff5546
|
Rust
|
hgzimmerman/rust_sms
|
/src/models/users/mod.rs
|
UTF-8
| 1,230
| 2.609375
| 3
|
[] |
no_license
|
mod user_state;
mod realized_user;
pub use self::user_state::UserState;
pub use self::realized_user::RealizedUser;
use schema::users;
use diesel::pg::PgConnection;
use diesel;
use diesel::prelude::*;
/// Db interfaceable user
#[derive(Queryable, Identifiable, Clone, Debug, AsChangeset)]
pub struct User {
pub id: i32,
pub first_name: String,
pub last_name: String,
pub phone_number: String, // TODO create a phone number type?
// groups: Vec<Group>,
pub state: i32
}
#[derive(Insertable)]
#[table_name="users"]
pub struct NewUser {
pub first_name: String,
pub last_name: String,
pub phone_number: String,
pub state: i32
}
impl NewUser {
pub fn new(first_name: String, last_name: String, phone_number: String) -> NewUser {
NewUser {
first_name: first_name,
last_name: last_name,
phone_number: phone_number,
//groups: Vec::new(),
state: UserState::StartState.into(),
}
}
pub fn db_insert(&self, connection: &PgConnection) {
use schema::users;
diesel::insert(self)
.into(users::table)
.execute(connection)
.expect("Error saving user");
}
}
| true
|
c791fa8f0210c1fa415f6056bdcf43ce327b00dc
|
Rust
|
daniel-e/rustml
|
/src/features.rs
|
UTF-8
| 567
| 2.8125
| 3
|
[] |
no_license
|
use math::{Mean, Var};
use math::{Dimension, Normalization};
/// Trait to estimate the mean and the variance of a set of samples.
pub trait Scale<T> {
fn scale(&self, nrm: Normalization) -> T;
}
impl Scale<Matrix<f32>> for Matrix<f32> {
fn scale(&self, nrm: Normalization) -> Matrix<f32> {
let mean_vec = self.mean(Dimension::Column);
}
}
// ----------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
use math::Normalization;
#[test]
fn test_parameters() {
}
}
| true
|
42f45175449ee7df7645a45db999e3adbc7c9cd7
|
Rust
|
parallaxsecond/rust-tss-esapi
|
/tss-esapi/src/structures/lists/tagged_tpm_property.rs
|
UTF-8
| 3,865
| 2.515625
| 3
|
[
"Apache-2.0"
] |
permissive
|
// Copyright 2021 Contributors to the Parsec project.
// SPDX-License-Identifier: Apache-2.0
use crate::{
constants::PropertyTag,
structures::TaggedProperty,
tss2_esys::{TPML_TAGGED_TPM_PROPERTY, TPMS_TAGGED_PROPERTY},
Error, Result, WrapperErrorKind,
};
use log::error;
use std::{convert::TryFrom, iter::IntoIterator, ops::Deref};
/// A structure holding a list of tagged tpm properties.
///
/// # Details
/// This corresponds to the TPML_TAGGED_TPM_PROPERTY structure.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TaggedTpmPropertyList {
tagged_tpm_properties: Vec<TaggedProperty>,
}
impl TaggedTpmPropertyList {
pub const MAX_SIZE: usize = Self::calculate_max_size();
/// Finds the first [TaggedProperty] in the list matching the provided `property_tag`.
pub fn find(&self, property_tag: PropertyTag) -> Option<&TaggedProperty> {
self.tagged_tpm_properties
.iter()
.find(|tp| tp.property() == property_tag)
}
/// Private function that calculates the maximum number
/// elements allowed in internal storage.
const fn calculate_max_size() -> usize {
crate::structures::capability_data::max_cap_size::<TPMS_TAGGED_PROPERTY>()
}
}
impl Deref for TaggedTpmPropertyList {
type Target = Vec<TaggedProperty>;
fn deref(&self) -> &Self::Target {
&self.tagged_tpm_properties
}
}
impl AsRef<[TaggedProperty]> for TaggedTpmPropertyList {
fn as_ref(&self) -> &[TaggedProperty] {
self.tagged_tpm_properties.as_slice()
}
}
impl TryFrom<Vec<TaggedProperty>> for TaggedTpmPropertyList {
type Error = Error;
fn try_from(tagged_tpm_properties: Vec<TaggedProperty>) -> Result<Self> {
if tagged_tpm_properties.len() > Self::MAX_SIZE {
error!("Failed to convert Vec<TaggedProperty> into TaggedTpmPropertyList, to many items (> {})", Self::MAX_SIZE);
return Err(Error::local_error(WrapperErrorKind::InvalidParam));
}
Ok(TaggedTpmPropertyList {
tagged_tpm_properties,
})
}
}
impl IntoIterator for TaggedTpmPropertyList {
type Item = TaggedProperty;
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.tagged_tpm_properties.into_iter()
}
}
impl TryFrom<TPML_TAGGED_TPM_PROPERTY> for TaggedTpmPropertyList {
type Error = Error;
fn try_from(tpml_tagged_tpm_property: TPML_TAGGED_TPM_PROPERTY) -> Result<Self> {
let count = usize::try_from(tpml_tagged_tpm_property.count).map_err(|e| {
error!(
"Failed to parse count in TPML_TAGGED_TPM_PROPERTY as usize: {}",
e
);
Error::local_error(WrapperErrorKind::InvalidParam)
})?;
if count > Self::MAX_SIZE {
error!(
"Invalid size value in TPML_TAGGED_TPM_PROPERTY (> {})",
Self::MAX_SIZE,
);
return Err(Error::local_error(WrapperErrorKind::InvalidParam));
}
tpml_tagged_tpm_property.tpmProperty[..count]
.iter()
.map(|&tp| TaggedProperty::try_from(tp))
.collect::<Result<Vec<TaggedProperty>>>()
.map(|tagged_tpm_properties| TaggedTpmPropertyList {
tagged_tpm_properties,
})
}
}
impl From<TaggedTpmPropertyList> for TPML_TAGGED_TPM_PROPERTY {
fn from(tagged_tpm_property_list: TaggedTpmPropertyList) -> Self {
let mut tpml_tagged_tpm_property: TPML_TAGGED_TPM_PROPERTY = Default::default();
for tagged_property in tagged_tpm_property_list {
tpml_tagged_tpm_property.tpmProperty[tpml_tagged_tpm_property.count as usize] =
tagged_property.into();
tpml_tagged_tpm_property.count += 1;
}
tpml_tagged_tpm_property
}
}
| true
|
246c11bee28f1fffa7046beb97abd954518c6e5d
|
Rust
|
shadyproject/learn
|
/rust/gentle_introduction/fun1.rs
|
UTF-8
| 834
| 4.09375
| 4
|
[
"MIT"
] |
permissive
|
fn sqr(x: f64) -> f64 {
x * x
}
//absolute value
fn abs(x: f64) -> f64 {
if x > 0.0 {
x
} else {
-x
}
}
//ensure a number always falls in the given range
fn clamp(x: f64, x1: f64, x2: f64) -> f64 {
if x < x1 {
x1
} else if x > x2 {
x2
} else {
x
}
}
fn main() {
let res = sqr(2.0);
println!("square is {}", res);
let num = -1.0;
let abs1 = abs(num);
println!("absolute value of {} is {}", num, abs1);
let min = 5.0;
let max = 18.0;
let clamp_min = 1.0;
let clamp_max = 99.0;
let dont_clamp = 7.0;
println!("clamping {}: {}", clamp_min, clamp(clamp_min, min, max));
println!("clamping {}: {}", clamp_max, clamp(clamp_max, min, max));
println!("clamping {}: {}", dont_clamp, clamp(dont_clamp, min, max));
}
| true
|
a1f2d4133ec0f8f9e2c9a484dc5f43bb657af01a
|
Rust
|
ivanhrabcak/shopping-list-rust
|
/src/repositories/user.rs
|
UTF-8
| 3,599
| 2.828125
| 3
|
[] |
no_license
|
#![allow(proc_macro_derive_resolution_fallback)]
use diesel::prelude::*;
use diesel::dsl::*;
use crate::models;
use crate::repositories::schema::users;
use crate::repositories::schema::users::dsl::*;
use crate::request::Credentials;
use crypto::sha2::Sha512;
use crypto::digest::Digest;
use std::convert::TryFrom;
#[derive(Clone)]
pub struct NewUser {
pub username: String,
pub password: String
}
fn user_exists(user: NewUser, conn: &PgConnection) -> bool {
match users::table
.filter(username.eq(user.username))
.select(id)
.first::<i32>(conn) {
Ok(val) => {
println!("User exists, with id {}", val);
true
},
Err(_) => {
println!("User doesnt exist!");
false
}
}
}
fn sha512_hash_string(string: String) -> String {
let mut hasher = Sha512::new();
hasher.input_str(&string);
hasher.result_str()
}
pub fn create_user(user: NewUser, conn: &PgConnection) -> Result<models::User, ()> {
if user.password.len() != 128 {
println!("Wrong length!");
return Err(());
}
let new_username = &user.username;
let does_user_exist = user_exists(user.clone(), conn);
if does_user_exist {
return Err(());
}
let new_user_id: i64 = match users::table.select(count(id)).first(conn) {
Ok(user_id) => user_id,
Err(_) => return Err(())
};
let new_user_id: i32 = match i32::try_from(new_user_id) {
Ok(user_id) => user_id,
Err(_) => return Err(())
};
let new_user = models::User {
id: new_user_id,
username: new_username.to_string(),
password: user.password
};
match insert_into(users).values(&new_user).get_result::<(i32, String, String)>(conn) {
Ok(_) => Ok(new_user.clone()),
Err(_) => Err(())
}
}
pub fn remove_user(user: models::User, user_password: String, conn: &PgConnection) -> Result<(), ()>{
if sha512_hash_string(user_password) != user.password {
return Err(());
}
match delete(users.filter(id.eq(user.id))).execute(conn) {
Ok(_) => Ok(()),
Err(_) => Err(())
}
}
pub fn get_user_by_username(name: String, conn: &PgConnection) -> Result<models::User, ()> {
match users.filter(username.eq(name)).get_result::<models::User>(conn) {
Ok(u) => Ok(u),
Err(_) => Err(())
}
}
pub fn get_user_by_user_id(user_id: i32, conn: &PgConnection) -> Result<models::User, ()> {
match users.filter(id.eq(user_id)).get_result::<models::User>(conn) {
Ok(u) => Ok(u),
Err(_) => Err(())
}
}
pub fn get_user_with_credentials(credentials: Credentials, conn: &PgConnection) -> Result<models::User, ()> {
match users.filter(username.eq(credentials.username)).select(id).get_result::<i32>(conn) {
Ok(user_id) => {
let user: models::User = match users.find(user_id).first(conn) {
Ok(user) => user,
Err(_) => return Err(())
};
if user.password == credentials.password {
return Ok(user);
}
Err(())
},
Err(_) => Err(())
}
}
pub fn change_user_password(user: models::User, old_password: String, new_password: String, conn: &PgConnection) -> Result<(), ()> {
if old_password != user.password {
return Err(());
}
match update(users.filter(id.eq(user.id))).set(password.eq(new_password)).execute(conn) {
Ok(_) => Ok(()),
Err(_) => Err(())
}
}
| true
|
026d5084f6a2f3ed0b6b2e540f6b0c52e3fe909e
|
Rust
|
r-asou/databend
|
/common/datavalues/src/types/data_type_coercion.rs
|
UTF-8
| 15,279
| 2.59375
| 3
|
[
"Apache-2.0"
] |
permissive
|
// Copyright 2020 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::cmp;
use common_exception::ErrorCode;
use common_exception::Result;
use crate::prelude::DataType;
use crate::DataField;
use crate::DataValueArithmeticOperator;
/// Determine if a DataType is signed numeric or not
pub fn is_signed_numeric(dt: &DataType) -> bool {
matches!(
dt,
DataType::Int8
| DataType::Int16
| DataType::Int32
| DataType::Int64
| DataType::Float32
| DataType::Float64
)
}
/// Determine if a DataType is numeric or not
pub fn is_numeric(dt: &DataType) -> bool {
is_signed_numeric(dt)
|| matches!(
dt,
DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64
)
}
pub fn is_interval(dt: &DataType) -> bool {
matches!(dt, DataType::Interval(_))
}
fn next_size(size: usize) -> usize {
if size < 8_usize {
return size * 2;
}
size
}
pub fn is_floating(dt: &DataType) -> bool {
matches!(dt, DataType::Float32 | DataType::Float64)
}
pub fn is_date_or_date_time(dt: &DataType) -> bool {
matches!(
dt,
DataType::Date16 | DataType::Date32 | DataType::DateTime32(_)
)
}
pub fn is_integer(dt: &DataType) -> bool {
is_numeric(dt) && !is_floating(dt)
}
pub fn numeric_byte_size(dt: &DataType) -> Result<usize> {
match dt {
DataType::Int8 | DataType::UInt8 => Ok(1),
DataType::Int16 | DataType::UInt16 => Ok(2),
DataType::Int32 | DataType::UInt32 | DataType::Float32 => Ok(4),
DataType::Int64 | DataType::UInt64 | DataType::Float64 => Ok(8),
_ => Result::Err(ErrorCode::BadArguments(format!(
"Function number_byte_size argument must be numeric types, but got {:?}",
dt
))),
}
}
pub fn construct_numeric_type(
is_signed: bool,
is_floating: bool,
byte_size: usize,
) -> Result<DataType> {
match (is_signed, is_floating, byte_size) {
(false, false, 1) => Ok(DataType::UInt8),
(false, false, 2) => Ok(DataType::UInt16),
(false, false, 4) => Ok(DataType::UInt32),
(false, false, 8) => Ok(DataType::UInt64),
(false, true, 4) => Ok(DataType::Float32),
(false, true, 8) => Ok(DataType::Float64),
(true, false, 1) => Ok(DataType::Int8),
(true, false, 2) => Ok(DataType::Int16),
(true, false, 4) => Ok(DataType::Int32),
(true, false, 8) => Ok(DataType::Int64),
(true, true, 1) => Ok(DataType::Float32),
(true, true, 2) => Ok(DataType::Float32),
(true, true, 4) => Ok(DataType::Float32),
(true, true, 8) => Ok(DataType::Float64),
// TODO support bigint and decimal types, now we just let's overflow
(false, false, d) if d > 8 => Ok(DataType::Int64),
(true, false, d) if d > 8 => Ok(DataType::UInt64),
(_, true, d) if d > 8 => Ok(DataType::Float64),
_ => Result::Err(ErrorCode::BadDataValueType(format!(
"Can't construct type from is_signed: {}, is_floating: {}, byte_size: {}",
is_signed, is_floating, byte_size
))),
}
}
/// Coercion rule for numerical types: The type that both lhs and rhs
/// can be casted to for numerical calculation, while maintaining
/// maximum precision
pub fn numerical_coercion(
lhs_type: &DataType,
rhs_type: &DataType,
allow_overflow: bool,
) -> Result<DataType> {
let has_float = is_floating(lhs_type) || is_floating(rhs_type);
let has_integer = is_integer(lhs_type) || is_integer(rhs_type);
let has_signed = is_signed_numeric(lhs_type) || is_signed_numeric(rhs_type);
let size_of_lhs = numeric_byte_size(lhs_type)?;
let size_of_rhs = numeric_byte_size(rhs_type)?;
let max_size_of_unsigned_integer = cmp::max(
if is_signed_numeric(lhs_type) {
0
} else {
size_of_lhs
},
if is_signed_numeric(rhs_type) {
0
} else {
size_of_rhs
},
);
let max_size_of_signed_integer = cmp::max(
if !is_signed_numeric(lhs_type) {
0
} else {
size_of_lhs
},
if !is_signed_numeric(rhs_type) {
0
} else {
size_of_rhs
},
);
let max_size_of_integer = cmp::max(
if !is_integer(lhs_type) {
0
} else {
size_of_lhs
},
if !is_integer(rhs_type) {
0
} else {
size_of_rhs
},
);
let max_size_of_float = cmp::max(
if !is_floating(lhs_type) {
0
} else {
size_of_lhs
},
if !is_floating(rhs_type) {
0
} else {
size_of_rhs
},
);
let should_double = (has_float && has_integer && max_size_of_integer >= max_size_of_float)
|| (has_signed && max_size_of_unsigned_integer >= max_size_of_signed_integer);
let mut max_size = if should_double {
cmp::max(size_of_rhs, size_of_lhs) * 2
} else {
cmp::max(size_of_rhs, size_of_lhs)
};
if max_size > 8 {
if allow_overflow {
max_size = 8
} else {
return Result::Err(ErrorCode::BadDataValueType(format!(
"Can't construct type from {} and {}",
lhs_type, rhs_type
)));
}
}
construct_numeric_type(has_signed, has_float, max_size)
}
#[inline]
pub fn numerical_arithmetic_coercion(
op: &DataValueArithmeticOperator,
lhs_type: &DataType,
rhs_type: &DataType,
) -> Result<DataType> {
// error on any non-numeric type
if !is_numeric(lhs_type) || !is_numeric(rhs_type) {
return Result::Err(ErrorCode::BadDataValueType(format!(
"DataValue Error: Unsupported ({:?}) {} ({:?})",
lhs_type, op, rhs_type
)));
};
let has_signed = is_signed_numeric(lhs_type) || is_signed_numeric(rhs_type);
let has_float = is_floating(lhs_type) || is_floating(rhs_type);
let max_size = cmp::max(numeric_byte_size(lhs_type)?, numeric_byte_size(rhs_type)?);
match op {
DataValueArithmeticOperator::Plus | DataValueArithmeticOperator::Mul => {
construct_numeric_type(has_signed, has_float, next_size(max_size))
}
DataValueArithmeticOperator::Modulo => {
if has_float {
return Ok(DataType::Float64);
}
// From clickhouse: NumberTraits.h
// If modulo of division can yield negative number, we need larger type to accommodate it.
// Example: toInt32(-199) % toUInt8(200) will return -199 that does not fit in Int8, only in Int16.
let result_is_signed = is_signed_numeric(lhs_type);
let right_size = numeric_byte_size(rhs_type)?;
let size_of_result = if result_is_signed {
next_size(right_size)
} else {
right_size
};
construct_numeric_type(result_is_signed, false, size_of_result)
}
DataValueArithmeticOperator::Minus => {
construct_numeric_type(true, has_float, next_size(max_size))
}
DataValueArithmeticOperator::Div => Ok(DataType::Float64),
}
}
#[inline]
pub fn datetime_arithmetic_coercion(
op: &DataValueArithmeticOperator,
lhs_type: &DataType,
rhs_type: &DataType,
) -> Result<DataType> {
let e = Result::Err(ErrorCode::BadDataValueType(format!(
"DataValue Error: Unsupported date coercion ({:?}) {} ({:?})",
lhs_type, op, rhs_type
)));
if !is_date_or_date_time(lhs_type) && !is_date_or_date_time(rhs_type) {
return e;
}
let mut a = lhs_type.clone();
let mut b = rhs_type.clone();
if !is_date_or_date_time(&a) {
a = rhs_type.clone();
b = lhs_type.clone();
}
match op {
DataValueArithmeticOperator::Plus => Ok(a),
DataValueArithmeticOperator::Minus => {
if is_numeric(&b) || is_interval(&b) {
Ok(a)
} else {
// Date minus Date or DateTime minus DateTime
Ok(DataType::Int32)
}
}
_ => e,
}
}
#[inline]
pub fn interval_arithmetic_coercion(
op: &DataValueArithmeticOperator,
lhs_type: &DataType,
rhs_type: &DataType,
) -> Result<DataType> {
let e = Result::Err(ErrorCode::BadDataValueType(format!(
"DataValue Error: Unsupported date coercion ({:?}) {} ({:?})",
lhs_type, op, rhs_type
)));
// only allow date/datetime [+/-] interval
if !(is_date_or_date_time(lhs_type) && is_interval(rhs_type)
|| is_date_or_date_time(rhs_type) && is_interval(lhs_type))
{
return e;
}
match op {
DataValueArithmeticOperator::Plus | DataValueArithmeticOperator::Minus => {
if is_date_or_date_time(lhs_type) {
Ok(lhs_type.clone())
} else {
Ok(rhs_type.clone())
}
}
_ => e,
}
}
#[inline]
pub fn numerical_unary_arithmetic_coercion(
op: &DataValueArithmeticOperator,
val_type: &DataType,
) -> Result<DataType> {
// error on any non-numeric type
if !is_numeric(val_type) {
return Result::Err(ErrorCode::BadDataValueType(format!(
"DataValue Error: Unsupported ({:?})",
val_type
)));
};
match op {
DataValueArithmeticOperator::Plus => Ok(val_type.clone()),
DataValueArithmeticOperator::Minus => {
let has_float = is_floating(val_type);
let has_signed = is_signed_numeric(val_type);
let numeric_size = numeric_byte_size(val_type)?;
let max_size = if has_signed {
numeric_size
} else {
next_size(numeric_size)
};
construct_numeric_type(true, has_float, max_size)
}
other => Result::Err(ErrorCode::UnknownFunction(format!(
"Unexpected operator:{:?} to unary function",
other
))),
}
}
// coercion rules for compare operations. This is a superset of all numerical coercion rules.
pub fn compare_coercion(lhs_type: &DataType, rhs_type: &DataType) -> Result<DataType> {
if lhs_type == rhs_type {
// same type => equality is possible
return Ok(lhs_type.clone());
}
if is_numeric(lhs_type) && is_numeric(rhs_type) {
return numerical_coercion(lhs_type, rhs_type, true);
}
// one of is null
{
if rhs_type == &DataType::Null {
return Ok(lhs_type.clone());
}
if lhs_type == &DataType::Null {
return Ok(rhs_type.clone());
}
}
// one of is String and other is number
if (is_numeric(lhs_type) && rhs_type == &DataType::String)
|| (is_numeric(rhs_type) && lhs_type == &DataType::String)
{
return Ok(DataType::Float64);
}
// one of is datetime and other is number or string
{
if (is_numeric(lhs_type) || lhs_type == &DataType::String) && is_date_or_date_time(rhs_type)
{
return Ok(rhs_type.clone());
}
if (is_numeric(rhs_type) || rhs_type == &DataType::String) && is_date_or_date_time(lhs_type)
{
return Ok(lhs_type.clone());
}
}
// one of is datetime and other is number or string
if is_date_or_date_time(lhs_type) || is_date_or_date_time(rhs_type) {
// one of is datetime
if matches!(lhs_type, DataType::DateTime32(_))
|| matches!(rhs_type, DataType::DateTime32(_))
{
return Ok(DataType::DateTime32(None));
}
return Ok(DataType::Date32);
}
Err(ErrorCode::IllegalDataType(format!(
"Can not compare {} with {}",
lhs_type, rhs_type
)))
}
// aggregate_types aggregates data types for a multi-argument function.
#[inline]
pub fn aggregate_types(args: &[DataType]) -> Result<DataType> {
match args.len() {
0 => Result::Err(ErrorCode::BadArguments("Can't aggregate empty args")),
1 => Ok(args[0].clone()),
_ => {
let left = args[0].clone();
let right = aggregate_types(&args[1..args.len()])?;
merge_types(&left, &right)
}
}
}
pub fn merge_types(lhs_type: &DataType, rhs_type: &DataType) -> Result<DataType> {
match (lhs_type, rhs_type) {
(DataType::Null, _) => Ok(rhs_type.clone()),
(_, DataType::Null) => Ok(lhs_type.clone()),
(DataType::List(a), DataType::List(b)) => {
if a.name() != b.name() {
return Result::Err(ErrorCode::BadDataValueType(format!(
"Can't merge types from {} and {}",
lhs_type, rhs_type
)));
}
let typ = merge_types(a.data_type(), b.data_type())?;
Ok(DataType::List(Box::new(DataField::new(
a.name(),
typ,
a.is_nullable() || b.is_nullable(),
))))
}
(DataType::Struct(a), DataType::Struct(b)) => {
if a.len() != b.len() {
return Result::Err(ErrorCode::BadDataValueType(format!(
"Can't merge types from {} and {}, because they have different sizes",
lhs_type, rhs_type
)));
}
let fields = a
.iter()
.zip(b.iter())
.map(|(a, b)| {
if a.name() != b.name() {
return Result::Err(ErrorCode::BadDataValueType(format!(
"Can't merge types from {} and {}",
lhs_type, rhs_type
)));
}
let typ = merge_types(a.data_type(), b.data_type())?;
Ok(DataField::new(
a.name(),
typ,
a.is_nullable() || b.is_nullable(),
))
})
.collect::<Result<Vec<_>>>()?;
Ok(DataType::Struct(fields))
}
_ => {
if lhs_type == rhs_type {
return Ok(lhs_type.clone());
}
if is_numeric(lhs_type) && is_numeric(rhs_type) {
numerical_coercion(lhs_type, rhs_type, false)
} else {
Result::Err(ErrorCode::BadDataValueType(format!(
"Can't merge types from {} and {}",
lhs_type, rhs_type
)))
}
}
}
}
| true
|
7b42446cf85dca87009c65a120cc8eca5a8c13f7
|
Rust
|
public/googapis
|
/googapis/genproto/google.cloud.secretmanager.v1.rs
|
UTF-8
| 71,516
| 3.09375
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
/// A [Secret][google.cloud.secretmanager.v1.Secret] is a logical secret whose value and versions can
/// be accessed.
///
/// A [Secret][google.cloud.secretmanager.v1.Secret] is made up of zero or more [SecretVersions][google.cloud.secretmanager.v1.SecretVersion] that
/// represent the secret data.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Secret {
/// Output only. The resource name of the [Secret][google.cloud.secretmanager.v1.Secret] in the format `projects/*/secrets/*`.
#[prost(string, tag = "1")]
pub name: std::string::String,
/// Required. Immutable. The replication policy of the secret data attached to the [Secret][google.cloud.secretmanager.v1.Secret].
///
/// The replication policy cannot be changed after the Secret has been created.
#[prost(message, optional, tag = "2")]
pub replication: ::std::option::Option<Replication>,
/// Output only. The time at which the [Secret][google.cloud.secretmanager.v1.Secret] was created.
#[prost(message, optional, tag = "3")]
pub create_time: ::std::option::Option<::prost_types::Timestamp>,
/// The labels assigned to this Secret.
///
/// Label keys must be between 1 and 63 characters long, have a UTF-8 encoding
/// of maximum 128 bytes, and must conform to the following PCRE regular
/// expression: `[\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}`
///
/// Label values must be between 0 and 63 characters long, have a UTF-8
/// encoding of maximum 128 bytes, and must conform to the following PCRE
/// regular expression: `[\p{Ll}\p{Lo}\p{N}_-]{0,63}`
///
/// No more than 64 labels can be assigned to a given resource.
#[prost(map = "string, string", tag = "4")]
pub labels: ::std::collections::HashMap<std::string::String, std::string::String>,
}
/// A secret version resource in the Secret Manager API.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SecretVersion {
/// Output only. The resource name of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] in the
/// format `projects/*/secrets/*/versions/*`.
///
/// [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] IDs in a [Secret][google.cloud.secretmanager.v1.Secret] start at 1 and
/// are incremented for each subsequent version of the secret.
#[prost(string, tag = "1")]
pub name: std::string::String,
/// Output only. The time at which the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] was created.
#[prost(message, optional, tag = "2")]
pub create_time: ::std::option::Option<::prost_types::Timestamp>,
/// Output only. The time this [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] was destroyed.
/// Only present if [state][google.cloud.secretmanager.v1.SecretVersion.state] is
/// [DESTROYED][google.cloud.secretmanager.v1.SecretVersion.State.DESTROYED].
#[prost(message, optional, tag = "3")]
pub destroy_time: ::std::option::Option<::prost_types::Timestamp>,
/// Output only. The current state of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion].
#[prost(enumeration = "secret_version::State", tag = "4")]
pub state: i32,
}
pub mod secret_version {
/// The state of a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion], indicating if it can be accessed.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum State {
/// Not specified. This value is unused and invalid.
Unspecified = 0,
/// The [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] may be accessed.
Enabled = 1,
/// The [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] may not be accessed, but the secret data
/// is still available and can be placed back into the [ENABLED][google.cloud.secretmanager.v1.SecretVersion.State.ENABLED]
/// state.
Disabled = 2,
/// The [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] is destroyed and the secret data is no longer
/// stored. A version may not leave this state once entered.
Destroyed = 3,
}
}
/// A policy that defines the replication configuration of data.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Replication {
/// The replication policy for this secret.
#[prost(oneof = "replication::Replication", tags = "1, 2")]
pub replication: ::std::option::Option<replication::Replication>,
}
pub mod replication {
/// A replication policy that replicates the [Secret][google.cloud.secretmanager.v1.Secret] payload without any
/// restrictions.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Automatic {}
/// A replication policy that replicates the [Secret][google.cloud.secretmanager.v1.Secret] payload into the
/// locations specified in [Secret.replication.user_managed.replicas][]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UserManaged {
/// Required. The list of Replicas for this [Secret][google.cloud.secretmanager.v1.Secret].
///
/// Cannot be empty.
#[prost(message, repeated, tag = "1")]
pub replicas: ::std::vec::Vec<user_managed::Replica>,
}
pub mod user_managed {
/// Represents a Replica for this [Secret][google.cloud.secretmanager.v1.Secret].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Replica {
/// The canonical IDs of the location to replicate data.
/// For example: `"us-east1"`.
#[prost(string, tag = "1")]
pub location: std::string::String,
}
}
/// The replication policy for this secret.
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Replication {
/// The [Secret][google.cloud.secretmanager.v1.Secret] will automatically be replicated without any restrictions.
#[prost(message, tag = "1")]
Automatic(Automatic),
/// The [Secret][google.cloud.secretmanager.v1.Secret] will only be replicated into the locations specified.
#[prost(message, tag = "2")]
UserManaged(UserManaged),
}
}
/// A secret payload resource in the Secret Manager API. This contains the
/// sensitive secret data that is associated with a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SecretPayload {
/// The secret data. Must be no larger than 64KiB.
#[prost(bytes, tag = "1")]
pub data: std::vec::Vec<u8>,
}
/// Request message for [SecretManagerService.ListSecrets][google.cloud.secretmanager.v1.SecretManagerService.ListSecrets].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListSecretsRequest {
/// Required. The resource name of the project associated with the
/// [Secrets][google.cloud.secretmanager.v1.Secret], in the format `projects/*`.
#[prost(string, tag = "1")]
pub parent: std::string::String,
/// Optional. The maximum number of results to be returned in a single page. If
/// set to 0, the server decides the number of results to return. If the
/// number is greater than 25000, it is capped at 25000.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// Optional. Pagination token, returned earlier via
/// [ListSecretsResponse.next_page_token][google.cloud.secretmanager.v1.ListSecretsResponse.next_page_token].
#[prost(string, tag = "3")]
pub page_token: std::string::String,
}
/// Response message for [SecretManagerService.ListSecrets][google.cloud.secretmanager.v1.SecretManagerService.ListSecrets].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListSecretsResponse {
/// The list of [Secrets][google.cloud.secretmanager.v1.Secret] sorted in reverse by create_time (newest
/// first).
#[prost(message, repeated, tag = "1")]
pub secrets: ::std::vec::Vec<Secret>,
/// A token to retrieve the next page of results. Pass this value in
/// [ListSecretsRequest.page_token][google.cloud.secretmanager.v1.ListSecretsRequest.page_token] to retrieve the next page.
#[prost(string, tag = "2")]
pub next_page_token: std::string::String,
/// The total number of [Secrets][google.cloud.secretmanager.v1.Secret].
#[prost(int32, tag = "3")]
pub total_size: i32,
}
/// Request message for [SecretManagerService.CreateSecret][google.cloud.secretmanager.v1.SecretManagerService.CreateSecret].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CreateSecretRequest {
/// Required. The resource name of the project to associate with the
/// [Secret][google.cloud.secretmanager.v1.Secret], in the format `projects/*`.
#[prost(string, tag = "1")]
pub parent: std::string::String,
/// Required. This must be unique within the project.
///
/// A secret ID is a string with a maximum length of 255 characters and can
/// contain uppercase and lowercase letters, numerals, and the hyphen (`-`) and
/// underscore (`_`) characters.
#[prost(string, tag = "2")]
pub secret_id: std::string::String,
/// Required. A [Secret][google.cloud.secretmanager.v1.Secret] with initial field values.
#[prost(message, optional, tag = "3")]
pub secret: ::std::option::Option<Secret>,
}
/// Request message for [SecretManagerService.AddSecretVersion][google.cloud.secretmanager.v1.SecretManagerService.AddSecretVersion].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AddSecretVersionRequest {
/// Required. The resource name of the [Secret][google.cloud.secretmanager.v1.Secret] to associate with the
/// [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] in the format `projects/*/secrets/*`.
#[prost(string, tag = "1")]
pub parent: std::string::String,
/// Required. The secret payload of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion].
#[prost(message, optional, tag = "2")]
pub payload: ::std::option::Option<SecretPayload>,
}
/// Request message for [SecretManagerService.GetSecret][google.cloud.secretmanager.v1.SecretManagerService.GetSecret].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetSecretRequest {
/// Required. The resource name of the [Secret][google.cloud.secretmanager.v1.Secret], in the format `projects/*/secrets/*`.
#[prost(string, tag = "1")]
pub name: std::string::String,
}
/// Request message for [SecretManagerService.ListSecretVersions][google.cloud.secretmanager.v1.SecretManagerService.ListSecretVersions].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListSecretVersionsRequest {
/// Required. The resource name of the [Secret][google.cloud.secretmanager.v1.Secret] associated with the
/// [SecretVersions][google.cloud.secretmanager.v1.SecretVersion] to list, in the format
/// `projects/*/secrets/*`.
#[prost(string, tag = "1")]
pub parent: std::string::String,
/// Optional. The maximum number of results to be returned in a single page. If
/// set to 0, the server decides the number of results to return. If the
/// number is greater than 25000, it is capped at 25000.
#[prost(int32, tag = "2")]
pub page_size: i32,
/// Optional. Pagination token, returned earlier via
/// ListSecretVersionsResponse.next_page_token][].
#[prost(string, tag = "3")]
pub page_token: std::string::String,
}
/// Response message for [SecretManagerService.ListSecretVersions][google.cloud.secretmanager.v1.SecretManagerService.ListSecretVersions].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListSecretVersionsResponse {
/// The list of [SecretVersions][google.cloud.secretmanager.v1.SecretVersion] sorted in reverse by
/// create_time (newest first).
#[prost(message, repeated, tag = "1")]
pub versions: ::std::vec::Vec<SecretVersion>,
/// A token to retrieve the next page of results. Pass this value in
/// [ListSecretVersionsRequest.page_token][google.cloud.secretmanager.v1.ListSecretVersionsRequest.page_token] to retrieve the next page.
#[prost(string, tag = "2")]
pub next_page_token: std::string::String,
/// The total number of [SecretVersions][google.cloud.secretmanager.v1.SecretVersion].
#[prost(int32, tag = "3")]
pub total_size: i32,
}
/// Request message for [SecretManagerService.GetSecretVersion][google.cloud.secretmanager.v1.SecretManagerService.GetSecretVersion].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetSecretVersionRequest {
/// Required. The resource name of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] in the format
/// `projects/*/secrets/*/versions/*`.
/// `projects/*/secrets/*/versions/latest` is an alias to the `latest`
/// [SecretVersion][google.cloud.secretmanager.v1.SecretVersion].
#[prost(string, tag = "1")]
pub name: std::string::String,
}
/// Request message for [SecretManagerService.UpdateSecret][google.cloud.secretmanager.v1.SecretManagerService.UpdateSecret].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateSecretRequest {
/// Required. [Secret][google.cloud.secretmanager.v1.Secret] with updated field values.
#[prost(message, optional, tag = "1")]
pub secret: ::std::option::Option<Secret>,
/// Required. Specifies the fields to be updated.
#[prost(message, optional, tag = "2")]
pub update_mask: ::std::option::Option<::prost_types::FieldMask>,
}
/// Request message for [SecretManagerService.AccessSecretVersion][google.cloud.secretmanager.v1.SecretManagerService.AccessSecretVersion].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AccessSecretVersionRequest {
/// Required. The resource name of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] in the format
/// `projects/*/secrets/*/versions/*`.
#[prost(string, tag = "1")]
pub name: std::string::String,
}
/// Response message for [SecretManagerService.AccessSecretVersion][google.cloud.secretmanager.v1.SecretManagerService.AccessSecretVersion].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct AccessSecretVersionResponse {
/// The resource name of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] in the format
/// `projects/*/secrets/*/versions/*`.
#[prost(string, tag = "1")]
pub name: std::string::String,
/// Secret payload
#[prost(message, optional, tag = "2")]
pub payload: ::std::option::Option<SecretPayload>,
}
/// Request message for [SecretManagerService.DeleteSecret][google.cloud.secretmanager.v1.SecretManagerService.DeleteSecret].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DeleteSecretRequest {
/// Required. The resource name of the [Secret][google.cloud.secretmanager.v1.Secret] to delete in the format
/// `projects/*/secrets/*`.
#[prost(string, tag = "1")]
pub name: std::string::String,
}
/// Request message for [SecretManagerService.DisableSecretVersion][google.cloud.secretmanager.v1.SecretManagerService.DisableSecretVersion].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DisableSecretVersionRequest {
/// Required. The resource name of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] to disable in the format
/// `projects/*/secrets/*/versions/*`.
#[prost(string, tag = "1")]
pub name: std::string::String,
}
/// Request message for [SecretManagerService.EnableSecretVersion][google.cloud.secretmanager.v1.SecretManagerService.EnableSecretVersion].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EnableSecretVersionRequest {
/// Required. The resource name of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] to enable in the format
/// `projects/*/secrets/*/versions/*`.
#[prost(string, tag = "1")]
pub name: std::string::String,
}
/// Request message for [SecretManagerService.DestroySecretVersion][google.cloud.secretmanager.v1.SecretManagerService.DestroySecretVersion].
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DestroySecretVersionRequest {
/// Required. The resource name of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] to destroy in the format
/// `projects/*/secrets/*/versions/*`.
#[prost(string, tag = "1")]
pub name: std::string::String,
}
#[doc = r" Generated client implementations."]
pub mod secret_manager_service_client {
#![allow(unused_variables, dead_code, missing_docs)]
use tonic::codegen::*;
#[doc = " Secret Manager Service"]
#[doc = ""]
#[doc = " Manages secrets and operations using those secrets. Implements a REST"]
#[doc = " model with the following objects:"]
#[doc = ""]
#[doc = " * [Secret][google.cloud.secretmanager.v1.Secret]"]
#[doc = " * [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]"]
pub struct SecretManagerServiceClient<T> {
inner: tonic::client::Grpc<T>,
}
impl SecretManagerServiceClient<tonic::transport::Channel> {
#[doc = r" Attempt to create a new client by connecting to a given endpoint."]
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
where
D: std::convert::TryInto<tonic::transport::Endpoint>,
D::Error: Into<StdError>,
{
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
Ok(Self::new(conn))
}
}
impl<T> SecretManagerServiceClient<T>
where
T: tonic::client::GrpcService<tonic::body::BoxBody>,
T::ResponseBody: Body + HttpBody + Send + 'static,
T::Error: Into<StdError>,
<T::ResponseBody as HttpBody>::Error: Into<StdError> + Send,
{
pub fn new(inner: T) -> Self {
let inner = tonic::client::Grpc::new(inner);
Self { inner }
}
pub fn with_interceptor(inner: T, interceptor: impl Into<tonic::Interceptor>) -> Self {
let inner = tonic::client::Grpc::with_interceptor(inner, interceptor);
Self { inner }
}
#[doc = " Lists [Secrets][google.cloud.secretmanager.v1.Secret]."]
pub async fn list_secrets(
&mut self,
request: impl tonic::IntoRequest<super::ListSecretsRequest>,
) -> Result<tonic::Response<super::ListSecretsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/ListSecrets",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a new [Secret][google.cloud.secretmanager.v1.Secret] containing no [SecretVersions][google.cloud.secretmanager.v1.SecretVersion]."]
pub async fn create_secret(
&mut self,
request: impl tonic::IntoRequest<super::CreateSecretRequest>,
) -> Result<tonic::Response<super::Secret>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/CreateSecret",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Creates a new [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] containing secret data and attaches"]
#[doc = " it to an existing [Secret][google.cloud.secretmanager.v1.Secret]."]
pub async fn add_secret_version(
&mut self,
request: impl tonic::IntoRequest<super::AddSecretVersionRequest>,
) -> Result<tonic::Response<super::SecretVersion>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/AddSecretVersion",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets metadata for a given [Secret][google.cloud.secretmanager.v1.Secret]."]
pub async fn get_secret(
&mut self,
request: impl tonic::IntoRequest<super::GetSecretRequest>,
) -> Result<tonic::Response<super::Secret>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/GetSecret",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Updates metadata of an existing [Secret][google.cloud.secretmanager.v1.Secret]."]
pub async fn update_secret(
&mut self,
request: impl tonic::IntoRequest<super::UpdateSecretRequest>,
) -> Result<tonic::Response<super::Secret>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/UpdateSecret",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Deletes a [Secret][google.cloud.secretmanager.v1.Secret]."]
pub async fn delete_secret(
&mut self,
request: impl tonic::IntoRequest<super::DeleteSecretRequest>,
) -> Result<tonic::Response<()>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/DeleteSecret",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Lists [SecretVersions][google.cloud.secretmanager.v1.SecretVersion]. This call does not return secret"]
#[doc = " data."]
pub async fn list_secret_versions(
&mut self,
request: impl tonic::IntoRequest<super::ListSecretVersionsRequest>,
) -> Result<tonic::Response<super::ListSecretVersionsResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/ListSecretVersions",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets metadata for a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
#[doc = ""]
#[doc = " `projects/*/secrets/*/versions/latest` is an alias to the `latest`"]
#[doc = " [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
pub async fn get_secret_version(
&mut self,
request: impl tonic::IntoRequest<super::GetSecretVersionRequest>,
) -> Result<tonic::Response<super::SecretVersion>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/GetSecretVersion",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Accesses a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]. This call returns the secret data."]
#[doc = ""]
#[doc = " `projects/*/secrets/*/versions/latest` is an alias to the `latest`"]
#[doc = " [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
pub async fn access_secret_version(
&mut self,
request: impl tonic::IntoRequest<super::AccessSecretVersionRequest>,
) -> Result<tonic::Response<super::AccessSecretVersionResponse>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/AccessSecretVersion",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Disables a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
#[doc = ""]
#[doc = " Sets the [state][google.cloud.secretmanager.v1.SecretVersion.state] of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] to"]
#[doc = " [DISABLED][google.cloud.secretmanager.v1.SecretVersion.State.DISABLED]."]
pub async fn disable_secret_version(
&mut self,
request: impl tonic::IntoRequest<super::DisableSecretVersionRequest>,
) -> Result<tonic::Response<super::SecretVersion>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/DisableSecretVersion",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Enables a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
#[doc = ""]
#[doc = " Sets the [state][google.cloud.secretmanager.v1.SecretVersion.state] of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] to"]
#[doc = " [ENABLED][google.cloud.secretmanager.v1.SecretVersion.State.ENABLED]."]
pub async fn enable_secret_version(
&mut self,
request: impl tonic::IntoRequest<super::EnableSecretVersionRequest>,
) -> Result<tonic::Response<super::SecretVersion>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/EnableSecretVersion",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Destroys a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
#[doc = ""]
#[doc = " Sets the [state][google.cloud.secretmanager.v1.SecretVersion.state] of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] to"]
#[doc = " [DESTROYED][google.cloud.secretmanager.v1.SecretVersion.State.DESTROYED] and irrevocably destroys the"]
#[doc = " secret data."]
pub async fn destroy_secret_version(
&mut self,
request: impl tonic::IntoRequest<super::DestroySecretVersionRequest>,
) -> Result<tonic::Response<super::SecretVersion>, tonic::Status> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/DestroySecretVersion",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Sets the access control policy on the specified secret. Replaces any"]
#[doc = " existing policy."]
#[doc = ""]
#[doc = " Permissions on [SecretVersions][google.cloud.secretmanager.v1.SecretVersion] are enforced according"]
#[doc = " to the policy set on the associated [Secret][google.cloud.secretmanager.v1.Secret]."]
pub async fn set_iam_policy(
&mut self,
request: impl tonic::IntoRequest<super::super::super::super::iam::v1::SetIamPolicyRequest>,
) -> Result<tonic::Response<super::super::super::super::iam::v1::Policy>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/SetIamPolicy",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Gets the access control policy for a secret."]
#[doc = " Returns empty policy if the secret exists and does not have a policy set."]
pub async fn get_iam_policy(
&mut self,
request: impl tonic::IntoRequest<super::super::super::super::iam::v1::GetIamPolicyRequest>,
) -> Result<tonic::Response<super::super::super::super::iam::v1::Policy>, tonic::Status>
{
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/GetIamPolicy",
);
self.inner.unary(request.into_request(), path, codec).await
}
#[doc = " Returns permissions that a caller has for the specified secret."]
#[doc = " If the secret does not exist, this call returns an empty set of"]
#[doc = " permissions, not a NOT_FOUND error."]
#[doc = ""]
#[doc = " Note: This operation is designed to be used for building permission-aware"]
#[doc = " UIs and command-line tools, not for authorization checking. This operation"]
#[doc = " may \"fail open\" without warning."]
pub async fn test_iam_permissions(
&mut self,
request: impl tonic::IntoRequest<
super::super::super::super::iam::v1::TestIamPermissionsRequest,
>,
) -> Result<
tonic::Response<super::super::super::super::iam::v1::TestIamPermissionsResponse>,
tonic::Status,
> {
self.inner.ready().await.map_err(|e| {
tonic::Status::new(
tonic::Code::Unknown,
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/google.cloud.secretmanager.v1.SecretManagerService/TestIamPermissions",
);
self.inner.unary(request.into_request(), path, codec).await
}
}
impl<T: Clone> Clone for SecretManagerServiceClient<T> {
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
}
impl<T> std::fmt::Debug for SecretManagerServiceClient<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "SecretManagerServiceClient {{ ... }}")
}
}
}
#[doc = r" Generated server implementations."]
pub mod secret_manager_service_server {
#![allow(unused_variables, dead_code, missing_docs)]
use tonic::codegen::*;
#[doc = "Generated trait containing gRPC methods that should be implemented for use with SecretManagerServiceServer."]
#[async_trait]
pub trait SecretManagerService: Send + Sync + 'static {
#[doc = " Lists [Secrets][google.cloud.secretmanager.v1.Secret]."]
async fn list_secrets(
&self,
request: tonic::Request<super::ListSecretsRequest>,
) -> Result<tonic::Response<super::ListSecretsResponse>, tonic::Status>;
#[doc = " Creates a new [Secret][google.cloud.secretmanager.v1.Secret] containing no [SecretVersions][google.cloud.secretmanager.v1.SecretVersion]."]
async fn create_secret(
&self,
request: tonic::Request<super::CreateSecretRequest>,
) -> Result<tonic::Response<super::Secret>, tonic::Status>;
#[doc = " Creates a new [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] containing secret data and attaches"]
#[doc = " it to an existing [Secret][google.cloud.secretmanager.v1.Secret]."]
async fn add_secret_version(
&self,
request: tonic::Request<super::AddSecretVersionRequest>,
) -> Result<tonic::Response<super::SecretVersion>, tonic::Status>;
#[doc = " Gets metadata for a given [Secret][google.cloud.secretmanager.v1.Secret]."]
async fn get_secret(
&self,
request: tonic::Request<super::GetSecretRequest>,
) -> Result<tonic::Response<super::Secret>, tonic::Status>;
#[doc = " Updates metadata of an existing [Secret][google.cloud.secretmanager.v1.Secret]."]
async fn update_secret(
&self,
request: tonic::Request<super::UpdateSecretRequest>,
) -> Result<tonic::Response<super::Secret>, tonic::Status>;
#[doc = " Deletes a [Secret][google.cloud.secretmanager.v1.Secret]."]
async fn delete_secret(
&self,
request: tonic::Request<super::DeleteSecretRequest>,
) -> Result<tonic::Response<()>, tonic::Status>;
#[doc = " Lists [SecretVersions][google.cloud.secretmanager.v1.SecretVersion]. This call does not return secret"]
#[doc = " data."]
async fn list_secret_versions(
&self,
request: tonic::Request<super::ListSecretVersionsRequest>,
) -> Result<tonic::Response<super::ListSecretVersionsResponse>, tonic::Status>;
#[doc = " Gets metadata for a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
#[doc = ""]
#[doc = " `projects/*/secrets/*/versions/latest` is an alias to the `latest`"]
#[doc = " [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
async fn get_secret_version(
&self,
request: tonic::Request<super::GetSecretVersionRequest>,
) -> Result<tonic::Response<super::SecretVersion>, tonic::Status>;
#[doc = " Accesses a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]. This call returns the secret data."]
#[doc = ""]
#[doc = " `projects/*/secrets/*/versions/latest` is an alias to the `latest`"]
#[doc = " [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
async fn access_secret_version(
&self,
request: tonic::Request<super::AccessSecretVersionRequest>,
) -> Result<tonic::Response<super::AccessSecretVersionResponse>, tonic::Status>;
#[doc = " Disables a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
#[doc = ""]
#[doc = " Sets the [state][google.cloud.secretmanager.v1.SecretVersion.state] of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] to"]
#[doc = " [DISABLED][google.cloud.secretmanager.v1.SecretVersion.State.DISABLED]."]
async fn disable_secret_version(
&self,
request: tonic::Request<super::DisableSecretVersionRequest>,
) -> Result<tonic::Response<super::SecretVersion>, tonic::Status>;
#[doc = " Enables a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
#[doc = ""]
#[doc = " Sets the [state][google.cloud.secretmanager.v1.SecretVersion.state] of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] to"]
#[doc = " [ENABLED][google.cloud.secretmanager.v1.SecretVersion.State.ENABLED]."]
async fn enable_secret_version(
&self,
request: tonic::Request<super::EnableSecretVersionRequest>,
) -> Result<tonic::Response<super::SecretVersion>, tonic::Status>;
#[doc = " Destroys a [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]."]
#[doc = ""]
#[doc = " Sets the [state][google.cloud.secretmanager.v1.SecretVersion.state] of the [SecretVersion][google.cloud.secretmanager.v1.SecretVersion] to"]
#[doc = " [DESTROYED][google.cloud.secretmanager.v1.SecretVersion.State.DESTROYED] and irrevocably destroys the"]
#[doc = " secret data."]
async fn destroy_secret_version(
&self,
request: tonic::Request<super::DestroySecretVersionRequest>,
) -> Result<tonic::Response<super::SecretVersion>, tonic::Status>;
#[doc = " Sets the access control policy on the specified secret. Replaces any"]
#[doc = " existing policy."]
#[doc = ""]
#[doc = " Permissions on [SecretVersions][google.cloud.secretmanager.v1.SecretVersion] are enforced according"]
#[doc = " to the policy set on the associated [Secret][google.cloud.secretmanager.v1.Secret]."]
async fn set_iam_policy(
&self,
request: tonic::Request<super::super::super::super::iam::v1::SetIamPolicyRequest>,
) -> Result<tonic::Response<super::super::super::super::iam::v1::Policy>, tonic::Status>;
#[doc = " Gets the access control policy for a secret."]
#[doc = " Returns empty policy if the secret exists and does not have a policy set."]
async fn get_iam_policy(
&self,
request: tonic::Request<super::super::super::super::iam::v1::GetIamPolicyRequest>,
) -> Result<tonic::Response<super::super::super::super::iam::v1::Policy>, tonic::Status>;
#[doc = " Returns permissions that a caller has for the specified secret."]
#[doc = " If the secret does not exist, this call returns an empty set of"]
#[doc = " permissions, not a NOT_FOUND error."]
#[doc = ""]
#[doc = " Note: This operation is designed to be used for building permission-aware"]
#[doc = " UIs and command-line tools, not for authorization checking. This operation"]
#[doc = " may \"fail open\" without warning."]
async fn test_iam_permissions(
&self,
request: tonic::Request<super::super::super::super::iam::v1::TestIamPermissionsRequest>,
) -> Result<
tonic::Response<super::super::super::super::iam::v1::TestIamPermissionsResponse>,
tonic::Status,
>;
}
#[doc = " Secret Manager Service"]
#[doc = ""]
#[doc = " Manages secrets and operations using those secrets. Implements a REST"]
#[doc = " model with the following objects:"]
#[doc = ""]
#[doc = " * [Secret][google.cloud.secretmanager.v1.Secret]"]
#[doc = " * [SecretVersion][google.cloud.secretmanager.v1.SecretVersion]"]
#[derive(Debug)]
#[doc(hidden)]
pub struct SecretManagerServiceServer<T: SecretManagerService> {
inner: _Inner<T>,
}
struct _Inner<T>(Arc<T>, Option<tonic::Interceptor>);
impl<T: SecretManagerService> SecretManagerServiceServer<T> {
pub fn new(inner: T) -> Self {
let inner = Arc::new(inner);
let inner = _Inner(inner, None);
Self { inner }
}
pub fn with_interceptor(inner: T, interceptor: impl Into<tonic::Interceptor>) -> Self {
let inner = Arc::new(inner);
let inner = _Inner(inner, Some(interceptor.into()));
Self { inner }
}
}
impl<T, B> Service<http::Request<B>> for SecretManagerServiceServer<T>
where
T: SecretManagerService,
B: HttpBody + Send + Sync + 'static,
B::Error: Into<StdError> + Send + 'static,
{
type Response = http::Response<tonic::body::BoxBody>;
type Error = Never;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
let inner = self.inner.clone();
match req.uri().path() {
"/google.cloud.secretmanager.v1.SecretManagerService/ListSecrets" => {
#[allow(non_camel_case_types)]
struct ListSecretsSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::ListSecretsRequest>
for ListSecretsSvc<T>
{
type Response = super::ListSecretsResponse;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::ListSecretsRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.list_secrets(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = ListSecretsSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/CreateSecret" => {
#[allow(non_camel_case_types)]
struct CreateSecretSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::CreateSecretRequest>
for CreateSecretSvc<T>
{
type Response = super::Secret;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::CreateSecretRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.create_secret(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = CreateSecretSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/AddSecretVersion" => {
#[allow(non_camel_case_types)]
struct AddSecretVersionSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::AddSecretVersionRequest>
for AddSecretVersionSvc<T>
{
type Response = super::SecretVersion;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::AddSecretVersionRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.add_secret_version(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = AddSecretVersionSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/GetSecret" => {
#[allow(non_camel_case_types)]
struct GetSecretSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::GetSecretRequest> for GetSecretSvc<T>
{
type Response = super::Secret;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::GetSecretRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.get_secret(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = GetSecretSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/UpdateSecret" => {
#[allow(non_camel_case_types)]
struct UpdateSecretSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::UpdateSecretRequest>
for UpdateSecretSvc<T>
{
type Response = super::Secret;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::UpdateSecretRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.update_secret(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = UpdateSecretSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/DeleteSecret" => {
#[allow(non_camel_case_types)]
struct DeleteSecretSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::DeleteSecretRequest>
for DeleteSecretSvc<T>
{
type Response = ();
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::DeleteSecretRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.delete_secret(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = DeleteSecretSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/ListSecretVersions" => {
#[allow(non_camel_case_types)]
struct ListSecretVersionsSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::ListSecretVersionsRequest>
for ListSecretVersionsSvc<T>
{
type Response = super::ListSecretVersionsResponse;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::ListSecretVersionsRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.list_secret_versions(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = ListSecretVersionsSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/GetSecretVersion" => {
#[allow(non_camel_case_types)]
struct GetSecretVersionSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::GetSecretVersionRequest>
for GetSecretVersionSvc<T>
{
type Response = super::SecretVersion;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::GetSecretVersionRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.get_secret_version(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = GetSecretVersionSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/AccessSecretVersion" => {
#[allow(non_camel_case_types)]
struct AccessSecretVersionSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::AccessSecretVersionRequest>
for AccessSecretVersionSvc<T>
{
type Response = super::AccessSecretVersionResponse;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::AccessSecretVersionRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.access_secret_version(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = AccessSecretVersionSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/DisableSecretVersion" => {
#[allow(non_camel_case_types)]
struct DisableSecretVersionSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::DisableSecretVersionRequest>
for DisableSecretVersionSvc<T>
{
type Response = super::SecretVersion;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::DisableSecretVersionRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.disable_secret_version(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = DisableSecretVersionSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/EnableSecretVersion" => {
#[allow(non_camel_case_types)]
struct EnableSecretVersionSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::EnableSecretVersionRequest>
for EnableSecretVersionSvc<T>
{
type Response = super::SecretVersion;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::EnableSecretVersionRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.enable_secret_version(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = EnableSecretVersionSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/DestroySecretVersion" => {
#[allow(non_camel_case_types)]
struct DestroySecretVersionSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<super::DestroySecretVersionRequest>
for DestroySecretVersionSvc<T>
{
type Response = super::SecretVersion;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<super::DestroySecretVersionRequest>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.destroy_secret_version(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = DestroySecretVersionSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/SetIamPolicy" => {
#[allow(non_camel_case_types)]
struct SetIamPolicySvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<
super::super::super::super::iam::v1::SetIamPolicyRequest,
> for SetIamPolicySvc<T>
{
type Response = super::super::super::super::iam::v1::Policy;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<
super::super::super::super::iam::v1::SetIamPolicyRequest,
>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.set_iam_policy(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = SetIamPolicySvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/GetIamPolicy" => {
#[allow(non_camel_case_types)]
struct GetIamPolicySvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<
super::super::super::super::iam::v1::GetIamPolicyRequest,
> for GetIamPolicySvc<T>
{
type Response = super::super::super::super::iam::v1::Policy;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<
super::super::super::super::iam::v1::GetIamPolicyRequest,
>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.get_iam_policy(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = GetIamPolicySvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
"/google.cloud.secretmanager.v1.SecretManagerService/TestIamPermissions" => {
#[allow(non_camel_case_types)]
struct TestIamPermissionsSvc<T: SecretManagerService>(pub Arc<T>);
impl<T: SecretManagerService>
tonic::server::UnaryService<
super::super::super::super::iam::v1::TestIamPermissionsRequest,
> for TestIamPermissionsSvc<T>
{
type Response =
super::super::super::super::iam::v1::TestIamPermissionsResponse;
type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;
fn call(
&mut self,
request: tonic::Request<
super::super::super::super::iam::v1::TestIamPermissionsRequest,
>,
) -> Self::Future {
let inner = self.0.clone();
let fut = async move { inner.test_iam_permissions(request).await };
Box::pin(fut)
}
}
let inner = self.inner.clone();
let fut = async move {
let interceptor = inner.1.clone();
let inner = inner.0;
let method = TestIamPermissionsSvc(inner);
let codec = tonic::codec::ProstCodec::default();
let mut grpc = if let Some(interceptor) = interceptor {
tonic::server::Grpc::with_interceptor(codec, interceptor)
} else {
tonic::server::Grpc::new(codec)
};
let res = grpc.unary(method, req).await;
Ok(res)
};
Box::pin(fut)
}
_ => Box::pin(async move {
Ok(http::Response::builder()
.status(200)
.header("grpc-status", "12")
.body(tonic::body::BoxBody::empty())
.unwrap())
}),
}
}
}
impl<T: SecretManagerService> Clone for SecretManagerServiceServer<T> {
fn clone(&self) -> Self {
let inner = self.inner.clone();
Self { inner }
}
}
impl<T: SecretManagerService> Clone for _Inner<T> {
fn clone(&self) -> Self {
Self(self.0.clone(), self.1.clone())
}
}
impl<T: std::fmt::Debug> std::fmt::Debug for _Inner<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.0)
}
}
impl<T: SecretManagerService> tonic::transport::NamedService for SecretManagerServiceServer<T> {
const NAME: &'static str = "google.cloud.secretmanager.v1.SecretManagerService";
}
}
| true
|
4e2e9a185dcbffb6f7537744ee728bdc665427dc
|
Rust
|
modernserf/aoc-2017
|
/day-2/src/main.rs
|
UTF-8
| 1,352
| 3.3125
| 3
|
[] |
no_license
|
use std::cmp;
fn main() {
let tsv = include_str!("input.txt");
let data = parse_tsv(tsv);
let sum = data.iter()
.map(|row| row_checksum(row))
.sum::<u32>();
println!("part 1: {}", sum);
let div_sum = data.iter()
.map(|row| divisible_row_checksum(row))
.sum::<u32>();
println!("part 2: {}", div_sum);
}
fn parse_tsv(tsv: &str) -> Vec<Vec<u32>> {
tsv.split("\n")
.map(|row| {
row.split("\t")
.filter_map(|s| s.parse::<u32>().ok())
.collect::<Vec<u32>>()
})
.filter(|row| { row.len() > 0 })
.collect()
}
fn row_checksum(xs: &[u32]) -> u32 {
let (min, max) = min_max(xs);
max - min
}
fn divisible_row_checksum(xs: &[u32]) -> u32 {
for (i, x) in xs.iter().enumerate() {
let ys = &xs[(i+1)..];
for y in ys {
let (a, b) = order(*x, *y);
if b % a == 0 {
return b / a;
}
}
}
panic!("Couldn't find divisible items in row")
}
fn min_max<T: Ord + Copy>(xs: &[T]) -> (T, T) {
xs.iter().fold(
(xs[0], xs[0]),
|(min_val, max_val), x| {
(cmp::min(*x, min_val), cmp::max(*x, max_val))
})
}
fn order<T: PartialOrd> (a: T, b: T) -> (T, T) {
if a < b { (a, b) } else { (b, a) }
}
| true
|
1c8821498124555708c9568fc32792bed164a26f
|
Rust
|
storance/krpc-bindings-rs
|
/src/spacecenter/parts/engine.rs
|
UTF-8
| 14,598
| 3.015625
| 3
|
[
"Apache-2.0"
] |
permissive
|
use super::Part;
use crate::codec::{Decode, Encode};
use crate::spacecenter::Thruster;
use crate::{remote_type, RemoteObject, Vector3};
use std::collections::BTreeMap;
remote_type!(
/// An engine, including ones of various types. For example liquid fuelled gimballed engines,
/// solid rocket boosters and jet engines. Obtained by calling `Part::engine()`.
object SpaceCenter.Engine {
properties: {
{
Part {
/// Returns the part object for this engine.
///
/// **Game Scenes**: All
get: part -> Part
}
}
{
Active {
/// Returns whether the engine is active.
///
/// **Game Scenes**: All
get: is_active -> bool,
/// Sets whether the engine is active. Setting this attribute may have no effect,
/// depending on `Engine::can_shutdown()` and `Engine.can_restart()`.
///
/// **Game Scenes**: All
set: set_active(bool)
}
}
{
Thrust {
/// Returns the current amount of thrust being produced by the engine, in Newtons.
///
/// **Game Scenes**: All
get: thrust -> f32
}
}
{
AvailableThrust {
/// Returns the amount of thrust, in Newtons, that would be produced by the engine
/// when activated and with its throttle set to 100%. Returns zero if the engine
/// does not have any fuel. Takes the engine’s current `Engine::thrust_limit()`
/// and atmospheric conditions into account.
///
/// **Game Scenes**: All
get: available_thrust -> f32
}
}
{
MaxThrust {
/// Returns the amount of thrust, in Newtons, that would be produced by the engine
/// when activated and fueled, with its throttle and throttle limiter set to 100%.
///
/// **Game Scenes**: All
get: max_thrust -> f32
}
}
{
MaxVacuumThrust {
/// Returns the maximum amount of thrust that can be produced by the engine in a
/// vacuum, in Newtons. This is the amount of thrust produced by the engine when
/// activated, `Engine.thrust_limit()` is set to 100%, the main vessel’s throttle is
/// set to 100% and the engine is in a vacuum.
///
/// **Game Scenes**: All
get: max_vacuum_thrust -> f32
}
}
{
ThrustLimit {
/// Returns the thrust limiter of the engine.
///
/// **Game Scenes**: All
///
/// # Returns
/// A value between 0 and 1.
get: thrust_limit -> f32,
/// Sets the thrust limiter of the engine. Setting this
/// attribute may have no effect, for example the thrust limit for a solid rocket
/// booster cannot be changed in flight.
///
/// **Game Scenes**: All
///
/// # Arguments
/// * `value` - The thrust limit to set as a value between 0 and 1.
set: set_thrust_limit(f32)
}
}
{
Thrusters {
/// Returns the components of the engine that generate thrust.
///
/// **Game Scenes**: All
///
/// # Note
/// For example, this corresponds to the rocket nozzel on a solid rocket booster, or
/// the individual nozzels on a RAPIER engine. The overall thrust produced by the
/// engine, as reported by `Engine::available_thrust()`, `Engine::max_thrust()` and
/// others, is the sum of the thrust generated by each thruster.
get: thrusters -> Vec<Thruster>
}
}
{
SpecificImpulse {
/// Returns the current specific impulse of the engine, in seconds. Returns zero if
/// the engine is not active.
///
/// **Game Scenes**: All
get: isp -> f32
}
}
{
VacuumSpecificImpulse {
/// Returns the vacuum specific impulse of the engine, in seconds.
///
/// **Game Scenes**: All
get: vacuum_isp -> f32
}
}
{
KerbinSeaLevelSpecificImpulse {
/// Returns the specific impulse of the engine at sea level on Kerbin, in seconds.
///
/// **Game Scenes**: All
get: kerbin_sea_level_isp -> f32
}
}
{
PropellantNames {
/// Returns the names of the propellants that the engine consumes.
///
/// **Game Scenes**: All
get: propellant_names -> Vec<String>
}
}
{
PropellantRatios {
/// Returns the ratio of resources that the engine consumes. A dictionary mapping
/// resource names to the ratio at which they are consumed by the engine.
///
/// **Game Scenes**: All
///
/// # Note
/// For example, if the ratios are 0.6 for LiquidFuel and 0.4 for Oxidizer, then for
/// every 0.6 units of LiquidFuel that the engine burns, it will burn 0.4 units
/// of Oxidizer.
get: propellant_ratios -> BTreeMap<String, f32>
}
}
{
Propellants {
/// Returns the propellants that the engine consumes.
///
/// **Game Scenes**: All
get: propellants -> Vec<Propellant>
}
}
{
HasFuel {
/// Returns whether the engine has any fuel available.
///
/// **Game Scenes**: All
///
/// # Note
/// The engine must be activated for this property to update correctly.
get: has_fuel -> bool
}
}
{
Throttle {
/// Returns the current throttle setting for the engine. A value between 0 and 1.
/// This is not necessarily the same as the vessel’s main throttle setting, as some
/// engines take time to adjust their throttle (such as jet engines).
///
/// **Game Scenes**: All
get: throttle -> f32
}
}
{
ThrottleLocked {
/// Returns whether the `Control::throttle()` affects the engine. For example, this
/// is true for liquid fueled rockets, and false for solid rocket boosters.
///
/// **Game Scenes**: All
get: is_throttle_locked -> bool
}
}
{
CanRestart {
/// Returns whether he engine can be restarted once shutdown. If the engine cannot be
/// shutdown, returns `false`. For example, this is `true` for liquid fueled rockets
/// and `false` for solid rocket boosters.
///
/// **Game Scenes**: All
get: can_restart -> bool
}
}
{
CanShutdown {
/// Returns whether the engine can be shutdown once activated. For example, this is
/// `true` for liquid fueled rockets and `false` for solid rocket boosters.
///
/// **Game Scenes**: All
get: can_shutdown -> bool
}
}
{
HasModes {
/// Returns whether the engine has multiple modes of operation.
///
/// **Game Scenes**: All
get: has_modes -> bool
}
}
{
Mode {
/// Returns the name of the current engine mode.
///
/// **Game Scenes**: All
get: mode -> String,
/// Sets the name of the current engine mode.
///
/// **Game Scenes**: All
set: set_mode(&str)
}
}
{
Modes {
/// Returns the available modes for the engine. A dictionary mapping mode
/// names to Engine objects.
///
/// **Game Scenes**: All
get: modes -> BTreeMap<String, Engine>
}
}
{
AutoModeSwitch {
/// Returns whether the engine will automatically switch modes.
///
/// **Game Scenes**: All
get: is_auto_mode_switch -> bool,
/// Sets whether the engine will automatically switch modes.
///
/// **Game Scenes**: All
set: set_auto_mode_switch(bool)
}
}
{
Gimballed {
/// Returns whether the engine is gimballed.
///
/// **Game Scenes**: All
get: is_gimballed -> bool
}
}
{
GimbalRange {
/// Returns the range over which the gimbal can move, in degrees. Returns 0 if the
/// engine is not gimballed.
///
/// **Game Scenes**: All
get: gimbal_range -> f32
}
}
{
GimbalLocked {
/// Returns whether the engines gimbal is locked in place.
///
/// **Game Scenes**: All
get: is_gimbal_locked -> bool,
/// Sets whether the engines gimbal is locked in place. Setting this attribute has no
/// effect if the engine is not gimballed.
///
/// **Game Scenes**: All
set: set_gimbal_locked(bool)
}
}
{
GimbalLimit {
/// Returns the gimbal limiter of the engine. A value between 0 and 1.
/// Returns 0 if the gimbal is locked.
///
/// **Game Scenes**: All
get: gimbal_limit -> f32,
/// Sets the gimbal limiter of the engine. A value between 0 and 1.
///
/// **Game Scenes**: All
set: set_gimbal_limit(f32)
}
}
{
AvailableTorque {
/// Returns the available torque, in Newton meters, that can be produced by this
/// engine, in the positive and negative pitch, roll and yaw axes of the vessel.
/// These axes correspond to the coordinate axes of the `Vessel::reference_frame()`.
/// Returns zero if the engine is inactive, or not gimballed.
///
/// **Game Scenes**: All
get: available_torque -> (Vector3, Vector3)
}
}
}
methods: {
{
/// Toggle the current engine mode.
///
/// **Game Scenes**: All
fn toggle_mode() {
ToggleMode()
}
}
}
});
remote_type!(
/// A propellant for an engine. Obtained by calling `Engine::propellants()`.
object SpaceCenter.Propellant {
properties: {
{
Name {
/// Returns the name of the propellant.
///
/// **Game Scenes**: All
get: name -> String
}
}
{
CurrentAmount {
/// Returns the current amount of propellant.
///
/// **Game Scenes**: All
get: current_amount -> f64
}
}
{
CurrentRequirement {
/// Returns the required amount of propellant.
///
/// **Game Scenes**: All
get: current_requirement -> f64
}
}
{
TotalResourceAvailable {
/// Returns the total amount of the underlying resource currently reachable
/// given resource flow rules.
///
/// **Game Scenes**: All
get: total_resource_available -> f64
}
}
{
TotalResourceCapacity {
/// Returns the total vehicle capacity for the underlying propellant resource,
/// restricted by resource flow rules.
///
/// **Game Scenes**: All
get: total_resource_capacity -> f64
}
}
{
IgnoreForIsp {
/// Returns if this propellant should be ignored when calculating required mass
/// flow given specific impulse.
///
/// **Game Scenes**: All
get: is_ignore_for_isp -> bool
}
}
{
IgnoreForThrustCurve {
/// Returns if this propellant should be ignored for thrust curve calculations.
///
/// **Game Scenes**: All
get: is_ignore_for_thrust_curve -> bool
}
}
{
DrawStackGauge {
/// Returns if this propellant has a stack gauge or not.
///
/// **Game Scenes**: All
get: is_draw_stack_gauge -> bool
}
}
{
IsDeprived {
/// Returns if this propellant is deprived.
///
/// **Game Scenes**: All
get: is_deprived -> bool
}
}
{
Ratio {
/// Returns the propellant ratio.
///
/// **Game Scenes**: All
get: ratio -> f32
}
}
}
});
| true
|
037a3304727026437e70d0a1a720f2a0db2208ec
|
Rust
|
comp590-19s/starter-ps06-thegrep-nfa
|
/nfa.rs
|
UTF-8
| 3,893
| 3.390625
| 3
|
[] |
no_license
|
pub mod helpers;
// Starter code for PS06 - thegrep
// Add Honor Code Header and Collaborators Here
use super::parser::Parser;
use super::parser::AST;
use super::tokenizer::Tokenizer;
use self::State::*;
/**
* ===== Public API =====
*/
/**
* An NFA is represented by an arena Vec of States
* and a start state.
*/
#[derive(Debug)]
pub struct NFA {
start: StateId,
states: Vec<State>,
}
impl NFA {
/**
* Construct an NFA from a regular expression pattern.
*/
pub fn from(regular_expression: &str) -> Result<NFA, String> {
let mut nfa = NFA::new();
let start = nfa.add(Start(None));
nfa.start = start;
// Parse the Abstract Syntax Tree of the Regular Expression
let ast = &Parser::parse(Tokenizer::new(regular_expression))?;
// The "body" of the NFA is made of the states between Start and End
let body = nfa.gen_fragment(ast);
nfa.join(nfa.start, body.start);
let end = nfa.add(End);
nfa.join_fragment(&body, end);
Ok(nfa)
}
/**
* Given an input string, simulate the NFA to determine if the
* input is accepted by the input string.
*/
pub fn accepts(&self, input: &str) -> bool {
false
}
}
/**
* ===== Internal API =====
*/
type StateId = usize;
/**
* States are the elements of our NFA Graph
* - Start is starting state
* - Match is a state with a single matching transition out
* - Split is a state with two epsilon transitions out
* - End is the final accepting state
*/
#[derive(Debug)]
enum State {
Start(Option<StateId>),
Match(Char, Option<StateId>),
Split(Option<StateId>, Option<StateId>),
End,
}
/**
* Chars are the matching label of a non-epsilon edge in the
* transition diagram representation of the NFA.
*/
#[derive(Debug)]
enum Char {
Literal(char),
Any,
}
/**
* Internal representation of a fragment of an NFA being constructed
* that keeps track of the start ID of the fragment as well as all of
* its unjoined end states.
*/
#[derive(Debug)]
struct Fragment {
start: StateId,
ends: Vec<StateId>,
}
/**
* Private methods of the NFA structure.
*/
impl NFA {
/**
* Constructor establishes an empty states Vec.
*/
fn new() -> NFA {
NFA {
states: vec![],
start: 0,
}
}
/**
* Add a state to the NFA and get its arena ID back.
*/
fn add(&mut self, state: State) -> StateId {
let idx = self.states.len();
self.states.push(state);
idx
}
/**
* Given an AST node, this method returns a Fragment of the NFA
* representing it and its children.
*/
fn gen_fragment(&mut self, ast: &AST) -> Fragment {
match ast {
AST::AnyChar => {
let state = self.add(Match(Char::Any, None));
Fragment {
start: state,
ends: vec![state],
}
},
node => panic!("Unimplemented branch of gen_fragment: {:?}", node)
}
}
/**
* Join all the loose ends of a fragment to another StateId.
*/
fn join_fragment(&mut self, lhs: &Fragment, to: StateId) {
for end in &lhs.ends {
self.join(*end, to);
}
}
/**
* Join a loose end of one state to another by IDs.
* Note in the Split case, only the 2nd ID (rhs) is being bound.
* It is assumed when building an NFA with these constructs
* that the lhs of an Split state will always be known and bound.
*/
fn join(&mut self, from: StateId, to: StateId) {
match self.states[from] {
Start(ref mut next) => *next = Some(to),
Match(_, ref mut next) => *next = Some(to),
Split(_, ref mut next) => *next = Some(to),
End => {}
}
}
}
| true
|
cd0c493ce3e1417f8acd8747d538ef738dc04f3f
|
Rust
|
mikialex/rainray
|
/src/bvh.rs
|
UTF-8
| 4,161
| 3.15625
| 3
|
[] |
no_license
|
use crate::math::*;
#[derive(Debug, Clone)]
pub enum SplitMethod {
SAH,
Middle,
EqualCounts,
}
pub enum Axis {
x,
y,
z,
}
pub struct BVHNode {
pub bounding_box: Box3,
pub left: Option<Box<BVHNode>>,
pub right: Option<Box<BVHNode>>,
pub split_axis: Option<Axis>,
pub primitive_start: u64,
pub primitive_count: u64,
pub depth: u64,
}
const BVH_MAX_BIN_SIZE: u64 = 1;
const BVH_MAX_DEPTH: u64 = 10;
// https://matthias-endler.de/2017/boxes-and-trees/
impl BVHNode {
pub fn build_from_range_primitives(
primitive_list: &Vec<Primitive>,
start: u64,
count: u64,
) -> BVHNode {
let bbox = get_range_primitives_bounding(primitive_list, start, count);
return BVHNode {
bounding_box: bbox,
left: None,
right: None,
split_axis: None,
primitive_start: start,
primitive_count: count,
depth: 0,
};
}
pub fn computed_split_axis(&mut self) {
self.split_axis = Some(get_longest_axis_of_bounding(&self.bounding_box))
}
pub fn should_split(&self) -> bool {
return self.primitive_count < BVH_MAX_BIN_SIZE || self.depth > BVH_MAX_DEPTH;
}
pub fn split(&mut self, primtive_list: &mut [Primitive], spliter: &dyn Fn(&mut BVHNode) -> ()) {
if !self.should_split() {
return;
}
self.computed_split_axis();
// TODO opti, maybe we should put this procedure in spliter
match self.split_axis {
Some(Axis::x) => primtive_list.sort_unstable_by(|a, b| a.cmp_center_x(b)),
Some(Axis::y) => primtive_list.sort_unstable_by(|a, b| a.cmp_center_y(b)),
Some(Axis::z) => primtive_list.sort_unstable_by(|a, b| a.cmp_center_z(b)),
None => panic!(""),
}
spliter(self);
match &mut self.left {
Some(node) => &node.split(primtive_list, spliter),
None => panic!(""),
};
// self.left.split(primtive_list, spliter);
}
}
fn build_equal_counts(node: &mut BVHNode) {
node.left = None;
node.right = None;
}
pub struct Primitive {
pub bounding_box: Box3,
pub center_point: Vec3,
pub index: u64,
}
impl Primitive {
pub fn cmp_center_x(&self, other: &Primitive) -> std::cmp::Ordering {
if self.center_point.x < other.center_point.x {
std::cmp::Ordering::Less
} else {
std::cmp::Ordering::Greater
}
}
pub fn cmp_center_y(&self, other: &Primitive) -> std::cmp::Ordering {
if self.center_point.y < other.center_point.y {
std::cmp::Ordering::Less
} else {
std::cmp::Ordering::Greater
}
}
pub fn cmp_center_z(&self, other: &Primitive) -> std::cmp::Ordering {
if self.center_point.z < other.center_point.z {
std::cmp::Ordering::Less
} else {
std::cmp::Ordering::Greater
}
}
}
pub struct BVHAccel {
root: BVHNode,
primitives: Vec<Primitive>,
}
impl BVHAccel {
pub fn build(primitives: Vec<Primitive>) -> BVHAccel {
let mut bvh = BVHAccel {
root: BVHNode::build_from_range_primitives(&primitives, 0, primitives.len() as u64),
primitives,
};
bvh.root.split(&mut bvh.primitives, &build_equal_counts);
bvh
}
}
fn get_range_primitives_bounding(primitive_list: &Vec<Primitive>, start: u64, count: u64) -> Box3 {
let mut bbox = primitive_list[start as usize].bounding_box.clone();
for pid in start..(start + count) {
bbox.extend_by_box(&primitive_list[pid as usize].bounding_box);
}
bbox
}
fn get_longest_axis_of_bounding(bbox: &Box3) -> Axis {
let x_length = bbox.max.x - bbox.min.x;
let y_length = bbox.max.y - bbox.min.y;
let z_length = bbox.max.z - bbox.min.z;
if x_length > y_length {
if x_length > z_length {
Axis::x
} else {
Axis::z
}
} else {
if y_length > z_length {
Axis::y
} else {
Axis::z
}
}
}
| true
|
35d66a900fc7e6b94158d7ad76a87fe14876a5f0
|
Rust
|
CodeSandwich/BrainRust
|
/src/memory.rs
|
UTF-8
| 725
| 2.984375
| 3
|
[
"MIT"
] |
permissive
|
use MemoryCellEditor;
pub struct Memory<T> {
positive_range: Vec<T>,
negative_range: Vec<T>,
}
impl<T> Memory<T> {
pub fn new() -> Memory<T> {
Memory {
positive_range: vec![],
negative_range: vec![],
}
}
pub fn get<U: MemoryCellEditor<T>>(&mut self, index: isize, cell_editor: &U) -> &mut T {
let (range, actual_index) =
if index >= 0 {
(&mut self.positive_range, index as usize)
} else {
(&mut self.negative_range, (-index - 1) as usize)
};
for _ in range.len()..actual_index + 1 {
range.push(cell_editor.empty())
}
&mut range[actual_index]
}
}
| true
|
6a917752b01cda3f0d768156e4ec9ef0facbc276
|
Rust
|
justin1dennison/gribbrs
|
/src/sections/grid_definition.rs
|
UTF-8
| 2,328
| 2.84375
| 3
|
[] |
no_license
|
use crate::tables::{InterpretationOfNumbersAtEndOfSection3, SourceOfGridDefinition};
use byteorder::{BigEndian, ReadBytesExt};
use serde::{Deserialize, Serialize};
use std::io::{Read, Seek};
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct GridDefinition {
pub length: u32,
pub number_of_section: u8,
pub source: SourceOfGridDefinition,
pub number_of_data_points: u32,
pub number_of_optional_numbers: u8,
pub interpretation_of_numbers_at_end_of_section_3: InterpretationOfNumbersAtEndOfSection3,
pub template_no: u16,
pub reserved: Vec<u8>,
}
impl<R: Read + Seek> From<R> for GridDefinition {
fn from(mut r: R) -> Self {
let length = r
.read_u32::<BigEndian>()
.expect("Could not read the length of GridDefinition");
let number_of_section = r
.read_u8()
.expect("Could not read the number of section for GridDefinition");
let source = r
.read_u8()
.map(SourceOfGridDefinition::from)
.expect("Could not read the section for GridDefinition");
let number_of_data_points = r
.read_u32::<BigEndian>()
.expect("Could not read the number of points for GridDefintion");
let number_of_optional_numbers = r
.read_u8()
.expect("Could not read number of optional numbers for GridDefinition");
let interpretation_of_numbers_at_end_of_section_3 = r
.read_u8()
.map(InterpretationOfNumbersAtEndOfSection3::from)
.expect("Could not read interpretation for GridDefinition");
let template_no = r
.read_u16::<BigEndian>()
.expect("Could not read template number for GridDefinition");
let reserved = {
let mut xs = Vec::new();
let capacity = (length - 14) as usize;
for _ in 0..capacity {
let byte = r.read_u8().unwrap();
xs.push(byte);
}
xs
};
GridDefinition {
length,
number_of_section,
source,
number_of_data_points,
number_of_optional_numbers,
interpretation_of_numbers_at_end_of_section_3,
template_no,
reserved,
}
}
}
| true
|
ff7d83802ace74f8117eb24e05fb944a1f83dafd
|
Rust
|
luteberget/junction
|
/lib/route_finder/src/routes.rs
|
UTF-8
| 11,930
| 2.625
| 3
|
[] |
no_license
|
use rolling::input::staticinfrastructure::*;
use std::collections::{HashSet, BTreeSet};
use smallvec::SmallVec;
#[derive(Debug)]
pub enum ConvertRouteIssue {
NoBoundaries,
StateConversionFailed( RouteEntryExit, RouteEntryExit),
ExitedUnenteredSection( RouteEntryExit, ObjectId),
RouteTooShort(RouteEntryExit, RouteEntryExit),
}
#[derive(Debug)]
pub enum ConvertRouteError {
String(String),
}
pub struct Config {
// TODO what can we put in here
//
// 0. section overlap minimum length
// 1. route minimum length
// ---
// 2. overlap policy
section_tolerance :f64,
route_minimum_length: f64,
}
impl Default for Config {
fn default() -> Config { Config {
section_tolerance: 15.0,
route_minimum_length: 15.0,
} }
}
#[derive(Debug, Clone)]
struct RouteEntry {
pub node: NodeId,
pub entry: RouteEntryExit,
pub section: Option<ObjectId>,
}
#[derive(Debug, Clone)]
pub struct Path {
node: NodeId,
length: f64,
entered_sections: SmallVec<[(ObjectId, f64); 2]>,
exited_sections: SmallVec<[(ObjectId, f64, f64, Option<NodeId>);4]>,
switches: SmallVec<[(ObjectId, f64, SwitchPosition);4]>,
edges_taken :Vec<(NodeId, NodeId)>,
}
pub type RoutePath = Vec<(NodeId,NodeId)>;
pub fn find_routes(config :Config, model :&StaticInfrastructure) -> Result<(Vec<(Route,RoutePath)>, Vec<ConvertRouteIssue>), ConvertRouteError> {
let mut routes = Vec::new();
let mut issues = Vec::new();
let boundary_nodes = model.nodes.iter().enumerate()
.filter_map(|(i,x)| if let Edges::ModelBoundary = x.edges { Some(i) }
else { None });
let mut entry_visited = HashSet::new();
for boundary_idx in boundary_nodes {
//println!("Boundary start {:?}", boundary_idx);
let mut entry_stack = Vec::new();
entry_stack.push(RouteEntry {
node: model.nodes[boundary_idx].other_node,
entry: RouteEntryExit::Boundary(Some(boundary_idx)),
section: None,
});
entry_visited.insert(model.nodes[boundary_idx].other_node);
while entry_stack.len() > 0 {
let entry = entry_stack.pop().unwrap();
let mut search_stack = Vec::new();
let mut switches_path_visited : BTreeSet<BTreeSet<(ObjectId, SwitchPosition)>> = BTreeSet::new();
search_stack.push(Path {
node: entry.node,
entered_sections: entry.section.into_iter().map(|x| (x, 0.0)).collect(),
exited_sections: SmallVec::new(),
switches: SmallVec::new(),
length: 0.0,
edges_taken: vec![],
});
while search_stack.len() > 0 {
let mut curr_state = search_stack.pop().unwrap();
loop { // TODO make absolutely sure this terminates
let mut is_exit = false;
// Check what is in here
for obj_idx in model.nodes[curr_state.node].objects.iter() {
match &model.objects[*obj_idx] {
StaticObject::Signal { .. } if curr_state.node != entry.node => {
let exit = RouteEntryExit::Signal(*obj_idx);
match make_route(&config, &curr_state, entry.entry, exit) {
Ok(route) => routes.push((route, curr_state.edges_taken.clone())),
Err(err) => issues.push(err),
}
if entry_visited.insert(curr_state.node) {
entry_stack.push(RouteEntry {
node: curr_state.node,
entry: RouteEntryExit::Signal(*obj_idx),
section: curr_state.entered_sections.iter().nth(0).map(|x| x.0),
});
}
is_exit = true;
},
StaticObject::TVDLimit { enter, exit } => {
if let Some(s) = enter {
curr_state.entered_sections.push((*s, curr_state.length));
}
if let Some(s) = exit {
if let Some(i) = curr_state.entered_sections.iter().position(|y| y.0 == *s) {
let e = curr_state.entered_sections.remove(i);
curr_state.exited_sections.push((e.0, e.1,
curr_state.length, Some(curr_state.node)));
} else {
issues.push(ConvertRouteIssue::ExitedUnenteredSection(entry.entry, *s));
}
}
},
_ => {} // sight, switch, sections, are not relevant
}
}
if is_exit { break; }
match model.nodes[curr_state.node].edges {
Edges::Nothing => { break; },
Edges::ModelBoundary => {
let exit = RouteEntryExit::Boundary(Some(curr_state.node));
match make_route(&config, &curr_state, entry.entry, exit) {
Ok(route) => routes.push((route, curr_state.edges_taken.clone())),
Err(err) => issues.push(err),
}
break;
},
Edges::Single(other, d) => {
// Trailing switches: look at the outgoing edges from opposite node.
match model.nodes[other].edges {
Edges::Switchable(sw) => {
if let Some(StaticObject::Switch { left_link, right_link, .. }) = model.objects.get(sw) {
let pos = if left_link.0 == curr_state.node { SwitchPosition::Left }
else if right_link.0 == curr_state.node { SwitchPosition::Right }
else {
return Err(ConvertRouteError::String(format!("Switch misconfigured {}", sw))); };
curr_state.switches.push((sw,curr_state.length,pos));
} else {
return Err(ConvertRouteError::String(format!("Switch misconfigured {}", sw)));
}
},
_ => {},
};
curr_state.edges_taken.push((curr_state.node, other));
curr_state.node = model.nodes[other].other_node;
curr_state.length += d;
},
Edges::Switchable(sw) => {
if let Some(StaticObject::Switch { left_link, right_link, .. }) = model.objects.get(sw) {
let mut right_state = curr_state.clone();
let mut left_state = curr_state;
right_state.edges_taken.push((right_state.node, right_link.0));
left_state.edges_taken.push((left_state.node, left_link.0));
right_state.node = model.nodes[right_link.0].other_node;
left_state.node = model.nodes[left_link.0].other_node;
right_state.switches.push((sw, right_state.length, SwitchPosition::Right));
left_state.switches.push((sw, left_state.length, SwitchPosition::Left));
right_state.length += right_link.1;
left_state.length += left_link.1;
if switches_path_visited.insert(
right_state.switches.iter().map(|(sw,_l,pos)| (*sw,*pos)).collect()) {
search_stack.push(right_state);
}
if switches_path_visited.insert(
left_state.switches.iter().map(|(sw,_l,pos)| (*sw,*pos)).collect()) {
search_stack.push(left_state);
}
break;
} else {
return Err(ConvertRouteError::String(format!("Switch misconfigured {}", sw)));
}
},
};
}
}
}
}
if !(entry_visited.len() > 0) {
issues.push(ConvertRouteIssue::NoBoundaries);
}
// TODO
// /// // Remove release of resources that were not aquired
// /// for r in &mut routes {
// /// let resources = r.sections.iter().chain(r.switches.iter().map(|&(ref sw,_)| sw)).collect::<Vec<_>>();
// /// for &mut (_,_,ref mut res) in &mut r.releases {
// /// res.retain(|x| resources.contains(&x));
// /// }
// /// }
Ok((routes,issues))
}
pub fn make_route(config: &Config, state :&Path, entry :RouteEntryExit, exit: RouteEntryExit)
-> Result<Route, ConvertRouteIssue> {
if state.length < config.route_minimum_length {
return Err(ConvertRouteIssue::RouteTooShort(entry, exit));
}
let mut sections = state.exited_sections.clone();
sections.extend(state.entered_sections.iter().map(|&(x, l)| (x, l, state.length, None)));
sections.retain(|&mut (_,a,b,_)| b-a > config.section_tolerance);
let trigger = sections.first();
let entry = match (trigger,entry) {
(Some((tvd,_,_,_)),RouteEntryExit::Signal(x)) =>
RouteEntryExit::SignalTrigger { signal: x, trigger_section: *tvd },
_ => entry,
};
let add_length = if let RouteEntryExit::Boundary(_) = &exit { 1000.0 }
else { 0.0 };
let mut cleared_length = 0.0;
let mut releases = sections.iter().map(|(trigger, start, end, node)| {
let start = if cleared_length > *start { cleared_length } else { *start };
let length = *end-start;
cleared_length += length;
let mut resources = vec![*trigger];
for (sw,pos,_side) in &state.switches {
if start <= *pos && pos < end {
resources.push(*sw);
}
}
Release {
end_node: *node,
trigger: *trigger,
length: length,
resources: resources.into(),
}
}).collect::<Vec<_>>();
let sum_releases_length = releases.iter().map(|r| r.length).sum::<f64>();
if releases.len() > 0 && sum_releases_length != state.length {
releases.last_mut().unwrap().length += state.length - sum_releases_length;
}
if let Some(last_release) = releases.last_mut() { last_release.length += add_length; }
Ok(Route {
entry: entry,
exit: exit,
length: state.length + add_length,
resources: RouteResources {
sections: sections.into_iter().map(|(x,_,_,_)| x).collect(),
switch_positions: state.switches.iter().map(|(x,_,s)| (*x,*s)).collect(),
releases: releases.into(),
},
overlaps: SmallVec::new(),
swinging_overlap: false,
})
}
| true
|
ee256dd957654adbd115c3e26651db94002b8840
|
Rust
|
snoyberg/terra-rust
|
/terra-rust-api/src/messages/bank.rs
|
UTF-8
| 995
| 2.640625
| 3
|
[
"Apache-2.0"
] |
permissive
|
//use crate::client::client_types::terra_u64_format;
use crate::core_types::{Coin, MsgInternal};
use crate::messages::Message;
use serde::Serialize;
#[derive(Serialize, Debug)]
/// Message: Send N coins from an address to another
pub struct MsgSend {
pub amount: Vec<Coin>,
pub from_address: String,
pub to_address: String,
}
impl MsgInternal for MsgSend {}
impl MsgSend {
/// Send amount coins from from_address to to_address
pub fn create_single(from_address: String, to_address: String, amount: Coin) -> Message {
MsgSend::create(from_address, to_address, vec![amount])
}
/// send multiple coins from from_address to to_address
pub fn create(from_address: String, to_address: String, amount: Vec<Coin>) -> Message {
let internal = MsgSend {
amount,
from_address,
to_address,
};
Message {
s_type: "bank/MsgSend".into(),
value: Box::new(internal),
}
}
}
| true
|
ccd71c9ee331979e7f1583cbca98ab2a7ba9aa7d
|
Rust
|
aclysma/dxmath
|
/src/math.rs
|
UTF-8
| 2,332
| 2.78125
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::arch::x86_64::*;
#[derive(Debug)]
pub struct Float2(pub [f32;2]);
impl Float2 {
fn x(&self) -> f32 { self.0[0] }
fn y(&self) -> f32 { self.0[1] }
fn x_mut(&mut self) -> &mut f32 { &mut self.0[0] }
fn y_mut(&mut self) -> &mut f32 { &mut self.0[1] }
}
#[derive(Debug)]
pub struct Float3(pub [f32;3]);
impl Float3 {
fn x(&self) -> f32 { self.0[0] }
fn y(&self) -> f32 { self.0[1] }
fn z(&self) -> f32 { self.0[2] }
fn x_mut(&mut self) -> &mut f32 { &mut self.0[0] }
fn y_mut(&mut self) -> &mut f32 { &mut self.0[1] }
fn z_mut(&mut self) -> &mut f32 { &mut self.0[2] }
}
#[derive(Debug)]
pub struct Float4(pub [f32;4]);
impl Float4 {
fn x(&self) -> f32 { self.0[0] }
fn y(&self) -> f32 { self.0[1] }
fn z(&self) -> f32 { self.0[2] }
fn w(&self) -> f32 { self.0[3] }
fn x_mut(&mut self) -> &mut f32 { &mut self.0[0] }
fn y_mut(&mut self) -> &mut f32 { &mut self.0[1] }
fn z_mut(&mut self) -> &mut f32 { &mut self.0[2] }
fn w_mut(&mut self) -> &mut f32 { &mut self.0[3] }
}
#[derive(Debug)]
#[allow(non_camel_case_types)]
pub struct Vector_f32(pub [f32; 4]);
#[derive(Debug)]
#[allow(non_camel_case_types)]
pub struct Vector_m128(pub __m128);
#[derive(Debug)]
#[allow(non_camel_case_types)]
pub struct Matrix_f32(pub [[f32; 4]; 4]);
#[derive(Debug)]
#[allow(non_camel_case_types)]
pub struct Matrix_m128(pub [Vector_m128; 4]);
pub use Vector_m128 as Vector;
pub use Matrix_m128 as Matrix;
#[inline(always)]
pub fn load_float2(src: &Float2) -> Vector {
unsafe { Vector(_mm_load_ps(src.0.as_ptr())) }
}
#[inline(always)]
pub fn load_float3(src: &Float3) -> Vector {
unsafe { Vector(_mm_load_ps(src.0.as_ptr())) }
}
#[inline(always)]
pub fn load_float4(src: &Float4) -> Vector {
unsafe { Vector(_mm_load_ps(src.0.as_ptr())) }
}
#[inline(always)]
pub fn store_float2(dst: &mut Float2, src: &Vector) {
unsafe { _mm_storeu_ps(dst.0.as_mut_ptr(), src.0) }
}
#[inline(always)]
pub fn store_float3(dst: &mut Float3, src: &Vector) {
unsafe { _mm_storeu_ps(dst.0.as_mut_ptr(), src.0) }
}
#[inline(always)]
pub fn store_float4(dst: &mut Float4, src: &Vector) {
unsafe { _mm_storeu_ps(dst.0.as_mut_ptr(), src.0) }
}
pub fn vector2_dot(v1: &Vector, v2: &Vector) -> Vector {
Vector(unsafe { _mm_dp_ps(v1.0, v2.0, 0x3f) })
}
| true
|
ddb94e0ad895718973daf569f1b7fbf40bc1267d
|
Rust
|
jonmsawyer/uva-online-judge
|
/Problem Set Volumes (100...1999)/Volume 1 (100-199)/103 - Stacking Boxes/rust/src/boxes.rs
|
UTF-8
| 4,244
| 3.5
| 4
|
[
"MIT"
] |
permissive
|
//! `rust` crate.
//!
//! Author: Jonathan Sawyer <jonmsawyer[at]gmail.com>
//!
//! Date: 2020-06-19
//!
//! This crate implements one solution to the [Stacking Boxes problem],
//! which calculates the longest string of stacking boxes.
//!
//! [Stacking Boxes problem]: https://onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&category=3&page=show_problem&problem=39
use std::cmp::Ordering;
#[derive(Debug, Eq)]
pub struct Box_ {
pub box_: Vec<usize>,
}
impl PartialOrd for Box_ {
fn partial_cmp(&self, other: &Box_) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Box_ {
fn cmp(&self, other: &Box_) -> Ordering {
let self_box_len = self.box_.len();
let other_box_len = other.box_.len();
if self_box_len != other_box_len {
return self.box_.cmp(&other.box_);
}
let mut is_less = true;
let mut is_greater = true;
for i in 0..self_box_len {
if self.box_[i] >= other.box_[i] {
is_less = false;
}
if self.box_[i] <= other.box_[i] {
is_greater = false;
}
}
if is_less {
return Ordering::Less;
}
if is_greater {
return Ordering::Greater;
}
Ordering::Equal
}
}
impl PartialEq for Box_ {
fn eq(&self, other: &Box_) -> bool {
let self_box_len = self.box_.len();
let other_box_len = other.box_.len();
if self_box_len != other_box_len {
return false;
}
let mut is_less = true;
let mut is_greater = true;
for i in 0..self_box_len {
if self.box_[i] >= other.box_[i] {
is_less = false;
}
if self.box_[i] <= other.box_[i] {
is_greater = false;
}
}
if is_less {
return false;
}
if is_greater {
return false;
}
true
}
}
#[derive(Debug, PartialEq)]
pub struct Boxes {
num: usize,
dimensions: usize,
boxes: Vec<Box_>,
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn box_is_less_than_box() {
let box1 = Box_ {
box_: vec![1, 2, 3, 4, 5],
};
let box2 = Box_ {
box_: vec![2, 3, 4, 5, 6],
};
assert!(box1 < box2);
assert!(box2 > box1);
}
#[test]
fn box_is_greater_than_box() {
let box1 = Box_ {
box_: vec![16, 39, 10],
};
let box2 = Box_ {
box_: vec![1, 3, 9],
};
assert!(box1 > box2);
assert!(box2 < box1);
}
#[test]
fn box_is_equal_to_box() {
let box1 = Box_ {
box_: vec![1, 2, 3],
};
let box2 = Box_ {
box_: vec![1, 2, 3],
};
assert_eq!(box1, box2);
let box1 = Box_ {
box_: vec![1, 2, 3],
};
let box2 = Box_ {
box_: vec![1, 1, 1],
};
// If a box can't exactly fit inside another box
// (dimensions need to be strictly less) then it is
// considered equal in this context.
assert_eq!(box1, box2);
}
#[test]
fn box_diff_dimensions() {
let box1 = Box_ {
box_: vec![2, 3],
};
let box2 = Box_ {
box_: vec![1, 2, 3],
};
assert!(box1 != box2);
assert!(box1 > box2); // Undefined behavior. Should it pass this test?
let box1 = Box_ {
box_: vec![2, 3],
};
let box2 = Box_ {
box_: vec![2, 3, 1],
};
assert!(box1 != box2);
assert!(box1 < box2); // Undefined behavior. Should it pass this test?
let box1 = Box_ {
box_: vec![2, 3],
};
let box2 = Box_ {
box_: vec![2, 3, 3],
};
assert!(box1 != box2);
}
}
| true
|
49f22ab6570849843158f9b0b3f23808d947c9f6
|
Rust
|
DarthStrom/rust-ray-tracer-challenge
|
/src/patterns/gradient.rs
|
UTF-8
| 2,038
| 3.484375
| 3
|
[] |
no_license
|
use std::any::Any;
use crate::{
color::{self, Color},
transformations::Transform,
tuple::Tuple,
};
use super::{BoxPattern, Pattern, PatternBuilder};
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct Gradient {
pub a: Color,
pub b: Color,
pub transform: Transform,
}
impl Gradient {
pub fn new(a: Color, b: Color) -> Self {
Self {
a,
b,
transform: Transform::default(),
}
}
}
impl PatternBuilder for Gradient {
fn with_transform(self, transform: Transform) -> Self {
Self { transform, ..self }
}
}
impl Default for Gradient {
fn default() -> Self {
Self {
a: color::WHITE,
b: color::BLACK,
transform: Transform::default(),
}
}
}
impl Pattern for Gradient {
fn box_clone(&self) -> BoxPattern {
Box::new(*self)
}
fn box_eq(&self, other: &dyn Any) -> bool {
other.downcast_ref::<Self>().map_or(false, |a| self == a)
}
fn as_any(&self) -> &dyn Any {
self
}
fn transform(&self) -> &Transform {
&self.transform
}
fn pattern_at(&self, point: Tuple) -> Color {
let distance = self.b - self.a;
let fraction = point.x() - point.x().floor();
self.a + distance * fraction
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn a_gradient_linearly_interpolates_between_colors() {
let pattern = Gradient::new(color::WHITE, color::BLACK);
assert_eq!(
pattern.pattern_at(Tuple::point(0.0, 0.0, 0.0)),
color::WHITE
);
assert_eq!(
pattern.pattern_at(Tuple::point(0.25, 0.0, 0.0)),
Color::new(0.75, 0.75, 0.75)
);
assert_eq!(
pattern.pattern_at(Tuple::point(0.5, 0.0, 0.0)),
Color::new(0.5, 0.5, 0.5)
);
assert_eq!(
pattern.pattern_at(Tuple::point(0.75, 0.0, 0.0)),
Color::new(0.25, 0.25, 0.25)
);
}
}
| true
|
d813e0a437a31d79aa3380946cf17c8f9afff6c1
|
Rust
|
Michael-F-Bryan/ffi_helpers
|
/src/error_handling.rs
|
UTF-8
| 9,789
| 3.671875
| 4
|
[
"MIT"
] |
permissive
|
//! Common error handling routines.
//!
//! The main error handling method employed is a thread-local variable called
//! `LAST_ERROR` which holds the most recent error as well as some convenience
//! functions for getting/clearing this variable.
//!
//! The theory is if a function fails then it should return an *"obviously
//! invalid"* value (typically `-1` or `0` when returning integers or `NULL` for
//! pointers, see the [`Nullable`] trait for more). The user can then check for
//! this and consult the most recent error for more information. Of course that
//! means all fallible operations *must* update the most recent error if they
//! fail and that you *must* check the returned value of any fallible operation.
//!
//! While it isn't as elegant as Rust's monad-style `Result<T, E>` with `?` and
//! the various combinators, it actually turns out to be a pretty robust error
//! handling technique in practice.
//!
//! > **Note:** It is highly recommended to have a skim through libgit2's
//! > [error handling docs][libgit2]. The error handling mechanism used here
//! > takes a lot of inspiration from `libgit2`.
//!
//! ## Examples
//!
//! The following shows a full example where our `write_data()` function will
//! try to write some data into a buffer. The first time through
//!
//! ```rust
//! use libc::{c_char, c_int};
//! use std::slice;
//! use ffi_helpers::error_handling;
//! # use anyhow::Error;
//!
//! fn main() {
//! if unsafe { some_fallible_operation() } != 1 {
//! // Before we can retrieve the message we need to know how long it is.
//! let err_msg_length = error_handling::last_error_length();
//!
//! // then allocate a big enough buffer
//! let mut buffer = vec![0; err_msg_length as usize];
//! let bytes_written = unsafe {
//! let buf = buffer.as_mut_ptr() as *mut c_char;
//! let len = buffer.len() as c_int;
//! error_handling::error_message_utf8(buf, len)
//! };
//!
//! // then interpret the message
//! match bytes_written {
//! -1 => panic!("Our buffer wasn't big enough!"),
//! 0 => panic!("There wasn't an error message... Huh?"),
//! len if len > 0 => {
//! buffer.truncate(len as usize - 1);
//! let msg = String::from_utf8(buffer).unwrap();
//! println!("Error: {}", msg);
//! }
//! _ => unreachable!(),
//! }
//! }
//! }
//!
//! /// pretend to do some complicated operation, returning whether the
//! /// operation was successful.
//! #[no_mangle]
//! unsafe extern "C" fn some_fallible_operation() -> c_int {
//! match do_stuff() {
//! Ok(_) => 1, // do_stuff() always errors, so this is unreachable
//! Err(e) => {
//! ffi_helpers::update_last_error(e);
//! 0
//! }
//! }
//! }
//!
//! # fn do_stuff() -> Result<(), Error> { Err(anyhow::anyhow!("An error occurred")) }
//! ```
//!
//! [`Nullable`]: trait.Nullable.html
//! [libgit2]: https://github.com/libgit2/libgit2/blob/master/docs/error-handling.md
use anyhow::Error;
use libc::{c_char, c_int};
use std::{cell::RefCell, slice};
use crate::nullable::Nullable;
thread_local! {
static LAST_ERROR: RefCell<Option<Error>> = RefCell::new(None);
}
/// Clear the `LAST_ERROR`.
pub extern "C" fn clear_last_error() { let _ = take_last_error(); }
/// Take the most recent error, clearing `LAST_ERROR` in the process.
pub fn take_last_error() -> Option<Error> {
LAST_ERROR.with(|prev| prev.borrow_mut().take())
}
/// Update the `thread_local` error, taking ownership of the `Error`.
pub fn update_last_error<E: Into<Error>>(err: E) {
LAST_ERROR.with(|prev| *prev.borrow_mut() = Some(err.into()));
}
/// Get the length of the last error message in bytes when encoded as UTF-8,
/// including the trailing null.
pub fn last_error_length() -> c_int {
LAST_ERROR.with(|prev| {
prev.borrow()
.as_ref()
.map(|e| format!("{:#}", e).len() + 1)
.unwrap_or(0)
}) as c_int
}
/// Get the length of the last error message in bytes when encoded as UTF-16,
/// including the trailing null.
pub fn last_error_length_utf16() -> c_int {
LAST_ERROR.with(|prev| {
prev.borrow()
.as_ref()
.map(|e| format!("{:#}", e).encode_utf16().count() + 1)
.unwrap_or(0)
}) as c_int
}
/// Peek at the most recent error and get its error message as a Rust `String`.
pub fn error_message() -> Option<String> {
LAST_ERROR.with(|prev| prev.borrow().as_ref().map(|e| format!("{:#}", e)))
}
/// Peek at the most recent error and write its error message (`Display` impl)
/// into the provided buffer as a UTF-8 encoded string.
///
/// This returns the number of bytes written, or `-1` if there was an error.
pub unsafe fn error_message_utf8(buf: *mut c_char, length: c_int) -> c_int {
crate::null_pointer_check!(buf);
let buffer = slice::from_raw_parts_mut(buf as *mut u8, length as usize);
copy_error_into_buffer(buffer, |msg| msg.into())
}
/// Peek at the most recent error and write its error message (`Display` impl)
/// into the provided buffer as a UTF-16 encoded string.
///
/// This returns the number of bytes written, or `-1` if there was an error.
pub unsafe fn error_message_utf16(buf: *mut u16, length: c_int) -> c_int {
crate::null_pointer_check!(buf);
let buffer = slice::from_raw_parts_mut(buf, length as usize);
let ret =
copy_error_into_buffer(buffer, |msg| msg.encode_utf16().collect());
if ret > 0 {
// utf16 uses two bytes per character
ret * 2
} else {
ret
}
}
fn copy_error_into_buffer<B, F>(buffer: &mut [B], error_msg: F) -> c_int
where
F: FnOnce(String) -> Vec<B>,
B: Copy + Nullable,
{
let maybe_error_message: Option<Vec<B>> =
error_message().map(|msg| error_msg(msg));
let err_msg = match maybe_error_message {
Some(msg) => msg,
None => return 0,
};
if err_msg.len() + 1 > buffer.len() {
// buffer isn't big enough
return -1;
}
buffer[..err_msg.len()].copy_from_slice(&err_msg);
// Make sure to add a trailing null in case people use this as a bare char*
buffer[err_msg.len()] = B::NULL;
(err_msg.len() + 1) as c_int
}
#[doc(hidden)]
#[macro_export]
macro_rules! export_c_symbol {
(fn $name:ident($( $arg:ident : $type:ty ),*) -> $ret:ty) => {
#[no_mangle]
pub unsafe extern "C" fn $name($( $arg : $type),*) -> $ret {
$crate::error_handling::$name($( $arg ),*)
}
};
(fn $name:ident($( $arg:ident : $type:ty ),*)) => {
export_c_symbol!(fn $name($( $arg : $type),*) -> ());
}
}
/// As a workaround for [rust-lang/rfcs#2771][2771], you can use this macro to
/// make sure the symbols for `ffi_helpers`'s error handling are correctly
/// exported in your `cdylib`.
///
/// [2771]: https://github.com/rust-lang/rfcs/issues/2771
#[macro_export]
macro_rules! export_error_handling_functions {
() => {
#[allow(missing_docs)]
#[doc(hidden)]
pub mod __ffi_helpers_errors {
export_c_symbol!(fn clear_last_error());
export_c_symbol!(fn last_error_length() -> ::libc::c_int);
export_c_symbol!(fn last_error_length_utf16() -> ::libc::c_int);
export_c_symbol!(fn error_message_utf8(buf: *mut ::libc::c_char, length: ::libc::c_int) -> ::libc::c_int);
export_c_symbol!(fn error_message_utf16(buf: *mut u16, length: ::libc::c_int) -> ::libc::c_int);
}
};
}
#[cfg(test)]
mod tests {
use super::*;
use std::str;
fn clear_last_error() {
let _ = LAST_ERROR.with(|e| e.borrow_mut().take());
}
#[test]
fn update_the_error() {
clear_last_error();
let err_msg = "An Error Occurred";
let e = anyhow::anyhow!(err_msg);
update_last_error(e);
let got_err_msg = LAST_ERROR
.with(|e| format!("{:#}", e.borrow_mut().take().unwrap()));
assert_eq!(got_err_msg, err_msg);
}
#[test]
fn take_the_last_error() {
clear_last_error();
let err_msg = "An Error Occurred";
let e = anyhow::anyhow!(err_msg);
update_last_error(e);
let got_err_msg = format!("{:#}", take_last_error().unwrap());
assert_eq!(got_err_msg, err_msg);
}
#[test]
fn get_the_last_error_messages_length() {
clear_last_error();
let err_msg = "Some Context: An Error Occurred";
let should_be = err_msg.len() + 1;
let e = anyhow::anyhow!("An Error Occurred").context("Some Context");
update_last_error(e);
// Get a valid error message's length
let got = last_error_length();
assert_eq!(got, should_be as _);
// Then clear the error message and make sure we get 0
clear_last_error();
let got = last_error_length();
assert_eq!(got, 0);
}
#[test]
fn write_the_last_error_message_into_a_buffer() {
clear_last_error();
let expected = "Some Context: An Error Occurred";
let e = anyhow::anyhow!("An Error Occurred").context("Some Context");
update_last_error(e);
let mut buffer: Vec<u8> = vec![0; 40];
let bytes_written = unsafe {
error_message_utf8(
buffer.as_mut_ptr() as *mut c_char,
buffer.len() as _,
)
};
assert!(bytes_written > 0);
assert_eq!(bytes_written as usize, expected.len() + 1);
let msg =
str::from_utf8(&buffer[..bytes_written as usize - 1]).unwrap();
assert_eq!(msg, expected);
}
}
| true
|
2c809e6c2356d6efdea581ad37285c720f721aec
|
Rust
|
ian-wazowski/FIXEngine-rs
|
/src/messages/mod.rs
|
UTF-8
| 5,029
| 3.171875
| 3
|
[
"Apache-2.0"
] |
permissive
|
use fields::{FIXMessageField, FIXMessageFields};
use std::any::Any;
use std::collections::HashMap;
pub trait FIXMessage : FIXMessageFields {}
pub struct RawFIXMessage {
field_values : Vec<Box<Any>>,
field_indices : HashMap<u32, Vec<usize>>
}
impl FIXMessageFields for RawFIXMessage {
fn get_value<T : Any, F : FIXMessageField<T>>(&self, field : &F) -> Result<Option<&T>, ()> {
return self.get_value_at(field, 0);
}
fn get_value_at<T : Any, F : FIXMessageField<T>>(&self, field : &F, index : usize) -> Result<Option<&T>, ()> {
return self.get_tag_value_at(field.get_tag_num(), index).and_then(|option|
match option {
Some(value) => {
match value.downcast_ref::<T>() {
Some(value) => Ok(Some(value)),
None => Err(())
}
},
None => Ok(None)
});
}
fn get_tag_value(&self, tag_num : u32) -> Result<Option<&Any>, ()> {
return self.get_tag_value_at(tag_num, 0);
}
fn get_tag_value_at(&self, tag_num : u32, index : usize) -> Result<Option<&Any>, ()> {
return self.field_indices.get(&tag_num).map_or(Err(()), |field_indices|
field_indices.get(index).map_or(Err(()), |field_index|
self.field_values.get(*field_index).map_or(Err(()), |value| Ok(Some(&**value)))));
}
}
impl FIXMessage for RawFIXMessage {}
pub struct RawFIXMessageFields {
field_values : Vec<Box<Any>>,
field_indices : HashMap<u32, Vec<usize>>
}
impl RawFIXMessageFields {
pub fn new() -> Self {
return RawFIXMessageFields { field_values: Vec::new(), field_indices: HashMap::new() };
}
pub fn add_value<T : Any, F : FIXMessageField<T>>(mut self, field : &F, value : T) -> Self {
return self.add_tag_value(field.get_tag_num(), value);
}
pub fn add_tag_value<T : Any>(mut self, tag_num : u32, value : T) -> Self {
// isolate borrows of self before owned return
{
let mut field_indices = self.field_indices.entry(tag_num).or_insert(Vec::new());
field_indices.push(self.field_values.len());
self.field_values.push(Box::new(value));
}
return self;
}
pub fn into_message(self) -> RawFIXMessage {
return RawFIXMessage { field_values: self.field_values, field_indices: self.field_indices };
}
}
#[cfg(test)]
mod tests {
use super::{RawFIXMessageFields, RawFIXMessage, FIXMessage};
use fix::fields::{FIXMessageField, StringField};
use std;
use std::any::Any;
#[test]
fn test_raw_fix_message() {
let field1 : StringField = StringField(1, "Field1");
let field2 : StringField = StringField(2, "Field2");
let field3 : StringField = StringField(3, "Field3");
let fields : RawFIXMessageFields = RawFIXMessageFields::new()
.add_value(&field1, String::from("abc1"))
.add_value(&field2, String::from("def2"))
.add_value(&field3, String::from("ghi3"))
.add_value(&field2, String::from("jkl4"))
.add_value(&field3, String::from("mno5"))
.add_value(&field1, String::from("pqr6"))
.add_value(&field3, String::from("stu7"))
.add_value(&field1, String::from("vwx8"))
.add_value(&field2, String::from("yz90"));
let message : RawFIXMessage = fields.into_message();
assert_field_value(&message, &field1, String::from("abc1"));
assert_field_value_at(&message, &field1, 1, String::from("pqr6"));
assert_field_value_at(&message, &field1, 2, String::from("vwx8"));
assert_field_value(&message, &field2, String::from("def2"));
assert_field_value_at(&message, &field2, 1, String::from("jkl4"));
assert_field_value_at(&message, &field2, 2, String::from("yz90"));
assert_field_value(&message, &field3, String::from("ghi3"));
assert_field_value_at(&message, &field3, 1, String::from("mno5"));
assert_field_value_at(&message, &field3, 2, String::from("stu7"));
}
fn assert_field_value<M : FIXMessage, T : Any + std::fmt::Debug + std::cmp::Eq, F : FIXMessageField<T>>(message : &M, field : &F, expected : T) {
assert_field_result_eq(expected, message.get_value(field));
}
fn assert_field_value_at<M : FIXMessage, T : Any + std::fmt::Debug + std::cmp::Eq, F : FIXMessageField<T>>(message : &M, field : &F, index : usize, expected : T) {
assert_field_result_eq(expected, message.get_value_at(field, index));
}
fn assert_field_result_eq<T : Any + std::fmt::Debug + std::cmp::Eq>(expected : T, result : Result<Option<&T>, ()>) {
assert!(result.is_ok());
let option = result.unwrap();
assert!(option.is_some());
let actual = option.unwrap();
assert_eq!(expected, *actual);
}
}
| true
|
d1e78bf7f6379f0c53ff2abcd58f7d9480f87feb
|
Rust
|
Wattyyy/LeetCode
|
/submissions/rotting-oranges/solution.rs
|
UTF-8
| 1,499
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
use std::cmp::max;
use std::collections::VecDeque;
impl Solution {
pub fn oranges_rotting(grid: Vec<Vec<i32>>) -> i32 {
let row_len = grid.len();
let col_len = grid[0].len();
let mut queue: VecDeque<(usize, usize, i32)> = VecDeque::new();
let mut rotten = grid.clone();
for r in 0..row_len {
for c in 0..col_len {
if grid[r][c] == 2 {
queue.push_back((r, c, 0));
}
}
}
let mut ans = 0;
while 0 < queue.len() {
let (r, c, cnt) = queue.pop_front().unwrap();
ans = max(ans, cnt);
if 0 < c && rotten[r][c - 1] == 1 {
rotten[r][c - 1] = 2;
queue.push_back((r, c - 1, cnt + 1));
}
if c + 1 < col_len && rotten[r][c + 1] == 1 {
rotten[r][c + 1] = 2;
queue.push_back((r, c + 1, cnt + 1))
}
if 0 < r && rotten[r - 1][c] == 1 {
rotten[r - 1][c] = 2;
queue.push_back((r - 1, c, cnt + 1));
}
if r + 1 < row_len && rotten[r + 1][c] == 1 {
rotten[r + 1][c] = 2;
queue.push_back((r + 1, c, cnt + 1));
}
}
for r in 0..row_len {
for c in 0..col_len {
if rotten[r][c] == 1 {
return -1;
}
}
}
return ans;
}
}
| true
|
a4074b1b575cc74c54b6765e212d8ad39e486775
|
Rust
|
JP-Ellis/coding-problems
|
/src/daily_coding_problem/p051.rs
|
UTF-8
| 1,712
| 3.234375
| 3
|
[] |
no_license
|
use crate::{Error, Problem};
use rand::prelude::*;
use std::io::prelude::*;
pub struct P;
const STATEMENT: &str = r#"Given a function that generates perfectly random
numbers between 1 and k (inclusive), where k is an input, write a function that
shuffles a deck of cards represented as an array using only swaps.
It should run in O(N) time.
Hint: Make sure each one of the 52! permutations of the deck is equally
likely."#;
fn shuffle<T>(l: &mut [T]) {
let n = l.len();
let mut rng = thread_rng();
for i in (1..n).rev() {
let idx = rng.gen_range(0, i);
l.swap(i, idx);
}
}
impl Problem for P {
fn name(&self) -> &str {
"Daily Coding Problem 51"
}
fn statement(&self) -> &str {
STATEMENT
}
fn solve(&self, _out: &mut dyn Write) -> Result<(), Error> {
let mut sums: Vec<usize> = (0..52).map(|_| 0).collect();
let deck: Vec<_> = (0..52).collect();
let shuffles = 10_000;
for _ in 0..shuffles {
let mut new = deck.clone();
shuffle(&mut new);
for (i, &card) in new.iter().enumerate() {
sums[i] += card;
}
}
let mean = 255000;
let stdev = 3002;
for sum in sums {
if !(mean - 3 * stdev < sum && sum < mean + 3 * stdev) {
Err(format!(
"Result {} is not within 3σ of {} (3σ range: {} - {})",
sum,
mean,
mean - 3 * stdev,
mean + 3 * stdev
))?
}
}
Ok(())
}
}
#[cfg(all(test, feature = "nightly"))]
mod benches {
use test::Bencher;
}
| true
|
bad5a3f19f294071e3cf88e28ea4af879efda65c
|
Rust
|
FFY00/delsum
|
/delsum-lib/src/lib.rs
|
UTF-8
| 10,627
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
mod bitnum;
pub mod checksum;
pub(crate) mod factor;
mod keyval;
use bitnum::BitNum;
use checksum::{
crc::{CRCBuilder, CRC},
fletcher::{Fletcher, FletcherBuilder},
modsum::{ModSum, ModSumBuilder},
Digest, LinearCheck, RangePairs, Relativity, SumStr,
};
use checksum::{CheckBuilderErr, CheckReverserError};
#[cfg(feature = "parallel")]
use rayon::prelude::*;
use std::str::FromStr;
#[cfg(test)]
#[macro_use(quickcheck)]
extern crate quickcheck_macros;
/// For figuring out what type of integer to use, we need to parse the width from the
/// model string, but to parse the model string, we need to know the integer type,
/// so it is done here separately.
/// We also need the prefix to find out what algorithm to use
fn find_prefix_width(s: &str) -> Result<(&str, usize, &str), CheckBuilderErr> {
let stripped = s.trim_start();
// it is done like this to ensure that no non-whitespace (blackspace?) is left at the end of the prefix
let pref = stripped.split_whitespace().next();
let (prefix, rest) = match PREFIXES.iter().find(|x| Some(**x) == pref) {
Some(p) => (*p, &stripped[p.len()..]),
None => return Err(CheckBuilderErr::MalformedString("algorithm".to_owned())),
};
for x in keyval::KeyValIter::new(rest) {
match x {
Err(k) => return Err(CheckBuilderErr::MalformedString(k)),
Ok((k, v)) => {
if &k == "width" {
return usize::from_str_radix(&v, 10)
.map_err(|_| CheckBuilderErr::MalformedString(k))
.map(|width| (prefix, width, rest));
}
}
}
}
Err(CheckBuilderErr::MissingParameter("width"))
}
/// A helper function for calling the find_segments function with strings arguments
fn find_segment_str<L>(
spec: &str,
bytes: &[Vec<u8>],
sum: &str,
rel: Relativity,
) -> Result<RangePairs, CheckBuilderErr>
where
L: LinearCheck + FromStr<Err = CheckBuilderErr>,
L::Sum: BitNum,
{
let sum_array = sum
.split(|x| x == ',')
.map(L::Sum::from_hex)
.collect::<Result<Vec<_>, _>>()
.map_err(|_| CheckBuilderErr::MalformedString(String::default()))?;
Ok(L::from_str(spec)?.find_segments(bytes, &sum_array, rel))
}
/// The available checksum types
static PREFIXES: &[&str] = &["fletcher", "crc", "modsum"];
/// A stringy function for determining which segments of a file have a given checksum.
///
/// It is given
/// * a string that models a checksum algorithm
/// * a vector of bytes slices (each slice containing the bytes of a file)
/// * a comma-separated string (without whitespace) containing target checksums for each file
/// * a parameter indicating whether the ends of the segments are relative to the start or the end of the file
///
/// # The Model String
/// A model string is generally of the form
/// ```text
/// [algorithm] width=[number] {more parameters}
/// ```
/// The `algorithm` parameter is either `fletcher`, `crc` or `modsum`.
/// Parameters depend solely on what kind of algorithm is used and more information is available
/// at the respective Builders.
pub fn find_checksum_segments(
strspec: &str,
bytes: &[Vec<u8>],
sum: &str,
rel: Relativity,
) -> Result<RangePairs, CheckBuilderErr> {
let (prefix, width, rest) = find_prefix_width(strspec)?;
match (width, prefix) {
(1..=8, "crc") => find_segment_str::<CRC<u8>>(rest, bytes, sum, rel),
(9..=16, "crc") => find_segment_str::<CRC<u16>>(rest, bytes, sum, rel),
(17..=32, "crc") => find_segment_str::<CRC<u32>>(rest, bytes, sum, rel),
(33..=64, "crc") => find_segment_str::<CRC<u64>>(rest, bytes, sum, rel),
(65..=128, "crc") => find_segment_str::<CRC<u128>>(rest, bytes, sum, rel),
(1..=8, "modsum") => find_segment_str::<ModSum<u8>>(rest, bytes, sum, rel),
(9..=16, "modsum") => find_segment_str::<ModSum<u16>>(rest, bytes, sum, rel),
(17..=32, "modsum") => find_segment_str::<ModSum<u32>>(rest, bytes, sum, rel),
(33..=64, "modsum") => find_segment_str::<ModSum<u64>>(rest, bytes, sum, rel),
(1..=8, "fletcher") => find_segment_str::<Fletcher<u8>>(rest, bytes, sum, rel),
(9..=16, "fletcher") => find_segment_str::<Fletcher<u16>>(rest, bytes, sum, rel),
(17..=32, "fletcher") => find_segment_str::<Fletcher<u32>>(rest, bytes, sum, rel),
(33..=64, "fletcher") => find_segment_str::<Fletcher<u64>>(rest, bytes, sum, rel),
(65..=128, "fletcher") => find_segment_str::<Fletcher<u128>>(rest, bytes, sum, rel),
_ => Err(CheckBuilderErr::ValueOutOfRange("width")),
}
}
fn get_checksums<A>(
strspec: &str,
files: &[Vec<u8>],
width: usize,
) -> Result<Vec<String>, CheckBuilderErr>
where
A: Digest + FromStr<Err = CheckBuilderErr>,
{
let algo = A::from_str(strspec)?;
let mut sums = Vec::new();
for file in files {
sums.push(algo.digest(file.as_slice()).unwrap().to_width_str(width));
}
Ok(sums)
}
pub fn find_checksum(strspec: &str, bytes: &[Vec<u8>]) -> Result<Vec<String>, CheckBuilderErr> {
let (prefix, width, rest) = find_prefix_width(strspec)?;
// look, it's not really useful to it in this case, but i really like how this looks
match (width, prefix) {
(1..=8, "crc") => get_checksums::<CRC<u8>>(rest, bytes, width),
(9..=16, "crc") => get_checksums::<CRC<u16>>(rest, bytes, width),
(17..=32, "crc") => get_checksums::<CRC<u32>>(rest, bytes, width),
(33..=64, "crc") => get_checksums::<CRC<u64>>(rest, bytes, width),
(65..=128, "crc") => get_checksums::<CRC<u128>>(rest, bytes, width),
(1..=8, "modsum") => get_checksums::<ModSum<u8>>(rest, bytes, width),
(9..=16, "modsum") => get_checksums::<ModSum<u16>>(rest, bytes, width),
(17..=32, "modsum") => get_checksums::<ModSum<u32>>(rest, bytes, width),
(33..=64, "modsum") => get_checksums::<ModSum<u64>>(rest, bytes, width),
(1..=8, "fletcher") => get_checksums::<Fletcher<u8>>(rest, bytes, width),
(9..=16, "fletcher") => get_checksums::<Fletcher<u16>>(rest, bytes, width),
(17..=32, "fletcher") => get_checksums::<Fletcher<u32>>(rest, bytes, width),
(33..=64, "fletcher") => get_checksums::<Fletcher<u64>>(rest, bytes, width),
(65..=128, "fletcher") => get_checksums::<Fletcher<u128>>(rest, bytes, width),
_ => Err(CheckBuilderErr::ValueOutOfRange("width")),
}
}
enum BuilderEnum {
CRC(CRCBuilder<u128>),
ModSum(ModSumBuilder<u64>),
Fletcher(FletcherBuilder<u128>),
}
pub struct AlgorithmFinder<'a> {
pairs: Vec<(&'a [u8], u128)>,
spec: BuilderEnum,
verbosity: u64,
}
impl<'a> AlgorithmFinder<'a> {
pub fn find_all<'b>(&'b self) -> impl Iterator<Item = Result<String, CheckReverserError>> + 'b {
let maybe_crc = if let BuilderEnum::CRC(crc) = &self.spec {
Some(
checksum::crc::rev::reverse_crc(crc, self.pairs.as_slice(), self.verbosity)
.map(|x| x.map(|y| y.to_string())),
)
} else {
None
};
let maybe_modsum = if let BuilderEnum::ModSum(modsum) = &self.spec {
Some(
checksum::modsum::rev::reverse_modsum(
modsum,
self.pairs.as_slice(),
self.verbosity,
)
.map(|x| x.map(|y| y.to_string())),
)
} else {
None
};
let maybe_fletcher = if let BuilderEnum::Fletcher(fletcher) = &self.spec {
Some(
checksum::fletcher::rev::reverse_fletcher(
fletcher,
self.pairs.as_slice(),
self.verbosity,
)
.map(|x| x.map(|y| y.to_string())),
)
} else {
None
};
maybe_crc
.into_iter()
.flatten()
.chain(maybe_modsum.into_iter().flatten())
.chain(maybe_fletcher.into_iter().flatten())
}
#[cfg(feature = "parallel")]
pub fn find_all_para<'b>(
&'b self,
) -> impl ParallelIterator<Item = Result<String, CheckReverserError>> + 'b {
let maybe_crc = if let BuilderEnum::CRC(crc) = &self.spec {
Some(
checksum::crc::rev::reverse_crc_para(crc, self.pairs.as_slice(), self.verbosity)
.map(|x| x.map(|y| y.to_string())),
)
} else {
None
};
let maybe_modsum = if let BuilderEnum::ModSum(modsum) = &self.spec {
Some(
checksum::modsum::rev::reverse_modsum(
modsum,
self.pairs.as_slice(),
self.verbosity,
)
.map(|x| x.map(|y| y.to_string()))
.par_bridge(),
)
} else {
None
};
let maybe_fletcher = if let BuilderEnum::Fletcher(fletcher) = &self.spec {
Some(
checksum::fletcher::rev::reverse_fletcher_para(
fletcher,
self.pairs.as_slice(),
self.verbosity,
)
.map(|x| x.map(|y| y.to_string())),
)
} else {
None
};
maybe_crc
.into_par_iter()
.flatten()
.chain(maybe_modsum.into_par_iter().flatten())
.chain(maybe_fletcher.into_par_iter().flatten())
}
}
pub fn find_algorithm<'a>(
strspec: &str,
bytes: &'a [&[u8]],
sum: &str,
verbosity: u64,
) -> Result<AlgorithmFinder<'a>, CheckBuilderErr> {
let (prefix, _, rest) = find_prefix_width(strspec)?;
let prefix = prefix.to_ascii_lowercase();
let spec = match prefix.as_str() {
"crc" => BuilderEnum::CRC(CRCBuilder::<u128>::from_str(rest)?),
"modsum" => BuilderEnum::ModSum(ModSumBuilder::<u64>::from_str(rest)?),
"fletcher" => BuilderEnum::Fletcher(FletcherBuilder::<u128>::from_str(rest)?),
_ => unimplemented!(),
};
let sums = sum
.split(|x| x == ',')
.map(u128::from_hex)
.collect::<Result<Vec<_>, _>>()
.map_err(|_| CheckBuilderErr::MalformedString(String::default()))?;
if sums.len() != bytes.len() {
panic!("Help how do I error handle this?")
}
let pairs: Vec<_> = bytes.iter().cloned().zip(sums.into_iter()).collect();
Ok(AlgorithmFinder {
spec,
pairs,
verbosity,
})
}
| true
|
a7082a4db46b7d73fc175be7920e266698ec6840
|
Rust
|
LukeMathWalker/advent-of-code-2019
|
/day02/src/main.rs
|
UTF-8
| 3,418
| 3.5
| 4
|
[] |
no_license
|
use itertools::iproduct;
use std::str::FromStr;
fn read_input(path: &str) -> Result<Vec<usize>, anyhow::Error> {
let input = std::fs::read_to_string(path)?;
let instructions = input
.trim()
.split(",")
.map(|s| usize::from_str(&s).expect("Failed to parse instruction"))
.collect();
Ok(instructions)
}
#[derive(PartialEq, Eq)]
enum Outcome {
Success,
Halt,
}
struct TuringMachine {
memory_tape: Vec<usize>,
instruction_pointer: usize,
}
impl TuringMachine {
fn new(memory_tape: Vec<usize>) -> Self {
assert!(
memory_tape.len() > 0,
"There has to be at least one instruction!"
);
Self {
memory_tape,
instruction_pointer: 0,
}
}
fn execute(mut self) -> Vec<usize> {
loop {
let outcome = (&mut self).step();
if outcome == Outcome::Halt {
break;
}
}
self.memory_tape
}
fn step(&mut self) -> Outcome {
let opcode = &self.memory_tape[self.instruction_pointer];
match opcode {
1 => {
let lhs_index = self.memory_tape[self.instruction_pointer + 1].clone();
let rhs_index = self.memory_tape[self.instruction_pointer + 2].clone();
let output_index = self.memory_tape[self.instruction_pointer + 3].clone();
let lhs = self.memory_tape[lhs_index].clone();
let rhs = self.memory_tape[rhs_index].clone();
let output = lhs + rhs;
self.memory_tape[output_index] = output;
self.instruction_pointer += 4;
Outcome::Success
}
2 => {
let lhs_index = &self.memory_tape[self.instruction_pointer + 1].clone();
let rhs_index = &self.memory_tape[self.instruction_pointer + 2].clone();
let output_index = &self.memory_tape[self.instruction_pointer + 3].clone();
let lhs = &self.memory_tape[*lhs_index].clone();
let rhs = &self.memory_tape[*rhs_index].clone();
let output = lhs * rhs;
self.memory_tape[*output_index] = output;
self.instruction_pointer += 4;
Outcome::Success
}
99 => Outcome::Halt,
_ => panic!("Unknown opcode!"),
}
}
}
fn reproduce_1202_program_alarm(memory_tape: Vec<usize>) {
let output_tape = run_program(12, 2, memory_tape);
println!("Position 0: {:?}", output_tape[0]);
}
fn run_program(noun: usize, verb: usize, mut memory_tape: Vec<usize>) -> Vec<usize> {
memory_tape[1] = noun;
memory_tape[2] = verb;
TuringMachine::new(memory_tape).execute()
}
fn find_input_pair(desired_output: usize, memory_tape: Vec<usize>) -> Option<(usize, usize)> {
for (noun, verb) in iproduct!(0..=99, 0..=99) {
let output_tape = run_program(noun, verb, memory_tape.clone());
if output_tape[0] == desired_output {
return Some((noun, verb));
}
}
None
}
fn main() -> Result<(), anyhow::Error> {
let memory_tape = read_input("input.txt")?;
reproduce_1202_program_alarm(memory_tape.clone());
if let Some((noun, verb)) = find_input_pair(19690720, memory_tape.clone()) {
println!("{:?}", 100 * noun + verb);
}
Ok(())
}
| true
|
fa5ece3506d734d885cf0062f5f302bcf8c68837
|
Rust
|
swc-project/swc
|
/crates/dbg-swc/src/es/minifier/ensure_size.rs
|
UTF-8
| 8,135
| 2.546875
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use std::{
path::{Path, PathBuf},
sync::Arc,
};
use anyhow::{Context, Result};
use clap::Args;
use rayon::prelude::*;
use swc_common::{
errors::{ColorConfig, Handler, HANDLER},
SourceFile, SourceMap, GLOBALS,
};
use tracing::info;
use crate::util::{
all_js_files, gzipped_size,
minifier::{get_esbuild_output, get_minified, get_terser_output},
print_js, wrap_task,
};
/// [Experimental] Ensure that we are performing better than other minification
/// tools.
#[derive(Debug, Args)]
pub struct EnsureSize {
#[clap(long)]
pub no_terser: bool,
#[clap(long)]
pub no_esbuild: bool,
/// This can be a directyory or a file.
///
/// If this is a directory, all `.js` files in it will be verified.
pub path: PathBuf,
}
impl EnsureSize {
pub fn run(self, cm: Arc<SourceMap>) -> Result<()> {
let all_files = all_js_files(&self.path)?;
info!("Using {} files", all_files.len());
let mut results = GLOBALS.with(|globals| {
all_files
.par_iter()
.map(|js_file| GLOBALS.set(globals, || self.check_file(cm.clone(), js_file)))
.filter_map(|v| v.transpose())
.collect::<Result<Vec<_>>>()
})?;
results.sort_by_key(|f| {
if let Some(terser) = &f.terser {
f.swc.mangled_size as isize - terser.mangled_size as isize
} else {
0
}
});
for f in &results {
if let Some(terser) = &f.terser {
if f.swc.mangled_size > terser.mangled_size
|| f.swc.no_mangle_size > terser.no_mangle_size
|| f.swc.gzipped_size > terser.gzipped_size
{
println!();
println!("{}", f.fm.name);
}
if f.swc.gzipped_size > terser.gzipped_size {
println!(" Gzipped");
println!(" swc: {} bytes", f.swc.gzipped_size);
println!(" terser: {} bytes", terser.gzipped_size);
}
if f.swc.mangled_size > terser.mangled_size {
println!(" Mangled");
println!(" swc: {} bytes", f.swc.mangled_size);
println!(" terser: {} bytes", terser.mangled_size);
}
if f.swc.no_mangle_size > terser.no_mangle_size {
println!(" No-mangle");
println!(" swc: {} bytes", f.swc.no_mangle_size);
println!(" terser: {} bytes", terser.no_mangle_size);
}
}
}
{
let swc_total = results.iter().map(|f| f.swc.mangled_size).sum::<usize>();
let terser_total = results
.iter()
.flat_map(|f| f.terser.map(|v| v.mangled_size))
.sum::<usize>();
println!("Total");
println!(" swc: {} bytes", swc_total);
println!(" terser: {} bytes", terser_total);
println!(" Size ratio: {}", swc_total as f64 / terser_total as f64);
let swc_smaller_file_count = results
.iter()
.filter(|f| {
if let Some(terser) = &f.terser {
f.swc.mangled_size <= terser.mangled_size
} else {
false
}
})
.count();
println!(
"swc produced smaller or equal output for {} files out of {} files, {:.2}%",
swc_smaller_file_count,
all_files.len(),
100.0 * swc_smaller_file_count as f64 / results.len() as f64
);
}
{
let swc_total = results.iter().map(|f| f.swc.gzipped_size).sum::<usize>();
let terser_total = results
.iter()
.flat_map(|f| f.terser.map(|v| v.gzipped_size))
.sum::<usize>();
println!("Total (gzipped)");
println!(" swc: {} bytes", swc_total);
println!(" terser: {} bytes", terser_total);
println!(" Size ratio: {}", swc_total as f64 / terser_total as f64);
let swc_smaller_file_count = results
.iter()
.filter(|f| {
if let Some(terser) = &f.terser {
f.swc.gzipped_size <= terser.gzipped_size
} else {
false
}
})
.count();
println!(
"swc produced smaller or equal output for {} files out of {} files, {:.2}%",
swc_smaller_file_count,
all_files.len(),
100.0 * swc_smaller_file_count as f64 / results.len() as f64
);
}
Ok(())
}
fn check_file(&self, cm: Arc<SourceMap>, js_file: &Path) -> Result<Option<FileSize>> {
wrap_task(|| {
info!("Checking {}", js_file.display());
let fm = cm.load_file(js_file).context("failed to load file")?;
let handler =
Handler::with_tty_emitter(ColorConfig::Never, true, false, Some(cm.clone()));
HANDLER.set(&handler, || {
let code_mangled = {
let minified_mangled = get_minified(cm.clone(), js_file, true, true)?;
print_js(cm.clone(), &minified_mangled.module, true)
.context("failed to convert ast to code")?
};
let swc_no_mangle = {
let minified_no_mangled = get_minified(cm.clone(), js_file, true, false)?;
print_js(cm, &minified_no_mangled.module, true)
.context("failed to convert ast to code")?
};
// eprintln!("The output size of swc minifier: {}", code_mangled.len());
let mut file_size = FileSize {
fm,
swc: MinifierOutput {
mangled_size: code_mangled.len(),
no_mangle_size: swc_no_mangle.len(),
gzipped_size: gzipped_size(&code_mangled),
},
terser: Default::default(),
esbuild: Default::default(),
};
if !self.no_terser {
let terser_mangled = get_terser_output(js_file, true, true)?;
let terser_no_mangle = get_terser_output(js_file, true, false)?;
file_size.terser = Some(MinifierOutput {
mangled_size: terser_mangled.len(),
no_mangle_size: terser_no_mangle.len(),
gzipped_size: gzipped_size(&terser_mangled),
});
}
if !self.no_esbuild {
let esbuild_mangled = get_esbuild_output(js_file, true)?;
let esbuild_no_mangle = get_esbuild_output(js_file, false)?;
file_size.esbuild = Some(MinifierOutput {
mangled_size: esbuild_mangled.len(),
no_mangle_size: esbuild_no_mangle.len(),
gzipped_size: gzipped_size(&esbuild_mangled),
});
}
if file_size.terser.is_none() && file_size.esbuild.is_none() {
return Ok(None);
}
Ok(Some(file_size))
})
})
.with_context(|| format!("failed to check file: {}", js_file.display()))
}
}
#[allow(unused)]
#[derive(Debug)]
struct FileSize {
fm: Arc<SourceFile>,
swc: MinifierOutput,
terser: Option<MinifierOutput>,
esbuild: Option<MinifierOutput>,
}
#[allow(unused)]
#[derive(Debug, Clone, Copy)]
struct MinifierOutput {
mangled_size: usize,
no_mangle_size: usize,
/// Minify + mangle + gzip
gzipped_size: usize,
}
| true
|
c493d0c4ef24eb681d823e2a815b39cb366be6e3
|
Rust
|
johnstonskj/rust-financial
|
/fin_model/src/symbol.rs
|
UTF-8
| 2,756
| 3.71875
| 4
|
[
"MIT"
] |
permissive
|
/*!
Provides types and functions for market symbols.
In general the `Symbol` type is loosely defined, it is stored as a `String`
but there is little standardization across markets and geographies
concerning length or character set. To this end the function `is_valid`
simply takes the commonly known limits and returns true/false, although
this should probably be _maybe true_/false.
The macro [`assert_is_valid`](../macro.assert_is_valid.html) can be used by
providers as it will do nothing if a symbol is valid but return a
`request::RequestResult` if it is not.
*/
use std::collections::HashSet;
// ------------------------------------------------------------------------------------------------
// Public Types
// ------------------------------------------------------------------------------------------------
/// Type for a market ticker symbol. Consumers of symbols should use
/// `is_valid` to ensure the value they are provided is at least
/// syntactically correct.
pub type Symbol = String;
/// Where a set of symbols are used together.
pub type Symbols = HashSet<Symbol>;
/// Maximum length assuming a root length of 6 and a 2 character suffix.
pub const MAX_SYMBOL_LENGTH: usize = 8;
/// is the symbol valid; currently only string lengths are checked.
#[inline(always)]
pub fn is_valid(symbol: Symbol) -> bool {
!symbol.is_empty() && symbol.len() <= MAX_SYMBOL_LENGTH
}
/// Type for a qualified ticker symbol using the same format
/// for the market and symbol itself.
pub struct QualifiedSymbol {
/// the symbol for the market that qualifies `symbol`
pub market: Symbol,
/// the target security symbol
pub symbol: Symbol,
}
// ------------------------------------------------------------------------------------------------
// Macros
// ------------------------------------------------------------------------------------------------
/// Short-cut to test whether a `Symbol` is valid, and if not to return
/// a `RequestResult` containing the error `RequestError::BadSymbolError`.
///
/// This macro *requires* that `is_valid` and `request::RequestError` are
/// in scope where the assertion is made.
/// ## Example
///
/// The following example shoes the use of the macro, and specifically the
/// necessary imports.
///
/// ```rust
/// use fin_model::request::{RequestError, RequestResult};
/// use fin_model::symbol::is_valid;
///
/// fn latest_price_only(&self, for_symbol: Symbol) -> RequestResult<f32> {
/// assert_is_valid!(for_symbol);
/// Ok(0.0)
/// }
/// ```
#[macro_export]
macro_rules! assert_is_valid {
($symbol:expr) => {
match is_valid($symbol.to_string()) {
true => (),
false => return Err(RequestError::BadSymbolError($symbol)),
};
};
}
| true
|
ae363734627b27a3ddb15b5951b51fde10ce12e3
|
Rust
|
mackenziestarr/opl3
|
/src/uart.rs
|
UTF-8
| 1,965
| 2.8125
| 3
|
[] |
no_license
|
use volatile::Volatile;
use bit_field::BitField;
use core;
use super::port::{Rx,Tx};
#[repr(C,packed)]
pub struct Uart {
bdh : Volatile<u8>,
bdl : Volatile<u8>,
c1 : Volatile<u8>,
c2 : Volatile<u8>,
s1 : Volatile<u8>,
s2 : Volatile<u8>,
c3 : Volatile<u8>,
d : Volatile<u8>,
ma1 : Volatile<u8>,
ma2 : Volatile<u8>,
c4 : Volatile<u8>,
c5 : Volatile<u8>,
ed : Volatile<u8>,
modem : Volatile<u8>,
ir : Volatile<u8>,
}
impl Uart {
pub unsafe fn new(id: u8, rx: Option<Rx>, tx: Option<Tx>, clkdiv: (u16,u8)) -> &'static mut Uart {
if let Some(r) = rx.as_ref() {
if r.uart() != id {
panic!("Invalid RX pin for UART {}", id);
}
}
if let Some(t) = tx.as_ref() {
if t.uart() != id {
panic!("Invalid TX pin for UART {}", id);
}
}
if clkdiv.0 >= 8192 {
panic!("Invalid UART clock divider: {}", clkdiv.0);
}
if clkdiv.1 >= 32 {
panic!("Invalid UART fractional divisor: {}", clkdiv.1);
}
let uart = match id {
0 => &mut *(0x4006A000 as *mut Uart),
_ => panic!("Invalid UART id: {}", id)
};
uart.c4.update(|c4| {
c4.set_bits(0..5, clkdiv.1);
});
uart.bdh.update(|bdh| {
bdh.set_bits(0..5, clkdiv.0.get_bits(8..13) as u8);
});
uart.bdl.write(clkdiv.0.get_bits(0..8) as u8);
uart.c2.update(|c2| {
c2.set_bit(2, rx.is_some());
c2.set_bit(3, tx.is_some());
});
uart
}
}
impl core::fmt::Write for Uart {
fn write_str(&mut self, s: &str) -> core::fmt::Result {
for b in s.bytes() {
while !self.s1.read().get_bit(7) {}
self.d.write(b);
}
while !self.s1.read().get_bit(6) {}
Ok(())
}
}
| true
|
e7286f22d2e0c65d48e3a13079dbfc53c034e539
|
Rust
|
femnad/gel
|
/src/main.rs
|
UTF-8
| 7,115
| 2.5625
| 3
|
[] |
no_license
|
extern crate clap;
extern crate reqwest;
extern crate roxmltree;
extern crate select;
extern crate scraper;
#[macro_use]
extern crate tantivy;
use std::path::Path;
use std::process::Command;
use clap::{App, Arg, SubCommand, AppSettings};
use reqwest::StatusCode;
use roxmltree::Document;
use tantivy::schema::Schema;
use tantivy::schema::Field;
use tantivy::{Index, Score, DocAddress};
use tantivy::query::QueryParser;
use tantivy::collector::TopDocs;
const DEFAULT_INDEX_PATH_SUFFIX: &str = "gel";
fn get_token(secret_name: String) -> String {
let output = Command::new("pass")
.arg(secret_name)
.output()
.expect("fail pass");
let lines = String::from_utf8(output.stdout).expect("failage");
let v: Vec<&str> = lines.trim().split('\n').collect();
v[v.len() - 1].to_string()
}
#[derive(Debug)]
struct Post {
link: String,
title: String,
}
struct Options {
secret: String,
count: u64,
index_path: String,
}
fn get_url(text: String) -> Vec<Post> {
let doc = Document::parse(&text).expect("xml parsing fail");
doc.descendants()
.filter(|node| { node.tag_name().name() == "post" })
.map(|node| {
let link = node.attribute("href").expect("attribute fail").to_string();
let title = node.attribute("description").expect("attribute fail").to_string();
Post{link, title}
})
.collect::<Vec<Post>>()
}
fn get_posts(options: Options) -> Vec<Post> {
let token = get_token(options.secret);
let tokens: Vec<&str> = token.split(": ").collect();
let auth = tokens[tokens.len() - 1].to_string();
let url = format!("https://api.pinboard.in/v1/posts/recent?auth_token={auth}&count={count}",
auth=auth, count=options.count);
let mut response = reqwest::get(url.as_str()).expect("failage");
assert!(response.status() == StatusCode::OK);
get_url(response.text().expect("response"))
}
fn get_text(post: Post) -> String {
reqwest::get(post.link.as_str()).expect("crawl fail") .text() .expect("text fail")
}
fn get_index(schema_path: &Path, schema: Schema) -> Index {
let maybe_index = Index::open_in_dir(schema_path);
if maybe_index.is_ok() {
maybe_index.unwrap()
} else {
if !schema_path.exists() {
std::fs::create_dir(schema_path).unwrap();
}
Index::create_in_dir(schema_path, schema).unwrap()
}
}
fn get_schema() -> (Field, Field, Schema) {
let mut schema_builder = Schema::builder();
let title = schema_builder.add_text_field("title", tantivy::schema::TEXT | tantivy::schema::STORED);
let body = schema_builder.add_text_field("body", tantivy::schema::TEXT);
(title, body, schema_builder.build())
}
fn scrape_posts(posts: Vec<Post>, schema_dir: String) {
let (title, body, schema) = get_schema();
let schema_path = Path::new(&schema_dir);
let index = get_index(schema_path, schema);
let mut index_writer = index.writer(100_000_000).expect("writer create fail");
for post in posts {
let post_title = post.title.clone();
println!("Parsing {}", post.title);
let text = get_text(post);
let document = scraper::Html::parse_document(text.as_str());
let selector = scraper::Selector::parse("p").expect("selector parse fail");
let full_text = document.select(&selector).into_iter()
.map(|paragraph| {
paragraph.text().collect::<Vec<&str>>().join(" ")
})
.collect::<Vec<String>>()
.join("\n");
index_writer.add_document(doc!(
title => post_title,
body => full_text,
));
}
index_writer.commit().expect("commit fail");
}
fn search(term: &str, results: usize, schema_path: &str) {
let (title, body, schema) = get_schema();
let index = Index::open_in_dir(schema_path).expect("index open fail");
let reader = index.reader().expect("reader fail");
let searcher = reader.searcher();
let query_parser = QueryParser::for_index(&index, vec![title, body]);
let query = query_parser.parse_query(term).expect("query parse fail");
let top_docs: Vec<(Score, DocAddress)> = searcher.search(&query, &TopDocs::with_limit(results))
.expect("search fail");
for (score, doc_address) in top_docs {
let retrieved_doc = searcher.doc(doc_address).expect("retrieve fail");
println!("{score}: {doc}", score=score, doc=schema.to_json(&retrieved_doc));
}
}
fn get_default_index_path() -> String {
let config_dir = dirs::config_dir().unwrap();
format!("{config_dir}/{suffix}", config_dir=config_dir.to_str().unwrap(),
suffix=DEFAULT_INDEX_PATH_SUFFIX)
}
fn main() {
let default_index = get_default_index_path();
let matches = App::new("gel")
.setting(AppSettings::SubcommandRequiredElseHelp)
.subcommand(SubCommand::with_name("index")
.about("build an index")
.arg(Arg::with_name("secret")
.short("s")
.long("secret")
.help("a pass secret containing API secret")
.takes_value(true)
.required(true))
.arg(Arg::with_name("index")
.short("i")
.long("index-path")
.takes_value(true)
.default_value(default_index.as_str()))
.arg(Arg::with_name("count")
.short("c")
.long("count")
.help("number of recent posts")
.default_value("1")))
.subcommand(SubCommand::with_name("search")
.about("search the index")
.arg(Arg::with_name("term")
.index(1)
.required(true))
.arg(Arg::with_name("index")
.short("i")
.long("index-path")
.takes_value(true)
.default_value(default_index.as_str()))
.arg(Arg::with_name("results")
.short("r")
.long("results")
.help("number of results")
.default_value("10")))
.get_matches();
if let Some(matches) = matches.subcommand_matches("index") {
let pass_secret = matches.value_of("secret").expect("failed getting secret");
let count: u64 = matches.value_of("count").expect("failed getting count").parse()
.expect("failed parsing int");
let index_path = matches.value_of("index").unwrap();
let options = Options{secret: pass_secret.to_string(), count,
index_path: index_path.to_string()};
let index_path = options.index_path.clone();
let posts = get_posts(options);
scrape_posts(posts, index_path)
} else if let Some(matches) = matches.subcommand_matches("search") {
let index_path = matches.value_of("index").unwrap();
let term = matches.value_of("term").unwrap();
let results: usize = matches.value_of("results").unwrap().parse().unwrap();
search(term, results, index_path);
}
}
| true
|
e3f25407d8e8760dfef35a1c849a4cee21374acb
|
Rust
|
mcountryman/wuffs-rs
|
/wuffs/src/std/compression/gzip.rs
|
UTF-8
| 2,854
| 2.578125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use crate::{
boxed::{WuffsBox, WuffsBoxed},
buf::WuffsBuf,
slice::WuffsSlice,
status::{IntoResult, WuffsError},
};
use wuffs_sys::{
sizeof__wuffs_gzip__decoder, wuffs_gzip__decoder, wuffs_gzip__decoder__initialize,
wuffs_gzip__decoder__set_quirk_enabled, wuffs_gzip__decoder__transform_io,
WUFFS_GZIP__DECODER_WORKBUF_LEN_MAX_INCL_WORST_CASE as WORK_BUF_SIZE, WUFFS_VERSION,
};
#[derive(Clone)]
pub struct WuffsGzipDecoder {
work: Vec<u8>,
inner: WuffsBox<wuffs_gzip__decoder>,
}
impl WuffsGzipDecoder {
pub fn new() -> Result<Self, WuffsError> {
let mut inner = WuffsBox::new();
unsafe {
wuffs_gzip__decoder__initialize(
inner.as_mut_ptr(),
inner.size() as _,
WUFFS_VERSION as _,
0x01,
)
.into_result()?;
}
Ok(Self {
work: vec![0; WORK_BUF_SIZE as _],
inner,
})
}
pub fn set_quirk_enabled(&mut self, quirk: u32, enabled: bool) {
unsafe {
wuffs_gzip__decoder__set_quirk_enabled(self.inner.as_mut_ptr(), quirk, enabled);
}
}
pub fn decode(
&mut self,
src: &[u8],
dst: &mut [u8],
) -> Result<(usize, usize), WuffsError> {
let mut src = unsafe { WuffsBuf::from_slice_readonly(src) };
let mut dst = WuffsBuf::from_slice(dst);
let work = WuffsSlice::from(&mut self.work[..]);
unsafe {
wuffs_gzip__decoder__transform_io(
self.inner.as_mut_ptr(),
dst.as_mut_ptr(),
src.as_mut_ptr(),
work.into_inner(),
)
.into_result()?
};
Ok((src.read(), dst.written()))
}
}
impl WuffsBoxed for wuffs_gzip__decoder {
fn size() -> usize {
unsafe { sizeof__wuffs_gzip__decoder() as _ }
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_gzip_decode() {
let cmp = b"Hello Wuffs.\n";
// src holds a gzip-encoded "Hello Wuffs."
//
// $ echo "Hello Wuffs." | gzip --no-name | xxd
// 00000000: 1f8b 0800 0000 0000 0003 f348 cdc9 c957 ...........H...W
// 00000010: 082f 4d4b 2bd6 e302 003c 8475 bb0d 0000 ./MK+....<.u....
// 00000020: 00 .
//
// Passing --no-name to the gzip command line also means to skip the timestamp,
// which means that its output is deterministic.
let src = [
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, // 00..07
0x00, 0x03, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, // 08..0F
0x08, 0x2f, 0x4d, 0x4b, 0x2b, 0xd6, 0xe3, 0x02, // 10..17
0x00, 0x3c, 0x84, 0x75, 0xbb, 0x0d, 0x00, 0x00, // 18..1F
0x00, // 20..20
];
let mut dst = [0; 1024];
let mut gz = super::WuffsGzipDecoder::new().unwrap();
let (read, written) = gz.decode(&src, &mut dst).unwrap();
assert_eq!(read, src.len());
assert_eq!(written, cmp.len());
let dst = &dst[..written];
assert_eq!(dst, b"Hello Wuffs.\n");
}
}
| true
|
20c04be27d8c3e00b0938815d613299a0571297b
|
Rust
|
coreos/afterburn
|
/src/providers/microsoft/goalstate.rs
|
UTF-8
| 3,434
| 2.703125
| 3
|
[
"Apache-2.0"
] |
permissive
|
//! Logic to interact with WireServer `goalstate` endpoint.
use anyhow::{anyhow, Result};
use serde::Deserialize;
#[derive(Debug, Deserialize, Clone)]
pub(crate) struct GoalState {
#[serde(rename = "Container")]
pub container: Container,
#[serde(rename = "Incarnation")]
incarnation: String,
}
impl GoalState {
/// Return the certificates endpoint (if any).
pub(crate) fn certs_endpoint(&self) -> Option<String> {
let role = match self.container.role_instance_list.role_instances.get(0) {
Some(r) => r,
None => return None,
};
role.configuration.certificates.clone()
}
/// Return this instance `ContainerId`.
pub(crate) fn container_id(&self) -> &str {
&self.container.container_id
}
/// Return this instance `InstanceId`.
pub(crate) fn instance_id(&self) -> Result<&str> {
Ok(&self
.container
.role_instance_list
.role_instances
.get(0)
.ok_or_else(|| anyhow!("empty RoleInstanceList"))?
.instance_id)
}
/// Return the current `Incarnation` count for this instance.
pub(crate) fn incarnation(&self) -> &str {
&self.incarnation
}
}
#[derive(Debug, Deserialize, Clone, Default)]
#[allow(dead_code)]
pub(crate) struct Container {
#[serde(rename = "ContainerId")]
pub container_id: String,
#[serde(rename = "RoleInstanceList")]
pub role_instance_list: RoleInstanceList,
}
#[derive(Debug, Deserialize, Clone, Default)]
#[allow(dead_code)]
pub(crate) struct RoleInstanceList {
#[serde(rename = "RoleInstance", default)]
pub role_instances: Vec<RoleInstance>,
}
#[derive(Debug, Deserialize, Clone)]
#[allow(dead_code)]
pub(crate) struct RoleInstance {
#[serde(rename = "Configuration")]
pub configuration: Configuration,
#[serde(rename = "InstanceId")]
pub instance_id: String,
}
#[derive(Debug, Deserialize, Clone)]
#[allow(dead_code)]
pub(crate) struct Configuration {
#[serde(rename = "Certificates")]
pub certificates: Option<String>,
#[serde(rename = "SharedConfig", default)]
pub shared_config: String,
}
#[derive(Debug, Deserialize, Clone)]
#[allow(dead_code)]
pub(crate) struct CertificatesFile {
#[serde(rename = "Data", default)]
pub data: String,
}
#[derive(Debug, Deserialize, Clone)]
#[allow(dead_code)]
pub(crate) struct SharedConfig {
#[serde(rename = "Incarnation")]
pub incarnation: Incarnation,
#[serde(rename = "Instances")]
pub instances: Instances,
}
#[derive(Debug, Deserialize, Clone)]
#[allow(dead_code)]
pub(crate) struct Incarnation {
pub instance: String,
}
#[derive(Debug, Deserialize, Clone)]
#[allow(dead_code)]
pub(crate) struct Instances {
#[serde(rename = "Instance", default)]
pub instances: Vec<Instance>,
}
#[derive(Debug, Deserialize, Clone)]
#[allow(dead_code)]
pub(crate) struct Instance {
pub id: String,
pub address: String,
#[serde(rename = "InputEndpoints")]
pub input_endpoints: InputEndpoints,
}
#[derive(Debug, Deserialize, Clone)]
#[allow(dead_code)]
pub(crate) struct InputEndpoints {
#[serde(rename = "Endpoint", default)]
pub endpoints: Vec<Endpoint>,
}
#[derive(Debug, Deserialize, Clone)]
#[allow(dead_code)]
pub(crate) struct Endpoint {
#[serde(rename = "loadBalancedPublicAddress", default)]
pub load_balanced_public_address: String,
}
| true
|
df14e52f06a4ac993590b4ec7f019402b6b00eff
|
Rust
|
sourcedennis/wasm-pathtracer
|
/src/math/vec3.rs
|
UTF-8
| 4,458
| 3.765625
| 4
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
use std::ops;
use std::fmt;
/// A vector in 3-dimensional space
#[derive(Copy,Clone)]
pub struct Vec3 {
pub x : f32,
pub y : f32,
pub z : f32
}
impl Vec3 {
/// The vector that lies at the origin, which has 0 length; (0,0,0)
pub const ZERO: Vec3 = Vec3 { x: 0.0, y: 0.0, z: 0.0 };
/// Constructs a new vector with the provided components
pub fn new( x : f32, y : f32, z : f32 ) -> Vec3 {
Vec3 { x, y, z }
}
/// Constructs a new unit vector in the provided direction
pub fn unit( x : f32, y : f32, z : f32 ) -> Vec3 {
Vec3::new( x, y, z ).normalize( )
}
/// Scales the vector such that its length becomes 1
pub fn normalize( self ) -> Vec3 {
self * ( 1.0 / self.len( ) )
}
/// Computes the dot product with the provided Vec3
pub fn dot( self, rhs : Vec3 ) -> f32 {
self.x * rhs.x + self.y * rhs.y + self.z * rhs.z
}
/// Returns some vector that is orthogonal to the current
pub fn orthogonal( self ) -> Vec3 {
if self.z.abs( ) > 0.1 {
let v1 = 1.0;
let v2 = 1.0;
let v3 = -( self.x * v1 + self.y * v2 ) / self.z;
Vec3::unit( v1, v2, v3 )
} else if self.x.abs( ) > 0.1 {
let v2 = 1.0;
let v3 = 1.0;
let v1 = -( self.y * v2 + self.z * v3 ) / self.x;
Vec3::unit( v1, v2, v3 )
} else {
let v1 = 1.0;
let v3 = 1.0;
let v2 = -( self.x * v1 + self.z * v3 ) / self.y;
Vec3::unit( v1, v2, v3 )
}
}
/// Computes the crosss product with the provided Vec3
pub fn cross( self, t : Vec3 ) -> Vec3 {
Vec3::new(
self.y * t.z - self.z * t.y,
self.z * t.x - self.x * t.z,
self.x * t.y - self.y * t.x )
}
/// Returns the length
pub fn len( self ) -> f32 {
self.len_sq( ).sqrt( )
}
/// Returns the *square* length
pub fn len_sq( self ) -> f32 {
self.dot( self )
}
/// Returns the *square* distance to the other point
pub fn dis_sq( self, b : Vec3 ) -> f32 {
( self - b ).len_sq( )
}
/// Returns the distance to the other point
pub fn dis( self, b : Vec3 ) -> f32 {
( self - b ).len( )
}
/// Reflects the vector along the provided normal
pub fn reflect( self, normal : Vec3 ) -> Vec3 {
2.0 * self.dot( normal ) * normal - self
}
/// Applies every component as the power of `e`
/// So, it returns: (e^x, e^y, e^z)
pub fn exp( self ) -> Vec3 {
Vec3::new( self.x.exp( ), self.y.exp( ), self.z.exp( ) )
}
pub fn rot_y( &self, angle : f32 ) -> Vec3 {
// [ c 0 s ] [x]
// [ 0 1 0 ] [y]
// [ -s 0 c ] [z]
let x = self.x;
let y = self.y;
let z = self.z;
let c = angle.cos( );
let s = angle.sin( );
Vec3::new( c * x + s * z, y, -s * x + c * z )
}
pub fn rot_x( &self, angle : f32 ) -> Vec3 {
// [ 1 0 0 ] [x]
// [ 0 c -s ] [y]
// [ 0 s c ] [z]
let x = self.x;
let y = self.y;
let z = self.z;
let c = angle.cos( );
let s = angle.sin( );
Vec3::new( x, c * y - s * z, s * y + c * z )
}
}
impl ops::Neg for Vec3 {
type Output = Vec3;
fn neg( self ) -> Vec3 {
Vec3::new( -self.x, -self.y, -self.z )
}
}
impl ops::Add< Vec3 > for Vec3 {
type Output = Vec3;
fn add( self, addend: Vec3 ) -> Vec3 {
Vec3::new( self.x + addend.x, self.y + addend.y, self.z + addend.z )
}
}
impl ops::Sub< Vec3 > for Vec3 {
type Output = Vec3;
fn sub( self, subtrahend: Vec3 ) -> Vec3 {
Vec3::new( self.x - subtrahend.x, self.y - subtrahend.y, self.z - subtrahend.z )
}
}
impl ops::Mul< f32 > for Vec3 {
type Output = Vec3;
fn mul( self, multiplier: f32 ) -> Vec3 {
Vec3::new( multiplier * self.x, multiplier * self.y, multiplier * self.z )
}
}
impl ops::Mul< Vec3 > for f32 {
type Output = Vec3;
fn mul( self, v: Vec3 ) -> Vec3 {
Vec3::new( self * v.x, self * v.y, self * v.z )
}
}
impl ops::Mul< Vec3 > for Vec3 {
type Output = Vec3;
fn mul( self, v: Vec3 ) -> Vec3 {
Vec3::new( self.x * v.x, self.y * v.y, self.z * v.z )
}
}
impl ops::Div< f32 > for Vec3 {
type Output = Vec3;
fn div( self, divisor: f32 ) -> Vec3 {
Vec3::new( self.x / divisor, self.y / divisor, self.z / divisor )
}
}
impl ops::AddAssign< Vec3 > for Vec3 {
fn add_assign( &mut self, v : Vec3 ) {
self.x += v.x;
self.y += v.y;
self.z += v.z;
}
}
impl fmt::Debug for Vec3 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!( f, "vec3({}, {}, {})", self.x, self.y, self.z )
}
}
| true
|
afea4e02014fbb88ed551719dc76f3cb120a919a
|
Rust
|
ZhouHansen/async_rust_playground
|
/src/event_loop.rs
|
UTF-8
| 2,456
| 3.171875
| 3
|
[] |
no_license
|
use std::collections::BTreeMap;
use std::sync::mpsc;
use std::thread;
use std::time::Instant;
use super::task_exc::*;
/// A handle to a timer, used for registering wakeups
#[derive(Clone)]
pub struct ToyTimer {
pub tx: mpsc::Sender<Registration>,
}
/// A wakeup request
pub struct Registration {
pub at: Instant,
pub wake: ToyWake,
}
/// State for the worker thread that processes timer events
pub struct Worker {
pub rx: mpsc::Receiver<Registration>,
pub active: BTreeMap<Instant, Vec<ToyWake>>
}
impl ToyTimer {
pub fn new() -> ToyTimer {
let (tx, rx) = mpsc::channel();
let worker = Worker { rx, active: BTreeMap::new() };
thread::spawn(|| worker.work());
ToyTimer { tx }
}
// Register a new wakeup with this timer
pub fn register(&self, at: Instant, wake: ToyWake) {
self.tx.send(Registration { at, wake: wake }).unwrap();
}
}
impl Worker {
pub fn enroll(&mut self, item: Registration) {
add_active(item.at, item.wake, &mut self.active)
}
pub fn fire(&mut self, key: Instant) {
self.active.remove(&key).unwrap().iter().for_each(
|waker| {
waker.wake();
}
)
}
pub fn work(mut self) {
loop {
if let Some(first) = self.active.keys().next().cloned() {
let now = Instant::now();
if first <= now {
self.fire(first);
} else {
// we're not ready to fire off `first` yet, so wait until we are
// (or until we get a new registration, which might be for an
// earlier time).
if let Ok(new_registration) = self.rx.recv_timeout(first - now) {
self.enroll(new_registration);
}
}
} else {
// no existing registrations, so unconditionally block until
// we receive one.
let new_registration = self.rx.recv().unwrap();
self.enroll(new_registration)
}
}
}
}
fn add_active (property: Instant, new_value: ToyWake, active: &mut BTreeMap<Instant, Vec<ToyWake>>) {
if active.contains_key(&property) {
if let Some(x) = active.get_mut(&property) {
(*x).push(new_value);
}
} else {
active.insert(property, vec![new_value]);
}
}
| true
|
daebc0b861c1d64722ff97d6b0d28221d3f09827
|
Rust
|
feather-rs/feather
|
/feather/plugin-host/src/context.rs
|
UTF-8
| 13,622
| 2.625
| 3
|
[
"Apache-2.0"
] |
permissive
|
use std::{
alloc::Layout,
cell::{Ref, RefMut},
marker::PhantomData,
mem::size_of,
panic::AssertUnwindSafe,
ptr::NonNull,
sync::atomic::{AtomicBool, Ordering},
};
use anyhow::anyhow;
use bytemuck::{Pod, Zeroable};
use feather_common::Game;
use feather_ecs::EntityBuilder;
use quill_common::Component;
use serde::de::DeserializeOwned;
use vec_arena::Arena;
use wasmer::{FromToNativeWasmType, Instance};
use crate::{host_function::WasmHostFunction, thread_pinned::ThreadPinned, PluginId};
mod native;
mod wasm;
/// Wraps a pointer into a plugin's memory space.
#[derive(Copy, Clone, PartialEq, Eq, Zeroable)]
#[repr(transparent)]
pub struct PluginPtr<T> {
pub ptr: u64,
pub _marker: PhantomData<*const T>,
}
impl<T> PluginPtr<T> {
pub fn as_native(&self) -> *const T {
self.ptr as usize as *const T
}
/// # Safety
/// Adding `n` to this pointer
/// must produce a pointer within the same allocated
/// object.
#[must_use = "PluginPtr::add returns a new pointer"]
pub unsafe fn add(self, n: usize) -> Self {
Self {
ptr: self.ptr + (n * size_of::<T>()) as u64,
_marker: self._marker,
}
}
/// # Safety
/// The cast must be valid.
pub unsafe fn cast<U>(self) -> PluginPtr<U> {
PluginPtr {
ptr: self.ptr,
_marker: PhantomData,
}
}
}
unsafe impl<T: Copy + 'static> Pod for PluginPtr<T> {}
/// Wraps a pointer into a plugin's memory space.
#[derive(Copy, Clone, PartialEq, Eq, Zeroable)]
#[repr(transparent)]
pub struct PluginPtrMut<T> {
pub ptr: u64,
pub _marker: PhantomData<*mut T>,
}
impl<T> PluginPtrMut<T> {
pub fn as_native(&self) -> *mut T {
self.ptr as usize as *mut T
}
/// # Safety
/// A null pointer must be valid in the context it is used.
pub unsafe fn null() -> Self {
Self {
ptr: 0,
_marker: PhantomData,
}
}
/// # Safety
/// Adding `n` to this pointer
/// must produce a pointer within the same allocated
/// object.
#[must_use = "PluginPtrMut::add returns a new pointer"]
pub unsafe fn add(self, n: usize) -> Self {
Self {
ptr: self.ptr + (n * size_of::<T>()) as u64,
_marker: self._marker,
}
}
/// # Safety
/// The cast must be valid.
pub unsafe fn cast<U>(self) -> PluginPtrMut<U> {
PluginPtrMut {
ptr: self.ptr,
_marker: PhantomData,
}
}
}
unsafe impl<T: Copy + 'static> Pod for PluginPtrMut<T> {}
unsafe impl<T: Copy> FromToNativeWasmType for PluginPtr<T> {
type Native = i64;
fn from_native(native: Self::Native) -> Self {
Self {
ptr: native as u64,
_marker: PhantomData,
}
}
fn to_native(self) -> Self::Native {
self.ptr as i64
}
}
unsafe impl<T: Copy> FromToNativeWasmType for PluginPtrMut<T> {
type Native = i64;
fn from_native(native: Self::Native) -> Self {
Self {
ptr: native as u64,
_marker: PhantomData,
}
}
fn to_native(self) -> Self::Native {
self.ptr as i64
}
}
/// Context of a running plugin.
///
/// Provides methods to access plugin memory,
/// invoke exported functions, and access the `Game`.
///
/// This type abstracts over WASM or native plugins,
/// providing the same interface for both.
///
/// # Safety
/// The `native` version of the plugin context
/// dereferences raw pointers. We assume pointers
/// passed by plugins are valid. Most functions
/// will cause undefined behavior if these constraints
/// are violated.
///
/// We type-encode that a pointer originates from a plugin
/// using the `PluginPtr` structs. Methods that
/// dereference pointers take instances of these
/// structs. Since creating a `PluginPtr` is unsafe,
/// `PluginContext` methods don't have to be marked
/// unsafe.
///
/// On WASM targets, the plugin is never trusted,
/// and pointer accesses are checked. Undefined behavior
/// can never occur as a result of malicious plugin input.
pub struct PluginContext {
inner: Inner,
/// Whether the plugin is currently being invoked
/// on the main thread.
/// If this is `true`, then plugin functions are on the call stack.
invoking_on_main_thread: AtomicBool,
/// The current `Game`.
///
/// Set to `None` if `invoking_on_main_thread` is `false`.
/// Otherwise, must point to a valid game. The pointer
/// must be cleared after the plugin finishes executing
/// or we risk a dangling reference.
game: ThreadPinned<Option<NonNull<Game>>>,
/// ID of the plugin.
id: PluginId,
/// Active entity builders for the plugin.
pub entity_builders: ThreadPinned<Arena<EntityBuilder>>,
}
impl PluginContext {
/// Creates a new WASM plugin context.
pub fn new_wasm(id: PluginId) -> Self {
Self {
inner: Inner::Wasm(ThreadPinned::new(wasm::WasmPluginContext::new())),
invoking_on_main_thread: AtomicBool::new(false),
game: ThreadPinned::new(None),
id,
entity_builders: ThreadPinned::new(Arena::new()),
}
}
/// Creates a new native plugin context.
pub fn new_native(id: PluginId) -> Self {
Self {
inner: Inner::Native(native::NativePluginContext::new()),
invoking_on_main_thread: AtomicBool::new(false),
game: ThreadPinned::new(None),
id,
entity_builders: ThreadPinned::new(Arena::new()),
}
}
pub fn init_with_instance(&self, instance: &Instance) -> anyhow::Result<()> {
match &self.inner {
Inner::Wasm(w) => w.borrow_mut().init_with_instance(instance),
Inner::Native(_) => panic!("cannot initialize native plugin context"),
}
}
/// Enters the plugin context, invoking a function inside the plugin.
///
/// # Panics
/// Panics if we are already inside the plugin context.
/// Panics if not called on the main thread.
pub fn enter<R>(&self, game: &mut Game, callback: impl FnOnce() -> R) -> R {
let was_already_entered = self.invoking_on_main_thread.swap(true, Ordering::SeqCst);
assert!(!was_already_entered, "cannot recursively invoke a plugin");
*self.game.borrow_mut() = Some(NonNull::from(game));
// If a panic occurs, we need to catch it so
// we clear `self.game`. Otherwise, we get
// a dangling pointer.
let result = std::panic::catch_unwind(AssertUnwindSafe(callback));
self.invoking_on_main_thread.store(false, Ordering::SeqCst);
*self.game.borrow_mut() = None;
self.bump_reset();
result.unwrap()
}
/// Gets a mutable reference to the `Game`.
///
/// # Panics
/// Panics if the plugin is not currently being
/// invoked on the main thread.
pub fn game_mut(&self) -> RefMut<Game> {
let ptr = self.game.borrow_mut();
RefMut::map(ptr, |ptr| {
let game_ptr = ptr.expect("plugin is not exeuctugin");
assert!(self.invoking_on_main_thread.load(Ordering::Relaxed));
// SAFETY: `game_ptr` points to a valid `Game` whenever
// the plugin is executing. If the plugin is not
// executing, then we already panicked when unwrapping `ptr`.
unsafe { &mut *game_ptr.as_ptr() }
})
}
/// Gets the plugin ID.
pub fn plugin_id(&self) -> PluginId {
self.id
}
/// Accesses a byte slice in the plugin's memory space.
///
/// # Safety
/// **WASM**: mutating plugin memory or invoking
/// plugin functions while this byte slice is
/// alive is undefined behavior.
/// **Native**: `ptr` must be valid.
pub unsafe fn deref_bytes(&self, ptr: PluginPtr<u8>, len: u32) -> anyhow::Result<&[u8]> {
match &self.inner {
Inner::Wasm(w) => {
let w = w.borrow();
let bytes = w.deref_bytes(ptr, len)?;
Ok(unsafe { std::slice::from_raw_parts(bytes.as_ptr(), bytes.len()) })
}
Inner::Native(n) => n.deref_bytes(ptr, len),
}
}
/// Accesses a byte slice in the plugin's memory space.
///
/// # Safety
/// **WASM**: accessing plugin memory or invoking
/// plugin functions while this byte slice is
/// alive is undefined behavior.
/// **Native**: `ptr` must be valid and the aliasing
/// rules must not be violated.
pub unsafe fn deref_bytes_mut(
&self,
ptr: PluginPtrMut<u8>,
len: u32,
) -> anyhow::Result<&mut [u8]> {
match &self.inner {
Inner::Wasm(w) => {
let w = w.borrow();
let bytes = w.deref_bytes_mut(ptr, len)?;
Ok(unsafe { std::slice::from_raw_parts_mut(bytes.as_mut_ptr(), bytes.len()) })
}
Inner::Native(n) => n.deref_bytes_mut(ptr, len),
}
}
/// Accesses a `Pod` value in the plugin's memory space.
pub fn read_pod<T: Pod>(&self, ptr: PluginPtr<T>) -> anyhow::Result<T> {
// SAFETY: we do not return a reference to these
// bytes.
unsafe {
let bytes = self.deref_bytes(ptr.cast(), size_of::<T>() as u32)?;
bytemuck::try_from_bytes(bytes)
.map_err(|_| anyhow!("badly aligned data"))
.map(|val| *val)
}
}
/// Accesses a `bincode`-encoded value in the plugin's memory space.
pub fn read_bincode<T: DeserializeOwned>(
&self,
ptr: PluginPtr<u8>,
len: u32,
) -> anyhow::Result<T> {
// SAFETY: we do not return a reference to these
// bytes.
unsafe {
let bytes = self.deref_bytes(ptr.cast(), len)?;
bincode::deserialize(bytes).map_err(From::from)
}
}
/// Accesses a `json`-encoded value in the plugin's memory space.
pub fn read_json<T: DeserializeOwned>(
&self,
ptr: PluginPtr<u8>,
len: u32,
) -> anyhow::Result<T> {
// SAFETY: we do not return a reference to these
// bytes.
unsafe {
let bytes = self.deref_bytes(ptr.cast(), len)?;
serde_json::from_slice(bytes).map_err(From::from)
}
}
/// Deserializes a component value in the plugin's memory space.
pub fn read_component<T: Component>(&self, ptr: PluginPtr<u8>, len: u32) -> anyhow::Result<T> {
// SAFETY: we do not return a reference to these
// bytes.
unsafe {
let bytes = self.deref_bytes(ptr.cast(), len)?;
T::from_bytes(bytes)
.ok_or_else(|| anyhow!("malformed component"))
.map(|(component, _bytes_read)| component)
}
}
/// Reads a string from the plugin's memory space.
pub fn read_string(&self, ptr: PluginPtr<u8>, len: u32) -> anyhow::Result<String> {
// SAFETY: we do not return a reference to these bytes.
unsafe {
let bytes = self.deref_bytes(ptr.cast(), len)?;
let string = std::str::from_utf8(bytes)?.to_owned();
Ok(string)
}
}
/// Reads a `Vec<u8>` from the plugin's memory space.
pub fn read_bytes(&self, ptr: PluginPtr<u8>, len: u32) -> anyhow::Result<Vec<u8>> {
// SAFETY: we do not return a reference to these bytes.
unsafe {
let bytes = self.deref_bytes(ptr.cast(), len)?;
Ok(bytes.to_owned())
}
}
/// Allocates some memory within the plugin's bump
/// allocator.
///
/// The memory is reset after the plugin finishes
/// executing the current system.
pub fn bump_allocate(&self, layout: Layout) -> anyhow::Result<PluginPtrMut<u8>> {
match &self.inner {
Inner::Wasm(w) => w.borrow().bump_allocate(layout),
Inner::Native(n) => n.bump_allocate(layout),
}
}
/// Bump allocates some memory, then copies `data` into it.
pub fn bump_allocate_and_write_bytes(&self, data: &[u8]) -> anyhow::Result<PluginPtrMut<u8>> {
let layout = Layout::array::<u8>(data.len())?;
let ptr = self.bump_allocate(layout)?;
// SAFETY: our access to these bytes is isolated to the
// current function. `ptr` is valid as it was just allocated.
unsafe {
self.write_bytes(ptr, data)?;
}
Ok(ptr)
}
/// Writes `data` to `ptr`.
///
/// # Safety
/// **WASM**: No concerns.
/// **NATIVE**: `ptr` must point to a slice
/// of at least `len` valid bytes.
pub unsafe fn write_bytes(&self, ptr: PluginPtrMut<u8>, data: &[u8]) -> anyhow::Result<()> {
let bytes = self.deref_bytes_mut(ptr, data.len() as u32)?;
bytes.copy_from_slice(data);
Ok(())
}
/// Writes a `Pod` type to `ptr`.
pub fn write_pod<T: Pod>(&self, ptr: PluginPtrMut<T>, value: T) -> anyhow::Result<()> {
// SAFETY: Unlike `write_bytes`, we know `ptr` is valid for values
// of type `T` because of its type parameter.
unsafe { self.write_bytes(ptr.cast(), bytemuck::bytes_of(&value)) }
}
/// Deallocates all bump-allocated memory.
fn bump_reset(&self) {
match &self.inner {
Inner::Wasm(w) => w.borrow().bump_reset(),
Inner::Native(n) => n.bump_reset(),
}
}
}
enum Inner {
Wasm(ThreadPinned<wasm::WasmPluginContext>),
Native(native::NativePluginContext),
}
| true
|
7ed796b533e30a6b2055ed1247984273528e8525
|
Rust
|
neunenak/hilite
|
/src/main.rs
|
UTF-8
| 3,549
| 3.0625
| 3
|
[] |
no_license
|
/* hilite - runs a command, highlighting everything it sends to stderr
* based on hilite by Mike Schiraldi <mgs21@columbia.edu>
*
*/
extern crate getopts;
use std::env;
use std::io;
use std::io::Write;
use std::io::Read;
use std::process;
macro_rules! print_stderr {
($($arg:tt)*) => {
(write!(&mut io::stderr(), "{}", format_args!($($arg)*))).unwrap();
}
}
enum HighlightStyles {
Red,
Cyan,
BlackUnderline,
WhiteUnderline,
RedUnderline,
CyanUnderline,
RedBackground,
CyanBackground
}
fn color_code(style: HighlightStyles) -> &'static str {
match style {
HighlightStyles::Red => "\x1b[1;31m",
HighlightStyles::Cyan => "\x1b[1;36m",
HighlightStyles::BlackUnderline => "\x1b[4;30m",
HighlightStyles::WhiteUnderline => "\x1b[4;37m",
HighlightStyles::RedUnderline => "\x1b[4;31m",
HighlightStyles::CyanUnderline => "\x1b[4;36m",
HighlightStyles::RedBackground => "\x1b[41m",
HighlightStyles::CyanBackground => "\x1b[46m",
}
}
fn main() {
let args: Vec<String> = env::args().collect();
let mut opts = getopts::Options::new();
opts.optopt("s", "style", "STYLE is one of: red | cyan | underline-{red|cyan|black|white} | background-{red|cyan}", "STYLE");
opts.optflag("h", "help", "Print help");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!(f.to_string())
};
if matches.opt_present("h") {
let brief = format!("Usage: OPTIONS PROGRAM {}", "{-- PROGRAM_OPTIONS }");
print!("{}", opts.usage(&brief));
return;
}
if matches.free.len() < 1 {
print_stderr!("{}: specify a command to execute\n",
args.get(0).unwrap());
return;
}
let style = match matches.opt_str("style") {
Some(ref s) if s == "red" => HighlightStyles::Red,
Some(ref s) if s == "cyan" => HighlightStyles::Cyan,
Some(ref s) if s == "underline-black" => HighlightStyles::BlackUnderline,
Some(ref s) if s == "underline-white" => HighlightStyles::WhiteUnderline,
Some(ref s) if s == "underline-red" => HighlightStyles::RedUnderline,
Some(ref s) if s == "underline-cyan" => HighlightStyles::CyanUnderline,
Some(ref s) if s == "background-red" => HighlightStyles::RedBackground,
Some(ref s) if s == "background-cyan" => HighlightStyles::CyanBackground,
Some(ref s) => panic!("Bad option for style: {}", s),
None => HighlightStyles::Red
};
let program_name = matches.free.get(0).unwrap();
let (_, program_args) = matches.free.split_at(1);
let running_program = process::Command::new(program_name)
.args(program_args)
.stderr(process::Stdio::piped())
.spawn()
.unwrap_or_else({|_| panic!("Failed to spawn program") });
let mut running_program_stderr = running_program.stderr.unwrap();
let color_header = color_code(style);
let color_footer = "\x1b[0m";
let mut buf = [0; 4096];
loop {
let res = running_program_stderr.read(&mut buf[..]);
match res {
Ok(0) => break,
Ok(_) => {
print_stderr!("{}{}{}", color_header,
String::from_utf8_lossy(&mut buf),
color_footer);
},
Err(_) => panic!("Error reading from child process")
}
}
}
| true
|
de54e3ec4359041b202836ac07319e207897336b
|
Rust
|
marosluuce/rchip8
|
/src/ops/jump_absolute.rs
|
UTF-8
| 822
| 3.296875
| 3
|
[] |
no_license
|
use cpu::Cpu;
use ops::op::{Matcher, Op};
use std::fmt;
pub struct JumpAbsolute {
address: u16,
}
impl Op for JumpAbsolute {
fn execute(&self, cpu: Cpu) -> Cpu {
Cpu {
pc: self.address,
..cpu
}
}
}
impl Matcher for JumpAbsolute {
const MASK: u16 = 0x1FFF;
fn new(opcode: u16) -> JumpAbsolute {
JumpAbsolute {
address: opcode & 0x0FFF,
}
}
}
impl fmt::Display for JumpAbsolute {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "JP {:X}", self.address)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn executes_jump_absolute() {
let op = JumpAbsolute::new(0x1123);
let cpu = Cpu::new();
assert_eq!(Cpu { pc: 0x123, ..cpu }, op.execute(cpu));
}
}
| true
|
e1274765b3ed02ff4225f16e8e0c87d5113a4929
|
Rust
|
mswdwk/bincode
|
/tests/issues/issue_431.rs
|
UTF-8
| 791
| 2.6875
| 3
|
[
"MIT"
] |
permissive
|
#![cfg(all(feature = "std", feature = "derive"))]
extern crate std;
use bincode::{Decode, Encode};
use std::borrow::Cow;
use std::string::String;
#[derive(Encode, Decode, PartialEq, Debug)]
struct T<'a, A: Clone + Encode + Decode> {
t: Cow<'a, U<'a, A>>,
}
#[derive(Clone, Encode, Decode, PartialEq, Debug)]
struct U<'a, A: Clone + Encode + Decode> {
u: Cow<'a, A>,
}
#[test]
fn test() {
let u = U {
u: Cow::Owned(String::from("Hello world")),
};
let t = T {
t: Cow::Borrowed(&u),
};
let vec = bincode::encode_to_vec(&t, bincode::config::standard()).unwrap();
let (decoded, len): (T<String>, usize) =
bincode::decode_from_slice(&vec, bincode::config::standard()).unwrap();
assert_eq!(t, decoded);
assert_eq!(len, 12);
}
| true
|
db850f043e04b0c317d58045ab8528cfbd3e7630
|
Rust
|
anholt/glsl
|
/src/lib.rs
|
UTF-8
| 816
| 2.84375
| 3
|
[] |
no_license
|
//! # GLSL compiler
//!
//! This crate is a GLSL450 compiler. It’s able to parse valid GLSL450 formatted source into an
//! abstract syntax tree (AST). That AST can then be transformed into SPIR-V, your own format or
//! even folded back to a raw GLSL `String` (think of a minifier, for instance).
//!
//! You’ll find two main modules:
//!
//! - `parser`, which exports most of the parsers to parse the whole or a part of GLSL source
//! (intermediary parsers)
//! - `syntax`, which exports the AST
//!
//! Feel free to inspect those modules for further information.
//!
//! # Quick parsing
//!
//! If you’re just looking for a parser that would give you the AST for a shader, you might be
//! interested in `translation_unit`.
#[macro_use]
extern crate nom;
pub mod parser;
pub mod syntax;
pub mod writer;
| true
|
59cf03ee6cb0a52282c20b39aa3df6ea5faf51ed
|
Rust
|
szabgab/slides
|
/rust/examples/vectors/map2.rs
|
UTF-8
| 209
| 2.84375
| 3
|
[] |
no_license
|
fn main() {
let chars = vec!['a', 'b', 'c'];
let mut c = 0;
let pairs = chars.into_iter().rev().map(|letter| { c += 1; (letter, c) });
for pair in pairs {
println!("{pair:?}");
}
}
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.