lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
gen/src/builtin.rs
capickett/cxx
8e2faa4991c75704b33e998cd332eb45346d5336
use crate::gen::block::Block; use crate::gen::ifndef; use crate::gen::out::{Content, OutFile}; #[derive(Default, PartialEq)] pub struct Builtins<'a> { pub panic: bool, pub rust_string: bool, pub rust_str: bool, pub rust_slice: bool, pub rust_box: bool, pub rust_vec: bool, pub rust_fn: bool, pub rust_isize: bool, pub opaque: bool, pub layout: bool, pub unsafe_bitcopy: bool, pub rust_error: bool, pub manually_drop: bool, pub maybe_uninit: bool, pub trycatch: bool, pub ptr_len: bool, pub repr_fat: bool, pub rust_str_new_unchecked: bool, pub rust_str_repr: bool, pub rust_slice_new: bool, pub rust_slice_repr: bool, pub exception: bool, pub relocatable: bool, pub friend_impl: bool, pub is_complete: bool, pub deleter_if: bool, pub content: Content<'a>, } impl<'a> Builtins<'a> { pub fn new() -> Self { Builtins::default() } } pub(super) fn write(out: &mut OutFile) { if out.builtin == Default::default() { return; } let include = &mut out.include; let builtin = &mut out.builtin; let out = &mut builtin.content; if builtin.rust_string { include.array = true; include.cstdint = true; include.string = true; } if builtin.rust_str { include.array = true; include.cstdint = true; include.string = true; builtin.friend_impl = true; } if builtin.rust_vec { include.algorithm = true; include.array = true; include.cstddef = true; include.cstdint = true; include.initializer_list = true; include.iterator = true; include.new = true; include.type_traits = true; include.utility = true; builtin.panic = true; builtin.rust_slice = true; builtin.unsafe_bitcopy = true; } if builtin.rust_slice { include.array = true; include.cstddef = true; include.cstdint = true; include.iterator = true; include.type_traits = true; builtin.friend_impl = true; builtin.layout = true; builtin.panic = true; } if builtin.rust_box { include.new = true; include.type_traits = true; include.utility = true; } if builtin.rust_fn { include.utility = true; } if builtin.rust_error { include.exception = true; builtin.friend_impl = true; } if builtin.rust_isize { include.basetsd = true; include.sys_types = true; } if builtin.relocatable { include.type_traits = true; } if builtin.layout { include.type_traits = true; include.cstddef = true; builtin.is_complete = true; } if builtin.is_complete { include.cstddef = true; include.type_traits = true; } out.begin_block(Block::Namespace("rust")); out.begin_block(Block::InlineNamespace("cxxbridge1")); writeln!(out, "// #include \"rust/cxx.h\""); ifndef::write(out, builtin.panic, "CXXBRIDGE1_PANIC"); if builtin.rust_string { out.next_section(); writeln!(out, "struct unsafe_bitcopy_t;"); } if builtin.friend_impl { out.begin_block(Block::AnonymousNamespace); writeln!(out, "template <typename T>"); writeln!(out, "class impl;"); out.end_block(Block::AnonymousNamespace); } out.next_section(); if builtin.rust_str && !builtin.rust_string { writeln!(out, "class String;"); } if builtin.layout && !builtin.opaque { writeln!(out, "class Opaque;"); } if builtin.rust_slice { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "::std::size_t size_of();"); writeln!(out, "template <typename T>"); writeln!(out, "::std::size_t align_of();"); } ifndef::write(out, builtin.rust_string, "CXXBRIDGE1_RUST_STRING"); ifndef::write(out, builtin.rust_str, "CXXBRIDGE1_RUST_STR"); ifndef::write(out, builtin.rust_slice, "CXXBRIDGE1_RUST_SLICE"); ifndef::write(out, builtin.rust_box, "CXXBRIDGE1_RUST_BOX"); ifndef::write(out, builtin.unsafe_bitcopy, "CXXBRIDGE1_RUST_BITCOPY"); ifndef::write(out, builtin.rust_vec, "CXXBRIDGE1_RUST_VEC"); ifndef::write(out, builtin.rust_fn, "CXXBRIDGE1_RUST_FN"); ifndef::write(out, builtin.rust_error, "CXXBRIDGE1_RUST_ERROR"); ifndef::write(out, builtin.rust_isize, "CXXBRIDGE1_RUST_ISIZE"); ifndef::write(out, builtin.opaque, "CXXBRIDGE1_RUST_OPAQUE"); ifndef::write(out, builtin.is_complete, "CXXBRIDGE1_IS_COMPLETE"); ifndef::write(out, builtin.layout, "CXXBRIDGE1_LAYOUT"); ifndef::write(out, builtin.relocatable, "CXXBRIDGE1_RELOCATABLE"); if builtin.rust_str_new_unchecked { out.next_section(); writeln!(out, "class Str::uninit {{}};"); writeln!(out, "inline Str::Str(uninit) noexcept {{}}"); } if builtin.rust_slice_new { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "class Slice<T>::uninit {{}};"); writeln!(out, "template <typename T>"); writeln!(out, "inline Slice<T>::Slice(uninit) noexcept {{}}"); } out.begin_block(Block::Namespace("detail")); if builtin.maybe_uninit { include.cstddef = true; include.new = true; out.next_section(); writeln!(out, "template <typename T, typename = void *>"); writeln!(out, "struct operator_new {{"); writeln!( out, " void *operator()(::std::size_t sz) {{ return ::operator new(sz); }}", ); writeln!(out, "}};"); out.next_section(); writeln!(out, "template <typename T>"); writeln!( out, "struct operator_new<T, decltype(T::operator new(sizeof(T)))> {{", ); writeln!( out, " void *operator()(::std::size_t sz) {{ return T::operator new(sz); }}", ); writeln!(out, "}};"); } out.end_block(Block::Namespace("detail")); if builtin.manually_drop { out.next_section(); include.utility = true; writeln!(out, "template <typename T>"); writeln!(out, "union ManuallyDrop {{"); writeln!(out, " T value;"); writeln!( out, " ManuallyDrop(T &&value) : value(::std::move(value)) {{}}", ); writeln!(out, " ~ManuallyDrop() {{}}"); writeln!(out, "}};"); } if builtin.maybe_uninit { include.cstddef = true; out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "union MaybeUninit {{"); writeln!(out, " T value;"); writeln!( out, " void *operator new(::std::size_t sz) {{ return detail::operator_new<T>{{}}(sz); }}", ); writeln!(out, " MaybeUninit() {{}}"); writeln!(out, " ~MaybeUninit() {{}}"); writeln!(out, "}};"); } out.begin_block(Block::AnonymousNamespace); if builtin.repr_fat { include.array = true; include.cstdint = true; out.next_section(); out.begin_block(Block::Namespace("repr")); writeln!(out, "using Fat = ::std::array<::std::uintptr_t, 2>;"); out.end_block(Block::Namespace("repr")); } if builtin.ptr_len { include.cstddef = true; out.next_section(); out.begin_block(Block::Namespace("repr")); writeln!(out, "struct PtrLen final {{"); writeln!(out, " void *ptr;"); writeln!(out, " ::std::size_t len;"); writeln!(out, "}};"); out.end_block(Block::Namespace("repr")); } if builtin.rust_str_new_unchecked || builtin.rust_str_repr { out.next_section(); writeln!(out, "template <>"); writeln!(out, "class impl<Str> final {{"); writeln!(out, "public:"); if builtin.rust_str_new_unchecked { writeln!( out, " static Str new_unchecked(repr::Fat repr) noexcept {{", ); writeln!(out, " Str str = Str::uninit{{}};"); writeln!(out, " str.repr = repr;"); writeln!(out, " return str;"); writeln!(out, " }}"); } if builtin.rust_str_repr { writeln!(out, " static repr::Fat repr(Str str) noexcept {{"); writeln!(out, " return str.repr;"); writeln!(out, " }}"); } writeln!(out, "}};"); } if builtin.rust_slice_new || builtin.rust_slice_repr { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "class impl<Slice<T>> final {{"); writeln!(out, "public:"); if builtin.rust_slice_new { writeln!(out, " static Slice<T> slice(repr::Fat repr) noexcept {{"); writeln!(out, " Slice<T> slice = typename Slice<T>::uninit{{}};"); writeln!(out, " slice.repr = repr;"); writeln!(out, " return slice;"); writeln!(out, " }}"); } if builtin.rust_slice_repr { writeln!(out, " static repr::Fat repr(Slice<T> slice) noexcept {{"); writeln!(out, " return slice.repr;"); writeln!(out, " }}"); } writeln!(out, "}};"); } if builtin.rust_error { out.next_section(); writeln!(out, "template <>"); writeln!(out, "class impl<Error> final {{"); writeln!(out, "public:"); writeln!(out, " static Error error(repr::PtrLen repr) noexcept {{"); writeln!(out, " Error error;"); writeln!(out, " error.msg = static_cast<const char *>(repr.ptr);"); writeln!(out, " error.len = repr.len;"); writeln!(out, " return error;"); writeln!(out, " }}"); writeln!(out, "}};"); } if builtin.deleter_if { out.next_section(); writeln!(out, "template <bool> struct deleter_if {{"); writeln!(out, " template <typename T> void operator()(T *) {{}}"); writeln!(out, "}};"); out.next_section(); writeln!(out, "template <> struct deleter_if<true> {{"); writeln!( out, " template <typename T> void operator()(T *ptr) {{ ptr->~T(); }}", ); writeln!(out, "}};"); } out.end_block(Block::AnonymousNamespace); out.end_block(Block::InlineNamespace("cxxbridge1")); if builtin.trycatch { out.begin_block(Block::Namespace("behavior")); include.exception = true; include.type_traits = true; include.utility = true; writeln!(out, "class missing {{}};"); writeln!(out, "missing trycatch(...);"); writeln!(out); writeln!(out, "template <typename Try, typename Fail>"); writeln!(out, "static typename ::std::enable_if<"); writeln!( out, " ::std::is_same<decltype(trycatch(::std::declval<Try>(), ::std::declval<Fail>())),", ); writeln!(out, " missing>::value>::type"); writeln!(out, "trycatch(Try &&func, Fail &&fail) noexcept try {{"); writeln!(out, " func();"); writeln!(out, "}} catch (const ::std::exception &e) {{"); writeln!(out, " fail(e.what());"); writeln!(out, "}}"); out.end_block(Block::Namespace("behavior")); } out.end_block(Block::Namespace("rust")); if builtin.exception { include.cstddef = true; out.begin_block(Block::ExternC); writeln!( out, "const char *cxxbridge1$exception(const char *, ::std::size_t);", ); out.end_block(Block::ExternC); } }
use crate::gen::block::Block; use crate::gen::ifndef; use crate::gen::out::{Content, OutFile}; #[derive(Default, PartialEq)] pub struct Builtins<'a> { pub panic: bool, pub rust_string: bool, pub rust_str: bool, pub rust_slice: bool, pub rust_box: bool, pub rust_vec: bool, pub rust_fn: bool, pub rust_isize: bool, pub opaque: bool, pub layout: bool, pub unsafe_bitcopy: bool, pub rust_error: bool, pub manually_drop: bool, pub maybe_uninit: bool, pub trycatch: bool, pub ptr_len: bool, pub repr_fat: bool, pub rust_str_new_unchecked: bool, pub rust_str_repr: bool, pub rust_slice_new: bool, pub rust_slice_repr: bool, pub exception: bool, pub relocatable: bool, pub friend_impl: bool, pub is_complete: bool, pub deleter_if: bool, pub content: Content<'a>, } impl<'a> Builtins<'a> { pub fn new() -> Self { Builtins::default() } } pub(super) fn write(out: &mut OutFile) { if out.builtin == Default::default() { return; } let include = &mut out.include; let builtin = &mut out.builtin; let out = &mut builtin.content; if builtin.rust_string { include.array = true; include.cstdint = true; include.string = true; } if builtin.rust_str { include.array = true; include.cstdint = true; include.string = true; builtin.friend_impl = true; } if builtin.rust_vec { include.algorithm = true; include.array = true; include.cstddef = true; include.cstdint = true; include.initializer_list = true; include.iterator = true; include.new = true; include.type_traits = true; include.utility = true; builtin.panic = true; builtin.rust_slice = true; builtin.unsafe_bitcopy = true; }
if builtin.rust_box { include.new = true; include.type_traits = true; include.utility = true; } if builtin.rust_fn { include.utility = true; } if builtin.rust_error { include.exception = true; builtin.friend_impl = true; } if builtin.rust_isize { include.basetsd = true; include.sys_types = true; } if builtin.relocatable { include.type_traits = true; } if builtin.layout { include.type_traits = true; include.cstddef = true; builtin.is_complete = true; } if builtin.is_complete { include.cstddef = true; include.type_traits = true; } out.begin_block(Block::Namespace("rust")); out.begin_block(Block::InlineNamespace("cxxbridge1")); writeln!(out, "// #include \"rust/cxx.h\""); ifndef::write(out, builtin.panic, "CXXBRIDGE1_PANIC"); if builtin.rust_string { out.next_section(); writeln!(out, "struct unsafe_bitcopy_t;"); } if builtin.friend_impl { out.begin_block(Block::AnonymousNamespace); writeln!(out, "template <typename T>"); writeln!(out, "class impl;"); out.end_block(Block::AnonymousNamespace); } out.next_section(); if builtin.rust_str && !builtin.rust_string { writeln!(out, "class String;"); } if builtin.layout && !builtin.opaque { writeln!(out, "class Opaque;"); } if builtin.rust_slice { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "::std::size_t size_of();"); writeln!(out, "template <typename T>"); writeln!(out, "::std::size_t align_of();"); } ifndef::write(out, builtin.rust_string, "CXXBRIDGE1_RUST_STRING"); ifndef::write(out, builtin.rust_str, "CXXBRIDGE1_RUST_STR"); ifndef::write(out, builtin.rust_slice, "CXXBRIDGE1_RUST_SLICE"); ifndef::write(out, builtin.rust_box, "CXXBRIDGE1_RUST_BOX"); ifndef::write(out, builtin.unsafe_bitcopy, "CXXBRIDGE1_RUST_BITCOPY"); ifndef::write(out, builtin.rust_vec, "CXXBRIDGE1_RUST_VEC"); ifndef::write(out, builtin.rust_fn, "CXXBRIDGE1_RUST_FN"); ifndef::write(out, builtin.rust_error, "CXXBRIDGE1_RUST_ERROR"); ifndef::write(out, builtin.rust_isize, "CXXBRIDGE1_RUST_ISIZE"); ifndef::write(out, builtin.opaque, "CXXBRIDGE1_RUST_OPAQUE"); ifndef::write(out, builtin.is_complete, "CXXBRIDGE1_IS_COMPLETE"); ifndef::write(out, builtin.layout, "CXXBRIDGE1_LAYOUT"); ifndef::write(out, builtin.relocatable, "CXXBRIDGE1_RELOCATABLE"); if builtin.rust_str_new_unchecked { out.next_section(); writeln!(out, "class Str::uninit {{}};"); writeln!(out, "inline Str::Str(uninit) noexcept {{}}"); } if builtin.rust_slice_new { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "class Slice<T>::uninit {{}};"); writeln!(out, "template <typename T>"); writeln!(out, "inline Slice<T>::Slice(uninit) noexcept {{}}"); } out.begin_block(Block::Namespace("detail")); if builtin.maybe_uninit { include.cstddef = true; include.new = true; out.next_section(); writeln!(out, "template <typename T, typename = void *>"); writeln!(out, "struct operator_new {{"); writeln!( out, " void *operator()(::std::size_t sz) {{ return ::operator new(sz); }}", ); writeln!(out, "}};"); out.next_section(); writeln!(out, "template <typename T>"); writeln!( out, "struct operator_new<T, decltype(T::operator new(sizeof(T)))> {{", ); writeln!( out, " void *operator()(::std::size_t sz) {{ return T::operator new(sz); }}", ); writeln!(out, "}};"); } out.end_block(Block::Namespace("detail")); if builtin.manually_drop { out.next_section(); include.utility = true; writeln!(out, "template <typename T>"); writeln!(out, "union ManuallyDrop {{"); writeln!(out, " T value;"); writeln!( out, " ManuallyDrop(T &&value) : value(::std::move(value)) {{}}", ); writeln!(out, " ~ManuallyDrop() {{}}"); writeln!(out, "}};"); } if builtin.maybe_uninit { include.cstddef = true; out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "union MaybeUninit {{"); writeln!(out, " T value;"); writeln!( out, " void *operator new(::std::size_t sz) {{ return detail::operator_new<T>{{}}(sz); }}", ); writeln!(out, " MaybeUninit() {{}}"); writeln!(out, " ~MaybeUninit() {{}}"); writeln!(out, "}};"); } out.begin_block(Block::AnonymousNamespace); if builtin.repr_fat { include.array = true; include.cstdint = true; out.next_section(); out.begin_block(Block::Namespace("repr")); writeln!(out, "using Fat = ::std::array<::std::uintptr_t, 2>;"); out.end_block(Block::Namespace("repr")); } if builtin.ptr_len { include.cstddef = true; out.next_section(); out.begin_block(Block::Namespace("repr")); writeln!(out, "struct PtrLen final {{"); writeln!(out, " void *ptr;"); writeln!(out, " ::std::size_t len;"); writeln!(out, "}};"); out.end_block(Block::Namespace("repr")); } if builtin.rust_str_new_unchecked || builtin.rust_str_repr { out.next_section(); writeln!(out, "template <>"); writeln!(out, "class impl<Str> final {{"); writeln!(out, "public:"); if builtin.rust_str_new_unchecked { writeln!( out, " static Str new_unchecked(repr::Fat repr) noexcept {{", ); writeln!(out, " Str str = Str::uninit{{}};"); writeln!(out, " str.repr = repr;"); writeln!(out, " return str;"); writeln!(out, " }}"); } if builtin.rust_str_repr { writeln!(out, " static repr::Fat repr(Str str) noexcept {{"); writeln!(out, " return str.repr;"); writeln!(out, " }}"); } writeln!(out, "}};"); } if builtin.rust_slice_new || builtin.rust_slice_repr { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "class impl<Slice<T>> final {{"); writeln!(out, "public:"); if builtin.rust_slice_new { writeln!(out, " static Slice<T> slice(repr::Fat repr) noexcept {{"); writeln!(out, " Slice<T> slice = typename Slice<T>::uninit{{}};"); writeln!(out, " slice.repr = repr;"); writeln!(out, " return slice;"); writeln!(out, " }}"); } if builtin.rust_slice_repr { writeln!(out, " static repr::Fat repr(Slice<T> slice) noexcept {{"); writeln!(out, " return slice.repr;"); writeln!(out, " }}"); } writeln!(out, "}};"); } if builtin.rust_error { out.next_section(); writeln!(out, "template <>"); writeln!(out, "class impl<Error> final {{"); writeln!(out, "public:"); writeln!(out, " static Error error(repr::PtrLen repr) noexcept {{"); writeln!(out, " Error error;"); writeln!(out, " error.msg = static_cast<const char *>(repr.ptr);"); writeln!(out, " error.len = repr.len;"); writeln!(out, " return error;"); writeln!(out, " }}"); writeln!(out, "}};"); } if builtin.deleter_if { out.next_section(); writeln!(out, "template <bool> struct deleter_if {{"); writeln!(out, " template <typename T> void operator()(T *) {{}}"); writeln!(out, "}};"); out.next_section(); writeln!(out, "template <> struct deleter_if<true> {{"); writeln!( out, " template <typename T> void operator()(T *ptr) {{ ptr->~T(); }}", ); writeln!(out, "}};"); } out.end_block(Block::AnonymousNamespace); out.end_block(Block::InlineNamespace("cxxbridge1")); if builtin.trycatch { out.begin_block(Block::Namespace("behavior")); include.exception = true; include.type_traits = true; include.utility = true; writeln!(out, "class missing {{}};"); writeln!(out, "missing trycatch(...);"); writeln!(out); writeln!(out, "template <typename Try, typename Fail>"); writeln!(out, "static typename ::std::enable_if<"); writeln!( out, " ::std::is_same<decltype(trycatch(::std::declval<Try>(), ::std::declval<Fail>())),", ); writeln!(out, " missing>::value>::type"); writeln!(out, "trycatch(Try &&func, Fail &&fail) noexcept try {{"); writeln!(out, " func();"); writeln!(out, "}} catch (const ::std::exception &e) {{"); writeln!(out, " fail(e.what());"); writeln!(out, "}}"); out.end_block(Block::Namespace("behavior")); } out.end_block(Block::Namespace("rust")); if builtin.exception { include.cstddef = true; out.begin_block(Block::ExternC); writeln!( out, "const char *cxxbridge1$exception(const char *, ::std::size_t);", ); out.end_block(Block::ExternC); } }
if builtin.rust_slice { include.array = true; include.cstddef = true; include.cstdint = true; include.iterator = true; include.type_traits = true; builtin.friend_impl = true; builtin.layout = true; builtin.panic = true; }
if_condition
[ { "content": "pub fn expand_struct(strct: &Struct, actual_derives: &mut Option<TokenStream>) -> TokenStream {\n\n let mut expanded = TokenStream::new();\n\n let mut traits = Vec::new();\n\n\n\n for derive in &strct.derives {\n\n let span = derive.span;\n\n match derive.what {\n\n ...
Rust
tests/test.rs
zharkomi/vector-merkle-tree
62385c81e5fcc2a0f419b073816f4a26cab70bea
extern crate ring; extern crate vmt; macro_rules! test_tree { ($constructor:ident) => { use ring::digest::{Algorithm, Context, Digest, SHA512}; use vmt::MerkleTree; static ALGO: &'static Algorithm = &SHA512; #[test] fn test_tree_0() { let values: Vec<&str> = vec![]; let tree = MerkleTree::$constructor(&values, ALGO); assert_eq!(true, tree.is_empty()); assert_eq!(0, tree.height()); assert_eq!(0, tree.nodes_count()); assert_eq!(0, tree.data_size()); let empty_root: Vec<u8> = vec![]; assert_eq!(empty_root, tree.get_root()); } #[test] fn test_tree_1() { let values = vec!["one"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d0.as_ref(), _d0.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(2, tree.height()); assert_eq!(3, tree.nodes_count()); assert_eq!(3 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_2() { let values = vec!["one", "two"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d0.as_ref(), _d1.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(2, tree.height()); assert_eq!(3, tree.nodes_count()); assert_eq!(3 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_2_reverse() { let values1 = vec!["one", "two"]; let tree1 = MerkleTree::$constructor(&values1, ALGO); let values2 = vec!["two", "one"]; let tree2 = MerkleTree::$constructor(&values2, ALGO); assert_eq!(tree1.get_root(), tree2.get_root()); } #[test] fn test_tree_3() { let values = vec!["one", "two", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_4() { let values = vec!["one", "two", "four", "three"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[3].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_4_reverse() { let values1 = vec!["one", "two", "three", "four"]; let tree1 = MerkleTree::$constructor(&values1, ALGO); let values2 = vec!["four", "three", "two", "one"]; let tree2 = MerkleTree::$constructor(&values2, ALGO); assert_eq!(tree1.get_root(), tree2.get_root()); } #[test] fn test_equal() { let values = vec!["one", "one", "one", "one"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[3].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_proof() { let values = vec!["one", "two", "three", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); for v in values { let proof = tree.build_proof(&v); assert_eq!(true, proof.is_some()); let vec = proof.unwrap(); assert_eq!(3, vec.len()); tree.validate(&vec); } let absent = vec!["qqq", "www", "eee", "rrr"]; for v in absent { let proof = tree.build_proof(&v); assert_eq!(true, proof.is_none()); } } #[test] fn test_bad_proof() { let values = vec!["one", "two", "three", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); let proof = tree.build_proof(&"one"); assert_eq!(true, proof.is_some()); let _d0: Digest = vmt::get_hash("five".as_ref(), ALGO); let proof_vec = proof.unwrap(); let vec = vec![proof_vec[0], proof_vec[1], _d0.as_ref()]; assert_eq!(false, tree.validate(&vec)); } fn hash_pair(x: &[u8], y: &[u8], algo: &'static Algorithm) -> Digest { let mut ctx = Context::new(algo); ctx.update(x); ctx.update(y); ctx.finish() } } } mod test { test_tree!(new); } mod test_with_map { test_tree!(new_with_map); }
extern crate ring; extern crate vmt; macro_rules! test_tree { ($constructor:ident) => { use ring::digest::{Algorithm, Context, Digest, SHA512}; use vmt::MerkleTree; static ALGO: &'static Algorithm = &SHA512; #[test] f
assert_eq!(true, proof.is_some()); let vec = proof.unwrap(); assert_eq!(3, vec.len()); tree.validate(&vec); } let absent = vec!["qqq", "www", "eee", "rrr"]; for v in absent { let proof = tree.build_proof(&v); assert_eq!(true, proof.is_none()); } } #[test] fn test_bad_proof() { let values = vec!["one", "two", "three", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); let proof = tree.build_proof(&"one"); assert_eq!(true, proof.is_some()); let _d0: Digest = vmt::get_hash("five".as_ref(), ALGO); let proof_vec = proof.unwrap(); let vec = vec![proof_vec[0], proof_vec[1], _d0.as_ref()]; assert_eq!(false, tree.validate(&vec)); } fn hash_pair(x: &[u8], y: &[u8], algo: &'static Algorithm) -> Digest { let mut ctx = Context::new(algo); ctx.update(x); ctx.update(y); ctx.finish() } } } mod test { test_tree!(new); } mod test_with_map { test_tree!(new_with_map); }
n test_tree_0() { let values: Vec<&str> = vec![]; let tree = MerkleTree::$constructor(&values, ALGO); assert_eq!(true, tree.is_empty()); assert_eq!(0, tree.height()); assert_eq!(0, tree.nodes_count()); assert_eq!(0, tree.data_size()); let empty_root: Vec<u8> = vec![]; assert_eq!(empty_root, tree.get_root()); } #[test] fn test_tree_1() { let values = vec!["one"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d0.as_ref(), _d0.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(2, tree.height()); assert_eq!(3, tree.nodes_count()); assert_eq!(3 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_2() { let values = vec!["one", "two"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d0.as_ref(), _d1.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(2, tree.height()); assert_eq!(3, tree.nodes_count()); assert_eq!(3 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_2_reverse() { let values1 = vec!["one", "two"]; let tree1 = MerkleTree::$constructor(&values1, ALGO); let values2 = vec!["two", "one"]; let tree2 = MerkleTree::$constructor(&values2, ALGO); assert_eq!(tree1.get_root(), tree2.get_root()); } #[test] fn test_tree_3() { let values = vec!["one", "two", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_4() { let values = vec!["one", "two", "four", "three"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[3].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_4_reverse() { let values1 = vec!["one", "two", "three", "four"]; let tree1 = MerkleTree::$constructor(&values1, ALGO); let values2 = vec!["four", "three", "two", "one"]; let tree2 = MerkleTree::$constructor(&values2, ALGO); assert_eq!(tree1.get_root(), tree2.get_root()); } #[test] fn test_equal() { let values = vec!["one", "one", "one", "one"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[3].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_proof() { let values = vec!["one", "two", "three", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); for v in values { let proof = tree.build_proof(&v);
random
[ { "content": "pub fn get_hash(x: &[u8], algo: &'static Algorithm) -> Digest {\n\n let mut ctx = Context::new(algo);\n\n ctx.update(x);\n\n ctx.finish()\n\n}", "file_path": "src/lib.rs", "rank": 0, "score": 65870.31932308526 }, { "content": "pub fn get_pair_hash(x: &[u8], y: &[u8], a...
Rust
exonum/src/encoding/protobuf/mod.rs
fedir-molchan/exonum
3501abba0f4574d21da64e5991434412234e813e
#![allow(bare_trait_objects)] #![allow(renamed_and_removed_lints)] pub use self::blockchain::{Block, ConfigReference, TxLocation}; pub use self::helpers::{BitVec, Hash, PublicKey}; pub use self::protocol::{ BlockRequest, BlockResponse, Connect, PeersRequest, Precommit, Prevote, PrevotesRequest, Propose, ProposeRequest, Status, TransactionsRequest, TransactionsResponse, }; use bit_vec; use chrono::{DateTime, TimeZone, Utc}; use protobuf::{well_known_types, Message, RepeatedField}; use crypto; use encoding::Error; use helpers::{Height, Round, ValidatorId}; use messages::BinaryForm; mod blockchain; mod helpers; mod protocol; pub trait ProtobufConvert: Sized { type ProtoStruct; fn to_pb(&self) -> Self::ProtoStruct; fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()>; } impl<T> BinaryForm for T where T: ProtobufConvert, <T as ProtobufConvert>::ProtoStruct: Message, { fn encode(&self) -> Result<Vec<u8>, Error> { Ok(self.to_pb().write_to_bytes().unwrap()) } fn decode(buffer: &[u8]) -> Result<Self, Error> { let mut pb = <Self as ProtobufConvert>::ProtoStruct::new(); pb.merge_from_bytes(buffer).unwrap(); Self::from_pb(pb).map_err(|_| "Conversion from protobuf error".into()) } } impl ProtobufConvert for crypto::Hash { type ProtoStruct = Hash; fn to_pb(&self) -> Hash { let mut hash = Hash::new(); hash.set_data(self.as_ref().to_vec()); hash } fn from_pb(pb: Hash) -> Result<Self, ()> { let data = pb.get_data(); if data.len() == crypto::HASH_SIZE { crypto::Hash::from_slice(data).ok_or(()) } else { Err(()) } } } impl ProtobufConvert for crypto::PublicKey { type ProtoStruct = PublicKey; fn to_pb(&self) -> PublicKey { let mut key = PublicKey::new(); key.set_data(self.as_ref().to_vec()); key } fn from_pb(pb: PublicKey) -> Result<Self, ()> { let data = pb.get_data(); if data.len() == crypto::PUBLIC_KEY_LENGTH { crypto::PublicKey::from_slice(data).ok_or(()) } else { Err(()) } } } impl ProtobufConvert for bit_vec::BitVec { type ProtoStruct = BitVec; fn to_pb(&self) -> BitVec { let mut bit_vec = BitVec::new(); bit_vec.set_data(self.to_bytes()); bit_vec.set_len(self.len() as u64); bit_vec } fn from_pb(pb: BitVec) -> Result<Self, ()> { let data = pb.get_data(); let mut bit_vec = bit_vec::BitVec::from_bytes(data); bit_vec.truncate(pb.get_len() as usize); Ok(bit_vec) } } impl ProtobufConvert for DateTime<Utc> { type ProtoStruct = well_known_types::Timestamp; fn to_pb(&self) -> well_known_types::Timestamp { let mut ts = well_known_types::Timestamp::new(); ts.set_seconds(self.timestamp()); ts.set_nanos(self.timestamp_subsec_nanos() as i32); ts } fn from_pb(pb: well_known_types::Timestamp) -> Result<Self, ()> { Utc.timestamp_opt(pb.get_seconds(), pb.get_nanos() as u32) .single() .ok_or(()) } } impl ProtobufConvert for String { type ProtoStruct = Self; fn to_pb(&self) -> Self::ProtoStruct { self.clone() } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl ProtobufConvert for Height { type ProtoStruct = u64; fn to_pb(&self) -> Self::ProtoStruct { self.0 } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(Height(pb)) } } impl ProtobufConvert for Round { type ProtoStruct = u32; fn to_pb(&self) -> Self::ProtoStruct { self.0 } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(Round(pb)) } } impl ProtobufConvert for ValidatorId { type ProtoStruct = u32; fn to_pb(&self) -> Self::ProtoStruct { u32::from(self.0) } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { if pb <= u32::from(u16::max_value()) { Ok(ValidatorId(pb as u16)) } else { Err(()) } } } impl ProtobufConvert for u32 { type ProtoStruct = u32; fn to_pb(&self) -> Self::ProtoStruct { *self } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl ProtobufConvert for u64 { type ProtoStruct = u64; fn to_pb(&self) -> Self::ProtoStruct { *self } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl<T> ProtobufConvert for Vec<T> where T: ProtobufConvert, { type ProtoStruct = RepeatedField<T::ProtoStruct>; fn to_pb(&self) -> Self::ProtoStruct { RepeatedField::from_vec(self.into_iter().map(|v| v.to_pb()).collect()) } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { pb.into_iter() .map(ProtobufConvert::from_pb) .collect::<Result<Vec<_>, _>>() } } impl ProtobufConvert for Vec<u8> { type ProtoStruct = Vec<u8>; fn to_pb(&self) -> Self::ProtoStruct { self.clone() } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } }
#![allow(bare_trait_objects)] #![allow(renamed_and_removed_lints)] pub use self::blockchain::{Block, ConfigReference, TxLocation}; pub use self::helpers::{BitVec, Hash, PublicKey}; pub use self::protocol::{ BlockRequest, BlockResponse, Connect, PeersRequest, Precommit, Prevote, PrevotesRequest, Propose, ProposeRequest, Status, TransactionsRequest, TransactionsResponse, }; use bit_vec; use chrono::{DateTime, TimeZone, Utc}; use protobuf::{well_known_types, Message, RepeatedField}; use crypto; use encoding::Error; use helpers::{Height, Round, ValidatorId}; use messages::BinaryForm; mod block
= u32; fn to_pb(&self) -> Self::ProtoStruct { self.0 } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(Round(pb)) } } impl ProtobufConvert for ValidatorId { type ProtoStruct = u32; fn to_pb(&self) -> Self::ProtoStruct { u32::from(self.0) } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { if pb <= u32::from(u16::max_value()) { Ok(ValidatorId(pb as u16)) } else { Err(()) } } } impl ProtobufConvert for u32 { type ProtoStruct = u32; fn to_pb(&self) -> Self::ProtoStruct { *self } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl ProtobufConvert for u64 { type ProtoStruct = u64; fn to_pb(&self) -> Self::ProtoStruct { *self } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl<T> ProtobufConvert for Vec<T> where T: ProtobufConvert, { type ProtoStruct = RepeatedField<T::ProtoStruct>; fn to_pb(&self) -> Self::ProtoStruct { RepeatedField::from_vec(self.into_iter().map(|v| v.to_pb()).collect()) } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { pb.into_iter() .map(ProtobufConvert::from_pb) .collect::<Result<Vec<_>, _>>() } } impl ProtobufConvert for Vec<u8> { type ProtoStruct = Vec<u8>; fn to_pb(&self) -> Self::ProtoStruct { self.clone() } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } }
chain; mod helpers; mod protocol; pub trait ProtobufConvert: Sized { type ProtoStruct; fn to_pb(&self) -> Self::ProtoStruct; fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()>; } impl<T> BinaryForm for T where T: ProtobufConvert, <T as ProtobufConvert>::ProtoStruct: Message, { fn encode(&self) -> Result<Vec<u8>, Error> { Ok(self.to_pb().write_to_bytes().unwrap()) } fn decode(buffer: &[u8]) -> Result<Self, Error> { let mut pb = <Self as ProtobufConvert>::ProtoStruct::new(); pb.merge_from_bytes(buffer).unwrap(); Self::from_pb(pb).map_err(|_| "Conversion from protobuf error".into()) } } impl ProtobufConvert for crypto::Hash { type ProtoStruct = Hash; fn to_pb(&self) -> Hash { let mut hash = Hash::new(); hash.set_data(self.as_ref().to_vec()); hash } fn from_pb(pb: Hash) -> Result<Self, ()> { let data = pb.get_data(); if data.len() == crypto::HASH_SIZE { crypto::Hash::from_slice(data).ok_or(()) } else { Err(()) } } } impl ProtobufConvert for crypto::PublicKey { type ProtoStruct = PublicKey; fn to_pb(&self) -> PublicKey { let mut key = PublicKey::new(); key.set_data(self.as_ref().to_vec()); key } fn from_pb(pb: PublicKey) -> Result<Self, ()> { let data = pb.get_data(); if data.len() == crypto::PUBLIC_KEY_LENGTH { crypto::PublicKey::from_slice(data).ok_or(()) } else { Err(()) } } } impl ProtobufConvert for bit_vec::BitVec { type ProtoStruct = BitVec; fn to_pb(&self) -> BitVec { let mut bit_vec = BitVec::new(); bit_vec.set_data(self.to_bytes()); bit_vec.set_len(self.len() as u64); bit_vec } fn from_pb(pb: BitVec) -> Result<Self, ()> { let data = pb.get_data(); let mut bit_vec = bit_vec::BitVec::from_bytes(data); bit_vec.truncate(pb.get_len() as usize); Ok(bit_vec) } } impl ProtobufConvert for DateTime<Utc> { type ProtoStruct = well_known_types::Timestamp; fn to_pb(&self) -> well_known_types::Timestamp { let mut ts = well_known_types::Timestamp::new(); ts.set_seconds(self.timestamp()); ts.set_nanos(self.timestamp_subsec_nanos() as i32); ts } fn from_pb(pb: well_known_types::Timestamp) -> Result<Self, ()> { Utc.timestamp_opt(pb.get_seconds(), pb.get_nanos() as u32) .single() .ok_or(()) } } impl ProtobufConvert for String { type ProtoStruct = Self; fn to_pb(&self) -> Self::ProtoStruct { self.clone() } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl ProtobufConvert for Height { type ProtoStruct = u64; fn to_pb(&self) -> Self::ProtoStruct { self.0 } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(Height(pb)) } } impl ProtobufConvert for Round { type ProtoStruct
random
[ { "content": "/// Calculates hash of a bytes slice.\n\npub fn hash(data: &[u8]) -> Hash {\n\n sha256::hash(data)\n\n}\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 0, "score": 275758.0190634015 }, { "content": "/// Calculates a hash of a bytes slice.\n\n///\n\n/// T...
Rust
consensus/service/src/api/grpc_error.rs
isis-mc/mobilecoin
aa5c65c042c78840ade32bf198538646ea2020ee
use crate::tx_manager::TxManagerError; use displaydoc::Display; use grpcio::{RpcStatus, RpcStatusCode}; use mc_common::logger::global_log; use mc_consensus_api::consensus_common::{ProposeTxResponse, ProposeTxResult}; use mc_consensus_enclave::Error as EnclaveError; use mc_ledger_db::Error as LedgerError; use mc_transaction_core::validation::TransactionValidationError; #[derive(Debug, Display)] pub enum ConsensusGrpcError { RpcStatus(RpcStatus), Ledger(LedgerError), OverCapacity, NotServing, Enclave(EnclaveError), TransactionValidation(TransactionValidationError), InvalidArgument(String), Other(String), } impl From<RpcStatus> for ConsensusGrpcError { fn from(src: RpcStatus) -> Self { Self::RpcStatus(src) } } impl From<LedgerError> for ConsensusGrpcError { fn from(src: LedgerError) -> Self { Self::Ledger(src) } } impl From<EnclaveError> for ConsensusGrpcError { fn from(src: EnclaveError) -> Self { match src { EnclaveError::MalformedTx(err) => Self::from(err), _ => Self::Enclave(src), } } } impl From<TransactionValidationError> for ConsensusGrpcError { fn from(src: TransactionValidationError) -> Self { Self::TransactionValidation(src) } } impl From<TxManagerError> for ConsensusGrpcError { fn from(src: TxManagerError) -> Self { match src { TxManagerError::Enclave(err) => Self::from(err), TxManagerError::TransactionValidation(err) => Self::from(err), TxManagerError::LedgerDb(err) => Self::from(err), _ => Self::Other(format!("tx manager error: {}", src)), } } } impl From<ConsensusGrpcError> for RpcStatus { fn from(src: ConsensusGrpcError) -> Self { match src { ConsensusGrpcError::RpcStatus(rpc_status) => rpc_status, ConsensusGrpcError::Ledger(err) => RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Ledger error: {}", err)), ), ConsensusGrpcError::OverCapacity => RpcStatus::new( RpcStatusCode::UNAVAILABLE, Some("Temporarily over capacity".into()), ), ConsensusGrpcError::NotServing => RpcStatus::new( RpcStatusCode::UNAVAILABLE, Some("Temporarily not serving requests".into()), ), ConsensusGrpcError::Enclave(EnclaveError::Attest(err)) => { global_log::error!("Permission denied: {}", err); RpcStatus::new( RpcStatusCode::PERMISSION_DENIED, Some("Permission Denied (attestation)".into()), ) } ConsensusGrpcError::Other(err) => RpcStatus::new(RpcStatusCode::INTERNAL, Some(err)), ConsensusGrpcError::TransactionValidation(err) => { global_log::error!("Attempting to convert a ConsensusGrpcError::TransactionValidation into RpcStatus, this should not happen! Error is: {}", err); RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Unexpected transaction validation error: {}", err)), ) } _ => RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Internal error: {}", src)), ), } } } impl Into<Result<ProposeTxResponse, RpcStatus>> for ConsensusGrpcError { fn into(self) -> Result<ProposeTxResponse, RpcStatus> { match self { Self::TransactionValidation(err) => { let mut resp = ProposeTxResponse::new(); resp.set_result(ProposeTxResult::from(err)); Ok(resp) } _ => Err(RpcStatus::from(self)), } } }
use crate::tx_manager::TxManagerError; use displaydoc::Display; use grpcio::{RpcStatus, RpcStatusCode}; use mc_common::logger::global_log; use mc_consensus_api::consensus_common::{Pr
n(err) => { let mut resp = ProposeTxResponse::new(); resp.set_result(ProposeTxResult::from(err)); Ok(resp) } _ => Err(RpcStatus::from(self)), } } }
oposeTxResponse, ProposeTxResult}; use mc_consensus_enclave::Error as EnclaveError; use mc_ledger_db::Error as LedgerError; use mc_transaction_core::validation::TransactionValidationError; #[derive(Debug, Display)] pub enum ConsensusGrpcError { RpcStatus(RpcStatus), Ledger(LedgerError), OverCapacity, NotServing, Enclave(EnclaveError), TransactionValidation(TransactionValidationError), InvalidArgument(String), Other(String), } impl From<RpcStatus> for ConsensusGrpcError { fn from(src: RpcStatus) -> Self { Self::RpcStatus(src) } } impl From<LedgerError> for ConsensusGrpcError { fn from(src: LedgerError) -> Self { Self::Ledger(src) } } impl From<EnclaveError> for ConsensusGrpcError { fn from(src: EnclaveError) -> Self { match src { EnclaveError::MalformedTx(err) => Self::from(err), _ => Self::Enclave(src), } } } impl From<TransactionValidationError> for ConsensusGrpcError { fn from(src: TransactionValidationError) -> Self { Self::TransactionValidation(src) } } impl From<TxManagerError> for ConsensusGrpcError { fn from(src: TxManagerError) -> Self { match src { TxManagerError::Enclave(err) => Self::from(err), TxManagerError::TransactionValidation(err) => Self::from(err), TxManagerError::LedgerDb(err) => Self::from(err), _ => Self::Other(format!("tx manager error: {}", src)), } } } impl From<ConsensusGrpcError> for RpcStatus { fn from(src: ConsensusGrpcError) -> Self { match src { ConsensusGrpcError::RpcStatus(rpc_status) => rpc_status, ConsensusGrpcError::Ledger(err) => RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Ledger error: {}", err)), ), ConsensusGrpcError::OverCapacity => RpcStatus::new( RpcStatusCode::UNAVAILABLE, Some("Temporarily over capacity".into()), ), ConsensusGrpcError::NotServing => RpcStatus::new( RpcStatusCode::UNAVAILABLE, Some("Temporarily not serving requests".into()), ), ConsensusGrpcError::Enclave(EnclaveError::Attest(err)) => { global_log::error!("Permission denied: {}", err); RpcStatus::new( RpcStatusCode::PERMISSION_DENIED, Some("Permission Denied (attestation)".into()), ) } ConsensusGrpcError::Other(err) => RpcStatus::new(RpcStatusCode::INTERNAL, Some(err)), ConsensusGrpcError::TransactionValidation(err) => { global_log::error!("Attempting to convert a ConsensusGrpcError::TransactionValidation into RpcStatus, this should not happen! Error is: {}", err); RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Unexpected transaction validation error: {}", err)), ) } _ => RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Internal error: {}", src)), ), } } } impl Into<Result<ProposeTxResponse, RpcStatus>> for ConsensusGrpcError { fn into(self) -> Result<ProposeTxResponse, RpcStatus> { match self { Self::TransactionValidatio
random
[ { "content": "## Prohibited Uses and Transfers of MobileCoins and Uses of MobileCoin Wallets\n\n\n\nYou may not:\n\n- use or transfer MobileCoins or access or use a MobileCoin Wallet in order to disguise the origin or nature of illicit proceeds of, or to further, any breach of applicable laws, or to transact or...
Rust
http/src/request/channel/message/create_message.rs
dlee13/dawn
3a1443e88bee0abc543b9cc92ed2e5665e685b84
use super::allowed_mentions::{AllowedMentions, AllowedMentionsBuilder, Unspecified}; use crate::request::prelude::*; use reqwest::{ multipart::{Form, Part}, Body, }; use std::{ collections::HashMap, error::Error, fmt::{Display, Formatter, Result as FmtResult}, }; use twilight_model::{ channel::{embed::Embed, Message}, id::ChannelId, }; #[derive(Clone, Debug)] #[non_exhaustive] pub enum CreateMessageError { ContentInvalid { content: String, }, EmbedTooLarge { embed: Box<Embed>, source: EmbedValidationError, }, } impl Display for CreateMessageError { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { match self { Self::ContentInvalid { .. } => f.write_str("the message content is invalid"), Self::EmbedTooLarge { .. } => f.write_str("the embed's contents are too long"), } } } impl Error for CreateMessageError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { Self::ContentInvalid { .. } => None, Self::EmbedTooLarge { source, .. } => Some(source), } } } #[derive(Default, Serialize)] pub(crate) struct CreateMessageFields { #[serde(skip_serializing_if = "Option::is_none")] content: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] embed: Option<Embed>, #[serde(skip_serializing_if = "Option::is_none")] nonce: Option<u64>, #[serde(skip_serializing_if = "Option::is_none")] payload_json: Option<Vec<u8>>, #[serde(skip_serializing_if = "Option::is_none")] tts: Option<bool>, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) allowed_mentions: Option<AllowedMentions>, } pub struct CreateMessage<'a> { attachments: HashMap<String, Body>, channel_id: ChannelId, pub(crate) fields: CreateMessageFields, fut: Option<Pending<'a, Message>>, http: &'a Client, } impl<'a> CreateMessage<'a> { pub(crate) fn new(http: &'a Client, channel_id: ChannelId) -> Self { Self { attachments: HashMap::new(), channel_id, fields: CreateMessageFields { allowed_mentions: http.default_allowed_mentions(), ..CreateMessageFields::default() }, fut: None, http, } } pub fn content(self, content: impl Into<String>) -> Result<Self, CreateMessageError> { self._content(content.into()) } fn _content(mut self, content: String) -> Result<Self, CreateMessageError> { if !validate::content_limit(&content) { return Err(CreateMessageError::ContentInvalid { content }); } self.fields.content.replace(content); Ok(self) } pub fn embed(mut self, embed: Embed) -> Result<Self, CreateMessageError> { if let Err(source) = validate::embed(&embed) { return Err(CreateMessageError::EmbedTooLarge { embed: Box::new(embed), source, }); } self.fields.embed.replace(embed); Ok(self) } pub fn allowed_mentions( self, ) -> AllowedMentionsBuilder<'a, Unspecified, Unspecified, Unspecified> { AllowedMentionsBuilder::for_builder(self) } pub fn attachment(mut self, name: impl Into<String>, file: impl Into<Body>) -> Self { self.attachments.insert(name.into(), file.into()); self } pub fn attachments<N: Into<String>, F: Into<Body>>( mut self, attachments: impl IntoIterator<Item = (N, F)>, ) -> Self { for (name, file) in attachments { self = self.attachment(name, file); } self } pub fn nonce(mut self, nonce: u64) -> Self { self.fields.nonce.replace(nonce); self } pub fn payload_json(mut self, payload_json: impl Into<Vec<u8>>) -> Self { self.fields.payload_json.replace(payload_json.into()); self } pub fn tts(mut self, tts: bool) -> Self { self.fields.tts.replace(tts); self } fn start(&mut self) -> Result<()> { self.fut.replace(Box::pin(self.http.request( if self.attachments.is_empty() { Request::from(( crate::json_to_vec(&self.fields)?, Route::CreateMessage { channel_id: self.channel_id.0, }, )) } else { let mut form = Form::new(); for (index, (name, file)) in self.attachments.drain().enumerate() { form = form.part(format!("{}", index), Part::stream(file).file_name(name)); } let body = crate::json_to_vec(&self.fields)?; form = form.part("payload_json", Part::bytes(body)); Request::from(( form, Route::CreateMessage { channel_id: self.channel_id.0, }, )) }, ))); Ok(()) } } poll_req!(CreateMessage<'_>, Message);
use super::allowed_mentions::{AllowedMentions, AllowedMentionsBuilder, Unspecified}; use crate::request::prelude::*; use reqwest::{ multipart::{Form, Part}, Body, }; use std::{ collections::HashMap, error::Error, fmt::{Display, Formatter, Result as FmtResult}, }; use twilight_model::{ channel::{embed::Embed, Message}, id::ChannelId, }; #[derive(Clone, Debug)] #[non_exhaustive] pub enum CreateMessageError { ContentInvalid { content: String, }, EmbedTooLarge { embed: Box<Embed>, source: EmbedValidationError, }, } impl Display for CreateMessageError { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { match self { Self::ContentInvalid { .. } => f.write_str("the message content is invalid"), Self::EmbedTooLarge { .. } => f.write_str("the embed's contents are too long"), } } } impl Error for CreateMessageError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { Self::ContentInvalid { .. } => None, Self::EmbedTooLarge { source, .. } => Some(source), } } } #[derive(Default, Serialize)] pub(crate) struct CreateMessageFields { #[serde(skip_serializing_if = "Option::is_none")] content: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] embed: Option<Embed>, #[serde(skip_serializing_if = "Option::is_none")] nonce: Option<u64>, #[serde(skip_serializing_if = "Option::is_none")] payload_json: Option<Vec<u8>>, #[serde(skip_serializing_if = "Option::is_none")] tts: Option<bool>, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) allowed_mentions: Option<AllowedMentions>, } pub struct CreateMessage<'a> { attachments: HashMap<String, Body>, channel_id: ChannelId, pub(crate) fields: CreateMessageFields, fut: Option<Pending<'a, Message>>, http: &'a Client, } impl<'a> CreateMessage<'a> { pub(crate) fn new(http: &'a Client, channel_id: ChannelId) -> Self { Self { attachments: HashMap::new(), channel_id, fields: CreateMessageFields { allowed_mentions: http.default_allowed_mentions(), ..CreateMessageFields::default() }, fut: None, http, } } pub fn content(self, content: impl Into<String>) -> Result<Self, CreateMessageError> { self._content(content.into()) } fn _content(mut self, content: String) -> Result<Self, CreateMessageError> { if !validate::content_limit(&content) { return Err(CreateMessageError::ContentInvalid { content }); } self.fields.content.replace(content); Ok(self) } pub fn embed(mut self, embed: Embed) -> Result<Self, CreateMessageError> {
self.fields.embed.replace(embed); Ok(self) } pub fn allowed_mentions( self, ) -> AllowedMentionsBuilder<'a, Unspecified, Unspecified, Unspecified> { AllowedMentionsBuilder::for_builder(self) } pub fn attachment(mut self, name: impl Into<String>, file: impl Into<Body>) -> Self { self.attachments.insert(name.into(), file.into()); self } pub fn attachments<N: Into<String>, F: Into<Body>>( mut self, attachments: impl IntoIterator<Item = (N, F)>, ) -> Self { for (name, file) in attachments { self = self.attachment(name, file); } self } pub fn nonce(mut self, nonce: u64) -> Self { self.fields.nonce.replace(nonce); self } pub fn payload_json(mut self, payload_json: impl Into<Vec<u8>>) -> Self { self.fields.payload_json.replace(payload_json.into()); self } pub fn tts(mut self, tts: bool) -> Self { self.fields.tts.replace(tts); self } fn start(&mut self) -> Result<()> { self.fut.replace(Box::pin(self.http.request( if self.attachments.is_empty() { Request::from(( crate::json_to_vec(&self.fields)?, Route::CreateMessage { channel_id: self.channel_id.0, }, )) } else { let mut form = Form::new(); for (index, (name, file)) in self.attachments.drain().enumerate() { form = form.part(format!("{}", index), Part::stream(file).file_name(name)); } let body = crate::json_to_vec(&self.fields)?; form = form.part("payload_json", Part::bytes(body)); Request::from(( form, Route::CreateMessage { channel_id: self.channel_id.0, }, )) }, ))); Ok(()) } } poll_req!(CreateMessage<'_>, Message);
if let Err(source) = validate::embed(&embed) { return Err(CreateMessageError::EmbedTooLarge { embed: Box::new(embed), source, }); }
if_condition
[ { "content": "pub fn embed(embed: &Embed) -> Result<(), EmbedValidationError> {\n\n let mut total = 0;\n\n\n\n if embed.fields.len() > EmbedValidationError::FIELD_COUNT {\n\n return Err(EmbedValidationError::TooManyFields {\n\n amount: embed.fields.len(),\n\n });\n\n }\n\n\n\n ...
Rust
megenginelite-rs/src/api.rs
MegEngine/megenginelite-rs
8d88645bfe4e1eaee144aa73433b359c69d09b98
use crate::types::*; use megenginelite_sys::MgeLiteDynLib; use std::ffi::CStr; use std::sync::{Mutex, Once}; #[doc(hidden)] pub trait IntoLiteRst { fn into_rst(self) -> LiteResult<()>; } impl IntoLiteRst for i32 { fn into_rst(self) -> LiteResult<()> { match self { 0 => Ok(()), _ => { let descp = unsafe { let api = API .as_ref() .expect("dynamic library [megenginelite] is not found"); CStr::from_ptr(api.LITE_get_last_error()) } .to_str() .unwrap() .to_owned(); Err(LiteError::MGELiteError(descp)) } } } } #[doc(hidden)] pub static mut API: Option<MgeLiteDynLib> = None; #[cfg(feature = "auto-load")] fn auto_load() -> Option<()> { use std::path::PathBuf; use std::process::Command; if let Ok(output) = Command::new("python3") .args(["-c", "import megenginelite;print(megenginelite.__file__)"]) .output() { let output = String::from_utf8(output.stdout).ok()?; let mut dir = PathBuf::from(output); dir.pop(); dir.push("libs"); for name in std::fs::read_dir(&dir).ok()? { if let Some(path) = name.ok() { let path = path.path(); if let Some(ext) = path.extension() { if ext == "so" { unsafe { load(path) }.ok(); } } } } } None } lazy_static::lazy_static! { static ref INIT: Mutex<()> = Mutex::new(()); } #[cfg(feature = "auto-load")] static INIT_ONCE: Once = Once::new(); #[doc(hidden)] pub fn api() -> &'static MgeLiteDynLib { #[cfg(feature = "auto-load")] INIT_ONCE.call_once(|| { auto_load(); }); unsafe { API.as_ref() .expect("dynamic library [megenginelite] is not found") } } pub unsafe fn load<P>(path: P) -> LiteResult<()> where P: AsRef<std::ffi::OsStr>, { let mut err = None; let _l = INIT.lock().unwrap(); match MgeLiteDynLib::new(&path) { Ok(lib) => { API = Some(lib); } Err(e) => { err = Some(e); } }; if err.is_some() { return Err(LiteError::LoadingFault); } check_version() } fn check_version() -> LiteResult<()> { let mut major = 0i32; let mut minor = 0i32; let mut patch = 0i32; unsafe { let api = API .as_ref() .expect("dynamic library [megenginelite] is not found"); api.LITE_get_version(&mut major, &mut minor, &mut patch) }; let current_version = version(major, minor, patch); let min_version = version( megenginelite_sys::MAJOR, megenginelite_sys::MINOR, megenginelite_sys::PATCH, ); if current_version < min_version { return Err(LiteError::VersionNotMatch(format!( "This version is not compatible, [expected version >= {}, but get {}]", min_version, current_version ))); } Ok(()) } fn version(major: i32, minor: i32, patch: i32) -> i32 { const UNIT: i32 = 10000; major * UNIT * UNIT + minor * UNIT + patch }
use crate::types::*; use megenginelite_sys::MgeLiteDynLib; use std::ffi::CStr; use std::sync::{Mutex, Once}; #[doc(hidden)] pub trait IntoLiteRst { fn into_rst(self) -> LiteResult<()>; } impl IntoLiteRst for i32 { fn into_rst(self) -> LiteResult<()> { match self { 0 => Ok(()), _ => { let descp = unsafe { let api = API .as_ref() .expect("dynamic library [megenginelite] is not found"); CStr::from_ptr(api.LITE_get_last_error()) } .to_str() .unwrap() .to_owned(); Err(LiteError::MGELiteError(descp)) } } } } #[doc(hidden)] pub static mut API: Option<MgeLiteDynLib> = None; #[cfg(feature = "auto-load")] fn auto_load() -> Option<()> { use std::path::PathBuf; use std::process::Command; if let Ok(output) = Command::new("python3") .args(["-c", "import megenginelite;print(megenginelite.__file__)"]) .output() { let output = String::from_utf8(output.stdout).ok()?; let mut dir = PathBuf::from(output); dir.pop(); dir.push("libs"); for name in std::fs::read_dir(&dir).ok()? { if let Some(path) = name.ok() { let path = path.path(); if let Some(ext) = path.extension() { if ext == "so" { unsafe { load(path) }.ok(); } } } } } None } lazy_static::lazy_static! { static ref INIT: Mutex<()> = Mutex::new(()); } #[cfg(feature = "auto-load")] static INIT_ONCE: Once = Once::new(); #[doc(hidden)] pub fn api() -> &'static MgeLiteDynLib { #[cfg(feature = "auto-load")] INIT_ONCE.call_once(|| { auto_load(); }); unsafe { API.as_ref() .expect("dynamic library [megenginelite] is not found") } } pub unsafe fn load<P>(path: P) -> LiteResult<()> where P: AsRef<std::ffi::OsStr>, { let mut err = None; let _l = INIT.lock().unwrap(); match MgeLiteDynLib::new(&path) { Ok(lib) => { API = Some(lib); } Err(e) => { err = Some(e); } }; if err.is_some() { return Err(LiteError::LoadingFault); } check_version() }
fn version(major: i32, minor: i32, patch: i32) -> i32 { const UNIT: i32 = 10000; major * UNIT * UNIT + minor * UNIT + patch }
fn check_version() -> LiteResult<()> { let mut major = 0i32; let mut minor = 0i32; let mut patch = 0i32; unsafe { let api = API .as_ref() .expect("dynamic library [megenginelite] is not found"); api.LITE_get_version(&mut major, &mut minor, &mut patch) }; let current_version = version(major, minor, patch); let min_version = version( megenginelite_sys::MAJOR, megenginelite_sys::MINOR, megenginelite_sys::PATCH, ); if current_version < min_version { return Err(LiteError::VersionNotMatch(format!( "This version is not compatible, [expected version >= {}, but get {}]", min_version, current_version ))); } Ok(()) }
function_block-full_function
[ { "content": "/// Dump the algo policy cache to file, if the network is set to profile\n\n///\n\n/// when forward, though this the algo policy will dump to file\n\n/// `cache_path` is the file path which store the cache\n\npub fn dump_persistent_cache(path: impl AsRef<Path>) {\n\n let path = utils::path_to_c...
Rust
benches/bench.rs
sdroege/byteorder
f8e7685b3a81c52f5448fd77fb4e0535bc92f880
#![cfg_attr(feature = "i128", feature(i128))] #![feature(test)] extern crate byteorder; extern crate test; macro_rules! bench_num { ($name:ident, $read:ident, $bytes:expr, $data:expr) => ( mod $name { use byteorder::{ByteOrder, BigEndian, NativeEndian, LittleEndian}; use super::test::Bencher; use super::test::black_box as bb; const NITER: usize = 100_000; #[bench] fn read_big_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$read(&buf, $bytes)); } }); } #[bench] fn read_little_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$read(&buf, $bytes)); } }); } #[bench] fn read_native_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$read(&buf, $bytes)); } }); } } ); ($ty:ident, $max:ident, $read:ident, $write:ident, $size:expr, $data:expr) => ( mod $ty { use std::$ty; use byteorder::{ByteOrder, BigEndian, NativeEndian, LittleEndian}; use super::test::Bencher; use super::test::black_box as bb; const NITER: usize = 100_000; #[bench] fn read_big_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$read(&buf)); } }); } #[bench] fn read_little_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$read(&buf)); } }); } #[bench] fn read_native_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$read(&buf)); } }); } #[bench] fn write_big_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$write(&mut buf, n)); } }); } #[bench] fn write_little_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$write(&mut buf, n)); } }); } #[bench] fn write_native_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$write(&mut buf, n)); } }); } } ); } bench_num!(u16, MAX, read_u16, write_u16, 2, [1, 2]); bench_num!(i16, MAX, read_i16, write_i16, 2, [1, 2]); bench_num!(u32, MAX, read_u32, write_u32, 4, [1, 2, 3, 4]); bench_num!(i32, MAX, read_i32, write_i32, 4, [1, 2, 3, 4]); bench_num!(u64, MAX, read_u64, write_u64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(i64, MAX, read_i64, write_i64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(f32, MAX, read_f32, write_f32, 4, [1, 2, 3, 4]); bench_num!(f64, MAX, read_f64, write_f64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(uint_1, read_uint, 1, [1]); bench_num!(uint_2, read_uint, 2, [1, 2]); bench_num!(uint_3, read_uint, 3, [1, 2, 3]); bench_num!(uint_4, read_uint, 4, [1, 2, 3, 4]); bench_num!(uint_5, read_uint, 5, [1, 2, 3, 4, 5]); bench_num!(uint_6, read_uint, 6, [1, 2, 3, 4, 5, 6]); bench_num!(uint_7, read_uint, 7, [1, 2, 3, 4, 5, 6, 7]); bench_num!(uint_8, read_uint, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(int_1, read_int, 1, [1]); bench_num!(int_2, read_int, 2, [1, 2]); bench_num!(int_3, read_int, 3, [1, 2, 3]); bench_num!(int_4, read_int, 4, [1, 2, 3, 4]); bench_num!(int_5, read_int, 5, [1, 2, 3, 4, 5]); bench_num!(int_6, read_int, 6, [1, 2, 3, 4, 5, 6]); bench_num!(int_7, read_int, 7, [1, 2, 3, 4, 5, 6, 7]); bench_num!(int_8, read_int, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(u128, MAX, read_u128, write_u128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(i128, MAX, read_i128, write_i128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(uint128_1, read_uint128, 1, [1]); #[cfg(feature = "i128")] bench_num!(uint128_2, read_uint128, 2, [1, 2]); #[cfg(feature = "i128")] bench_num!(uint128_3, read_uint128, 3, [1, 2, 3]); #[cfg(feature = "i128")] bench_num!(uint128_4, read_uint128, 4, [1, 2, 3, 4]); #[cfg(feature = "i128")] bench_num!(uint128_5, read_uint128, 5, [1, 2, 3, 4, 5]); #[cfg(feature = "i128")] bench_num!(uint128_6, read_uint128, 6, [1, 2, 3, 4, 5, 6]); #[cfg(feature = "i128")] bench_num!(uint128_7, read_uint128, 7, [1, 2, 3, 4, 5, 6, 7]); #[cfg(feature = "i128")] bench_num!(uint128_8, read_uint128, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(uint128_9, read_uint128, 9, [1, 2, 3, 4, 5, 6, 7, 8, 9]); #[cfg(feature = "i128")] bench_num!(uint128_10, read_uint128, 10, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); #[cfg(feature = "i128")] bench_num!(uint128_11, read_uint128, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]); #[cfg(feature = "i128")] bench_num!(uint128_12, read_uint128, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); #[cfg(feature = "i128")] bench_num!(uint128_13, read_uint128, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]); #[cfg(feature = "i128")] bench_num!(uint128_14, read_uint128, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]); #[cfg(feature = "i128")] bench_num!(uint128_15, read_uint128, 15, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); #[cfg(feature = "i128")] bench_num!(uint128_16, read_uint128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(int128_1, read_int128, 1, [1]); #[cfg(feature = "i128")] bench_num!(int128_2, read_int128, 2, [1, 2]); #[cfg(feature = "i128")] bench_num!(int128_3, read_int128, 3, [1, 2, 3]); #[cfg(feature = "i128")] bench_num!(int128_4, read_int128, 4, [1, 2, 3, 4]); #[cfg(feature = "i128")] bench_num!(int128_5, read_int128, 5, [1, 2, 3, 4, 5]); #[cfg(feature = "i128")] bench_num!(int128_6, read_int128, 6, [1, 2, 3, 4, 5, 6]); #[cfg(feature = "i128")] bench_num!(int128_7, read_int128, 7, [1, 2, 3, 4, 5, 6, 7]); #[cfg(feature = "i128")] bench_num!(int128_8, read_int128, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(int128_9, read_int128, 9, [1, 2, 3, 4, 5, 6, 7, 8, 9]); #[cfg(feature = "i128")] bench_num!(int128_10, read_int128, 10, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); #[cfg(feature = "i128")] bench_num!(int128_11, read_int128, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]); #[cfg(feature = "i128")] bench_num!(int128_12, read_int128, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); #[cfg(feature = "i128")] bench_num!(int128_13, read_int128, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]); #[cfg(feature = "i128")] bench_num!(int128_14, read_int128, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]); #[cfg(feature = "i128")] bench_num!(int128_15, read_int128, 15, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); #[cfg(feature = "i128")] bench_num!(int128_16, read_int128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
#![cfg_attr(feature = "i128", feature(i128))] #![feature(test)] extern crate byteorder; extern crate test; macro_rules! bench_num { ($name:ident, $read:ident, $bytes:expr, $data:expr) => ( mod $name { use byteorder::{ByteOrder, BigEndian, NativeEndian, LittleEndian}; use super::test::Bencher; use super::test::black_box as bb; const NITER: usize = 100_000; #[bench] fn read_big_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$read(&buf, $bytes)); } }); } #[bench] fn read_little_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$read(&buf, $bytes)); } }); } #[bench] fn read_native_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$read(&buf, $bytes)); } }); } } ); ($ty:ident, $max:ident, $read:ident, $write:ident, $size:expr, $data:expr) => ( mod $ty { use std::$ty; use byteorder::{ByteOrder, BigEndian, NativeEndian, LittleEndian}; use super::test::Bencher; use super::test::black_box as bb; const NITER: usize = 100_000; #[bench] fn read_big_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$read(&buf)); } }); } #[bench] fn read_little_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$read(&buf)); } }); } #[bench] fn read_native_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$read(&buf)); } }); } #[bench] fn write_big_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$write(&mut buf, n)); } }); } #[bench] fn write_little_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$write(&mut buf, n)); } }); } #[bench] fn write_native_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$write(&mut buf, n)); } }); } } ); } bench_num!(u16, MAX, read_u16, write_u16, 2, [1, 2]); bench_num!(i16, MAX, read_i16, write_i16, 2, [1, 2]); bench_num!(u32, MAX, read_u32, write_u32, 4, [1, 2, 3, 4]); bench_num!(i32, MAX, read_i32, write_i32, 4, [1, 2, 3, 4]); bench_num!(u64, MAX, read_u64, write_u64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(i64, MAX, read_i64, write_i64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(
0, 11]); #[cfg(feature = "i128")] bench_num!(int128_12, read_int128, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); #[cfg(feature = "i128")] bench_num!(int128_13, read_int128, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]); #[cfg(feature = "i128")] bench_num!(int128_14, read_int128, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]); #[cfg(feature = "i128")] bench_num!(int128_15, read_int128, 15, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); #[cfg(feature = "i128")] bench_num!(int128_16, read_int128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
f32, MAX, read_f32, write_f32, 4, [1, 2, 3, 4]); bench_num!(f64, MAX, read_f64, write_f64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(uint_1, read_uint, 1, [1]); bench_num!(uint_2, read_uint, 2, [1, 2]); bench_num!(uint_3, read_uint, 3, [1, 2, 3]); bench_num!(uint_4, read_uint, 4, [1, 2, 3, 4]); bench_num!(uint_5, read_uint, 5, [1, 2, 3, 4, 5]); bench_num!(uint_6, read_uint, 6, [1, 2, 3, 4, 5, 6]); bench_num!(uint_7, read_uint, 7, [1, 2, 3, 4, 5, 6, 7]); bench_num!(uint_8, read_uint, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(int_1, read_int, 1, [1]); bench_num!(int_2, read_int, 2, [1, 2]); bench_num!(int_3, read_int, 3, [1, 2, 3]); bench_num!(int_4, read_int, 4, [1, 2, 3, 4]); bench_num!(int_5, read_int, 5, [1, 2, 3, 4, 5]); bench_num!(int_6, read_int, 6, [1, 2, 3, 4, 5, 6]); bench_num!(int_7, read_int, 7, [1, 2, 3, 4, 5, 6, 7]); bench_num!(int_8, read_int, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(u128, MAX, read_u128, write_u128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(i128, MAX, read_i128, write_i128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(uint128_1, read_uint128, 1, [1]); #[cfg(feature = "i128")] bench_num!(uint128_2, read_uint128, 2, [1, 2]); #[cfg(feature = "i128")] bench_num!(uint128_3, read_uint128, 3, [1, 2, 3]); #[cfg(feature = "i128")] bench_num!(uint128_4, read_uint128, 4, [1, 2, 3, 4]); #[cfg(feature = "i128")] bench_num!(uint128_5, read_uint128, 5, [1, 2, 3, 4, 5]); #[cfg(feature = "i128")] bench_num!(uint128_6, read_uint128, 6, [1, 2, 3, 4, 5, 6]); #[cfg(feature = "i128")] bench_num!(uint128_7, read_uint128, 7, [1, 2, 3, 4, 5, 6, 7]); #[cfg(feature = "i128")] bench_num!(uint128_8, read_uint128, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(uint128_9, read_uint128, 9, [1, 2, 3, 4, 5, 6, 7, 8, 9]); #[cfg(feature = "i128")] bench_num!(uint128_10, read_uint128, 10, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); #[cfg(feature = "i128")] bench_num!(uint128_11, read_uint128, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]); #[cfg(feature = "i128")] bench_num!(uint128_12, read_uint128, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); #[cfg(feature = "i128")] bench_num!(uint128_13, read_uint128, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]); #[cfg(feature = "i128")] bench_num!(uint128_14, read_uint128, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]); #[cfg(feature = "i128")] bench_num!(uint128_15, read_uint128, 15, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); #[cfg(feature = "i128")] bench_num!(uint128_16, read_uint128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(int128_1, read_int128, 1, [1]); #[cfg(feature = "i128")] bench_num!(int128_2, read_int128, 2, [1, 2]); #[cfg(feature = "i128")] bench_num!(int128_3, read_int128, 3, [1, 2, 3]); #[cfg(feature = "i128")] bench_num!(int128_4, read_int128, 4, [1, 2, 3, 4]); #[cfg(feature = "i128")] bench_num!(int128_5, read_int128, 5, [1, 2, 3, 4, 5]); #[cfg(feature = "i128")] bench_num!(int128_6, read_int128, 6, [1, 2, 3, 4, 5, 6]); #[cfg(feature = "i128")] bench_num!(int128_7, read_int128, 7, [1, 2, 3, 4, 5, 6, 7]); #[cfg(feature = "i128")] bench_num!(int128_8, read_int128, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(int128_9, read_int128, 9, [1, 2, 3, 4, 5, 6, 7, 8, 9]); #[cfg(feature = "i128")] bench_num!(int128_10, read_int128, 10, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); #[cfg(feature = "i128")] bench_num!(int128_11, read_int128, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 1
random
[ { "content": "#[cfg(feature = \"i128\")]\n\n#[inline]\n\nfn unextend_sign128(val: i128, nbytes: usize) -> u128 {\n\n let shift = (16 - nbytes) * 8;\n\n (val << shift) as u128 >> shift\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 0, "score": 48565.358119323115 }, { "content": "#[cfg(...
Rust
src/llvm/global_variables.rs
mrLSD/iLang
3202a32d7401460cab90b15768e42088c2de3bcb
use super::{ addrspace::AddrSpace, align::Alignment, comdat::ComDat, dll_storage_classes::DllStorageClasses, linkage_types::LinkageTypes, runtime_preemption::RuntimePreemptionSpecifier, section::Section, thread_local_storage::ThreadLocalStorage, types::Type, visibility_styles::VisibilityStyles, }; use crate::llvm::InstructionSet; #[derive(Debug, Eq, PartialEq, Clone)] pub enum UnnamedAddr { UnnamedAddr, LocalUnnamedAddr, } #[derive(Debug, Eq, PartialEq, Clone)] pub enum GlobalVariableKind { Global, Constant, } #[derive(Debug, Eq, PartialEq, Clone)] pub struct GlobalVariable { pub name: String, pub linkage: Option<LinkageTypes>, pub preemption_specifier: Option<RuntimePreemptionSpecifier>, pub visibility: Option<VisibilityStyles>, pub dll_storage_classes: Option<DllStorageClasses>, pub thread_local: Option<ThreadLocalStorage>, pub unnamed_addr: Option<UnnamedAddr>, pub addrspace: Option<AddrSpace>, pub global_variable_kind: GlobalVariableKind, pub value_type: Type, pub initializer_constant: Option<String>, pub section: Option<Section>, pub comdat: Option<ComDat>, pub alignment: Option<Alignment>, pub metadata: Option<String>, pub ctx: Option<u64>, } impl GlobalVariable { pub fn get_value_name(&self) -> Option<String> { if let Some(ctx) = self.ctx { Some(format!("@{:?}", ctx)) } else { Some(format!("@{}", self.name)) } } } impl InstructionSet for GlobalVariable { fn set_context(&mut self, ctx: u64) { self.ctx = Some(ctx); } fn is_assignment(&self) -> bool { true } fn is_global(&self) -> bool { true } fn get_type(&self) -> Option<Type> { Some(self.value_type.clone()) } fn get_value(&self) -> Option<String> { self.get_value_name() } } impl std::fmt::Display for UnnamedAddr { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { UnnamedAddr::UnnamedAddr => "unnamed_addr", UnnamedAddr::LocalUnnamedAddr => "local_unnamed_addr", }; write!(f, "{}", s) } } impl std::fmt::Display for GlobalVariableKind { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { GlobalVariableKind::Global => "global", GlobalVariableKind::Constant => "constant", }; write!(f, "{}", s) } } impl std::fmt::Display for GlobalVariable { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let mut s = if let Some(ctx) = self.ctx { format!("@{:?} =", ctx) } else { format!("@{} =", self.name) }; if self.linkage.is_some() { s = format!("{} {}", s, self.linkage.as_ref().unwrap()); } if self.preemption_specifier.is_some() { s = format!("{} {}", s, self.preemption_specifier.as_ref().unwrap()); } if self.visibility.is_some() { s = format!("{} {}", s, self.visibility.as_ref().unwrap()); } if self.dll_storage_classes.is_some() { s = format!("{} {}", s, self.dll_storage_classes.as_ref().unwrap()); } if self.thread_local.is_some() { s = format!("{} {}", s, self.thread_local.as_ref().unwrap()); } if self.unnamed_addr.is_some() { s = format!("{} {}", s, self.unnamed_addr.as_ref().unwrap()); } if self.addrspace.is_some() { s = format!("{} {}", s, self.addrspace.as_ref().unwrap()); } s = format!("{} {} {}", s, self.global_variable_kind, self.value_type); if self.initializer_constant.is_some() { s = format!("{} {}", s, self.initializer_constant.as_ref().unwrap()); } if self.section.is_some() { s = format!("{}, {}", s, self.section.as_ref().unwrap()); } if self.comdat.is_some() { s = format!("{}, {}", s, self.comdat.as_ref().unwrap()); } if self.alignment.is_some() { s = format!("{}, {}", s, self.alignment.as_ref().unwrap()); } if self.metadata.is_some() { s = format!("{}, {}", s, self.metadata.as_ref().unwrap()); } write!(f, "{}", s) } }
use super::{ addrspace::AddrSpace, align::Alignment, comdat::ComDat, dll_storage_classes::DllStorageClasses, linkage_types::LinkageTypes, runtime_preemption::RuntimePreemptionSpecifier, section::Section, thread_local_storage::ThreadLocalStorage, types::Type, visibility_styles::VisibilityStyles, }; use crate::llvm::InstructionSet; #[derive(Debug, Eq, PartialEq, Clone)] pub enum UnnamedAddr { UnnamedAddr, LocalUnnamedAddr, } #[derive(Debug, Eq, PartialEq, Clone)] pub enum GlobalVariableKind { Global, Constant, } #[derive(Debug, Eq, PartialEq, Clone)] pub struct GlobalVariable { pub name: String, pub linkage: Option<LinkageTypes>, pub preemption_specifier: Option<RuntimePreemptionSpecifier>, pub visibility: Option<VisibilityStyles>, pub dll_storage_classes: Option<DllStorageClasses>, pub thread_local: Option<ThreadLocalStorage>, pub unnamed_addr: Option<UnnamedAddr>, pub addrspace: Option<AddrSpace>, pub global_variable_kind: GlobalVariableKind, pub value_type: Type, pub initializer_constant: Option<String>, pub section: Option<Section>, pub comdat: Option<ComDat>, pub alignment: Option<Alignment>, pub metadata: Option<String>, pub ctx: Option<u64>, } impl GlobalVariable {
} impl InstructionSet for GlobalVariable { fn set_context(&mut self, ctx: u64) { self.ctx = Some(ctx); } fn is_assignment(&self) -> bool { true } fn is_global(&self) -> bool { true } fn get_type(&self) -> Option<Type> { Some(self.value_type.clone()) } fn get_value(&self) -> Option<String> { self.get_value_name() } } impl std::fmt::Display for UnnamedAddr { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { UnnamedAddr::UnnamedAddr => "unnamed_addr", UnnamedAddr::LocalUnnamedAddr => "local_unnamed_addr", }; write!(f, "{}", s) } } impl std::fmt::Display for GlobalVariableKind { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { GlobalVariableKind::Global => "global", GlobalVariableKind::Constant => "constant", }; write!(f, "{}", s) } } impl std::fmt::Display for GlobalVariable { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let mut s = if let Some(ctx) = self.ctx { format!("@{:?} =", ctx) } else { format!("@{} =", self.name) }; if self.linkage.is_some() { s = format!("{} {}", s, self.linkage.as_ref().unwrap()); } if self.preemption_specifier.is_some() { s = format!("{} {}", s, self.preemption_specifier.as_ref().unwrap()); } if self.visibility.is_some() { s = format!("{} {}", s, self.visibility.as_ref().unwrap()); } if self.dll_storage_classes.is_some() { s = format!("{} {}", s, self.dll_storage_classes.as_ref().unwrap()); } if self.thread_local.is_some() { s = format!("{} {}", s, self.thread_local.as_ref().unwrap()); } if self.unnamed_addr.is_some() { s = format!("{} {}", s, self.unnamed_addr.as_ref().unwrap()); } if self.addrspace.is_some() { s = format!("{} {}", s, self.addrspace.as_ref().unwrap()); } s = format!("{} {} {}", s, self.global_variable_kind, self.value_type); if self.initializer_constant.is_some() { s = format!("{} {}", s, self.initializer_constant.as_ref().unwrap()); } if self.section.is_some() { s = format!("{}, {}", s, self.section.as_ref().unwrap()); } if self.comdat.is_some() { s = format!("{}, {}", s, self.comdat.as_ref().unwrap()); } if self.alignment.is_some() { s = format!("{}, {}", s, self.alignment.as_ref().unwrap()); } if self.metadata.is_some() { s = format!("{}, {}", s, self.metadata.as_ref().unwrap()); } write!(f, "{}", s) } }
pub fn get_value_name(&self) -> Option<String> { if let Some(ctx) = self.ctx { Some(format!("@{:?}", ctx)) } else { Some(format!("@{}", self.name)) } }
function_block-full_function
[ { "content": "type LetValueName = String;\n\n\n\n/// Value and their type representation\n\n#[derive(Debug, Clone)]\n\npub struct ValueType {\n\n pub value: LetValueName,\n\n pub value_type: Option<BuildInTypes>,\n\n}\n\n\n\nimpl<'a> Codegen<'a> {\n\n #[allow(clippy::ptr_arg)]\n\n fn new(ast: &'a Ma...
Rust
src/mint.rs
Rational-As-Fuck/metaboss
89ee1b6123ca0b743d72d220f27fcffa7e728d16
use anyhow::{anyhow, Result}; use glob::glob; use log::{error, info}; use metaplex_token_metadata::instruction::{ create_master_edition, create_metadata_accounts, update_metadata_accounts, }; use rayon::prelude::*; use reqwest; use retry::{delay::Exponential, retry}; use serde_json::Value; use solana_client::rpc_client::RpcClient; use solana_sdk::{ pubkey::Pubkey, signature::Signature, signer::{keypair::Keypair, Signer}, system_instruction::create_account, transaction::Transaction, }; use spl_associated_token_account::{create_associated_token_account, get_associated_token_address}; use spl_token::{ instruction::{initialize_mint, mint_to}, ID as TOKEN_PROGRAM_ID, }; use std::{fs::File, path::Path, str::FromStr}; use crate::data::NFTData; use crate::parse::*; use crate::{constants::*, parse::convert_local_to_remote_data}; const MINT_LAYOUT: u64 = 82; pub fn mint_list( client: &RpcClient, keypair: String, receiver: Option<String>, list_dir: Option<String>, external_metadata_uris: Option<String>, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { if !is_only_one_option(&list_dir, &external_metadata_uris) { return Err(anyhow!( "Only one of --list-dir or --external-metadata-uris can be specified" )); } if let Some(list_dir) = list_dir { mint_from_files( client, keypair, receiver, list_dir, immutable, primary_sale_happened, )?; } else if let Some(external_metadata_uris) = external_metadata_uris { mint_from_uris( client, keypair, receiver, external_metadata_uris, immutable, primary_sale_happened, )?; } else { return Err(anyhow!( "Either --list-dir or --external-metadata-uris must be specified" )); } Ok(()) } pub fn mint_from_files( client: &RpcClient, keypair: String, receiver: Option<String>, list_dir: String, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { let path = Path::new(&list_dir).join("*.json"); let pattern = path.to_str().ok_or(anyhow!("Invalid directory path"))?; let (paths, errors): (Vec<_>, Vec<_>) = glob(pattern)?.into_iter().partition(Result::is_ok); let paths: Vec<_> = paths.into_iter().map(Result::unwrap).collect(); let errors: Vec<_> = errors.into_iter().map(Result::unwrap_err).collect(); paths.par_iter().for_each(|path| { match mint_one( client, &keypair, &receiver, Some(path), None, immutable, primary_sale_happened, ) { Ok(_) => (), Err(e) => error!("Failed to mint {:?}: {}", &path, e), } }); if !errors.is_empty() { error!("Failed to read some of the files with the following errors:"); for error in errors { error!("{}", error); } } Ok(()) } pub fn mint_from_uris( client: &RpcClient, keypair: String, receiver: Option<String>, external_metadata_uris_path: String, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { let f = File::open(external_metadata_uris_path)?; let external_metadata_uris: Vec<String> = serde_json::from_reader(f)?; external_metadata_uris .par_iter() .for_each(|uri| { match mint_one( client, &keypair, &receiver, None::<String>, Some(uri), immutable, primary_sale_happened, ) { Ok(_) => (), Err(e) => error!("Failed to mint {:?}: {}", &uri, e), } }); Ok(()) } pub fn mint_one<P: AsRef<Path>>( client: &RpcClient, keypair: &String, receiver: &Option<String>, nft_data_file: Option<P>, external_metadata_uri: Option<&String>, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { if !is_only_one_option(&nft_data_file, &external_metadata_uri) { return Err(anyhow!( "You must supply either --nft_data_file or --external-metadata-uris but not both" )); } let keypair = parse_keypair(&keypair)?; let receiver = if let Some(address) = receiver { Pubkey::from_str(&address)? } else { keypair.pubkey() }; let nft_data: NFTData = if let Some(nft_data_file) = nft_data_file { let f = File::open(nft_data_file)?; serde_json::from_reader(f)? } else if let Some(external_metadata_uri) = external_metadata_uri { let body: Value = reqwest::blocking::get(external_metadata_uri)?.json()?; let creators_json = body .get("properties") .ok_or_else(|| anyhow!("Bad JSON"))? .get("creators") .ok_or_else(|| anyhow!("Bad JSON"))?; let name = parse_name(&body)?; let creators = parse_creators(&creators_json)?; let symbol = parse_symbol(&body)?; let seller_fee_basis_points = parse_seller_fee_basis_points(&body)?; NFTData { name, symbol, creators: Some(creators), uri: external_metadata_uri.to_string(), seller_fee_basis_points, } } else { return Err(anyhow!( "You must supply either --nft_data_file or --external-metadata-uris but not both" )); }; let (tx_id, mint_account) = mint( client, keypair, receiver, nft_data, immutable, primary_sale_happened, )?; info!("Tx id: {:?}\nMint account: {:?}", &tx_id, &mint_account); let message = format!("Tx id: {:?}\nMint account: {:?}", &tx_id, &mint_account,); println!("{}", message); Ok(()) } pub fn mint( client: &RpcClient, funder: Keypair, receiver: Pubkey, nft_data: NFTData, immutable: bool, primary_sale_happened: bool, ) -> Result<(Signature, Pubkey)> { let metaplex_program_id = Pubkey::from_str(METAPLEX_PROGRAM_ID)?; let mint = Keypair::new(); let data = convert_local_to_remote_data(nft_data)?; let min_rent = client.get_minimum_balance_for_rent_exemption(MINT_LAYOUT as usize)?; let create_mint_account_ix = create_account( &funder.pubkey(), &mint.pubkey(), min_rent, MINT_LAYOUT, &TOKEN_PROGRAM_ID, ); let init_mint_ix = initialize_mint( &TOKEN_PROGRAM_ID, &mint.pubkey(), &funder.pubkey(), Some(&funder.pubkey()), 0, )?; let assoc = get_associated_token_address(&receiver, &mint.pubkey()); let create_assoc_account_ix = create_associated_token_account(&funder.pubkey(), &receiver, &mint.pubkey()); let mint_to_ix = mint_to( &TOKEN_PROGRAM_ID, &mint.pubkey(), &assoc, &funder.pubkey(), &[], 1, )?; let metadata_seeds = &[ "metadata".as_bytes(), &metaplex_program_id.to_bytes(), &mint.pubkey().to_bytes(), ]; let (metadata_account, _pda) = Pubkey::find_program_address(metadata_seeds, &metaplex_program_id); let master_edition_seeds = &[ "metadata".as_bytes(), &metaplex_program_id.to_bytes(), &mint.pubkey().to_bytes(), "edition".as_bytes(), ]; let (master_edition_account, _pda) = Pubkey::find_program_address(master_edition_seeds, &metaplex_program_id); let create_metadata_account_ix = create_metadata_accounts( metaplex_program_id, metadata_account, mint.pubkey(), funder.pubkey(), funder.pubkey(), funder.pubkey(), data.name, data.symbol, data.uri, data.creators, data.seller_fee_basis_points, true, !immutable, ); let create_master_edition_account_ix = create_master_edition( metaplex_program_id, master_edition_account, mint.pubkey(), funder.pubkey(), funder.pubkey(), metadata_account, funder.pubkey(), Some(0), ); let mut instructions = vec![ create_mint_account_ix, init_mint_ix, create_assoc_account_ix, mint_to_ix, create_metadata_account_ix, create_master_edition_account_ix, ]; if primary_sale_happened { let ix = update_metadata_accounts( metaplex_program_id, metadata_account, funder.pubkey(), None, None, Some(true), ); instructions.push(ix); } let (recent_blockhash, _) = client.get_recent_blockhash()?; let tx = Transaction::new_signed_with_payer( &instructions, Some(&funder.pubkey()), &[&funder, &mint], recent_blockhash, ); let res = retry( Exponential::from_millis_with_factor(250, 2.0).take(3), || client.send_and_confirm_transaction(&tx), ); let sig = res?; Ok((sig, mint.pubkey())) }
use anyhow::{anyhow, Result}; use glob::glob; use log::{error, info}; use metaplex_token_metadata::instruction::{ create_master_edition, create_metadata_accounts, update_metadata_accounts, }; use rayon::prelude::*; use reqwest; use retry::{delay::Exponential, retry}; use serde_json::Value; use solana_client::rpc_client::RpcClient; use solana_sdk::{ pubkey::Pubkey, signature::Signature, signer::{keypair::Keypair, Signer}, system_instruction::create_account, transaction::Transaction, }; use spl_associated_token_account::{create_associated_token_account, get_associated_token_address}; use spl_token::{ instruction::{initialize_mint, mint_to}, ID as TOKEN_PROGRAM_ID, }; use std::{fs::File, path::Path, str::FromStr}; use crate::data::NFTData; use crate::parse::*; use crate::{constants::*, parse::convert_local_to_remote_data}; const MINT_LAYOUT: u64 = 82; pub fn mint_list( client: &RpcClient, keypair: String, receiver: Option<String>, list_dir: Option<String>, external_metadata_uris: Option<String>, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { if !is_only_one_option(&list_dir, &external_metadata_uris) { return Err(anyhow!( "Only one of --list-dir or --external-metadata-uris can be specified" )); } if let Some(list_dir) = list_dir { mint_from_files( client, keypair, receiver, list_dir, immutable, primary_sale_happened, )?; } else if let Some(external_metadata_uris) = external_metadata_uris { mint_from_uris( client, keypair, receiver, external_metadata_uris, immutable, primary_sale_happened, )?; } else { return Err(anyhow!( "Either --list-dir or --external-metadata-uris must be specified" )); } Ok(()) } pub fn mint_from_files( client: &RpcClient, keypair: String, receiver: Option<String>, list_dir: String, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { let path = Path::new(&list_dir).join("*.json"); let pattern = path.to_str().ok_or(anyhow!("Invalid directory path"))?; let (paths, errors): (Vec<_>, Vec<_>) = glob(pattern)?.into_iter().partition(Result::is_ok); let paths: Vec<_> = paths.into_iter().map(Result::unwrap).collect(); let errors: Vec<_> = errors.into_iter().map(Result::unwrap_err).collect(); paths.par_iter().for_each(|path| { match
{ Ok(_) => (), Err(e) => error!("Failed to mint {:?}: {}", &path, e), } }); if !errors.is_empty() { error!("Failed to read some of the files with the following errors:"); for error in errors { error!("{}", error); } } Ok(()) } pub fn mint_from_uris( client: &RpcClient, keypair: String, receiver: Option<String>, external_metadata_uris_path: String, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { let f = File::open(external_metadata_uris_path)?; let external_metadata_uris: Vec<String> = serde_json::from_reader(f)?; external_metadata_uris .par_iter() .for_each(|uri| { match mint_one( client, &keypair, &receiver, None::<String>, Some(uri), immutable, primary_sale_happened, ) { Ok(_) => (), Err(e) => error!("Failed to mint {:?}: {}", &uri, e), } }); Ok(()) } pub fn mint_one<P: AsRef<Path>>( client: &RpcClient, keypair: &String, receiver: &Option<String>, nft_data_file: Option<P>, external_metadata_uri: Option<&String>, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { if !is_only_one_option(&nft_data_file, &external_metadata_uri) { return Err(anyhow!( "You must supply either --nft_data_file or --external-metadata-uris but not both" )); } let keypair = parse_keypair(&keypair)?; let receiver = if let Some(address) = receiver { Pubkey::from_str(&address)? } else { keypair.pubkey() }; let nft_data: NFTData = if let Some(nft_data_file) = nft_data_file { let f = File::open(nft_data_file)?; serde_json::from_reader(f)? } else if let Some(external_metadata_uri) = external_metadata_uri { let body: Value = reqwest::blocking::get(external_metadata_uri)?.json()?; let creators_json = body .get("properties") .ok_or_else(|| anyhow!("Bad JSON"))? .get("creators") .ok_or_else(|| anyhow!("Bad JSON"))?; let name = parse_name(&body)?; let creators = parse_creators(&creators_json)?; let symbol = parse_symbol(&body)?; let seller_fee_basis_points = parse_seller_fee_basis_points(&body)?; NFTData { name, symbol, creators: Some(creators), uri: external_metadata_uri.to_string(), seller_fee_basis_points, } } else { return Err(anyhow!( "You must supply either --nft_data_file or --external-metadata-uris but not both" )); }; let (tx_id, mint_account) = mint( client, keypair, receiver, nft_data, immutable, primary_sale_happened, )?; info!("Tx id: {:?}\nMint account: {:?}", &tx_id, &mint_account); let message = format!("Tx id: {:?}\nMint account: {:?}", &tx_id, &mint_account,); println!("{}", message); Ok(()) } pub fn mint( client: &RpcClient, funder: Keypair, receiver: Pubkey, nft_data: NFTData, immutable: bool, primary_sale_happened: bool, ) -> Result<(Signature, Pubkey)> { let metaplex_program_id = Pubkey::from_str(METAPLEX_PROGRAM_ID)?; let mint = Keypair::new(); let data = convert_local_to_remote_data(nft_data)?; let min_rent = client.get_minimum_balance_for_rent_exemption(MINT_LAYOUT as usize)?; let create_mint_account_ix = create_account( &funder.pubkey(), &mint.pubkey(), min_rent, MINT_LAYOUT, &TOKEN_PROGRAM_ID, ); let init_mint_ix = initialize_mint( &TOKEN_PROGRAM_ID, &mint.pubkey(), &funder.pubkey(), Some(&funder.pubkey()), 0, )?; let assoc = get_associated_token_address(&receiver, &mint.pubkey()); let create_assoc_account_ix = create_associated_token_account(&funder.pubkey(), &receiver, &mint.pubkey()); let mint_to_ix = mint_to( &TOKEN_PROGRAM_ID, &mint.pubkey(), &assoc, &funder.pubkey(), &[], 1, )?; let metadata_seeds = &[ "metadata".as_bytes(), &metaplex_program_id.to_bytes(), &mint.pubkey().to_bytes(), ]; let (metadata_account, _pda) = Pubkey::find_program_address(metadata_seeds, &metaplex_program_id); let master_edition_seeds = &[ "metadata".as_bytes(), &metaplex_program_id.to_bytes(), &mint.pubkey().to_bytes(), "edition".as_bytes(), ]; let (master_edition_account, _pda) = Pubkey::find_program_address(master_edition_seeds, &metaplex_program_id); let create_metadata_account_ix = create_metadata_accounts( metaplex_program_id, metadata_account, mint.pubkey(), funder.pubkey(), funder.pubkey(), funder.pubkey(), data.name, data.symbol, data.uri, data.creators, data.seller_fee_basis_points, true, !immutable, ); let create_master_edition_account_ix = create_master_edition( metaplex_program_id, master_edition_account, mint.pubkey(), funder.pubkey(), funder.pubkey(), metadata_account, funder.pubkey(), Some(0), ); let mut instructions = vec![ create_mint_account_ix, init_mint_ix, create_assoc_account_ix, mint_to_ix, create_metadata_account_ix, create_master_edition_account_ix, ]; if primary_sale_happened { let ix = update_metadata_accounts( metaplex_program_id, metadata_account, funder.pubkey(), None, None, Some(true), ); instructions.push(ix); } let (recent_blockhash, _) = client.get_recent_blockhash()?; let tx = Transaction::new_signed_with_payer( &instructions, Some(&funder.pubkey()), &[&funder, &mint], recent_blockhash, ); let res = retry( Exponential::from_millis_with_factor(250, 2.0).take(3), || client.send_and_confirm_transaction(&tx), ); let sig = res?; Ok((sig, mint.pubkey())) }
mint_one( client, &keypair, &receiver, Some(path), None, immutable, primary_sale_happened, )
call_expression
[ { "content": "pub fn sign_one(client: &RpcClient, keypair: String, account: String) -> Result<()> {\n\n let creator = parse_keypair(&keypair)?;\n\n let account_pubkey = Pubkey::from_str(&account)?;\n\n\n\n let metadata_pubkey = get_metadata_pda(account_pubkey);\n\n\n\n info!(\n\n \"Signing me...
Rust
starknet-core/src/types/block.rs
xJonathanLEI/starknet-rs
23f3b072e85ebd44437addc86449a3ba805dc2c7
use super::{ super::serde::unsigned_field_element::{UfeHex, UfeHexOption}, ConfirmedTransactionReceipt, FieldElement, TransactionType, }; use serde::Deserialize; use serde_with::serde_as; pub enum BlockId { Hash(FieldElement), Number(u64), Pending, Latest, } #[derive(Debug, Deserialize, PartialEq)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[cfg_attr(test, serde(deny_unknown_fields))] pub enum BlockStatus { Pending, Aborted, Reverted, AcceptedOnL2, AcceptedOnL1, } #[serde_as] #[derive(Debug, Deserialize)] #[cfg_attr(test, serde(deny_unknown_fields))] pub struct Block { #[serde(default)] #[serde_as(as = "UfeHexOption")] pub block_hash: Option<FieldElement>, pub block_number: Option<u64>, #[serde_as(as = "UfeHex")] pub parent_block_hash: FieldElement, pub timestamp: u64, #[serde(default)] #[serde_as(as = "UfeHexOption")] pub sequencer_address: Option<FieldElement>, #[serde(default)] #[serde_as(as = "UfeHexOption")] pub state_root: Option<FieldElement>, pub status: BlockStatus, #[serde_as(as = "UfeHex")] pub gas_price: FieldElement, pub transactions: Vec<TransactionType>, pub transaction_receipts: Vec<ConfirmedTransactionReceipt>, } #[cfg(test)] mod tests { use super::super::transaction::EntryPointType; use super::*; #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_transactions() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/1_with_transactions.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 39232); assert_eq!(block.status, BlockStatus::AcceptedOnL1); assert_eq!( block.state_root.unwrap(), FieldElement::from_hex_be( "06cb132715b8687f1c1d79a7282975986fb0a9c166d64b384cfad965a602fe02" ) .unwrap() ); assert_eq!(block.transactions.len(), 3); assert_eq!(block.transaction_receipts.len(), 3); if let TransactionType::Deploy(tx) = &block.transactions[0] { assert_eq!(tx.constructor_calldata.len(), 2); } else { panic!("Did not deserialize Transaction::Deploy properly"); } if let TransactionType::InvokeFunction(tx) = &block.transactions[1] { assert_eq!(tx.entry_point_type, EntryPointType::External); assert_eq!(tx.calldata.len(), 7); } else { panic!("Did not deserialize Transaction::InvokeFunction properly"); } let receipt = &block.transaction_receipts[0]; assert_eq!(receipt.execution_resources.n_steps, 68); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_messages() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/2_with_messages.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 122387); assert_eq!(block.transaction_receipts.len(), 49); let receipt = &block.transaction_receipts[22]; assert_eq!(receipt.l2_to_l1_messages.len(), 1); assert_eq!(receipt.l2_to_l1_messages[0].payload.len(), 2); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_events() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/3_with_events.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 47543); assert_eq!(block.transaction_receipts.len(), 4); let receipt = &block.transaction_receipts[3]; assert_eq!(receipt.events.len(), 1); assert_eq!(receipt.events[0].data.len(), 2); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_pending() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/4_pending.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert!(block.block_hash.is_none()); assert!(block.block_number.is_none()); assert!(block.state_root.is_none()); assert_eq!(block.status, BlockStatus::Pending); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_new_attributes_0_8_2() { let new_block: Block = serde_json::from_str(include_str!( "../../test-data/raw_gateway_responses/get_block/6_with_sequencer_address.txt" )) .unwrap(); assert!(new_block.sequencer_address.is_some()); let old_block: Block = serde_json::from_str(include_str!( "../../test-data/raw_gateway_responses/get_block/2_with_messages.txt" )) .unwrap(); assert!(old_block.sequencer_address.is_none()); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_declare_tx() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/7_with_declare_tx.txt"); let block: Block = serde_json::from_str(raw).unwrap(); let tx = match &block.transactions[26] { TransactionType::Declare(tx) => tx, _ => panic!("Unexpected tx type"), }; assert_eq!(tx.sender_address, FieldElement::ONE); } }
use super::{ super::serde::unsigned_field_element::{UfeHex, UfeHexOption}, ConfirmedTransactionReceipt, FieldElement, TransactionType, }; use serde::Deserialize; use serde_with::serde_as; pub enum BlockId { Hash(FieldElement), Number(u64), Pending, Latest, } #[derive(Debug, Deserialize, PartialEq)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[cfg_attr(test, serde(deny_unknown_fields))] pub enum BlockStatus { Pending, Aborted, Reverted, AcceptedOnL2, AcceptedOnL1, } #[serde_as] #[derive(Debug, Deserialize)] #[cfg_attr(test, serde(deny_unknown_fields))] pub struct Block { #[serde(default)] #[serde_as(as = "UfeHexOption")] pub block_hash: Option<FieldElement>, pub block_number: Option<u64>, #[serde_as(as = "UfeHex")] pub parent_block_hash: FieldElement, pub timestamp: u64, #[serde(default)] #[serde_as(as = "UfeHexOption")] pub sequencer_address: Option<FieldElement>, #[serde(default)] #[serde_as(as = "UfeHexOption")] pub state_root: Option<FieldElement>, pub status: BlockStatus, #[serde_as(as = "UfeHex")] pub gas_price: FieldElement, pub transactions: Vec<TransactionType>, pub transaction_receipts: Vec<ConfirmedTransactionReceipt>, } #[cfg(test)] mod tests { use super::super::transaction::EntryPointType; use super::*; #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_transactions() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/1_with_transactions.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 39232); assert_eq!(block.status, BlockStatus::AcceptedOnL1); assert_eq!( block.state_root.unwrap(), FieldElement::from_hex_be( "06cb132715b8687f1c1d79a7282975986fb0a9c166d64b384cfad965a602fe02" ) .unwrap() ); assert_eq!(block.transactions.len(), 3); assert_eq!(block.transaction_receipts.len(), 3); if let TransactionType::Deploy(tx) = &block.transactions[0] { assert_eq!(tx.constructor_calldata.len(), 2); } else { panic!("Did not deserialize Transaction::Deploy properly"); } if let TransactionType::InvokeFunction(tx) = &block.transactions[1] { assert_eq!(tx.entry_point_type, EntryPointType::External); assert_eq!(tx.calldata.len(), 7); } else { panic!("Did not deserialize Transaction::InvokeFunction properly"); } let receipt = &block.transaction_receipts[0]; assert_eq!(receipt.execution_resources.n_steps, 68); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_messages() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/2_with_messages.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 122387); assert_eq!(block.transaction_receipts.len(), 49); let receipt = &block.transaction_receipts[22]; assert_eq!(receipt.l2_to_l1_messages.len(), 1); assert_eq!(receipt.l2_to_l1_messages[0].payload.len(), 2); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_events() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/3_with_events.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 47543); assert_eq!(block.transaction_receipts.len(), 4); let receipt = &block.transaction_receipts[3]; assert_eq!(receipt.events.len(), 1); assert_eq!(receipt.events[0].data.len(), 2); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_pending() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/4_pending.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert!(block.block_hash.is_none()); assert!(block.block_number.is_none()); assert!(block.state_root.is_none()); assert_eq!(block.status, BlockStatus::Pending); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_new_attributes_0_8_2() { let new_block: Block = serde_json::from_str(include_str!( "../../test-data/raw_gateway_responses/get_block/6_with_sequencer_address.txt" )) .
#[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_declare_tx() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/7_with_declare_tx.txt"); let block: Block = serde_json::from_str(raw).unwrap(); let tx = match &block.transactions[26] { TransactionType::Declare(tx) => tx, _ => panic!("Unexpected tx type"), }; assert_eq!(tx.sender_address, FieldElement::ONE); } }
unwrap(); assert!(new_block.sequencer_address.is_some()); let old_block: Block = serde_json::from_str(include_str!( "../../test-data/raw_gateway_responses/get_block/2_with_messages.txt" )) .unwrap(); assert!(old_block.sequencer_address.is_none()); }
function_block-function_prefix_line
[ { "content": "#[serde_as]\n\n#[derive(Serialize, Deserialize)]\n\nstruct Felt(#[serde_as(as = \"UfeHex\")] pub FieldElement);\n\n\n", "file_path": "starknet-providers/src/jsonrpc/mod.rs", "rank": 0, "score": 146793.51411672964 }, { "content": "pub fn mul_mod_floor(\n\n multiplicand: &Fiel...
Rust
common/functions/src/scalars/logics/logic.rs
youngsofun/databend
82689b1f3eb9da9e4243045090815ff1124a0a38
use std::sync::Arc; use common_datavalues2::BooleanType; use common_datavalues2::ColumnBuilder; use common_datavalues2::ColumnRef; use common_datavalues2::ColumnViewer; use common_datavalues2::ColumnsWithField; use common_datavalues2::DataTypePtr; use common_datavalues2::NullableColumnBuilder; use common_datavalues2::NullableType; use common_exception::ErrorCode; use common_exception::Result; use super::xor::LogicXorFunction; use super::LogicAndFunction; use super::LogicNotFunction; use super::LogicOrFunction; use crate::scalars::cast_column_field; use crate::scalars::Function2; use crate::scalars::Function2Factory; #[derive(Clone)] pub struct LogicFunction { op: LogicOperator, } #[derive(Clone, Debug)] pub enum LogicOperator { Not, And, Or, Xor, } impl LogicFunction { pub fn try_create(op: LogicOperator) -> Result<Box<dyn Function2>> { Ok(Box::new(Self { op })) } pub fn register(factory: &mut Function2Factory) { factory.register("and", LogicAndFunction::desc()); factory.register("or", LogicOrFunction::desc()); factory.register("not", LogicNotFunction::desc()); factory.register("xor", LogicXorFunction::desc()); } fn eval_not(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { let mut nullable = false; if columns[0].data_type().is_nullable() { nullable = true; } let dt = if nullable { Arc::new(NullableType::create(BooleanType::arc())) } else { BooleanType::arc() }; let col = cast_column_field(&columns[0], &dt)?; if nullable { let col_viewer = ColumnViewer::<bool>::create(&col)?; let mut builder = NullableColumnBuilder::<bool>::with_capacity(input_rows); for idx in 0..input_rows { builder.append(!col_viewer.value(idx), col_viewer.valid_at(idx)); } Ok(builder.build(input_rows)) } else { let col_viewer = ColumnViewer::<bool>::create(&col)?; let mut builder = ColumnBuilder::<bool>::with_capacity(input_rows); for idx in 0..input_rows { builder.append(!col_viewer.value(idx)); } Ok(builder.build(input_rows)) } } fn eval_and_not_or(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { let mut nullable = false; if columns[0].data_type().is_nullable() || columns[1].data_type().is_nullable() { nullable = true; } let dt = if nullable { Arc::new(NullableType::create(BooleanType::arc())) } else { BooleanType::arc() }; let lhs = cast_column_field(&columns[0], &dt)?; let rhs = cast_column_field(&columns[1], &dt)?; if nullable { let lhs_viewer = ColumnViewer::<bool>::create(&lhs)?; let rhs_viewer = ColumnViewer::<bool>::create(&rhs)?; let mut builder = NullableColumnBuilder::<bool>::with_capacity(input_rows); macro_rules! calcute_with_null { ($input_rows:expr, $lhs_viewer: expr, $rhs_viewer: expr, $builder: expr, $func: expr) => { for idx in 0..$input_rows { let (val, valid) = $func( $lhs_viewer.value(idx), $rhs_viewer.value(idx), $lhs_viewer.valid_at(idx), $rhs_viewer.valid_at(idx), ); $builder.append(val, valid); } }; } match self.op { LogicOperator::And => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, l_valid: bool, r_valid: bool| -> (bool, bool) { (lhs & rhs, l_valid & r_valid) } ), LogicOperator::Or => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, _l_valid: bool, _r_valid: bool| -> (bool, bool) { (lhs || rhs, lhs || rhs) } ), LogicOperator::Xor => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, l_valid: bool, r_valid: bool| -> (bool, bool) { (lhs ^ rhs, l_valid & r_valid) } ), LogicOperator::Not => return Err(ErrorCode::LogicalError("never happen")), }; Ok(builder.build(input_rows)) } else { let lhs_viewer = ColumnViewer::<bool>::create(&lhs)?; let rhs_viewer = ColumnViewer::<bool>::create(&rhs)?; let mut builder = ColumnBuilder::<bool>::with_capacity(input_rows); macro_rules! calcute { ($input_rows:expr, $lhs_viewer: expr, $rhs_viewer: expr, $builder: expr, $func: expr) => { for idx in 0..$input_rows { $builder.append($func($lhs_viewer.value(idx), $rhs_viewer.value(idx))); } }; } match self.op { LogicOperator::And => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs & rhs } ), LogicOperator::Or => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs || rhs } ), LogicOperator::Xor => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs ^ rhs } ), LogicOperator::Not => return Err(ErrorCode::LogicalError("never happen")), }; Ok(builder.build(input_rows)) } } } impl Function2 for LogicFunction { fn name(&self) -> &str { "LogicFunction" } fn return_type(&self, args: &[&DataTypePtr]) -> Result<DataTypePtr> { match self.op { LogicOperator::Not => { if args[0].is_nullable() { Ok(Arc::new(NullableType::create(BooleanType::arc()))) } else { Ok(BooleanType::arc()) } } _ => { if args[0].is_nullable() || args[1].is_nullable() { Ok(Arc::new(NullableType::create(BooleanType::arc()))) } else { Ok(BooleanType::arc()) } } } } fn eval(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { match self.op { LogicOperator::Not => self.eval_not(columns, input_rows), _ => self.eval_and_not_or(columns, input_rows), } } fn passthrough_null(&self) -> bool { !matches!(self.op, LogicOperator::Or) } } impl std::fmt::Display for LogicFunction { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self.op) } }
use std::sync::Arc; use common_datavalues2::BooleanType; use common_datavalues2::ColumnBuilder; use common_datavalues2::ColumnRef; use common_datavalues2::ColumnViewer; use common_datavalues2::ColumnsWithField; use common_datavalues2::DataTypePtr; use common_datavalues2::NullableColumnBuilder; use common_datavalues2::NullableType; use common_exception::ErrorCode; use common_exception::Result; use super::xor::LogicXorFunction; use super::LogicAndFunction; use super::LogicNotFunction; use super::LogicOrFunction; use crate::scalars::cast_column_field; use crate::scalars::Function2; use crate::scalars::Function2Factory; #[derive(Clone)] pub struct LogicFunction { op: LogicOperator, } #[derive(Clone, Debug)] pub enum LogicOperator { Not, And, Or, Xor, } impl LogicFunction { pub fn try_create(op: LogicOperator) -> Result<Box<dyn Function2>> { Ok(Box::new(Self { op })) } pub fn register(factory: &mut Function2Factory) { factory.register("and", LogicAndFunction::desc()); factory.register("or", LogicOrFunction::desc()); factory.register("not", LogicNotFunction::desc()); factory.register("xor", LogicXorFunction::desc()); } fn eval_not(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { let mut nullable = false; if columns[0].data_type().is_nullable() { nullable = true; } let dt = if nullable { Arc::new(NullableType::create(BooleanType::arc())) } else { BooleanType::arc() }; let col = cast_column_field(&columns[0], &dt)?; if nullable { let col_viewer = ColumnViewer::<bool>::create(&col)?; let mut builder = NullableColumnBuilder::<bool>::with_capacity(input_rows); for idx in 0..input_rows { builder.append(!col_viewer.value(idx), col_viewer.valid_at(idx)); } Ok(builder.build(input_rows)) } else { let col_viewer = ColumnViewer::<bool>::create(&col)?; let mut builder = ColumnBuilder::<bool>::with_capacity(input_rows); for idx in 0..input_rows { builder.append(!col_viewer.value(idx)); } Ok(builder.build(input_rows)) } } fn eval_and_not_or(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { let mut nullable = false; if columns[0].data_type().is_nullable() || columns[1].data_type().is_nullable() { nullable = true; } let dt = if nullable { Arc::new(NullableType::create(BooleanType::arc())) } else { BooleanType::arc() }; let lhs = cast_column_field(&columns[0], &dt)?; let rhs = cast_column_field(&columns[1], &dt)?; if nullable { let lhs_viewer = ColumnViewer::<bool>::create(&lhs)?; let rhs_viewer = ColumnViewer::<bool>::create(&rhs)?; let mut builder = NullableColumnBuilder::<bool>::with_capacity(input_rows); macro_rules! calcute_with_null { ($input_rows:expr, $lhs_viewer: expr, $rhs_viewer: expr, $builder: expr, $func: expr) => { for idx in 0..$input_rows { let (val, valid) = $func( $lhs_viewer.value(idx), $rhs_viewer.value(idx), $lhs_viewer.valid_at(idx), $rhs_viewer.valid_at(idx), ); $builder.append(val, valid); } }; } match self.op { LogicOperator::And => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, l_valid: bool, r_valid: bool| -> (bool, bool) { (lhs & rhs, l_valid & r_valid) } ), LogicOperator::Or => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, _l_valid: bool, _r_valid: bool| -> (bool, bool) { (lhs || rhs, lhs || rhs) } ), LogicOperator::Xor => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, l_valid: bool, r_valid: bool| -> (bool, bool) { (lhs ^ rhs, l_valid & r_valid) } ), LogicOperator::Not => return Err(ErrorCode::LogicalError("never happen")), }; Ok(builder.build(input_rows)) } else { let lhs_viewer = ColumnViewer::<bool>::create(&lhs)?; let rhs_viewer = ColumnViewer::<bool>::create(&rhs)?; let mut builder = ColumnBuilder::<bool>::with_capacity(input_rows); macro_rules! calcute { ($input_rows:expr, $lhs_viewer: expr, $rhs_viewer: expr, $builder: expr, $func: expr) => { for idx in 0..$input_rows { $builder.append($func($lhs_viewer.value(idx), $rhs_viewer.value(idx))); } }; } match self.op { LogicOperator::And => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs & rhs } ), LogicOperator::Or => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs || rhs } ), LogicOperator::Xor => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs ^ rhs } ), LogicOperator::Not => return Err(ErrorCode::LogicalError("never happen")), }; Ok(builder.build(input_rows)) } } } impl Function2 for LogicFunction { fn name(&self) -> &str { "LogicFunction" }
fn eval(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { match self.op { LogicOperator::Not => self.eval_not(columns, input_rows), _ => self.eval_and_not_or(columns, input_rows), } } fn passthrough_null(&self) -> bool { !matches!(self.op, LogicOperator::Or) } } impl std::fmt::Display for LogicFunction { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self.op) } }
fn return_type(&self, args: &[&DataTypePtr]) -> Result<DataTypePtr> { match self.op { LogicOperator::Not => { if args[0].is_nullable() { Ok(Arc::new(NullableType::create(BooleanType::arc()))) } else { Ok(BooleanType::arc()) } } _ => { if args[0].is_nullable() || args[1].is_nullable() { Ok(Arc::new(NullableType::create(BooleanType::arc()))) } else { Ok(BooleanType::arc()) } } } }
function_block-full_function
[ { "content": "pub fn col(name: &str) -> Expression {\n\n Expression::Column(name.to_string())\n\n}\n", "file_path": "common/planners/src/plan_expression_column.rs", "rank": 0, "score": 377708.0555833581 }, { "content": "pub fn match_text(text: &'static str) -> impl FnMut(Input) -> IResult...
Rust
src/spi.rs
jonas-schievink/stm32f0xx-hal
a7daf77cf0e9707e0d9b8f441ffbe82d80654fb5
use core::marker::PhantomData; use core::{ops::Deref, ptr}; pub use embedded_hal::spi::{Mode, Phase, Polarity}; use crate::pac::SPI1; #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] use crate::pac::SPI2; use crate::gpio::*; use crate::rcc::{Clocks, Rcc}; use crate::time::Hertz; pub struct EightBit; pub struct SixteenBit; #[derive(Debug)] pub enum Error { Overrun, ModeFault, Crc, #[doc(hidden)] _Extensible, } pub struct Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> { spi: SPI, pins: (SCKPIN, MISOPIN, MOSIPIN), _width: PhantomData<WIDTH>, } pub trait SckPin<SPI> {} pub trait MisoPin<SPI> {} pub trait MosiPin<SPI> {} macro_rules! spi_pins { ($($SPI:ident => { sck => [$($sck:ty),+ $(,)*], miso => [$($miso:ty),+ $(,)*], mosi => [$($mosi:ty),+ $(,)*], })+) => { $( $( impl SckPin<crate::pac::$SPI> for $sck {} )+ $( impl MisoPin<crate::pac::$SPI> for $miso {} )+ $( impl MosiPin<crate::pac::$SPI> for $mosi {} )+ )+ } } spi_pins! { SPI1 => { sck => [gpioa::PA5<Alternate<AF0>>, gpiob::PB3<Alternate<AF0>>], miso => [gpioa::PA6<Alternate<AF0>>, gpiob::PB4<Alternate<AF0>>], mosi => [gpioa::PA7<Alternate<AF0>>, gpiob::PB5<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030x4", feature = "stm32f030x6", feature = "stm32f031", feature = "stm32f038", ))] spi_pins! { SPI1 => { sck => [gpiob::PB13<Alternate<AF0>>], miso => [gpiob::PB14<Alternate<AF0>>], mosi => [gpiob::PB15<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiob::PB13<Alternate<AF0>>], miso => [gpiob::PB14<Alternate<AF0>>], mosi => [gpiob::PB15<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030xc", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiob::PB10<Alternate<AF5>>], miso => [gpioc::PC2<Alternate<AF1>>], mosi => [gpioc::PC3<Alternate<AF1>>], } } #[cfg(any( feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiod::PD1<Alternate<AF1>>], miso => [gpiod::PD3<Alternate<AF1>>], mosi => [gpiod::PD4<Alternate<AF1>>], } } macro_rules! spi { ($($SPI:ident: ($spi:ident, $spiXen:ident, $spiXrst:ident, $apbenr:ident, $apbrstr:ident),)+) => { $( impl<SCKPIN, MISOPIN, MOSIPIN> Spi<$SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { pub fn $spi<F>( spi: $SPI, pins: (SCKPIN, MISOPIN, MOSIPIN), mode: Mode, speed: F, rcc: &mut Rcc, ) -> Self where SCKPIN: SckPin<$SPI>, MISOPIN: MisoPin<$SPI>, MOSIPIN: MosiPin<$SPI>, F: Into<Hertz>, { /* Enable clock for SPI */ rcc.regs.$apbenr.modify(|_, w| w.$spiXen().set_bit()); /* Reset SPI */ rcc.regs.$apbrstr.modify(|_, w| w.$spiXrst().set_bit()); rcc.regs.$apbrstr.modify(|_, w| w.$spiXrst().clear_bit()); Spi::<$SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { spi, pins, _width: PhantomData }.spi_init(mode, speed, rcc.clocks).into_8bit_width() } } )+ } } spi! { SPI1: (spi1, spi1en, spi1rst, apb2enr, apb2rstr), } #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi! { SPI2: (spi2, spi2en, spi2rst, apb1enr, apb1rstr), } #[allow(dead_code)] type SpiRegisterBlock = crate::pac::spi1::RegisterBlock; impl<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> where SPI: Deref<Target = SpiRegisterBlock>, { fn spi_init<F>(self, mode: Mode, speed: F, clocks: Clocks) -> Self where F: Into<Hertz>, { /* Make sure the SPI unit is disabled so we can configure it */ self.spi.cr1.modify(|_, w| w.spe().clear_bit()); let br = match clocks.pclk().0 / speed.into().0 { 0 => unreachable!(), 1..=2 => 0b000, 3..=5 => 0b001, 6..=11 => 0b010, 12..=23 => 0b011, 24..=47 => 0b100, 48..=95 => 0b101, 96..=191 => 0b110, _ => 0b111, }; self.spi.cr1.write(|w| { w.cpha() .bit(mode.phase == Phase::CaptureOnSecondTransition) .cpol() .bit(mode.polarity == Polarity::IdleHigh) .mstr() .set_bit() .br() .bits(br) .lsbfirst() .clear_bit() .ssm() .set_bit() .ssi() .set_bit() .rxonly() .clear_bit() .bidimode() .clear_bit() .spe() .set_bit() }); self } pub fn into_8bit_width(self) -> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { self.spi .cr2 .write(|w| w.frxth().set_bit().ds().eight_bit().ssoe().clear_bit()); Spi { spi: self.spi, pins: self.pins, _width: PhantomData, } } pub fn into_16bit_width(self) -> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> { self.spi .cr2 .write(|w| w.frxth().set_bit().ds().sixteen_bit().ssoe().clear_bit()); Spi { spi: self.spi, pins: self.pins, _width: PhantomData, } } fn set_send_only(&mut self) { self.spi .cr1 .modify(|_, w| w.bidimode().set_bit().bidioe().set_bit()); } fn set_bidi(&mut self) { self.spi .cr1 .modify(|_, w| w.bidimode().clear_bit().bidioe().clear_bit()); } fn check_read(&mut self) -> nb::Result<(), Error> { let sr = self.spi.sr.read(); Err(if sr.ovr().bit_is_set() { nb::Error::Other(Error::Overrun) } else if sr.modf().bit_is_set() { nb::Error::Other(Error::ModeFault) } else if sr.crcerr().bit_is_set() { nb::Error::Other(Error::Crc) } else if sr.rxne().bit_is_set() { return Ok(()); } else { nb::Error::WouldBlock }) } fn send_buffer_size(&mut self) -> u8 { match self.spi.sr.read().ftlvl().bits() { 0 => 4, 1 => 3, 2 => 2, _ => 0, } } fn check_send(&mut self) -> nb::Result<(), Error> { let sr = self.spi.sr.read(); Err(if sr.ovr().bit_is_set() { nb::Error::Other(Error::Overrun) } else if sr.modf().bit_is_set() { nb::Error::Other(Error::ModeFault) } else if sr.crcerr().bit_is_set() { nb::Error::Other(Error::Crc) } else if sr.txe().bit_is_set() { return Ok(()); } else { nb::Error::WouldBlock }) } fn read_u8(&mut self) -> u8 { unsafe { ptr::read_volatile(&self.spi.dr as *const _ as *const u8) } } fn send_u8(&mut self, byte: u8) { unsafe { ptr::write_volatile(&self.spi.dr as *const _ as *mut u8, byte) } } fn read_u16(&mut self) -> u16 { unsafe { ptr::read_volatile(&self.spi.dr as *const _ as *const u16) } } fn send_u16(&mut self, byte: u16) { unsafe { ptr::write_volatile(&self.spi.dr as *const _ as *mut u16, byte) } } pub fn release(self) -> (SPI, (SCKPIN, MISOPIN, MOSIPIN)) { (self.spi, self.pins) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Transfer<u8> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn transfer<'w>(&mut self, words: &'w mut [u8]) -> Result<&'w [u8], Self::Error> { self.set_bidi(); for word in words.iter_mut() { nb::block!(self.check_send())?; self.send_u8(word.clone()); nb::block!(self.check_read())?; *word = self.read_u8(); } Ok(words) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Write<u8> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn write(&mut self, words: &[u8]) -> Result<(), Self::Error> { let mut bufcap: u8 = 0; self.set_send_only(); nb::block!(self.check_send())?; for word in words { while bufcap == 0 { bufcap = self.send_buffer_size(); } self.send_u8(*word); bufcap -= 1; } self.check_send().ok(); Ok(()) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Transfer<u16> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn transfer<'w>(&mut self, words: &'w mut [u16]) -> Result<&'w [u16], Self::Error> { self.set_bidi(); for word in words.iter_mut() { nb::block!(self.check_send())?; self.send_u16(*word); nb::block!(self.check_read())?; *word = self.read_u16(); } Ok(words) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Write<u16> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn write(&mut self, words: &[u16]) -> Result<(), Self::Error> { self.set_send_only(); for word in words { nb::block!(self.check_send())?; self.send_u16(word.clone()); } self.check_send().ok(); Ok(()) } }
use core::marker::PhantomData; use core::{ops::Deref, ptr}; pub use embedded_hal::spi::{Mode, Phase, Polarity}; use crate::pac::SPI1; #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] use crate::pac::SPI2; use crate::gpio::*; use crate::rcc::{Clocks, Rcc}; use crate::time::Hertz; pub struct EightBit; pub struct SixteenBit; #[derive(Debug)] pub enum Error { Overrun, ModeFault, Crc, #[doc(hidden)] _Extensible, } pub struct Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> { spi: SPI, pins: (SCKPIN, MISOPIN, MOSIPIN), _width: PhantomData<WIDTH>, } pub trait SckPin<SPI> {} pub trait MisoPin<SPI> {} pub trait MosiPin<SPI> {} macro_rules! spi_pins { ($($SPI:ident => { sck => [$($sck:ty),+ $(,)*], miso => [$($miso:ty),+ $(,)*], mosi => [$($mosi:ty),+ $(,)*], })+) => { $( $( impl SckPin<crate::pac::$SPI> for $sck {} )+ $( impl MisoPin<crate::pac::$SPI> for $miso {} )+ $( impl MosiPin<crate::pac::$SPI> for $mosi {} )+ )+ } } spi_pins! { SPI1 => { sck => [gpioa::PA5<Alternate<AF0>>, gpiob::PB3<Alternate<AF0>>], miso => [gpioa::PA6<Alternate<AF0>>, gpiob::PB4<Alternate<AF0>>], mosi => [gpioa::PA7<Alternate<AF0>>, gpiob::PB5<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030x4", feature = "stm32f030x6", feature = "stm32f031", feature = "stm32f038", ))] spi_pins! { SPI1 => { sck => [gpiob::PB13<Alternate<AF0>>], miso => [gpiob::PB14<Alternate<AF0>>], mosi => [gpiob::PB15<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiob::PB13<Alternate<AF0>>], miso => [gpiob::PB14<Alternate<AF0>>], mosi => [gpiob::PB15<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030xc", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiob::PB10<Alternate<AF5>>], miso => [gpioc::PC2<Alternate<AF1>>], mosi => [gpioc::PC3<Alternate<AF1>>], } } #[cfg(any( feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiod::PD1<Alternate<AF1>>], miso => [gpiod::PD3<Alternate<AF1>>], mosi => [gpiod::PD4<Alternate<AF1>>], } } macro_rules! spi { ($($SPI:ident: ($spi:ident, $spiXen:ident, $spiXrst:ident, $apbenr:ident, $apbrstr:ident),)+) => { $( impl<SCKPIN, MISOPIN, MOSIPIN> Spi<$SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { pub fn $spi<F>( spi: $SPI, pins: (SCKPIN, MISOPIN, MOSIPIN), mode: Mode, speed: F, rcc: &mut Rcc, ) -> Self where SCKPIN: SckPin<$SPI>, MISOPIN: MisoPin<$SPI>, MOSIPIN: MosiPin<$SPI>, F: Into<Hertz>, { /* Enable clock for SPI */ rcc.regs.$apbenr.modify(|_, w| w.$spiXen().set_bit()); /* Reset SPI */ rcc.regs.$apbrstr.modify(|_, w| w.$spiXrst().set_bit()); rcc.regs.$apbrstr.modify(|_, w| w.$spiXrst().clear_bit()); Spi::<$SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { spi, pins, _width: PhantomData }.spi_init(mode, speed, rcc.clocks).into_8bit_width() } } )+ } } spi! { SPI1: (spi1, spi1en, spi1rst, apb2enr, apb2rstr), } #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi! { SPI2: (spi2, spi2en, spi2rst, apb1enr, apb1rstr), } #[allow(dead_code)] type SpiRegisterBlock = crate::pac::spi1::RegisterBlock; impl<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> where SPI: Deref<Target = SpiRegisterBlock>, { fn spi_init<F>(self, mode: Mode, speed: F, clocks: Clocks) -> Self where F: Into<Hertz>, { /* Make sure the SPI unit is disabled so we can configure it */ self.spi.cr1.modify(|_, w| w.spe().clear_bit()); let br = match clocks.pclk().0 / speed.into().0 { 0 => unreachable!(), 1..=2 => 0b000, 3..=5 => 0b001, 6..=11 => 0b010, 12..=23 => 0b011, 24..=47 => 0b100, 48..=95 => 0b101, 96..=191 => 0b110, _ => 0b111, }; self.spi.cr1.write(|w| { w.cpha() .bit(mode.phase == Phase::CaptureOnSecondTransition) .cpol() .bit(mode.polarity == Polarity::IdleHigh) .mstr() .set_bit() .br() .bits(br) .lsbfirst() .clear_bit() .ssm() .set_bit() .ssi() .set_bit() .rxonly() .clear_bit() .bidimode() .clear_bit() .spe() .set_bit() }); self } pub fn into_8bit_width(self) -> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { self.spi .cr2 .write(|w| w.frxth().set_bit().ds().eight_bit().ssoe().clear_bit()); Spi { spi: self.spi, pins: self.pins, _width: PhantomData, } } pub fn into_16bit_width(self) -> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> { self.spi .cr2 .write(|w| w.frxth().set_bit().ds().sixteen_bit().ssoe().clear_bit()); Spi { spi: self.spi, pins: self.pins, _width: PhantomData, } } fn set_send_only(&mut self) { self.spi .cr1 .modify(|_, w| w.bidimode().set_bit().bidioe().set_bit()); } fn set_bidi(&mut self) { self.spi .cr1 .modify(|_, w| w.bidimode().clear_bit().bidioe().clear_bit()); } fn check_read(&mut self) -> nb::Result<(), Error> { let sr = self.spi.sr.read(); Err(if sr.ovr().bit_is_set() { nb::Error::Other(Error::Overrun) } else if sr.modf().bit_is_set() { nb::Error::Other(Error::ModeFault) } else if sr.crcerr().bit_is_set() { nb::Error::Other(Error::Crc) } else if sr.rxne().bit_is_set() { return Ok(()); } else { nb::Error::WouldBlock }) } fn send_buffer_size(&mut self) -> u8 { match self.spi.sr.read().ftlvl().bits() { 0 => 4, 1 => 3, 2 => 2, _ => 0, } } fn check_send(&mut self) -> nb::Result<(), Error> { let sr = self.spi.sr.read();
} fn read_u8(&mut self) -> u8 { unsafe { ptr::read_volatile(&self.spi.dr as *const _ as *const u8) } } fn send_u8(&mut self, byte: u8) { unsafe { ptr::write_volatile(&self.spi.dr as *const _ as *mut u8, byte) } } fn read_u16(&mut self) -> u16 { unsafe { ptr::read_volatile(&self.spi.dr as *const _ as *const u16) } } fn send_u16(&mut self, byte: u16) { unsafe { ptr::write_volatile(&self.spi.dr as *const _ as *mut u16, byte) } } pub fn release(self) -> (SPI, (SCKPIN, MISOPIN, MOSIPIN)) { (self.spi, self.pins) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Transfer<u8> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn transfer<'w>(&mut self, words: &'w mut [u8]) -> Result<&'w [u8], Self::Error> { self.set_bidi(); for word in words.iter_mut() { nb::block!(self.check_send())?; self.send_u8(word.clone()); nb::block!(self.check_read())?; *word = self.read_u8(); } Ok(words) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Write<u8> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn write(&mut self, words: &[u8]) -> Result<(), Self::Error> { let mut bufcap: u8 = 0; self.set_send_only(); nb::block!(self.check_send())?; for word in words { while bufcap == 0 { bufcap = self.send_buffer_size(); } self.send_u8(*word); bufcap -= 1; } self.check_send().ok(); Ok(()) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Transfer<u16> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn transfer<'w>(&mut self, words: &'w mut [u16]) -> Result<&'w [u16], Self::Error> { self.set_bidi(); for word in words.iter_mut() { nb::block!(self.check_send())?; self.send_u16(*word); nb::block!(self.check_read())?; *word = self.read_u16(); } Ok(words) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Write<u16> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn write(&mut self, words: &[u16]) -> Result<(), Self::Error> { self.set_send_only(); for word in words { nb::block!(self.check_send())?; self.send_u16(word.clone()); } self.check_send().ok(); Ok(()) } }
Err(if sr.ovr().bit_is_set() { nb::Error::Other(Error::Overrun) } else if sr.modf().bit_is_set() { nb::Error::Other(Error::ModeFault) } else if sr.crcerr().bit_is_set() { nb::Error::Other(Error::Crc) } else if sr.txe().bit_is_set() { return Ok(()); } else { nb::Error::WouldBlock })
call_expression
[]
Rust
core/http/src/uri/absolute.rs
benjaminch/Rocket
7f1731089849987052a1a545464188dace2b0d0c
use std::borrow::Cow; use std::fmt::{self, Display}; use crate::ext::IntoOwned; use crate::parse::{Extent, IndexedStr}; use crate::uri::{Authority, Origin, Error, as_utf8_unchecked}; #[derive(Debug, Clone)] pub struct Absolute<'a> { source: Option<Cow<'a, str>>, scheme: IndexedStr<'a>, authority: Option<Authority<'a>>, origin: Option<Origin<'a>>, } impl IntoOwned for Absolute<'_> { type Owned = Absolute<'static>; fn into_owned(self) -> Self::Owned { Absolute { source: self.source.into_owned(), scheme: self.scheme.into_owned(), authority: self.authority.into_owned(), origin: self.origin.into_owned(), } } } impl<'a> Absolute<'a> { #[inline] pub(crate) unsafe fn raw( source: Cow<'a, [u8]>, scheme: Extent<&'a [u8]>, authority: Option<Authority<'a>>, origin: Option<Origin<'a>>, ) -> Absolute<'a> { Absolute { authority, origin, source: Some(as_utf8_unchecked(source)), scheme: scheme.into(), } } #[cfg(test)] pub(crate) fn new( scheme: &'a str, authority: Option<Authority<'a>>, origin: Option<Origin<'a>> ) -> Absolute<'a> { Absolute { authority, origin, source: None, scheme: Cow::Borrowed(scheme).into(), } } pub fn parse(string: &'a str) -> Result<Absolute<'a>, Error<'a>> { crate::parse::uri::absolute_from_str(string) } #[inline(always)] pub fn scheme(&self) -> &str { self.scheme.from_cow_source(&self.source) } #[inline(always)] pub fn authority(&self) -> Option<&Authority<'a>> { self.authority.as_ref() } #[inline(always)] pub fn origin(&self) -> Option<&Origin<'a>> { self.origin.as_ref() } #[inline(always)] pub fn with_authority(mut self, authority: Authority<'a>) -> Self { self.set_authority(authority); self } #[inline(always)] pub fn set_authority(&mut self, authority: Authority<'a>) { self.authority = Some(authority); } #[inline(always)] pub fn with_origin(mut self, origin: Origin<'a>) -> Self { self.set_origin(origin); self } #[inline(always)] pub fn set_origin(&mut self, origin: Origin<'a>) { self.origin = Some(origin); } } impl<'a, 'b> PartialEq<Absolute<'b>> for Absolute<'a> { fn eq(&self, other: &Absolute<'b>) -> bool { self.scheme() == other.scheme() && self.authority() == other.authority() && self.origin() == other.origin() } } impl Display for Absolute<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.scheme())?; match self.authority { Some(ref authority) => write!(f, "://{}", authority)?, None => write!(f, ":")? } if let Some(ref origin) = self.origin { write!(f, "{}", origin)?; } Ok(()) } }
use std::borrow::Cow; use std::fmt::{self, Display}; use crate::ext::IntoOwned; use crate::parse::{Extent, IndexedStr}; use crate::uri::{Authority, Origin, Error, as_utf8_unchecked}; #[derive(Debug, Clone)] pub struct Absolute<'a> { source: Option<Cow<'a, str>>, scheme: IndexedStr<'a>, authority: Option<Authority<'a>>, origin: Option<Origin<'a>>, } impl IntoOwned for Absolute<'_> { type Owned = Absolute<'static>; fn into_owned(self) -> Self::Owned { Absolute { source: self.source.into_owned(), scheme: self.scheme.into_owned(), authority: self.authority.into_owned(), origin: self.origin.into_owned(), } } } impl<'a> Absolute<'a> { #[inline] pub(crate) unsafe fn raw( source: Cow<'a, [u8]>, scheme: Extent<&'a [u8]>, authority: Option<Authority<'a>>, origin: Option<Origin<'a>>, ) -> Absolute<'a> { Absolute { authority, origin, source: Some(as_utf8_unchecked(source)), scheme: scheme.into(), } } #[cfg(test)] pub(crate) fn new( scheme: &'a str, authority: Option<Authority<'a>>, origin: Option<Origin<'a>> ) -> Absolute<'a> { Absolute { authority, origin, source: None, scheme: Cow::Borrowed(scheme).into(), } } pub fn parse(string: &'a str) -> Result<Absolute<'a>, Error<'a>> { crate::parse::uri::absolute_from_str(string) } #[inline(always)] pub fn scheme(&self) -> &str { self.scheme.from_cow_source(&self.source) } #[inline(always)] pub fn authority(&self) -> Option<&Authority<'a>> { self.authority.as_ref() } #[inline(always)] pub fn origin(&self) -> Option<&Origin<'a>> { self.origin.as_ref() } #[inline(always)] pub fn with_authority(mut self, authority: Authority<'a>) -> Self { self.set_authority(authority); self } #[inline(always)] pub fn set_authority(&mut self, authority: Authority<'a>) { self.authority = Some(authority); } #[inline(always)] pub fn with_origin(mut self, origin: Origin<'a>) -> Self { self.set_origin(origin); self } #[inline(always)] pub fn set_origin(&mut self, origin: Origin<'a>) { self.origin = Some(origin); } } impl<'a, 'b> PartialEq<Absolute<'b>> for Absolute<'a> { fn eq(&self, other: &Absolute<'b>) -> bool { self.scheme() == other.scheme() && self.authority() == other.authority() && self.origin() == other.origin() } } impl Display for Absolute<'_> {
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.scheme())?; match self.authority { Some(ref authority) => write!(f, "://{}", authority)?, None => write!(f, ":")? } if let Some(ref origin) = self.origin { write!(f, "{}", origin)?; } Ok(()) }
function_block-full_function
[ { "content": "#[inline]\n\npub fn origin_from_str(s: &str) -> Result<Origin<'_>, Error<'_>> {\n\n Ok(parse!(origin: RawInput::new(s.as_bytes()))?)\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 0, "score": 387481.30215801205 }, { "content": "#[inline]\n\npub fn author...
Rust
2018/01/src/main.rs
RadicalZephyr/advent-of-rust
34207a74305adbf1aae4e317c14f8cad8cb7811d
use std::collections::HashSet; use std::env; use std::fs::File; use std::io::BufReader; use std::io::{self, BufRead}; use std::num; #[derive(Debug)] enum Error { ArgumentError, EmptyInput, IoError(io::Error), ParseError(num::ParseIntError), } impl From<io::Error> for Error { fn from(error: io::Error) -> Self { Error::IoError(error) } } impl From<num::ParseIntError> for Error { fn from(error: num::ParseIntError) -> Self { Error::ParseError(error) } } fn parse<'a>( changes: impl Iterator<Item = &'a (impl AsRef<str> + 'a)>, ) -> Result<Vec<isize>, num::ParseIntError> { changes .map(|c| c.as_ref().parse()) .collect::<Result<Vec<isize>, num::ParseIntError>>() } fn frequency<'a>(changes: impl Iterator<Item = &'a isize>) -> isize { changes.sum() } fn first_repeat<'a>(changes: impl Iterator<Item = &'a isize> + Clone) -> Option<isize> { let mut frequency = 0; let mut frequencies = HashSet::new(); frequencies.insert(frequency); for change in changes.cycle() { frequency += change; if frequencies.contains(&frequency) { return Some(frequency); } frequencies.insert(frequency); } None } fn main() -> Result<(), Error> { let input = env::args().nth(1).ok_or(Error::ArgumentError)?; let file = File::open(input)?; let reader = BufReader::new(file); let lines = reader.lines().collect::<io::Result<Vec<String>>>()?; let changes = parse(lines.iter())?; let repeat = first_repeat(changes.iter()).ok_or(Error::EmptyInput)?; println!("Part 1: {}", frequency(changes.iter())); println!("Part 2: {}", repeat); Ok(()) } #[cfg(test)] mod test { use super::{first_repeat, frequency, parse}; #[test] fn parse_errors_with_invalid_digits() { let invalid_digits = vec!["", "a", "$", "‽"]; for invalid_digit in invalid_digits { assert!(parse(vec![invalid_digit].iter()).is_err()); } } #[test] fn parse_handles_positive_digits() { let digits = vec!["+1", "+1", "+1"]; let expected = Ok(vec![1, 1, 1]); assert_eq!(expected, parse(digits.iter())); } #[test] fn parse_handles_negative_digits() { let digits = vec!["-1", "-2", "-3"]; let expected = Ok(vec![-1, -2, -3]); assert_eq!(expected, parse(digits.iter())); } #[test] fn parse_handles_positive_and_negative_digits() { let digits = vec!["+1", "+1", "-2"]; let expected = Ok(vec![1, 1, -2]); assert_eq!(expected, parse(digits.iter())); } #[test] fn frequency_is_zero_with_no_changes() { let changes: Vec<isize> = vec![]; assert_eq!(0, frequency(changes.iter())); } #[test] fn frequency_handles_positive_changes() { let changes = vec![1, 1, 1]; assert_eq!(3, frequency(changes.iter())); } #[test] fn frequency_handles_negative_changes() { let changes = vec![-1, -2, -3]; assert_eq!(-6, frequency(changes.iter())); } #[test] fn frequency_handles_positive_and_negative_changes() { let changes = vec![1, 1, -2]; assert_eq!(0, frequency(changes.iter())); } #[test] fn first_repeat_is_none_with_no_changes() { let changes = vec![]; assert_eq!(None, first_repeat(changes.iter())); } #[test] fn first_repeat_handles_finite_input() { let inputs = vec![ (vec![1, -1], Some(0)), (vec![3, 3, 4, -2, -4], Some(10)), (vec![-6, 3, 8, 5, -6], Some(5)), (vec![7, 7, -2, -7, -4], Some(14)), ]; for (given, expected) in inputs { assert_eq!(expected, first_repeat(given.iter())); } } }
use std::collections::HashSet; use std::env; use std::fs::File; use std::io::BufReader; use std::io::{self, BufRead}; use std::num; #[derive(Debug)] enum Error { ArgumentError, EmptyInput, IoError(io::Error), ParseError(num::ParseIntError), } impl From<io::Error> for Error {
.map(|c| c.as_ref().parse()) .collect::<Result<Vec<isize>, num::ParseIntError>>() } fn frequency<'a>(changes: impl Iterator<Item = &'a isize>) -> isize { changes.sum() } fn first_repeat<'a>(changes: impl Iterator<Item = &'a isize> + Clone) -> Option<isize> { let mut frequency = 0; let mut frequencies = HashSet::new(); frequencies.insert(frequency); for change in changes.cycle() { frequency += change; if frequencies.contains(&frequency) { return Some(frequency); } frequencies.insert(frequency); } None } fn main() -> Result<(), Error> { let input = env::args().nth(1).ok_or(Error::ArgumentError)?; let file = File::open(input)?; let reader = BufReader::new(file); let lines = reader.lines().collect::<io::Result<Vec<String>>>()?; let changes = parse(lines.iter())?; let repeat = first_repeat(changes.iter()).ok_or(Error::EmptyInput)?; println!("Part 1: {}", frequency(changes.iter())); println!("Part 2: {}", repeat); Ok(()) } #[cfg(test)] mod test { use super::{first_repeat, frequency, parse}; #[test] fn parse_errors_with_invalid_digits() { let invalid_digits = vec!["", "a", "$", "‽"]; for invalid_digit in invalid_digits { assert!(parse(vec![invalid_digit].iter()).is_err()); } } #[test] fn parse_handles_positive_digits() { let digits = vec!["+1", "+1", "+1"]; let expected = Ok(vec![1, 1, 1]); assert_eq!(expected, parse(digits.iter())); } #[test] fn parse_handles_negative_digits() { let digits = vec!["-1", "-2", "-3"]; let expected = Ok(vec![-1, -2, -3]); assert_eq!(expected, parse(digits.iter())); } #[test] fn parse_handles_positive_and_negative_digits() { let digits = vec!["+1", "+1", "-2"]; let expected = Ok(vec![1, 1, -2]); assert_eq!(expected, parse(digits.iter())); } #[test] fn frequency_is_zero_with_no_changes() { let changes: Vec<isize> = vec![]; assert_eq!(0, frequency(changes.iter())); } #[test] fn frequency_handles_positive_changes() { let changes = vec![1, 1, 1]; assert_eq!(3, frequency(changes.iter())); } #[test] fn frequency_handles_negative_changes() { let changes = vec![-1, -2, -3]; assert_eq!(-6, frequency(changes.iter())); } #[test] fn frequency_handles_positive_and_negative_changes() { let changes = vec![1, 1, -2]; assert_eq!(0, frequency(changes.iter())); } #[test] fn first_repeat_is_none_with_no_changes() { let changes = vec![]; assert_eq!(None, first_repeat(changes.iter())); } #[test] fn first_repeat_handles_finite_input() { let inputs = vec![ (vec![1, -1], Some(0)), (vec![3, 3, 4, -2, -4], Some(10)), (vec![-6, 3, 8, 5, -6], Some(5)), (vec![7, 7, -2, -7, -4], Some(14)), ]; for (given, expected) in inputs { assert_eq!(expected, first_repeat(given.iter())); } } }
fn from(error: io::Error) -> Self { Error::IoError(error) } } impl From<num::ParseIntError> for Error { fn from(error: num::ParseIntError) -> Self { Error::ParseError(error) } } fn parse<'a>( changes: impl Iterator<Item = &'a (impl AsRef<str> + 'a)>, ) -> Result<Vec<isize>, num::ParseIntError> { changes
random
[ { "content": "#[derive(Debug)]\n\nenum Error {\n\n ArgumentError,\n\n IoError(io::Error),\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(error: io::Error) -> Self {\n\n Error::IoError(error)\n\n }\n\n}\n\n\n", "file_path": "2018/02/src/main.rs", "rank": 1, "score": 86601...
Rust
query/src/sql/planner/binder/ddl/table.rs
DatafuseDev/fusequery
70de90ad5b05f0c2159921c7e9866da73d7bb217
use std::collections::BTreeMap; use common_ast::ast::*; use common_datavalues::DataField; use common_datavalues::DataSchemaRef; use common_datavalues::DataSchemaRefExt; use common_datavalues::NullableType; use common_datavalues::TypeFactory; use common_exception::ErrorCode; use common_exception::Result; use common_meta_app::schema::TableMeta; use common_planners::*; use crate::sql::binder::scalar::ScalarBinder; use crate::sql::binder::Binder; use crate::sql::is_reserved_opt_key; use crate::sql::plans::Plan; use crate::sql::BindContext; use crate::sql::ColumnBinding; use crate::sql::OPT_KEY_DATABASE_ID; impl<'a> Binder { async fn analyze_create_table_schema( &self, source: &CreateTableSource<'a>, ) -> Result<DataSchemaRef> { let bind_context = BindContext::new(); match source { CreateTableSource::Columns(columns) => { let mut scalar_binder = ScalarBinder::new(&bind_context, self.ctx.clone(), self.metadata.clone()); let mut fields = Vec::with_capacity(columns.len()); for column in columns.iter() { let name = column.name.name.clone(); let mut data_type = TypeFactory::instance() .get(column.data_type.to_string())? .clone(); if column.nullable { data_type = NullableType::new_impl(data_type); } let field = DataField::new(name.as_str(), data_type).with_default_expr({ if let Some(default_expr) = &column.default_expr { scalar_binder.bind(default_expr).await?; Some(default_expr.to_string()) } else { None } }); fields.push(field); } Ok(DataSchemaRefExt::create(fields)) } CreateTableSource::Like { catalog, database, table, } => { let catalog = catalog .as_ref() .map(|catalog| catalog.name.to_lowercase()) .unwrap_or_else(|| self.ctx.get_current_catalog()); let database = database.as_ref().map_or_else( || self.ctx.get_current_catalog(), |ident| ident.name.clone(), ); let table_name = table.name.as_str(); let table = self .ctx .get_table(catalog.as_str(), database.as_str(), table_name) .await?; Ok(table.schema()) } } } fn insert_table_option_with_validation( &self, options: &mut BTreeMap<String, String>, key: String, value: String, ) -> Result<()> { if is_reserved_opt_key(&key) { Err(ErrorCode::BadOption(format!("the following table options are reserved, please do not specify them in the CREATE TABLE statement: {}", key ))) } else if options.insert(key.clone(), value).is_some() { Err(ErrorCode::BadOption(format!( "Duplicated table option: {key}" ))) } else { Ok(()) } } async fn validate_expr(&self, schema: DataSchemaRef, expr: &Expr<'a>) -> Result<()> { let mut bind_context = BindContext::new(); for field in schema.fields() { let column = ColumnBinding { table_name: None, column_name: field.name().clone(), index: 0, data_type: field.data_type().clone(), visible_in_unqualified_wildcard: false, }; bind_context.columns.push(column); } let mut scalar_binder = ScalarBinder::new(&bind_context, self.ctx.clone(), self.metadata.clone()); scalar_binder.bind(expr).await?; Ok(()) } pub(in crate::sql::planner::binder) async fn bind_create_table( &mut self, stmt: &CreateTableStmt<'a>, ) -> Result<Plan> { let CreateTableStmt { if_not_exists, catalog, database, table, source, table_options, cluster_by, as_query, comment: _, } = stmt; let catalog = catalog .as_ref() .map(|catalog| catalog.name.to_lowercase()) .unwrap_or_else(|| self.ctx.get_current_catalog()); let database = database .as_ref() .map(|ident| ident.name.to_lowercase()) .unwrap_or_else(|| self.ctx.get_current_database()); let table = table.name.to_lowercase(); let mut engine = Engine::Fuse; let mut options: BTreeMap<String, String> = BTreeMap::new(); for table_option in table_options.iter() { match table_option { TableOption::Engine(table_engine) => { engine = table_engine.clone(); } TableOption::Comment(comment) => self.insert_table_option_with_validation( &mut options, "COMMENT".to_string(), comment.clone(), )?, } } let schema = match (&source, &as_query) { (Some(source), None) => { self.analyze_create_table_schema(source).await? } (None, Some(query)) => { let init_bind_context = BindContext::new(); let (_s_expr, bind_context) = self.bind_query(&init_bind_context, query).await?; let fields = bind_context .columns .iter() .map(|column_binding| { DataField::new( column_binding.column_name.as_str(), column_binding.data_type.clone(), ) }) .collect(); DataSchemaRefExt::create(fields) } _ => Err(ErrorCode::UnImplement("Unsupported CREATE TABLE statement"))?, }; let mut table_meta = TableMeta { schema: schema.clone(), engine: engine.to_string(), options: options.clone(), ..Default::default() }; if engine == Engine::Fuse { let catalog = self.ctx.get_catalog(catalog.as_str())?; let db = catalog .get_database(self.ctx.get_tenant().as_str(), database.as_str()) .await?; let db_id = db.get_db_info().ident.db_id; table_meta .options .insert(OPT_KEY_DATABASE_ID.to_owned(), db_id.to_string()); } let mut cluster_keys = Vec::with_capacity(cluster_by.len()); for cluster_key in cluster_by.iter() { self.validate_expr(schema.clone(), cluster_key).await?; cluster_keys.push(cluster_key.to_string()); } if !cluster_keys.is_empty() { let cluster_keys_sql = format!("({})", cluster_keys.join(", ")); table_meta = table_meta.push_cluster_key(cluster_keys_sql); } let plan = CreateTablePlan { if_not_exists: *if_not_exists, tenant: self.ctx.get_tenant(), catalog, database, table, table_meta, cluster_keys, as_select: if as_query.is_some() { Err(ErrorCode::UnImplement( "Unsupported CREATE TABLE ... AS ...", ))? } else { None }, }; Ok(Plan::CreateTable(Box::new(plan))) } }
use std::collections::BTreeMap; use common_ast::ast::*; use common_datavalues::DataField; use common_datavalues::DataSchemaRef; use common_datavalues::DataSchemaRefExt; use common_datavalues::NullableType; use common_datavalues::TypeFactory; use common_exception::ErrorCode; use common_exception::Result; use common_meta_app::schema::TableMeta; use common_planners::*; use crate::sql::binder::scalar::ScalarBinder; use crate::sql::binder::Binder; use crate::sql::is_reserved_opt_key; use crate::sql::plans::Plan; use crate::sql::BindContext; use crate::sql::ColumnBinding; use crate::sql::OPT_KEY_DATABASE_ID; impl<'a> Binder { async fn analyze_create_table_schema( &self, source: &CreateTableSource<'a>, ) -> Result<DataSchemaRef> { let bind_context = BindContext::new(); match source { CreateTableSource::Columns(columns) => { let mut scalar_binder = ScalarBinder::new(&bind_context, self.ctx.clone(), self.metadata.clone()); let mut fields = Vec::with_capacity(columns.len()); for column in columns.iter() { let name = column.name.name.clone(); let mut data_type = TypeFactory::instance() .get(column.data_type.to_string())? .clone(); if column.nullable { data_type = NullableType::new_impl(data_type); } let field = DataField::new(name.as_str(), data_type).with_default_expr({ if let Some(default_expr) = &column.default_expr { scalar_binder.bind(default_expr).await?; Some(default_expr.to_string()) } else { None } }); fields.push(field); } Ok(DataSchem
.unwrap_or_else(|| self.ctx.get_current_catalog()); let database = database .as_ref() .map(|ident| ident.name.to_lowercase()) .unwrap_or_else(|| self.ctx.get_current_database()); let table = table.name.to_lowercase(); let mut engine = Engine::Fuse; let mut options: BTreeMap<String, String> = BTreeMap::new(); for table_option in table_options.iter() { match table_option { TableOption::Engine(table_engine) => { engine = table_engine.clone(); } TableOption::Comment(comment) => self.insert_table_option_with_validation( &mut options, "COMMENT".to_string(), comment.clone(), )?, } } let schema = match (&source, &as_query) { (Some(source), None) => { self.analyze_create_table_schema(source).await? } (None, Some(query)) => { let init_bind_context = BindContext::new(); let (_s_expr, bind_context) = self.bind_query(&init_bind_context, query).await?; let fields = bind_context .columns .iter() .map(|column_binding| { DataField::new( column_binding.column_name.as_str(), column_binding.data_type.clone(), ) }) .collect(); DataSchemaRefExt::create(fields) } _ => Err(ErrorCode::UnImplement("Unsupported CREATE TABLE statement"))?, }; let mut table_meta = TableMeta { schema: schema.clone(), engine: engine.to_string(), options: options.clone(), ..Default::default() }; if engine == Engine::Fuse { let catalog = self.ctx.get_catalog(catalog.as_str())?; let db = catalog .get_database(self.ctx.get_tenant().as_str(), database.as_str()) .await?; let db_id = db.get_db_info().ident.db_id; table_meta .options .insert(OPT_KEY_DATABASE_ID.to_owned(), db_id.to_string()); } let mut cluster_keys = Vec::with_capacity(cluster_by.len()); for cluster_key in cluster_by.iter() { self.validate_expr(schema.clone(), cluster_key).await?; cluster_keys.push(cluster_key.to_string()); } if !cluster_keys.is_empty() { let cluster_keys_sql = format!("({})", cluster_keys.join(", ")); table_meta = table_meta.push_cluster_key(cluster_keys_sql); } let plan = CreateTablePlan { if_not_exists: *if_not_exists, tenant: self.ctx.get_tenant(), catalog, database, table, table_meta, cluster_keys, as_select: if as_query.is_some() { Err(ErrorCode::UnImplement( "Unsupported CREATE TABLE ... AS ...", ))? } else { None }, }; Ok(Plan::CreateTable(Box::new(plan))) } }
aRefExt::create(fields)) } CreateTableSource::Like { catalog, database, table, } => { let catalog = catalog .as_ref() .map(|catalog| catalog.name.to_lowercase()) .unwrap_or_else(|| self.ctx.get_current_catalog()); let database = database.as_ref().map_or_else( || self.ctx.get_current_catalog(), |ident| ident.name.clone(), ); let table_name = table.name.as_str(); let table = self .ctx .get_table(catalog.as_str(), database.as_str(), table_name) .await?; Ok(table.schema()) } } } fn insert_table_option_with_validation( &self, options: &mut BTreeMap<String, String>, key: String, value: String, ) -> Result<()> { if is_reserved_opt_key(&key) { Err(ErrorCode::BadOption(format!("the following table options are reserved, please do not specify them in the CREATE TABLE statement: {}", key ))) } else if options.insert(key.clone(), value).is_some() { Err(ErrorCode::BadOption(format!( "Duplicated table option: {key}" ))) } else { Ok(()) } } async fn validate_expr(&self, schema: DataSchemaRef, expr: &Expr<'a>) -> Result<()> { let mut bind_context = BindContext::new(); for field in schema.fields() { let column = ColumnBinding { table_name: None, column_name: field.name().clone(), index: 0, data_type: field.data_type().clone(), visible_in_unqualified_wildcard: false, }; bind_context.columns.push(column); } let mut scalar_binder = ScalarBinder::new(&bind_context, self.ctx.clone(), self.metadata.clone()); scalar_binder.bind(expr).await?; Ok(()) } pub(in crate::sql::planner::binder) async fn bind_create_table( &mut self, stmt: &CreateTableStmt<'a>, ) -> Result<Plan> { let CreateTableStmt { if_not_exists, catalog, database, table, source, table_options, cluster_by, as_query, comment: _, } = stmt; let catalog = catalog .as_ref() .map(|catalog| catalog.name.to_lowercase())
random
[ { "content": "pub fn col(name: &str) -> Expression {\n\n Expression::Column(name.to_string())\n\n}\n", "file_path": "common/planners/src/plan_expression_column.rs", "rank": 0, "score": 242307.14682482358 }, { "content": "fn run_lexer(file: &mut File, source: &str) {\n\n let tokens = To...
Rust
gee/src/auto/queue.rs
jeandudey/granite-rs
a06d131240dbcf2935eb8d9659d8e65ab0d38df4
use crate::Collection; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct Queue(Interface<ffi::GeeQueue>) @requires Collection; match fn { get_type => || ffi::gee_queue_get_type(), } } pub const NONE_QUEUE: Option<&Queue> = None; pub trait QueueExt: 'static { #[doc(alias = "gee_queue_drain")] fn drain<P: IsA<Collection>>(recipient: &P, amount: i32) -> i32; #[doc(alias = "gee_queue_get_capacity")] fn get_capacity() -> i32; #[doc(alias = "gee_queue_get_remaining_capacity")] fn get_remaining_capacity() -> i32; #[doc(alias = "gee_queue_get_is_full")] fn get_is_full() -> bool; fn connect_property_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_remaining_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_is_full_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Queue>> QueueExt for O { fn drain<P: IsA<Collection>>(recipient: &P, amount: i32) -> i32 { unsafe { ffi::gee_queue_drain(recipient.as_ref().to_glib_none().0, amount) } } fn get_capacity() -> i32 { unsafe { ffi::gee_queue_get_capacity() } } fn get_remaining_capacity() -> i32 { unsafe { ffi::gee_queue_get_remaining_capacity() } } fn get_is_full() -> bool { unsafe { from_glib(ffi::gee_queue_get_is_full()) } } fn connect_property_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_capacity_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::capacity\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_capacity_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } fn connect_property_remaining_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_remaining_capacity_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::remaining-capacity\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_remaining_capacity_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } fn connect_property_is_full_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_is_full_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::is-full\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_is_full_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } } impl fmt::Display for Queue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("Queue") } }
use crate::Collection; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct Queue(Interface<ffi::GeeQueue>) @requires Collection; match fn { get_type => || ffi::gee_queue_get_type(), } } pub const NONE_QUEUE: Option<&Queue> = None; pub trait QueueExt: 'static {
x_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::is-full\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_is_full_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } } impl fmt::Display for Queue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("Queue") } }
#[doc(alias = "gee_queue_drain")] fn drain<P: IsA<Collection>>(recipient: &P, amount: i32) -> i32; #[doc(alias = "gee_queue_get_capacity")] fn get_capacity() -> i32; #[doc(alias = "gee_queue_get_remaining_capacity")] fn get_remaining_capacity() -> i32; #[doc(alias = "gee_queue_get_is_full")] fn get_is_full() -> bool; fn connect_property_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_remaining_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_is_full_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Queue>> QueueExt for O { fn drain<P: IsA<Collection>>(recipient: &P, amount: i32) -> i32 { unsafe { ffi::gee_queue_drain(recipient.as_ref().to_glib_none().0, amount) } } fn get_capacity() -> i32 { unsafe { ffi::gee_queue_get_capacity() } } fn get_remaining_capacity() -> i32 { unsafe { ffi::gee_queue_get_remaining_capacity() } } fn get_is_full() -> bool { unsafe { from_glib(ffi::gee_queue_get_is_full()) } } fn connect_property_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_capacity_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::capacity\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_capacity_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } fn connect_property_remaining_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_remaining_capacity_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::remaining-capacity\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_remaining_capacity_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } fn connect_property_is_full_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_is_full_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Bo
random
[ { "content": "pub trait CollectionExt: 'static {\n\n //#[doc(alias = \"gee_collection_contains\")]\n\n //fn contains(item: /*Unimplemented*/Fundamental: Pointer) -> bool;\n\n\n\n //#[doc(alias = \"gee_collection_add\")]\n\n //fn add(item: /*Unimplemented*/Fundamental: Pointer) -> bool;\n\n\n\n //...
Rust
bootloader-efi/src/arch/aarch64/mod.rs
hyperswine/Redox2
b090d53c3270d6eabf7d2fce5077d1bb0c78052a
use core::{mem, ptr}; use orbclient::{Color, Renderer}; use std::fs::find; use std::proto::Protocol; use uefi::guid::Guid; use uefi::status::{Error, Result}; use crate::display::{Display, ScaledDisplay, Output}; use crate::image::{self, Image}; use crate::key::{key, Key}; use crate::redoxfs; use crate::text::TextDisplay; use self::memory_map::memory_map; use self::paging::paging; mod memory_map; mod paging; mod partitions; static KERNEL: &'static str = concat!("\\", env!("BASEDIR"), "\\kernel"); static SPLASHBMP: &'static [u8] = include_bytes!("../../../res/splash.bmp"); static KERNEL_OFFSET: u64 = 0xFFFF_FF00_0000_0000; static KERNEL_PHYSICAL: u64 = 0x4000_0000; static mut KERNEL_SIZE: u64 = 0; static mut KERNEL_ENTRY: u64 = 0; static mut DTB_PHYSICAL: u64 = 0; #[no_mangle] pub extern "C" fn __chkstk() { } unsafe fn exit_boot_services(key: usize) { let handle = std::handle(); let uefi = std::system_table(); let _ = (uefi.BootServices.ExitBootServices)(handle, key); } unsafe fn enter() -> ! { let entry_fn: extern "C" fn(dtb: u64) -> ! = mem::transmute(( KERNEL_PHYSICAL + KERNEL_ENTRY - KERNEL_OFFSET )); entry_fn(DTB_PHYSICAL); } fn get_correct_block_io() -> Result<redoxfs::Disk> { let mut handles = vec! [uefi::Handle(0); 128]; let mut size = handles.len() * mem::size_of::<uefi::Handle>(); (std::system_table().BootServices.LocateHandle)(uefi::boot::LocateSearchType::ByProtocol, &uefi::guid::BLOCK_IO_GUID, 0, &mut size, handles.as_mut_ptr())?; let max_size = size / mem::size_of::<uefi::Handle>(); let actual_size = std::cmp::min(handles.len(), max_size); for handle in handles.into_iter().take(actual_size) { let block_io = redoxfs::Disk::handle_protocol(handle)?; if !block_io.0.Media.LogicalPartition { continue; } let part = partitions::PartitionProto::handle_protocol(handle)?.0; if part.sys == 1 { continue; } assert_eq!({part.rev}, partitions::PARTITION_INFO_PROTOCOL_REVISION); if part.ty == partitions::PartitionProtoDataTy::Gpt as u32 { let gpt = unsafe { part.info.gpt }; assert_ne!(gpt.part_ty_guid, partitions::ESP_GUID, "detected esp partition again"); if gpt.part_ty_guid == partitions::REDOX_FS_GUID || gpt.part_ty_guid == partitions::LINUX_FS_GUID { return Ok(block_io); } } else if part.ty == partitions::PartitionProtoDataTy::Mbr as u32 { let mbr = unsafe { part.info.mbr }; if mbr.ty == 0x83 { return Ok(block_io); } } else { continue; } } panic!("Couldn't find handle for partition"); } static DTB_GUID: Guid = Guid(0xb1b621d5, 0xf19c, 0x41a5, [0x83, 0x0b, 0xd9, 0x15, 0x2c, 0x69, 0xaa, 0xe0]); fn find_dtb() -> Result<()> { let cfg_tables = std::system_table().config_tables(); for cfg_table in cfg_tables.iter() { if cfg_table.VendorGuid == DTB_GUID { unsafe { DTB_PHYSICAL = cfg_table.VendorTable as u64; println!("DTB: {:X}", DTB_PHYSICAL); } return Ok(()); } } println!("Failed to find DTB"); Err(Error::NotFound) } fn redoxfs() -> Result<redoxfs::FileSystem> { redoxfs::FileSystem::open(get_correct_block_io()?) } const MB: usize = 1024 * 1024; fn inner() -> Result<()> { find_dtb()?; { println!("Loading Kernel..."); let (kernel, mut env): (Vec<u8>, String) = { let (_i, mut kernel_file) = find(KERNEL)?; let info = kernel_file.info()?; let len = info.FileSize; let mut kernel = Vec::with_capacity(len as usize); let mut buf = vec![0; 4 * MB]; loop { let percent = kernel.len() as u64 * 100 / len; print!("\r{}% - {} MB", percent, kernel.len() / MB); let count = kernel_file.read(&mut buf)?; if count == 0 { break; } kernel.extend(&buf[.. count]); } println!(""); (kernel, String::new()) }; println!("Copying Kernel..."); unsafe { KERNEL_SIZE = kernel.len() as u64; println!("Size: {}", KERNEL_SIZE); KERNEL_ENTRY = *(kernel.as_ptr().offset(0x18) as *const u64); println!("Entry: {:X}", KERNEL_ENTRY); ptr::copy(kernel.as_ptr(), KERNEL_PHYSICAL as *mut u8, kernel.len()); } println!("Done!"); } unsafe { let key = memory_map(); exit_boot_services(key); } unsafe { asm!("msr daifset, #2"); paging(); } unsafe { enter(); } } fn select_mode(output: &mut Output) -> Result<u32> { loop { for i in 0..output.0.Mode.MaxMode { let mut mode_ptr = ::core::ptr::null_mut(); let mut mode_size = 0; (output.0.QueryMode)(output.0, i, &mut mode_size, &mut mode_ptr)?; let mode = unsafe { &mut *mode_ptr }; let w = mode.HorizontalResolution; let h = mode.VerticalResolution; print!("\r{}x{}: Is this OK? (y)es/(n)o", w, h); if key(true)? == Key::Character('y') { println!(""); return Ok(i); } } } } fn pretty_pipe<T, F: FnMut() -> Result<T>>(splash: &Image, f: F) -> Result<T> { let mut display = Display::new(Output::one()?); let mut display = ScaledDisplay::new(&mut display); { let bg = Color::rgb(0x4a, 0xa3, 0xfd); display.set(bg); { let x = (display.width() as i32 - splash.width() as i32)/2; let y = 16; splash.draw(&mut display, x, y); } { let prompt = format!( "Redox Bootloader {} {}", env!("CARGO_PKG_VERSION"), env!("TARGET").split('-').next().unwrap_or("") ); let mut x = (display.width() as i32 - prompt.len() as i32 * 8)/2; let y = display.height() as i32 - 32; for c in prompt.chars() { display.char(x, y, c, Color::rgb(0xff, 0xff, 0xff)); x += 8; } } display.sync(); } { let cols = 80; let off_x = (display.width() as i32 - cols as i32 * 8)/2; let off_y = 16 + splash.height() as i32 + 16; let rows = (display.height() as i32 - 64 - off_y - 1) as usize/16; display.rect(off_x, off_y, cols as u32 * 8, rows as u32 * 16, Color::rgb(0, 0, 0)); display.sync(); let mut text = TextDisplay::new(display); text.off_x = off_x; text.off_y = off_y; text.cols = cols; text.rows = rows; text.pipe(f) } } pub fn main() -> Result<()> { inner()?; /* TODO if let Ok(mut output) = Output::one() { let mut splash = Image::new(0, 0); { println!("Loading Splash..."); if let Ok(image) = image::bmp::parse(&SPLASHBMP) { splash = image; } println!(" Done"); } /* TODO let mode = pretty_pipe(&splash, || { select_mode(&mut output) })?; (output.0.SetMode)(output.0, mode)?; */ pretty_pipe(&splash, inner)?; } else { inner()?; } */ Ok(()) }
use core::{mem, ptr}; use orbclient::{Color, Renderer}; use std::fs::find; use std::proto::Protocol; use uefi::guid::Guid; use uefi::status::{Error, Result}; use crate::display::{Display, ScaledDisplay, Output}; use crate::image::{self, Image}; use crate::key::{key, Key}; use crate::redoxfs; use crate::text::TextDisplay; use self::memory_map::memory_map; use self::paging::paging; mod memory_map; mod paging; mod partitions; static KERNEL: &'static str = concat!("\\", env!("BASEDIR"), "\\kernel"); static SPLASHBMP: &'static [u8] = include_bytes!("../../../res/splash.bmp"); static KERNEL_OFFSET: u64 = 0xFFFF_FF00_0000_0000; static KERNEL_PHYSICAL: u64 = 0x4000_0000; static mut KERNEL_SIZE: u64 = 0; st
if count == 0 { break; } kernel.extend(&buf[.. count]); } println!(""); (kernel, String::new()) }; println!("Copying Kernel..."); unsafe { KERNEL_SIZE = kernel.len() as u64; println!("Size: {}", KERNEL_SIZE); KERNEL_ENTRY = *(kernel.as_ptr().offset(0x18) as *const u64); println!("Entry: {:X}", KERNEL_ENTRY); ptr::copy(kernel.as_ptr(), KERNEL_PHYSICAL as *mut u8, kernel.len()); } println!("Done!"); } unsafe { let key = memory_map(); exit_boot_services(key); } unsafe { asm!("msr daifset, #2"); paging(); } unsafe { enter(); } } fn select_mode(output: &mut Output) -> Result<u32> { loop { for i in 0..output.0.Mode.MaxMode { let mut mode_ptr = ::core::ptr::null_mut(); let mut mode_size = 0; (output.0.QueryMode)(output.0, i, &mut mode_size, &mut mode_ptr)?; let mode = unsafe { &mut *mode_ptr }; let w = mode.HorizontalResolution; let h = mode.VerticalResolution; print!("\r{}x{}: Is this OK? (y)es/(n)o", w, h); if key(true)? == Key::Character('y') { println!(""); return Ok(i); } } } } fn pretty_pipe<T, F: FnMut() -> Result<T>>(splash: &Image, f: F) -> Result<T> { let mut display = Display::new(Output::one()?); let mut display = ScaledDisplay::new(&mut display); { let bg = Color::rgb(0x4a, 0xa3, 0xfd); display.set(bg); { let x = (display.width() as i32 - splash.width() as i32)/2; let y = 16; splash.draw(&mut display, x, y); } { let prompt = format!( "Redox Bootloader {} {}", env!("CARGO_PKG_VERSION"), env!("TARGET").split('-').next().unwrap_or("") ); let mut x = (display.width() as i32 - prompt.len() as i32 * 8)/2; let y = display.height() as i32 - 32; for c in prompt.chars() { display.char(x, y, c, Color::rgb(0xff, 0xff, 0xff)); x += 8; } } display.sync(); } { let cols = 80; let off_x = (display.width() as i32 - cols as i32 * 8)/2; let off_y = 16 + splash.height() as i32 + 16; let rows = (display.height() as i32 - 64 - off_y - 1) as usize/16; display.rect(off_x, off_y, cols as u32 * 8, rows as u32 * 16, Color::rgb(0, 0, 0)); display.sync(); let mut text = TextDisplay::new(display); text.off_x = off_x; text.off_y = off_y; text.cols = cols; text.rows = rows; text.pipe(f) } } pub fn main() -> Result<()> { inner()?; /* TODO if let Ok(mut output) = Output::one() { let mut splash = Image::new(0, 0); { println!("Loading Splash..."); if let Ok(image) = image::bmp::parse(&SPLASHBMP) { splash = image; } println!(" Done"); } /* TODO let mode = pretty_pipe(&splash, || { select_mode(&mut output) })?; (output.0.SetMode)(output.0, mode)?; */ pretty_pipe(&splash, inner)?; } else { inner()?; } */ Ok(()) }
atic mut KERNEL_ENTRY: u64 = 0; static mut DTB_PHYSICAL: u64 = 0; #[no_mangle] pub extern "C" fn __chkstk() { } unsafe fn exit_boot_services(key: usize) { let handle = std::handle(); let uefi = std::system_table(); let _ = (uefi.BootServices.ExitBootServices)(handle, key); } unsafe fn enter() -> ! { let entry_fn: extern "C" fn(dtb: u64) -> ! = mem::transmute(( KERNEL_PHYSICAL + KERNEL_ENTRY - KERNEL_OFFSET )); entry_fn(DTB_PHYSICAL); } fn get_correct_block_io() -> Result<redoxfs::Disk> { let mut handles = vec! [uefi::Handle(0); 128]; let mut size = handles.len() * mem::size_of::<uefi::Handle>(); (std::system_table().BootServices.LocateHandle)(uefi::boot::LocateSearchType::ByProtocol, &uefi::guid::BLOCK_IO_GUID, 0, &mut size, handles.as_mut_ptr())?; let max_size = size / mem::size_of::<uefi::Handle>(); let actual_size = std::cmp::min(handles.len(), max_size); for handle in handles.into_iter().take(actual_size) { let block_io = redoxfs::Disk::handle_protocol(handle)?; if !block_io.0.Media.LogicalPartition { continue; } let part = partitions::PartitionProto::handle_protocol(handle)?.0; if part.sys == 1 { continue; } assert_eq!({part.rev}, partitions::PARTITION_INFO_PROTOCOL_REVISION); if part.ty == partitions::PartitionProtoDataTy::Gpt as u32 { let gpt = unsafe { part.info.gpt }; assert_ne!(gpt.part_ty_guid, partitions::ESP_GUID, "detected esp partition again"); if gpt.part_ty_guid == partitions::REDOX_FS_GUID || gpt.part_ty_guid == partitions::LINUX_FS_GUID { return Ok(block_io); } } else if part.ty == partitions::PartitionProtoDataTy::Mbr as u32 { let mbr = unsafe { part.info.mbr }; if mbr.ty == 0x83 { return Ok(block_io); } } else { continue; } } panic!("Couldn't find handle for partition"); } static DTB_GUID: Guid = Guid(0xb1b621d5, 0xf19c, 0x41a5, [0x83, 0x0b, 0xd9, 0x15, 0x2c, 0x69, 0xaa, 0xe0]); fn find_dtb() -> Result<()> { let cfg_tables = std::system_table().config_tables(); for cfg_table in cfg_tables.iter() { if cfg_table.VendorGuid == DTB_GUID { unsafe { DTB_PHYSICAL = cfg_table.VendorTable as u64; println!("DTB: {:X}", DTB_PHYSICAL); } return Ok(()); } } println!("Failed to find DTB"); Err(Error::NotFound) } fn redoxfs() -> Result<redoxfs::FileSystem> { redoxfs::FileSystem::open(get_correct_block_io()?) } const MB: usize = 1024 * 1024; fn inner() -> Result<()> { find_dtb()?; { println!("Loading Kernel..."); let (kernel, mut env): (Vec<u8>, String) = { let (_i, mut kernel_file) = find(KERNEL)?; let info = kernel_file.info()?; let len = info.FileSize; let mut kernel = Vec::with_capacity(len as usize); let mut buf = vec![0; 4 * MB]; loop { let percent = kernel.len() as u64 * 100 / len; print!("\r{}% - {} MB", percent, kernel.len() / MB); let count = kernel_file.read(&mut buf)?;
random
[]
Rust
contrib/rust/src/softmax_builder.rs
verycumbersome/dynet
b477636c62e22efdaa024acf497080fc6a6dae1f
use std::ptr::{self, NonNull}; use dynet_sys; use super::{ ApiResult, ComputationGraph, Expression, Parameter, ParameterCollection, Result, Wrap, }; pub trait SoftmaxBuilder: Wrap<dynet_sys::dynetSoftmaxBuilder_t> { fn new_graph(&mut self, cg: &mut ComputationGraph, update: bool) { unsafe { check_api_status!(dynet_sys::dynetResetSoftmaxBuilderGraph( self.as_mut_ptr(), cg.as_mut_ptr(), update as u32, )); } } fn neg_log_softmax_one<E: AsRef<Expression>>(&mut self, rep: E, classidx: u32) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetApplySoftmaxBuilderNegLogSoftmaxOne( self.as_mut_ptr(), rep.as_ref().as_ptr(), classidx, &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn neg_log_softmax<E: AsRef<Expression>>(&mut self, rep: E, classidxs: &[u32]) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetApplySoftmaxBuilderNegLogSoftmax( self.as_mut_ptr(), rep.as_ref().as_ptr(), classidxs.as_ptr(), classidxs.len(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn sample<E: AsRef<Expression>>(&mut self, rep: E) -> u32 { unsafe { let mut retval: u32 = 0; check_api_status!(dynet_sys::dynetSampleFromSoftmaxBuilder( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut retval, )); retval } } fn full_log_distribution<E: AsRef<Expression>>(&mut self, rep: E) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderFullLogDistribution( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn full_logits<E: AsRef<Expression>>(&mut self, rep: E) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderFullLogits( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn get_parameter_collection(&mut self) -> ParameterCollection { unsafe { let mut pc_ptr: *mut dynet_sys::dynetParameterCollection_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderParameterCollection( self.as_mut_ptr(), &mut pc_ptr, )); ParameterCollection::from_raw(pc_ptr, false) } } } macro_rules! impl_softmax_builder { ($name:ident) => { impl_wrap_owned!($name, dynetSoftmaxBuilder_t); impl_drop!($name, dynetDeleteSoftmaxBuilder); impl SoftmaxBuilder for $name {} }; } #[derive(Debug)] pub struct StandardSoftmaxBuilder { inner: NonNull<dynet_sys::dynetSoftmaxBuilder_t>, } impl_softmax_builder!(StandardSoftmaxBuilder); impl StandardSoftmaxBuilder { pub fn new( rep_dim: u32, num_classes: u32, pc: &mut ParameterCollection, bias: bool, ) -> StandardSoftmaxBuilder { unsafe { let mut builder_ptr: *mut dynet_sys::dynetSoftmaxBuilder_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetCreateStandardSoftmaxBuilder( rep_dim, num_classes, pc.as_mut_ptr(), bias as u32, &mut builder_ptr, )); StandardSoftmaxBuilder::from_raw(builder_ptr, true) } } pub fn from_parameters(p_w: &mut Parameter, p_b: &mut Parameter) -> StandardSoftmaxBuilder { unsafe { let mut builder_ptr: *mut dynet_sys::dynetSoftmaxBuilder_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetCreateStandardSoftmaxBuilderFromParameters( p_w.as_mut_ptr(), p_b.as_mut_ptr(), &mut builder_ptr, )); StandardSoftmaxBuilder::from_raw(builder_ptr, true) } } }
use std::ptr::{self, NonNull}; use dynet_sys; use super::{ ApiResult, ComputationGraph, Expression, Parameter, ParameterCollection, Result, Wrap, }; pub trait SoftmaxBuilder: Wrap<dynet_sys::dynetSoftmaxBuilder_t> { fn new_graph(&mut self, cg: &mut ComputationGraph, update: bool) { unsafe { check_api_status!(dynet_sys::dynetResetSoftmaxBuilderGraph( self.as_mut_ptr(), cg.as_mut_ptr(), update as u32, )); } } fn neg_log_softmax_one<E: AsRef<Expression>>(&mut self, rep: E, classidx: u32) -> Exp
pression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderFullLogDistribution( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn full_logits<E: AsRef<Expression>>(&mut self, rep: E) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderFullLogits( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn get_parameter_collection(&mut self) -> ParameterCollection { unsafe { let mut pc_ptr: *mut dynet_sys::dynetParameterCollection_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderParameterCollection( self.as_mut_ptr(), &mut pc_ptr, )); ParameterCollection::from_raw(pc_ptr, false) } } } macro_rules! impl_softmax_builder { ($name:ident) => { impl_wrap_owned!($name, dynetSoftmaxBuilder_t); impl_drop!($name, dynetDeleteSoftmaxBuilder); impl SoftmaxBuilder for $name {} }; } #[derive(Debug)] pub struct StandardSoftmaxBuilder { inner: NonNull<dynet_sys::dynetSoftmaxBuilder_t>, } impl_softmax_builder!(StandardSoftmaxBuilder); impl StandardSoftmaxBuilder { pub fn new( rep_dim: u32, num_classes: u32, pc: &mut ParameterCollection, bias: bool, ) -> StandardSoftmaxBuilder { unsafe { let mut builder_ptr: *mut dynet_sys::dynetSoftmaxBuilder_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetCreateStandardSoftmaxBuilder( rep_dim, num_classes, pc.as_mut_ptr(), bias as u32, &mut builder_ptr, )); StandardSoftmaxBuilder::from_raw(builder_ptr, true) } } pub fn from_parameters(p_w: &mut Parameter, p_b: &mut Parameter) -> StandardSoftmaxBuilder { unsafe { let mut builder_ptr: *mut dynet_sys::dynetSoftmaxBuilder_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetCreateStandardSoftmaxBuilderFromParameters( p_w.as_mut_ptr(), p_b.as_mut_ptr(), &mut builder_ptr, )); StandardSoftmaxBuilder::from_raw(builder_ptr, true) } } }
ression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetApplySoftmaxBuilderNegLogSoftmaxOne( self.as_mut_ptr(), rep.as_ref().as_ptr(), classidx, &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn neg_log_softmax<E: AsRef<Expression>>(&mut self, rep: E, classidxs: &[u32]) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetApplySoftmaxBuilderNegLogSoftmax( self.as_mut_ptr(), rep.as_ref().as_ptr(), classidxs.as_ptr(), classidxs.len(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn sample<E: AsRef<Expression>>(&mut self, rep: E) -> u32 { unsafe { let mut retval: u32 = 0; check_api_status!(dynet_sys::dynetSampleFromSoftmaxBuilder( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut retval, )); retval } } fn full_log_distribution<E: AsRef<Expression>>(&mut self, rep: E) -> Ex
random
[ { "content": "/// Computes moment along a specific dimension.\n\n///\n\n/// # Arguments\n\n///\n\n/// * x - Input mini-batched expression.\n\n/// * dims - Dimensions along which to reduce.\n\n/// * r - Order of the moment.\n\n/// * b - Whether to include batch dimension.\n\n/// * n - If > 0, overwrite the `n` i...
Rust
src/main.rs
frol/cargo-hack
2befd3982c39aef6f446827346b365aee6b2ce67
#![forbid(unsafe_code)] #![warn(future_incompatible, rust_2018_idioms, single_use_lifetimes, unreachable_pub)] #![warn(clippy::default_trait_access, clippy::wildcard_imports)] #[macro_use] mod term; #[macro_use] mod process; mod cargo; mod cli; mod context; mod features; mod fs; mod manifest; mod metadata; mod remove_dev_deps; mod restore; mod rustup; mod version; use std::fmt::Write; use anyhow::{bail, Result}; use crate::{ cargo::Cargo, context::Context, features::Feature, metadata::PackageId, process::ProcessBuilder, restore::Restore, rustup::Rustup, }; fn main() { if let Err(e) = try_main() { error!("{:#}", e); std::process::exit(1) } } fn try_main() -> Result<()> { let args = &cli::raw(); let cx = &Context::new(args)?; exec_on_workspace(cx) } fn exec_on_workspace(cx: &Context<'_>) -> Result<()> { let mut progress = Progress::default(); let packages = determine_package_list(cx, &mut progress)?; let restore = Restore::new(cx); if let Some(range) = &cx.version_range { progress.total *= range.len(); let mut line = process!("cargo"); if cx.verbose { line.display_manifest_path(); } { let toolchain = &range[0]; rustup::install_toolchain(toolchain, cx.target, true)?; let mut line = line.clone(); line.leading_arg(toolchain); line.args(&["generate-lockfile"]); if let Some(pid) = cx.current_package() { let package = cx.packages(pid); line.arg("--manifest-path"); line.arg( package .manifest_path .strip_prefix(&cx.current_dir) .unwrap_or(&package.manifest_path), ); } line.exec_with_output()?; } range.iter().enumerate().try_for_each(|(i, toolchain)| { if i != 0 { rustup::install_toolchain(toolchain, cx.target, true)?; } if cx.clean_per_version { cargo_clean(cx, None)?; } let mut line = line.clone(); line.leading_arg(toolchain); line.with_args(cx); packages.iter().try_for_each(|(id, kind)| { exec_on_package(cx, id, kind, &line, &restore, &mut progress) }) }) } else { let mut line = cx.cargo(); line.with_args(cx); packages.iter().try_for_each(|(id, kind)| { exec_on_package(cx, id, kind, &line, &restore, &mut progress) }) } } #[derive(Default)] struct Progress { total: usize, count: usize, } enum Kind<'a> { NoSubcommand, SkipAsPrivate, Normal, Each { features: Vec<&'a Feature> }, Powerset { features: Vec<Vec<&'a Feature>> }, } fn determine_kind<'a>(cx: &'a Context<'_>, id: &PackageId, progress: &mut Progress) -> Kind<'a> { if cx.ignore_private && cx.is_private(id) { info!("skipped running on private package `{}`", cx.name_verbose(id)); return Kind::SkipAsPrivate; } if cx.subcommand.is_none() { return Kind::NoSubcommand; } if !cx.each_feature && !cx.feature_powerset { progress.total += 1; return Kind::Normal; } let package = cx.packages(id); let filter = |&f: &&Feature| { !cx.exclude_features.iter().any(|s| f == *s) && !cx.group_features.iter().any(|g| g.matches(f.name())) }; let features = if cx.include_features.is_empty() { let feature_list = cx.pkg_features(id); cx.exclude_features.iter().for_each(|d| { if !feature_list.contains(d) { warn!("specified feature `{}` not found in package `{}`", d, package.name); } }); let mut features: Vec<_> = feature_list.normal().iter().filter(filter).collect(); if let Some(opt_deps) = &cx.optional_deps { for &d in opt_deps { if !feature_list.optional_deps().iter().any(|f| f == d) { warn!( "specified optional dependency `{}` not found in package `{}`", d, package.name ); } } features.extend(feature_list.optional_deps().iter().filter(|f| { filter(f) && (opt_deps.is_empty() || opt_deps.iter().any(|x| *f == *x)) })); } if cx.include_deps_features { features.extend(feature_list.deps_features().iter().filter(filter)); } if !cx.group_features.is_empty() { features.extend(cx.group_features.iter()); } features } else { cx.include_features.iter().filter(filter).collect() }; if cx.each_feature { if (package.features.is_empty() || !cx.include_features.is_empty()) && features.is_empty() { progress.total += 1; Kind::Normal } else { progress.total += features.len() + !cx.exclude_no_default_features as usize + !cx.exclude_all_features as usize; Kind::Each { features } } } else if cx.feature_powerset { let features = features::feature_powerset(features, cx.depth, &package.features); if (package.features.is_empty() || !cx.include_features.is_empty()) && features.is_empty() { progress.total += 1; Kind::Normal } else { progress.total += features.len() - 1 + !cx.exclude_no_default_features as usize + !cx.exclude_all_features as usize; Kind::Powerset { features } } } else { unreachable!() } } fn determine_package_list<'a>( cx: &'a Context<'_>, progress: &mut Progress, ) -> Result<Vec<(&'a PackageId, Kind<'a>)>> { Ok(if cx.workspace { for spec in &cx.exclude { if !cx.workspace_members().any(|id| cx.packages(id).name == *spec) { warn!( "excluded package(s) `{}` not found in workspace `{}`", spec, cx.workspace_root().display() ); } } cx.workspace_members() .filter(|id| !cx.exclude.contains(&&*cx.packages(id).name)) .map(|id| (id, determine_kind(cx, id, progress))) .collect() } else if !cx.package.is_empty() { if let Some(spec) = cx .package .iter() .find(|&&spec| !cx.workspace_members().any(|id| cx.packages(id).name == spec)) { bail!("package ID specification `{}` matched no packages", spec) } cx.workspace_members() .filter(|id| cx.package.contains(&&*cx.packages(id).name)) .map(|id| (id, determine_kind(cx, id, progress))) .collect() } else if cx.current_package().is_none() { cx.workspace_members().map(|id| (id, determine_kind(cx, id, progress))).collect() } else { let current_package = &cx.packages(cx.current_package().unwrap()).name; cx.workspace_members() .find(|id| cx.packages(id).name == *current_package) .map(|id| vec![(id, determine_kind(cx, id, progress))]) .unwrap_or_default() }) } fn exec_on_package( cx: &Context<'_>, id: &PackageId, kind: &Kind<'_>, line: &ProcessBuilder<'_>, restore: &Restore, progress: &mut Progress, ) -> Result<()> { if let Kind::SkipAsPrivate = kind { return Ok(()); } let package = cx.packages(id); let mut line = line.clone(); line.append_features_from_args(cx, id); line.arg("--manifest-path"); line.arg(package.manifest_path.strip_prefix(&cx.current_dir).unwrap_or(&package.manifest_path)); if cx.no_dev_deps || cx.remove_dev_deps { let new = cx.manifests(id).remove_dev_deps(); let mut handle = restore.set_manifest(cx, id); fs::write(&package.manifest_path, new)?; exec_actual(cx, id, kind, &mut line, progress)?; handle.close() } else { exec_actual(cx, id, kind, &mut line, progress) } } fn exec_actual( cx: &Context<'_>, id: &PackageId, kind: &Kind<'_>, line: &mut ProcessBuilder<'_>, progress: &mut Progress, ) -> Result<()> { match kind { Kind::NoSubcommand => return Ok(()), Kind::SkipAsPrivate => unreachable!(), Kind::Normal => { return exec_cargo(cx, id, line, progress); } Kind::Each { .. } | Kind::Powerset { .. } => {} } let mut line = line.clone(); if !cx.no_default_features { line.arg("--no-default-features"); } if !cx.exclude_no_default_features { exec_cargo(cx, id, &mut line, progress)?; } match kind { Kind::Each { features } => { features .iter() .try_for_each(|f| exec_cargo_with_features(cx, id, &line, progress, Some(f)))?; } Kind::Powerset { features } => { features .iter() .skip(1) .try_for_each(|f| exec_cargo_with_features(cx, id, &line, progress, f))?; } _ => unreachable!(), } if !cx.exclude_all_features { line.arg("--all-features"); exec_cargo(cx, id, &mut line, progress)?; } Ok(()) } fn exec_cargo_with_features( cx: &Context<'_>, id: &PackageId, line: &ProcessBuilder<'_>, progress: &mut Progress, features: impl IntoIterator<Item = impl AsRef<str>>, ) -> Result<()> { let mut line = line.clone(); line.append_features(features); exec_cargo(cx, id, &mut line, progress) } fn exec_cargo( cx: &Context<'_>, id: &PackageId, line: &mut ProcessBuilder<'_>, progress: &mut Progress, ) -> Result<()> { progress.count += 1; if cx.clean_per_run { cargo_clean(cx, Some(id))?; } let mut msg = String::new(); if cx.verbose { write!(msg, "running {}", line).unwrap(); } else { write!(msg, "running {} on {}", line, cx.packages(id).name).unwrap(); } write!(msg, " ({}/{})", progress.count, progress.total).unwrap(); info!("{}", msg); line.exec() } fn cargo_clean(cx: &Context<'_>, id: Option<&PackageId>) -> Result<()> { let mut line = cx.cargo(); line.arg("clean"); if let Some(id) = id { line.arg("--package"); line.arg(&cx.packages(id).name); } if cx.verbose { info!("running {}", line); } line.exec() }
#![forbid(unsafe_code)] #![warn(future_incompatible, rust_2018_idioms, single_use_lifetimes, unreachable_pub)] #![warn(clippy::default_trait_access, clippy::wildcard_imports)] #[macro_use] mod term; #[macro_use] mod process; mod cargo; mod cli; mod context; mod features; mod fs; mod manifest; mod metadata; mod remove_dev_deps; mod restore; mod rustup; mod version; use std::fmt::Write; use anyhow::{bail, Result}; use crate::{ cargo::Cargo, context::Context, features::Feature, metadata::PackageId, process::ProcessBuilder, restore::Restore, rustup::Rustup, }; fn main() { if let Err(e) = try_main() { error!("{:#}", e); std::process::exit(1) } } fn try_main() -> Result<()> { let args = &cli::raw(); let cx = &Context::new(args)?; exec_on_workspace(cx) } fn exec_on_workspace(cx: &Context<'_>) -> Result<()> { let mut progress = Progress::default(); let packages = determine_package_list(cx, &mut progress)?; let restore = Restore::new(cx); if let Some(range) = &cx.version_range { progress.total *= range.len(); let mut line = process!("cargo"); if cx.verbose { line.display_manifest_path(); } { let toolchain = &range[0]; rustup::install_toolchain(toolchain, cx.target, true)?; let mut line = line.clone(); line.leading_arg(toolchain); line.args(&["generate-lockfile"]); if let Some(pid) = cx.current_package() { let package = cx.packages(pid); line.arg("--manifest-path"); line.arg( package .manifest_path .strip_prefix(&cx.current_dir) .unwrap_or(&package.manifest_path), ); } line.exec_with_output()?; } range.iter().enumerate().try_for_each(|(i, toolchain)| { if i != 0 { rustup::install_toolchain(toolchain, cx.target, true)?; } if cx.clean_per_version { cargo_clean(cx, None)?; } let mut line = line.clone(); line.leading_arg(toolchain); line.with_args(cx); packages.iter().try_for_each(|(id, kind)| { exec_on_package(cx, id, kind, &line, &restore, &mut progress) }) }) } else { let mut line = cx.cargo(); line.with_args(cx); packages.iter().try_for_each(|(id, kind)| { exec_on_package(cx, id, kind, &line, &restore, &mut progress) }) } } #[derive(Default)] struct Progress { total: usize, count: usize, } enum Kind<'a> { NoSubcommand, SkipAsPrivate, Normal, Each { features: Vec<&'a Feature> }, Powerset { features: Vec<Vec<&'a Feature>> }, } fn determine_kind<'a>(cx: &'a Context<'_>, id: &PackageId, progress: &mut Progress) -> Kind<'a> { if cx.ignore_private && cx.is_private(id) { info!("skipped running on private package `{}`", cx.name_verbose(id)); return Kind::SkipAsPrivate; } if cx.subcommand.is_none() { return Kind::NoSubcommand; } if !cx.each_feature && !cx.feature_powerset { progress.total += 1; return Kind::Normal; } let package = cx.packages(id); let filter = |&f: &&Feature| { !cx.exclude_features.iter().any(|s| f == *s) && !cx.group_features.iter().any(|g| g.matches(f.name())) }; let features = if cx.include_features.is_empty() { let feature_list = cx.pkg_features(id); cx.exclude_features.iter().for_each(|d| { if !feature_list.contains(d) { warn!("specified feature `{}` not found in package `{}`", d, package.name); } }); let mut features: Vec<_> = feature_list.normal().iter().filter(filter).collect(); if let Some(opt_deps) = &cx.optional_deps { for &d in opt_deps { if !feature_list.optional_deps().iter().any(|f| f == d) { warn!( "specified optional dependency `{}` not found in package `{}`", d, package.name ); } } features.extend(feature_list.optional_deps().iter().filter(|f| { filter(f) && (opt_deps.is_empty() || opt_deps.iter().any(|x| *f == *x)) })); } if cx.include_deps_features { features.extend(feature_list.deps_features().iter().filter(filter)); } if !cx.group_features.is_empty() { features.extend(cx.group_features.iter()); } features } else { cx.include_features.iter().filter(filter).collect() }; if cx.each_feature { if (package.features.is_empty() || !cx.include_features.is_empty()) && features.is_empty() { progress.total += 1; Kind::Normal } else { progress.total += features.len() + !cx.exclude_no_default_features as usize + !cx.exclude_all_features as usize; Kind::Each { features } } } else if cx.feature_powerset { let features = features::feature_powerset(features, cx.depth, &package.features); if (package.features.is_empty() || !cx.include_features.is_empty()) && features.is_empty() { progress.total += 1; Kind::Normal } else { progress.total += features.len() - 1 + !cx.exclude_no_default_features as usize + !cx.exclude_all_features as usize; Kind::Powerset { features } } } else { unreachable!() } } fn determine_package_list<'a>( cx: &'a Context<'_>, progress: &mut Progress, ) -> Result<Vec<(&'a PackageId, Kind<'a>)>> { Ok(if cx.workspace { for spec in &cx.exclude { if !cx.workspace_members().any(|id| cx.packages(id).name == *spec) { warn!( "excluded package(s) `{}` not found in workspace `{}`", spec, cx.workspace_root().display() ); } } cx.workspace_members() .filter(|id| !cx.exclude.contains(&&*cx.packages(id).name)) .map(|id| (id, determine_kind(cx, id, progress))) .collect() } else if !cx.package.is_empty() { if let Some(spec) = cx .package .iter() .find(|&&spec| !cx.workspace_members().any(|id| cx.packages(id).name == spec)) { bail!("package ID specification `{}` matched no packages", spec) } cx.workspace_members() .filter(|id| cx.package.contains(&&*cx.packages(id).name)) .map(|id| (id, determine_kind(cx, id, progress))) .collect() } else if cx.current_package().is_none() { cx.workspace_members().map(|id| (id, determine_kind(cx, id, progress))).collect() } else { let current_package = &cx.packages(cx.current_package().unwrap()).name; cx.workspace_members() .find(|id| cx.packages(id).name == *current_package) .map(|id| vec![(id, determine_kind(cx, id, progress))]) .unwrap_or_default() }) } fn exec_on_package( cx: &Context<'_>, id: &PackageId, kind: &Kind<'_>, line: &ProcessBuilder<'_>, restore: &Restore, progress: &mut Progress, ) -> Result<()> { if let Kind::SkipAsPrivate = kind { return Ok(()); } let package = cx.packages(id); let mut line = line.clone(); line.append_features_from_args(cx, id); line.arg("--manifest-path"); line.arg(package.manifest_path.strip_prefix(&cx.current_dir).unwrap_or(&package.manifest_path)); if cx.no_dev_deps || cx.remove_dev_deps { let new = cx.manifests(id).remove_dev_deps(); let mut handle = restore.set_manifest(cx, id); fs::write(&package.manifest_path, new)?; exec_actual(cx, id, kind, &mut line, progress)?; handle.close() } else { exec_actual(cx, id, kind, &mut line, progress) } }
fn exec_cargo_with_features( cx: &Context<'_>, id: &PackageId, line: &ProcessBuilder<'_>, progress: &mut Progress, features: impl IntoIterator<Item = impl AsRef<str>>, ) -> Result<()> { let mut line = line.clone(); line.append_features(features); exec_cargo(cx, id, &mut line, progress) } fn exec_cargo( cx: &Context<'_>, id: &PackageId, line: &mut ProcessBuilder<'_>, progress: &mut Progress, ) -> Result<()> { progress.count += 1; if cx.clean_per_run { cargo_clean(cx, Some(id))?; } let mut msg = String::new(); if cx.verbose { write!(msg, "running {}", line).unwrap(); } else { write!(msg, "running {} on {}", line, cx.packages(id).name).unwrap(); } write!(msg, " ({}/{})", progress.count, progress.total).unwrap(); info!("{}", msg); line.exec() } fn cargo_clean(cx: &Context<'_>, id: Option<&PackageId>) -> Result<()> { let mut line = cx.cargo(); line.arg("clean"); if let Some(id) = id { line.arg("--package"); line.arg(&cx.packages(id).name); } if cx.verbose { info!("running {}", line); } line.exec() }
fn exec_actual( cx: &Context<'_>, id: &PackageId, kind: &Kind<'_>, line: &mut ProcessBuilder<'_>, progress: &mut Progress, ) -> Result<()> { match kind { Kind::NoSubcommand => return Ok(()), Kind::SkipAsPrivate => unreachable!(), Kind::Normal => { return exec_cargo(cx, id, line, progress); } Kind::Each { .. } | Kind::Powerset { .. } => {} } let mut line = line.clone(); if !cx.no_default_features { line.arg("--no-default-features"); } if !cx.exclude_no_default_features { exec_cargo(cx, id, &mut line, progress)?; } match kind { Kind::Each { features } => { features .iter() .try_for_each(|f| exec_cargo_with_features(cx, id, &line, progress, Some(f)))?; } Kind::Powerset { features } => { features .iter() .skip(1) .try_for_each(|f| exec_cargo_with_features(cx, id, &line, progress, f))?; } _ => unreachable!(), } if !cx.exclude_all_features { line.arg("--all-features"); exec_cargo(cx, id, &mut line, progress)?; } Ok(()) }
function_block-full_function
[ { "content": "fn powerset<T: Copy>(iter: impl IntoIterator<Item = T>, depth: Option<usize>) -> Vec<Vec<T>> {\n\n iter.into_iter().fold(vec![vec![]], |mut acc, elem| {\n\n let ext = acc.clone().into_iter().map(|mut curr| {\n\n curr.push(elem);\n\n curr\n\n });\n\n if...
Rust
src/main.rs
phase/pokerus
27742d188930d62e76a2c7125692f32090acfbaf
extern crate bitbit; extern crate logos; extern crate png; use std::borrow::Cow; use std::env::args; use std::error::Error; use std::fs; use std::fs::File; use std::io::{BufRead, BufReader, Write}; use std::process::exit; use crate::tileset::{parse_metatile_config, Tile, TileStorage}; mod rom; mod tileset; const VERSION: &'static str = env!("CARGO_PKG_VERSION"); fn main() { let mut args: Vec<String> = args().collect(); args.remove(0); match inner_main(args) { Ok(message) => { println!("{}", message); exit(0); } Err(message) => { print_help(); println!("Failed previous command:\n{}", message); exit(1); } } } fn inner_main(args: Vec<String>) -> Result<String, String> { if let Some(arg) = args.get(0) { match arg.as_str() { "tileset" => { let primary = match args.get(1) { Some(arg) => { match arg.as_str() { "primary" => { true } "secondary" => { false } _ => return Err("missing primary/secondary argument".to_string()) } } None => return Err("missing primary/secondary argument".to_string()) }; let output_path = match args.get(2) { Some(arg) => { arg } None => return Err("missing output folder".to_string()) }.clone(); let metatile_definitions = match args.get(3) { Some(arg) => { let file = File::open(arg).expect("no such file"); let buf = BufReader::new(file); let lines: Vec<String> = buf.lines() .map(|l| l.expect("Could not parse line")) .collect(); parse_metatile_config(lines) } None => return Err("missing metatile file".to_string()) }.clone(); if args.len() < 5 { return Err("missing input tilesets".to_string()); } let inputs = &args[4..]; let mut storage = TileStorage::new(output_path.clone(), primary); for tileset in inputs { storage.add_image(tileset.clone()).expect("failed to add tileset to storage"); } storage.output(); let mut metatiles: Vec<u8> = Vec::new(); for (metatile_file_name, metatile_id) in metatile_definitions { let metatile = storage.encoded_metatiles.get(&(metatile_file_name.clone(), metatile_id)) .expect(&format!("failed to get encoded metatile: {} {}", metatile_file_name, metatile_id)).clone(); metatiles.append(&mut metatile.clone()); } let path = format!("{}/metatiles.bin", storage.output_folder); fs::remove_file(&path); let mut file = File::create(path).expect("failed to create metatiles.bin file"); file.write_all(&metatiles).expect("failed to write metatiles to file"); return Ok(format!("Tileset and palettes written to {}", output_path).to_string()); } "palette" => { let image = match args.get(1) { Some(arg) => { arg } None => return Err("missing image file".to_string()) }.clone(); let output = match args.get(2) { Some(arg) => { arg } None => return Err("missing output file".to_string()) }.clone(); return match TileStorage::read_palette(image) { Ok(palette) => { TileStorage::output_palette(&palette, output.clone()); Ok(format!("Palette file written to {}", output).to_string()) } Err(error) => Err(format!("error reading palette: {}", error.description())) }; } _ => { print_help(); } } } else { print_help(); } Ok("".to_string()) } fn print_help() { println!("*.*.*.* Pokerus v{} *.*.*.* ", VERSION); println!("Available Commands:"); println!("- pokerus"); println!(" Launches the GUI. (WIP)"); println!("- pokerus tileset <primary/secondary> <output_folder> <metatile_definitions> <input_images...>"); println!(" Merges tilesets and their palettes into one image."); println!(" Useful for importing into Porymap."); println!("- pokerus palette <image> <output.pal>"); println!(" Extract the palette of an image to a .pal file."); }
extern crate bitbit; extern crate logos; extern crate png; use std::borrow::Cow; use std::env::args; use std::error::Error; use std::fs; use std::fs::File; use std::io::{BufRead, BufReader, Write}; use std::process::exit; use crate::tileset::{parse_metatile_config, Tile, TileStorage}; mod rom; mod tileset; const VERSION: &'static str = env!("CARGO_PKG_VERSION"); fn main() { let mut args: Vec<String> = args().collect(); args.remove(0); match inner_main(args) { Ok(message) => { println!("{}", message); exit(0); } Err(message) => { print_help(); println!("Failed previous command:\n{}", message); exit(1); } } } fn inner_main(args: Vec<String>) -> Result<String, String> { if let Some(arg) = args.get(0) { match arg.as_str() { "tileset" => { let primary = match args.get(1) { Some(arg) => { match arg.as_str() { "primary" => { true } "secondary" => { false } _ => return Err("missing primary/secondary argument".to_string()) } } None => return Err("missing primary/secondary argument".to_string()) }; let output_path = match args.get(2) { Some(arg) => { arg } None => return Err("missing output folder".to_string()) }.clone(); let metatile_definitions = match args.get(3) { Some(arg) => {
}.clone(); if args.len() < 5 { return Err("missing input tilesets".to_string()); } let inputs = &args[4..]; let mut storage = TileStorage::new(output_path.clone(), primary); for tileset in inputs { storage.add_image(tileset.clone()).expect("failed to add tileset to storage"); } storage.output(); let mut metatiles: Vec<u8> = Vec::new(); for (metatile_file_name, metatile_id) in metatile_definitions { let metatile = storage.encoded_metatiles.get(&(metatile_file_name.clone(), metatile_id)) .expect(&format!("failed to get encoded metatile: {} {}", metatile_file_name, metatile_id)).clone(); metatiles.append(&mut metatile.clone()); } let path = format!("{}/metatiles.bin", storage.output_folder); fs::remove_file(&path); let mut file = File::create(path).expect("failed to create metatiles.bin file"); file.write_all(&metatiles).expect("failed to write metatiles to file"); return Ok(format!("Tileset and palettes written to {}", output_path).to_string()); } "palette" => { let image = match args.get(1) { Some(arg) => { arg } None => return Err("missing image file".to_string()) }.clone(); let output = match args.get(2) { Some(arg) => { arg } None => return Err("missing output file".to_string()) }.clone(); return match TileStorage::read_palette(image) { Ok(palette) => { TileStorage::output_palette(&palette, output.clone()); Ok(format!("Palette file written to {}", output).to_string()) } Err(error) => Err(format!("error reading palette: {}", error.description())) }; } _ => { print_help(); } } } else { print_help(); } Ok("".to_string()) } fn print_help() { println!("*.*.*.* Pokerus v{} *.*.*.* ", VERSION); println!("Available Commands:"); println!("- pokerus"); println!(" Launches the GUI. (WIP)"); println!("- pokerus tileset <primary/secondary> <output_folder> <metatile_definitions> <input_images...>"); println!(" Merges tilesets and their palettes into one image."); println!(" Useful for importing into Porymap."); println!("- pokerus palette <image> <output.pal>"); println!(" Extract the palette of an image to a .pal file."); }
let file = File::open(arg).expect("no such file"); let buf = BufReader::new(file); let lines: Vec<String> = buf.lines() .map(|l| l.expect("Could not parse line")) .collect(); parse_metatile_config(lines) } None => return Err("missing metatile file".to_string())
random
[ { "content": "pub fn parse_metatile_config(lines: Vec<String>) -> Vec<(String, usize)> {\n\n let mut file_map: HashMap<String, String> = HashMap::new();\n\n let mut metatile_refs: Vec<(String, usize)> = Vec::new();\n\n for line in lines {\n\n if line.len() < 3 || line.starts_with('#') {\n\n ...
Rust
api/src/auth.rs
anjaamrein/ngm
a039de268ed856d93a098b0d04c1ce76266d49ca
use anyhow::Context; use axum::{ async_trait, extract::{FromRequest, RequestParts, TypedHeader}, headers::{authorization::Bearer, Authorization}, }; use jsonwebtoken::jwk::{AlgorithmParameters, JwkSet}; use jsonwebtoken::{DecodingKey, Validation}; use once_cell::sync::OnceCell; use serde::{Deserialize, Serialize}; use crate::Error; static JWKS: OnceCell<JwkSet> = OnceCell::new(); static AUD: OnceCell<String> = OnceCell::new(); static ISS: OnceCell<String> = OnceCell::new(); #[derive(clap::Parser)] pub struct Auth { #[clap(env)] pub cognito_client_id: String, #[clap(env)] pub cognito_pool_id: String, #[clap(env, default_value = "eu-west-1")] pub cognito_aws_region: String, } impl Auth { pub async fn initialize(&self) -> anyhow::Result<()> { let url = format!( "https://cognito-idp.{}.amazonaws.com/{}/.well-known/jwks.json", self.cognito_aws_region, self.cognito_pool_id ); let keyset = reqwest::get(url).await?.json().await?; JWKS.get_or_init(|| keyset); let audience = self.cognito_client_id.clone(); AUD.get_or_init(|| audience); let issuer = format!( "https://cognito-idp.{}.amazonaws.com/{}", self.cognito_aws_region, self.cognito_pool_id ); ISS.get_or_init(|| issuer); Ok(()) } } #[derive(Debug, Serialize, Deserialize)] pub struct Claims { aud: String, exp: usize, iss: String, pub email: String, } #[async_trait] impl<B> FromRequest<B> for Claims where B: Send, { type Rejection = Error; async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> { let TypedHeader(Authorization(bearer)) = TypedHeader::<Authorization<Bearer>>::from_request(req) .await .map_err(|_| Error::Unauthorized)?; let token = bearer.token(); let header = jsonwebtoken::decode_header(token) .map_err(|_| Error::Jwt("Failed to decode token header"))?; let kid = header .kid .ok_or(Error::Jwt("Token is missing `kid` parameter"))?; let jwk = JWKS .get() .context("Once cell `JWKS` not initialized")? .find(&kid) .ok_or(Error::Jwt("No matching key found in keyset"))?; match jwk.algorithm { AlgorithmParameters::RSA(ref rsa) => { let decoding_key = DecodingKey::from_rsa_components(&rsa.n, &rsa.e) .map_err(|_| Error::Jwt("Failed to create decoding key"))?; let algorithm = jwk .common .algorithm .ok_or(Error::Jwt("JWK is missing `algorithm` parameter"))?; let mut validation = Validation::new(algorithm); validation.set_audience(&[AUD.get().context("Once cell `AUD` not initialized")?]); validation.set_issuer(&[ISS.get().context("Once cell `ISS` not initialized")?]); let decoded_token = jsonwebtoken::decode::<Claims>(token, &decoding_key, &validation).map_err( |_e| { Error::Jwt("Failed to decode token") }, )?; return Ok(decoded_token.claims); } _ => return Err(Error::Jwt("Unreachable!")), } } }
use anyhow::Context; use axum::{ async_trait, extract::{FromRequest, RequestParts, TypedHeader}, headers::{authorization::Bearer, Authorization}, }; use jsonwebtoken::jwk::{AlgorithmParameters, JwkSet}; use jsonwebtoken::{DecodingKey, Validation}; use once_cell::sync::OnceCell; use serde::{Deserialize, Serialize}; use crate::Error; static JWKS: OnceCell<JwkSet> = OnceCell::new(); static AUD: OnceCell<String> = OnceCell::new(); static ISS: OnceCell<String> = OnceCell::new(); #[derive(clap::Parser)] pub struct Auth { #[clap(env)] pub cognito_client_id: String, #[clap(env)] pub cognito_pool_id: String, #[clap(env, default_value = "eu-west-1")] pub cognito_aws_region: String, } impl Auth { pub async fn initialize(&self) -> anyhow::Result<()> { let url = format!( "https://cognito-idp.{}.amazonaws.com/{}/.well-known/jwks.json", self.cognito_aws_region, self.cognito_pool_id ); let keyset = reqwest::get(url).await?.json().await?; JWKS.get_or_init(|| keyset); let audience = self.cognito_client_id.clone(); AUD.get_or_init(|| audience); let issuer = format!( "https://cognito-idp.{}.amazonaws.com/{}", self.cognito_aws_region, self.cognito_pool_id ); ISS.get_or_init(|| issuer); Ok(()) } } #[derive(Debug, Serialize, Deserialize)] pub struct Claims { aud: String, exp: usize, iss: String, pub email: String, } #[async_trait] impl<B> FromRequest<B> for Claims where B: Send, { type Rejection = Error; async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> { let TypedHeader(Authorization(bearer)) = TypedHeader::<Authorization<Bearer>>::from_request(req) .await .map_err(|_| Error::Unauthorized)?; let token = bearer.token(); let header = jsonwebtoken::decode_header(token) .map_err(|_| Error::Jwt("Failed to decode token header"))?; let kid = header .kid .ok_or(Error::Jwt("Token is missing `kid` parameter"))?; let jwk = JWKS .get() .context("Once cell `J
Error::Jwt("Failed to decode token") }, )?; return Ok(decoded_token.claims); } _ => return Err(Error::Jwt("Unreachable!")), } } }
WKS` not initialized")? .find(&kid) .ok_or(Error::Jwt("No matching key found in keyset"))?; match jwk.algorithm { AlgorithmParameters::RSA(ref rsa) => { let decoding_key = DecodingKey::from_rsa_components(&rsa.n, &rsa.e) .map_err(|_| Error::Jwt("Failed to create decoding key"))?; let algorithm = jwk .common .algorithm .ok_or(Error::Jwt("JWK is missing `algorithm` parameter"))?; let mut validation = Validation::new(algorithm); validation.set_audience(&[AUD.get().context("Once cell `AUD` not initialized")?]); validation.set_issuer(&[ISS.get().context("Once cell `ISS` not initialized")?]); let decoded_token = jsonwebtoken::decode::<Claims>(token, &decoding_key, &validation).map_err( |_e| {
random
[ { "content": " static getAccessToken(): string | null {\n\n return localStorage.getItem(cognitoAccessToken);\n", "file_path": "ui/src/auth.ts", "rank": 0, "score": 146769.28736785732 }, { "content": "const header = `\n\n/* eslint-disable */\n\n// DO NOT EDIT: this is file is automaticall...
Rust
src/middleware/identity.rs
DenisKolodin/actix-web
a1958deaae7910f901d2ce4e6ecd8636869dbe15
use std::rc::Rc; use cookie::{Cookie, CookieJar, Key}; use futures::future::{err as FutErr, ok as FutOk, FutureResult}; use futures::Future; use time::Duration; use error::{Error, Result}; use http::header::{self, HeaderValue}; use httprequest::HttpRequest; use httpresponse::HttpResponse; use middleware::{Middleware, Response, Started}; pub trait RequestIdentity { fn identity(&self) -> Option<&str>; fn remember(&mut self, identity: String); fn forget(&mut self); } impl<S> RequestIdentity for HttpRequest<S> { fn identity(&self) -> Option<&str> { if let Some(id) = self.extensions().get::<IdentityBox>() { return id.0.identity(); } None } fn remember(&mut self, identity: String) { if let Some(id) = self.extensions_mut().get_mut::<IdentityBox>() { return id.0.remember(identity); } } fn forget(&mut self) { if let Some(id) = self.extensions_mut().get_mut::<IdentityBox>() { return id.0.forget(); } } } pub trait Identity: 'static { fn identity(&self) -> Option<&str>; fn remember(&mut self, key: String); fn forget(&mut self); fn write(&mut self, resp: HttpResponse) -> Result<Response>; } pub trait IdentityPolicy<S>: Sized + 'static { type Identity: Identity; type Future: Future<Item = Self::Identity, Error = Error>; fn from_request(&self, request: &mut HttpRequest<S>) -> Self::Future; } pub struct IdentityService<T> { backend: T, } impl<T> IdentityService<T> { pub fn new(backend: T) -> Self { IdentityService { backend } } } struct IdentityBox(Box<Identity>); #[doc(hidden)] unsafe impl Send for IdentityBox {} #[doc(hidden)] unsafe impl Sync for IdentityBox {} impl<S: 'static, T: IdentityPolicy<S>> Middleware<S> for IdentityService<T> { fn start(&self, req: &mut HttpRequest<S>) -> Result<Started> { let mut req = req.clone(); let fut = self.backend .from_request(&mut req) .then(move |res| match res { Ok(id) => { req.extensions_mut().insert(IdentityBox(Box::new(id))); FutOk(None) } Err(err) => FutErr(err), }); Ok(Started::Future(Box::new(fut))) } fn response( &self, req: &mut HttpRequest<S>, resp: HttpResponse, ) -> Result<Response> { if let Some(mut id) = req.extensions_mut().remove::<IdentityBox>() { id.0.write(resp) } else { Ok(Response::Done(resp)) } } } #[doc(hidden)] pub struct CookieIdentity { changed: bool, identity: Option<String>, inner: Rc<CookieIdentityInner>, } impl Identity for CookieIdentity { fn identity(&self) -> Option<&str> { self.identity.as_ref().map(|s| s.as_ref()) } fn remember(&mut self, value: String) { self.changed = true; self.identity = Some(value); } fn forget(&mut self) { self.changed = true; self.identity = None; } fn write(&mut self, mut resp: HttpResponse) -> Result<Response> { if self.changed { let _ = self.inner.set_cookie(&mut resp, self.identity.take()); } Ok(Response::Done(resp)) } } struct CookieIdentityInner { key: Key, name: String, path: String, domain: Option<String>, secure: bool, max_age: Option<Duration>, } impl CookieIdentityInner { fn new(key: &[u8]) -> CookieIdentityInner { CookieIdentityInner { key: Key::from_master(key), name: "actix-identity".to_owned(), path: "/".to_owned(), domain: None, secure: true, max_age: None, } } fn set_cookie(&self, resp: &mut HttpResponse, id: Option<String>) -> Result<()> { let some = id.is_some(); { let id = id.unwrap_or_else(String::new); let mut cookie = Cookie::new(self.name.clone(), id); cookie.set_path(self.path.clone()); cookie.set_secure(self.secure); cookie.set_http_only(true); if let Some(ref domain) = self.domain { cookie.set_domain(domain.clone()); } if let Some(max_age) = self.max_age { cookie.set_max_age(max_age); } let mut jar = CookieJar::new(); if some { jar.private(&self.key).add(cookie); } else { jar.add_original(cookie.clone()); jar.private(&self.key).remove(cookie); } for cookie in jar.delta() { let val = HeaderValue::from_str(&cookie.to_string())?; resp.headers_mut().append(header::SET_COOKIE, val); } } Ok(()) } fn load<S>(&self, req: &mut HttpRequest<S>) -> Option<String> { if let Ok(cookies) = req.cookies() { for cookie in cookies { if cookie.name() == self.name { let mut jar = CookieJar::new(); jar.add_original(cookie.clone()); let cookie_opt = jar.private(&self.key).get(&self.name); if let Some(cookie) = cookie_opt { return Some(cookie.value().into()); } } } } None } } pub struct CookieIdentityPolicy(Rc<CookieIdentityInner>); impl CookieIdentityPolicy { pub fn new(key: &[u8]) -> CookieIdentityPolicy { CookieIdentityPolicy(Rc::new(CookieIdentityInner::new(key))) } pub fn path<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().path = value.into(); self } pub fn name<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().name = value.into(); self } pub fn domain<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().domain = Some(value.into()); self } pub fn secure(mut self, value: bool) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().secure = value; self } pub fn max_age(mut self, value: Duration) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().max_age = Some(value); self } } impl<S> IdentityPolicy<S> for CookieIdentityPolicy { type Identity = CookieIdentity; type Future = FutureResult<CookieIdentity, Error>; fn from_request(&self, req: &mut HttpRequest<S>) -> Self::Future { let identity = self.0.load(req); FutOk(CookieIdentity { identity, changed: false, inner: Rc::clone(&self.0), }) } }
use std::rc::Rc; use cookie::{Cookie, CookieJar, Key}; use futures::future::{err as FutErr, ok as FutOk, FutureResult}; use futures::Future; use time::Duration; use error::{Error, Result}; use http::header::{self, HeaderValue}; use httprequest::HttpRequest; use httpresponse::HttpResponse; use middleware::{Middleware, Response, Started}; pub trait RequestIdentity { fn identity(&self) -> Option<&str>; fn remember(&mut self, identity: String); fn forget(&mut self); } impl<S> RequestIdentity for HttpRequest<S> { fn identity(&self) -> Option<&str> { if let Some(id) = self.extensions().get::<IdentityBox>() { return id.0.identity(); } None } fn remember(&mut self, identity: String) { if let Some(id) = self.extensions_mut().get_mut::<IdentityBox>() { return id.0.remember(identity); } } fn forget(&mut self) { if let Some(id) = self.extensions_mut().get_mut::<IdentityBox>() { return id.0.forget(); } } } pub trait Identity: 'static { fn identity(&self) -> Option<&str>; fn remember(&mut self, key: String); fn forget(&mut self); fn write(&mut self, resp: HttpResponse) -> Result<Response>; } pub trait IdentityPolicy<S>: Sized + 'static { type Identity: Identity; type Future: Future<Item = Self::Identity, Error = Error>; fn from_request(&self, request: &mut HttpRequest<S>) -> Self::Future; } pub struct IdentityService<T> { backend: T, } impl<T> IdentityService<T> { pub fn new(backend: T) -> Self { IdentityService { backend } } } struct IdentityBox(Box<Identity>); #[doc(hidden)] unsafe impl Send for IdentityBox {} #[doc(hidden)] unsafe impl Sync for IdentityBox {} impl<S: 'static, T: IdentityPolicy<S>> Middleware<S> for IdentityService<T> { fn start(&self, req: &mut HttpRequest<S>) -> Result<Started> { let mut req = req.clone(); let fut = self.backend .from_request(&mut req) .then(move |res| match res { Ok(id) => { req.extensions_mut().insert(IdentityBox(Box::new(id))); FutOk(None) } Err(err) => FutErr(err), }); Ok(Started::Future(Box::new(fut))) } fn response( &
} #[doc(hidden)] pub struct CookieIdentity { changed: bool, identity: Option<String>, inner: Rc<CookieIdentityInner>, } impl Identity for CookieIdentity { fn identity(&self) -> Option<&str> { self.identity.as_ref().map(|s| s.as_ref()) } fn remember(&mut self, value: String) { self.changed = true; self.identity = Some(value); } fn forget(&mut self) { self.changed = true; self.identity = None; } fn write(&mut self, mut resp: HttpResponse) -> Result<Response> { if self.changed { let _ = self.inner.set_cookie(&mut resp, self.identity.take()); } Ok(Response::Done(resp)) } } struct CookieIdentityInner { key: Key, name: String, path: String, domain: Option<String>, secure: bool, max_age: Option<Duration>, } impl CookieIdentityInner { fn new(key: &[u8]) -> CookieIdentityInner { CookieIdentityInner { key: Key::from_master(key), name: "actix-identity".to_owned(), path: "/".to_owned(), domain: None, secure: true, max_age: None, } } fn set_cookie(&self, resp: &mut HttpResponse, id: Option<String>) -> Result<()> { let some = id.is_some(); { let id = id.unwrap_or_else(String::new); let mut cookie = Cookie::new(self.name.clone(), id); cookie.set_path(self.path.clone()); cookie.set_secure(self.secure); cookie.set_http_only(true); if let Some(ref domain) = self.domain { cookie.set_domain(domain.clone()); } if let Some(max_age) = self.max_age { cookie.set_max_age(max_age); } let mut jar = CookieJar::new(); if some { jar.private(&self.key).add(cookie); } else { jar.add_original(cookie.clone()); jar.private(&self.key).remove(cookie); } for cookie in jar.delta() { let val = HeaderValue::from_str(&cookie.to_string())?; resp.headers_mut().append(header::SET_COOKIE, val); } } Ok(()) } fn load<S>(&self, req: &mut HttpRequest<S>) -> Option<String> { if let Ok(cookies) = req.cookies() { for cookie in cookies { if cookie.name() == self.name { let mut jar = CookieJar::new(); jar.add_original(cookie.clone()); let cookie_opt = jar.private(&self.key).get(&self.name); if let Some(cookie) = cookie_opt { return Some(cookie.value().into()); } } } } None } } pub struct CookieIdentityPolicy(Rc<CookieIdentityInner>); impl CookieIdentityPolicy { pub fn new(key: &[u8]) -> CookieIdentityPolicy { CookieIdentityPolicy(Rc::new(CookieIdentityInner::new(key))) } pub fn path<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().path = value.into(); self } pub fn name<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().name = value.into(); self } pub fn domain<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().domain = Some(value.into()); self } pub fn secure(mut self, value: bool) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().secure = value; self } pub fn max_age(mut self, value: Duration) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().max_age = Some(value); self } } impl<S> IdentityPolicy<S> for CookieIdentityPolicy { type Identity = CookieIdentity; type Future = FutureResult<CookieIdentity, Error>; fn from_request(&self, req: &mut HttpRequest<S>) -> Self::Future { let identity = self.0.load(req); FutOk(CookieIdentity { identity, changed: false, inner: Rc::clone(&self.0), }) } }
self, req: &mut HttpRequest<S>, resp: HttpResponse, ) -> Result<Response> { if let Some(mut id) = req.extensions_mut().remove::<IdentityBox>() { id.0.write(resp) } else { Ok(Response::Done(resp)) } }
function_block-function_prefix_line
[ { "content": "/// Do websocket handshake and start actor\n\npub fn start<A, S>(req: HttpRequest<S>, actor: A) -> Result<HttpResponse, Error>\n\nwhere\n\n A: Actor<Context = WebsocketContext<A, S>> + StreamHandler<Message, ProtocolError>,\n\n S: 'static,\n\n{\n\n let mut resp = handshake(&req)?;\n\n ...
Rust
util/fee-estimator/src/estimator.rs
brson/ckb
b9bf40024b8a5acd9b8871dba669c89f38be297d
use crate::tx_confirm_stat::TxConfirmStat; use crate::FeeRate; use ckb_logger::debug; use ckb_types::packed::Byte32; use std::collections::HashMap; pub const MAX_CONFIRM_BLOCKS: usize = 1000; const MIN_BUCKET_FEERATE: f64 = 1000f64; const MAX_BUCKET_FEERATE: f64 = 1e7; const FEE_SPACING: f64 = 1.05f64; const MIN_ESTIMATE_SAMPLES: usize = 20; const MIN_ESTIMATE_CONFIRM_RATE: f64 = 0.85f64; const DEFAULT_DECAY_FACTOR: f64 = 0.993; #[derive(Clone)] struct TxRecord { height: u64, bucket_index: usize, fee_rate: FeeRate, } #[derive(Clone)] pub struct Estimator { best_height: u64, start_height: u64, tx_confirm_stat: TxConfirmStat, tracked_txs: HashMap<Byte32, TxRecord>, } impl Default for Estimator { fn default() -> Self { Self::new() } } impl Estimator { pub fn new() -> Self { let mut buckets = Vec::new(); let mut bucket_fee_boundary = MIN_BUCKET_FEERATE; while bucket_fee_boundary <= MAX_BUCKET_FEERATE { buckets.push(FeeRate::from_u64(bucket_fee_boundary as u64)); bucket_fee_boundary *= FEE_SPACING; } Estimator { best_height: 0, start_height: 0, tx_confirm_stat: TxConfirmStat::new(&buckets, MAX_CONFIRM_BLOCKS, DEFAULT_DECAY_FACTOR), tracked_txs: Default::default(), } } fn process_block_tx(&mut self, height: u64, tx_hash: &Byte32) -> bool { if let Some(tx) = self.drop_tx_inner(tx_hash, false) { let blocks_to_confirm = height.saturating_sub(tx.height) as usize; self.tx_confirm_stat .add_confirmed_tx(blocks_to_confirm, tx.fee_rate); true } else { false } } pub fn process_block(&mut self, height: u64, txs: impl Iterator<Item = Byte32>) { if height <= self.best_height { return; } self.best_height = height; self.tx_confirm_stat.move_track_window(height); self.tx_confirm_stat.decay(); let processed_txs = txs.filter(|tx| self.process_block_tx(height, tx)).count(); if self.start_height == 0 && processed_txs > 0 { self.start_height = self.best_height; debug!("Fee estimator start recording at {}", self.start_height); } } pub fn track_tx(&mut self, tx_hash: Byte32, fee_rate: FeeRate, height: u64) { if self.tracked_txs.contains_key(&tx_hash) { return; } if height != self.best_height { return; } if let Some(bucket_index) = self.tx_confirm_stat.add_unconfirmed_tx(height, fee_rate) { self.tracked_txs.insert( tx_hash, TxRecord { height, bucket_index, fee_rate, }, ); } } fn drop_tx_inner(&mut self, tx_hash: &Byte32, count_failure: bool) -> Option<TxRecord> { self.tracked_txs.remove(tx_hash).map(|tx_record| { self.tx_confirm_stat.remove_unconfirmed_tx( tx_record.height, self.best_height, tx_record.bucket_index, count_failure, ); tx_record }) } pub fn drop_tx(&mut self, tx_hash: &Byte32) -> bool { self.drop_tx_inner(tx_hash, true).is_some() } pub fn estimate(&self, expect_confirm_blocks: usize) -> FeeRate { self.tx_confirm_stat.estimate_median( expect_confirm_blocks, MIN_ESTIMATE_SAMPLES, MIN_ESTIMATE_CONFIRM_RATE, ) } }
use crate::tx_confirm_stat::TxConfirmStat; use crate::FeeRate; use ckb_logger::debug; use ckb_types::packed::Byte32; use std::collections::HashMap; pub const MAX_CONFIRM_BLOCKS: usize = 1000; const MIN_BUCKET_FEERATE: f64 = 1000f64; const MAX_BUCKET_FEERATE: f64 = 1e7; const FEE_SPACING: f64 = 1.05f64; const MIN_ESTIMATE_SAMPLES: usize = 20; const MIN_ESTIMATE_CONFIRM_RATE: f64 = 0.85f64; const DEFAULT_DECAY_FACTOR: f64 = 0.993; #[derive(Clone)] struct TxRecord { height: u64, bucket_index: usize, fee_rate: FeeRate, } #[derive(Clone)] pub struct Estimator { best_height: u64, start_height: u64, tx_confirm_stat: TxConfirmStat, tracked_txs: HashMap<Byte32, TxRecord>, } impl Default for Estimator { fn default() -> Self { Self::new() } } impl Estimator { pub fn new() -> Self { let mut buckets = Vec::new(); let mut bucket_fee_boundary = MIN_BUCKET_FEERATE; while bucket_fee_boundary <= MAX_BUCKET_FEERATE { buckets.push(FeeRate::from_u64(bucket_fee_boundary as u64)); bucket_fee_boundary *= FEE_SPACING; } Estimator { best_height: 0, start_height: 0, tx_confirm_stat: TxConfirmStat::new(&buckets, MAX_CONFIRM_BLOCKS, DEFAULT_DECAY_FACTOR), tracked_txs: Default::default(), } } fn process_block_tx(&mut self, height: u64, tx_hash: &Byte32) -> bool { if let Some(tx) = self.drop_tx_inner(tx_hash, false) { let blocks_to_confirm = height.saturating_sub(tx.height) as usize; self.tx_confirm_stat .add_confirmed_tx(blocks_to_confirm, tx.fee_rate); true } else { false } } pub fn process_block(&mut self, height: u64, txs: impl Iterator<Item = Byte32>) { if height <= self.best_height { return; } self.best_height = height; self.tx_confirm_stat.move_track_window(height); self.tx_confirm_stat.decay(); let processed_txs = txs.
_tx(&mut self, tx_hash: Byte32, fee_rate: FeeRate, height: u64) { if self.tracked_txs.contains_key(&tx_hash) { return; } if height != self.best_height { return; } if let Some(bucket_index) = self.tx_confirm_stat.add_unconfirmed_tx(height, fee_rate) { self.tracked_txs.insert( tx_hash, TxRecord { height, bucket_index, fee_rate, }, ); } } fn drop_tx_inner(&mut self, tx_hash: &Byte32, count_failure: bool) -> Option<TxRecord> { self.tracked_txs.remove(tx_hash).map(|tx_record| { self.tx_confirm_stat.remove_unconfirmed_tx( tx_record.height, self.best_height, tx_record.bucket_index, count_failure, ); tx_record }) } pub fn drop_tx(&mut self, tx_hash: &Byte32) -> bool { self.drop_tx_inner(tx_hash, true).is_some() } pub fn estimate(&self, expect_confirm_blocks: usize) -> FeeRate { self.tx_confirm_stat.estimate_median( expect_confirm_blocks, MIN_ESTIMATE_SAMPLES, MIN_ESTIMATE_CONFIRM_RATE, ) } }
filter(|tx| self.process_block_tx(height, tx)).count(); if self.start_height == 0 && processed_txs > 0 { self.start_height = self.best_height; debug!("Fee estimator start recording at {}", self.start_height); } } pub fn track
random
[]
Rust
clap-utils/src/input_parsers.rs
kevzettler/solana
ce4304cc9a087f1f3defa29aac16e5751f9657bf
use crate::keypair::{ keypair_from_seed_phrase, pubkey_from_path, resolve_signer_from_path, signer_from_path, ASK_KEYWORD, SKIP_SEED_PHRASE_VALIDATION_ARG, }; use chrono::DateTime; use clap::ArgMatches; use solana_remote_wallet::remote_wallet::RemoteWalletManager; use solana_sdk::{ clock::UnixTimestamp, commitment_config::CommitmentConfig, genesis_config::ClusterType, native_token::sol_to_lamports, pubkey::Pubkey, signature::{read_keypair_file, Keypair, Signature, Signer}, }; use std::{str::FromStr, sync::Arc}; pub fn values_of<T>(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<T>> where T: std::str::FromStr, <T as std::str::FromStr>::Err: std::fmt::Debug, { matches .values_of(name) .map(|xs| xs.map(|x| x.parse::<T>().unwrap()).collect()) } pub fn value_of<T>(matches: &ArgMatches<'_>, name: &str) -> Option<T> where T: std::str::FromStr, <T as std::str::FromStr>::Err: std::fmt::Debug, { if let Some(value) = matches.value_of(name) { value.parse::<T>().ok() } else { None } } pub fn unix_timestamp_from_rfc3339_datetime( matches: &ArgMatches<'_>, name: &str, ) -> Option<UnixTimestamp> { matches.value_of(name).and_then(|value| { DateTime::parse_from_rfc3339(value) .ok() .map(|date_time| date_time.timestamp()) }) } pub fn keypair_of(matches: &ArgMatches<'_>, name: &str) -> Option<Keypair> { if let Some(value) = matches.value_of(name) { if value == ASK_KEYWORD { let skip_validation = matches.is_present(SKIP_SEED_PHRASE_VALIDATION_ARG.name); keypair_from_seed_phrase(name, skip_validation, true).ok() } else { read_keypair_file(value).ok() } } else { None } } pub fn keypairs_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Keypair>> { matches.values_of(name).map(|values| { values .filter_map(|value| { if value == ASK_KEYWORD { let skip_validation = matches.is_present(SKIP_SEED_PHRASE_VALIDATION_ARG.name); keypair_from_seed_phrase(name, skip_validation, true).ok() } else { read_keypair_file(value).ok() } }) .collect() }) } pub fn pubkey_of(matches: &ArgMatches<'_>, name: &str) -> Option<Pubkey> { value_of(matches, name).or_else(|| keypair_of(matches, name).map(|keypair| keypair.pubkey())) } pub fn pubkeys_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Pubkey>> { matches.values_of(name).map(|values| { values .map(|value| { value.parse::<Pubkey>().unwrap_or_else(|_| { read_keypair_file(value) .expect("read_keypair_file failed") .pubkey() }) }) .collect() }) } pub fn pubkeys_sigs_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<(Pubkey, Signature)>> { matches.values_of(name).map(|values| { values .map(|pubkey_signer_string| { let mut signer = pubkey_signer_string.split('='); let key = Pubkey::from_str(signer.next().unwrap()).unwrap(); let sig = Signature::from_str(signer.next().unwrap()).unwrap(); (key, sig) }) .collect() }) } #[allow(clippy::type_complexity)] pub fn signer_of( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<(Option<Box<dyn Signer>>, Option<Pubkey>), Box<dyn std::error::Error>> { if let Some(location) = matches.value_of(name) { let signer = signer_from_path(matches, location, name, wallet_manager)?; let signer_pubkey = signer.pubkey(); Ok((Some(signer), Some(signer_pubkey))) } else { Ok((None, None)) } } pub fn pubkey_of_signer( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<Pubkey>, Box<dyn std::error::Error>> { if let Some(location) = matches.value_of(name) { Ok(Some(pubkey_from_path( matches, location, name, wallet_manager, )?)) } else { Ok(None) } } pub fn pubkeys_of_multiple_signers( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<Vec<Pubkey>>, Box<dyn std::error::Error>> { if let Some(pubkey_matches) = matches.values_of(name) { let mut pubkeys: Vec<Pubkey> = vec![]; for signer in pubkey_matches { pubkeys.push(pubkey_from_path(matches, signer, name, wallet_manager)?); } Ok(Some(pubkeys)) } else { Ok(None) } } pub fn resolve_signer( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<String>, Box<dyn std::error::Error>> { Ok(resolve_signer_from_path( matches, matches.value_of(name).unwrap(), name, wallet_manager, )?) } pub fn lamports_of_sol(matches: &ArgMatches<'_>, name: &str) -> Option<u64> { value_of(matches, name).map(sol_to_lamports) } pub fn cluster_type_of(matches: &ArgMatches<'_>, name: &str) -> Option<ClusterType> { value_of(matches, name) } pub fn commitment_of(matches: &ArgMatches<'_>, name: &str) -> Option<CommitmentConfig> { matches.value_of(name).map(|value| match value { "max" => CommitmentConfig::max(), "recent" => CommitmentConfig::recent(), "root" => CommitmentConfig::root(), "single" => CommitmentConfig::single(), "singleGossip" => CommitmentConfig::single_gossip(), _ => CommitmentConfig::default(), }) } #[cfg(test)] mod tests { use super::*; use clap::{App, Arg}; use solana_sdk::signature::write_keypair_file; use std::fs; fn app<'ab, 'v>() -> App<'ab, 'v> { App::new("test") .arg( Arg::with_name("multiple") .long("multiple") .takes_value(true) .multiple(true), ) .arg(Arg::with_name("single").takes_value(true).long("single")) .arg(Arg::with_name("unit").takes_value(true).long("unit")) } fn tmp_file_path(name: &str, pubkey: &Pubkey) -> String { use std::env; let out_dir = env::var("FARF_DIR").unwrap_or_else(|_| "farf".to_string()); format!("{}/tmp/{}-{}", out_dir, name, pubkey.to_string()) } #[test] fn test_values_of() { let matches = app() .clone() .get_matches_from(vec!["test", "--multiple", "50", "--multiple", "39"]); assert_eq!(values_of(&matches, "multiple"), Some(vec![50, 39])); assert_eq!(values_of::<u64>(&matches, "single"), None); let pubkey0 = solana_sdk::pubkey::new_rand(); let pubkey1 = solana_sdk::pubkey::new_rand(); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &pubkey0.to_string(), "--multiple", &pubkey1.to_string(), ]); assert_eq!( values_of(&matches, "multiple"), Some(vec![pubkey0, pubkey1]) ); } #[test] fn test_value_of() { let matches = app() .clone() .get_matches_from(vec!["test", "--single", "50"]); assert_eq!(value_of(&matches, "single"), Some(50)); assert_eq!(value_of::<u64>(&matches, "multiple"), None); let pubkey = solana_sdk::pubkey::new_rand(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &pubkey.to_string()]); assert_eq!(value_of(&matches, "single"), Some(pubkey)); } #[test] fn test_keypair_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_keypair_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &outfile]); assert_eq!( keypair_of(&matches, "single").unwrap().pubkey(), keypair.pubkey() ); assert!(keypair_of(&matches, "multiple").is_none()); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "random_keypair_file.json"]); assert!(keypair_of(&matches, "single").is_none()); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkey_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_pubkey_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &outfile]); assert_eq!(pubkey_of(&matches, "single"), Some(keypair.pubkey())); assert_eq!(pubkey_of(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &keypair.pubkey().to_string()]); assert_eq!(pubkey_of(&matches, "single"), Some(keypair.pubkey())); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "random_keypair_file.json"]); assert_eq!(pubkey_of(&matches, "single"), None); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkeys_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_pubkeys_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &keypair.pubkey().to_string(), "--multiple", &outfile, ]); assert_eq!( pubkeys_of(&matches, "multiple"), Some(vec![keypair.pubkey(), keypair.pubkey()]) ); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkeys_sigs_of() { let key1 = solana_sdk::pubkey::new_rand(); let key2 = solana_sdk::pubkey::new_rand(); let sig1 = Keypair::new().sign_message(&[0u8]); let sig2 = Keypair::new().sign_message(&[1u8]); let signer1 = format!("{}={}", key1, sig1); let signer2 = format!("{}={}", key2, sig2); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &signer1, "--multiple", &signer2, ]); assert_eq!( pubkeys_sigs_of(&matches, "multiple"), Some(vec![(key1, sig1), (key2, sig2)]) ); } #[test] fn test_lamports_of_sol() { let matches = app() .clone() .get_matches_from(vec!["test", "--single", "50"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(50_000_000_000)); assert_eq!(lamports_of_sol(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "1.5"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(1_500_000_000)); assert_eq!(lamports_of_sol(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "0.03"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(30_000_000)); } }
use crate::keypair::{ keypair_from_seed_phrase, pubkey_from_path, resolve_signer_from_path, signer_from_path, ASK_KEYWORD, SKIP_SEED_PHRASE_VALIDATION_ARG, }; use chrono::DateTime; use clap::ArgMatches; use solana_remote_wallet::remote_wallet::RemoteWalletManager; use solana_sdk::{ clock::UnixTimestamp, commitment_config::CommitmentConfig, genesis_config::ClusterType, native_token::sol_to_lamports, pubkey::Pubkey, signature::{read_keypair_file, Keypair, Signature, Signer}, }; use std::{str::FromStr, sync::Arc}; pub fn values_of<T>(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<T>> where T: std::str::FromStr, <T as std::str::FromStr>::Err: std::fmt::Debug, { matches .values_of(name) .map(|xs| xs.map(|x| x.parse::<T>().unwrap()).collect()) } pub fn value_of<T>(matches: &ArgMatches<'_>, name: &str) -> Option<T> where T: std::str::FromStr, <T as std::str::FromStr>::Err: std::fmt::Debug, { if let Some(value) = matches.value_of(name) { value.parse::<T>().ok() } else { None } }
pub fn keypair_of(matches: &ArgMatches<'_>, name: &str) -> Option<Keypair> { if let Some(value) = matches.value_of(name) { if value == ASK_KEYWORD { let skip_validation = matches.is_present(SKIP_SEED_PHRASE_VALIDATION_ARG.name); keypair_from_seed_phrase(name, skip_validation, true).ok() } else { read_keypair_file(value).ok() } } else { None } } pub fn keypairs_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Keypair>> { matches.values_of(name).map(|values| { values .filter_map(|value| { if value == ASK_KEYWORD { let skip_validation = matches.is_present(SKIP_SEED_PHRASE_VALIDATION_ARG.name); keypair_from_seed_phrase(name, skip_validation, true).ok() } else { read_keypair_file(value).ok() } }) .collect() }) } pub fn pubkey_of(matches: &ArgMatches<'_>, name: &str) -> Option<Pubkey> { value_of(matches, name).or_else(|| keypair_of(matches, name).map(|keypair| keypair.pubkey())) } pub fn pubkeys_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Pubkey>> { matches.values_of(name).map(|values| { values .map(|value| { value.parse::<Pubkey>().unwrap_or_else(|_| { read_keypair_file(value) .expect("read_keypair_file failed") .pubkey() }) }) .collect() }) } pub fn pubkeys_sigs_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<(Pubkey, Signature)>> { matches.values_of(name).map(|values| { values .map(|pubkey_signer_string| { let mut signer = pubkey_signer_string.split('='); let key = Pubkey::from_str(signer.next().unwrap()).unwrap(); let sig = Signature::from_str(signer.next().unwrap()).unwrap(); (key, sig) }) .collect() }) } #[allow(clippy::type_complexity)] pub fn signer_of( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<(Option<Box<dyn Signer>>, Option<Pubkey>), Box<dyn std::error::Error>> { if let Some(location) = matches.value_of(name) { let signer = signer_from_path(matches, location, name, wallet_manager)?; let signer_pubkey = signer.pubkey(); Ok((Some(signer), Some(signer_pubkey))) } else { Ok((None, None)) } } pub fn pubkey_of_signer( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<Pubkey>, Box<dyn std::error::Error>> { if let Some(location) = matches.value_of(name) { Ok(Some(pubkey_from_path( matches, location, name, wallet_manager, )?)) } else { Ok(None) } } pub fn pubkeys_of_multiple_signers( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<Vec<Pubkey>>, Box<dyn std::error::Error>> { if let Some(pubkey_matches) = matches.values_of(name) { let mut pubkeys: Vec<Pubkey> = vec![]; for signer in pubkey_matches { pubkeys.push(pubkey_from_path(matches, signer, name, wallet_manager)?); } Ok(Some(pubkeys)) } else { Ok(None) } } pub fn resolve_signer( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<String>, Box<dyn std::error::Error>> { Ok(resolve_signer_from_path( matches, matches.value_of(name).unwrap(), name, wallet_manager, )?) } pub fn lamports_of_sol(matches: &ArgMatches<'_>, name: &str) -> Option<u64> { value_of(matches, name).map(sol_to_lamports) } pub fn cluster_type_of(matches: &ArgMatches<'_>, name: &str) -> Option<ClusterType> { value_of(matches, name) } pub fn commitment_of(matches: &ArgMatches<'_>, name: &str) -> Option<CommitmentConfig> { matches.value_of(name).map(|value| match value { "max" => CommitmentConfig::max(), "recent" => CommitmentConfig::recent(), "root" => CommitmentConfig::root(), "single" => CommitmentConfig::single(), "singleGossip" => CommitmentConfig::single_gossip(), _ => CommitmentConfig::default(), }) } #[cfg(test)] mod tests { use super::*; use clap::{App, Arg}; use solana_sdk::signature::write_keypair_file; use std::fs; fn app<'ab, 'v>() -> App<'ab, 'v> { App::new("test") .arg( Arg::with_name("multiple") .long("multiple") .takes_value(true) .multiple(true), ) .arg(Arg::with_name("single").takes_value(true).long("single")) .arg(Arg::with_name("unit").takes_value(true).long("unit")) } fn tmp_file_path(name: &str, pubkey: &Pubkey) -> String { use std::env; let out_dir = env::var("FARF_DIR").unwrap_or_else(|_| "farf".to_string()); format!("{}/tmp/{}-{}", out_dir, name, pubkey.to_string()) } #[test] fn test_values_of() { let matches = app() .clone() .get_matches_from(vec!["test", "--multiple", "50", "--multiple", "39"]); assert_eq!(values_of(&matches, "multiple"), Some(vec![50, 39])); assert_eq!(values_of::<u64>(&matches, "single"), None); let pubkey0 = solana_sdk::pubkey::new_rand(); let pubkey1 = solana_sdk::pubkey::new_rand(); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &pubkey0.to_string(), "--multiple", &pubkey1.to_string(), ]); assert_eq!( values_of(&matches, "multiple"), Some(vec![pubkey0, pubkey1]) ); } #[test] fn test_value_of() { let matches = app() .clone() .get_matches_from(vec!["test", "--single", "50"]); assert_eq!(value_of(&matches, "single"), Some(50)); assert_eq!(value_of::<u64>(&matches, "multiple"), None); let pubkey = solana_sdk::pubkey::new_rand(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &pubkey.to_string()]); assert_eq!(value_of(&matches, "single"), Some(pubkey)); } #[test] fn test_keypair_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_keypair_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &outfile]); assert_eq!( keypair_of(&matches, "single").unwrap().pubkey(), keypair.pubkey() ); assert!(keypair_of(&matches, "multiple").is_none()); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "random_keypair_file.json"]); assert!(keypair_of(&matches, "single").is_none()); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkey_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_pubkey_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &outfile]); assert_eq!(pubkey_of(&matches, "single"), Some(keypair.pubkey())); assert_eq!(pubkey_of(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &keypair.pubkey().to_string()]); assert_eq!(pubkey_of(&matches, "single"), Some(keypair.pubkey())); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "random_keypair_file.json"]); assert_eq!(pubkey_of(&matches, "single"), None); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkeys_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_pubkeys_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &keypair.pubkey().to_string(), "--multiple", &outfile, ]); assert_eq!( pubkeys_of(&matches, "multiple"), Some(vec![keypair.pubkey(), keypair.pubkey()]) ); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkeys_sigs_of() { let key1 = solana_sdk::pubkey::new_rand(); let key2 = solana_sdk::pubkey::new_rand(); let sig1 = Keypair::new().sign_message(&[0u8]); let sig2 = Keypair::new().sign_message(&[1u8]); let signer1 = format!("{}={}", key1, sig1); let signer2 = format!("{}={}", key2, sig2); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &signer1, "--multiple", &signer2, ]); assert_eq!( pubkeys_sigs_of(&matches, "multiple"), Some(vec![(key1, sig1), (key2, sig2)]) ); } #[test] fn test_lamports_of_sol() { let matches = app() .clone() .get_matches_from(vec!["test", "--single", "50"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(50_000_000_000)); assert_eq!(lamports_of_sol(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "1.5"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(1_500_000_000)); assert_eq!(lamports_of_sol(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "0.03"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(30_000_000)); } }
pub fn unix_timestamp_from_rfc3339_datetime( matches: &ArgMatches<'_>, name: &str, ) -> Option<UnixTimestamp> { matches.value_of(name).and_then(|value| { DateTime::parse_from_rfc3339(value) .ok() .map(|date_time| date_time.timestamp()) }) }
function_block-full_function
[ { "content": "// Pretty print a \"name value\"\n\npub fn println_name_value(name: &str, value: &str) {\n\n let styled_value = if value == \"\" {\n\n style(\"(not set)\").italic()\n\n } else {\n\n style(value)\n\n };\n\n println!(\"{} {}\", style(name).bold(), styled_value);\n\n}\n\n\n"...
Rust
src/material.rs
PicoJr/rray
b1cb3c30a1d31a3b4fe96316266bf2f2bf7350d3
use crate::color::RRgb; use crate::ray::{random_in_unit_sphere, Ray, RayHit, RT}; use nalgebra::Vector3; use rand::distributions::Uniform; use rand::prelude::ThreadRng; use rand::Rng; #[derive(Clone)] pub(crate) enum Material { Dieletric(Dieletric), Lambertian(Lambertian), Metal(Metal), Light(Light), } impl Scatterer for Material { fn scatter( &self, ray: &Ray<f32>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { match self { Material::Dieletric(dieletric) => dieletric.scatter(ray, ray_hit, thread_rng), Material::Lambertian(lambertian) => lambertian.scatter(ray, ray_hit, thread_rng), Material::Metal(metal) => metal.scatter(ray, ray_hit, thread_rng), Material::Light(_) => None, } } } impl Emitter for Material { fn emit(&self) -> RRgb { match self { Material::Dieletric(_) => RRgb::new(0., 0., 0.), Material::Lambertian(_) => RRgb::new(0., 0., 0.), Material::Metal(_) => RRgb::new(0., 0., 0.), Material::Light(light) => light.emit(), } } } pub(crate) trait Scatterer { fn scatter( &self, ray: &Ray<RT>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<RT>)>; } pub(crate) trait Emitter { fn emit(&self) -> RRgb; } #[derive(Clone)] pub(crate) struct Light { pub emitted: RRgb, } impl Emitter for Light { fn emit(&self) -> RRgb { self.emitted.clone() } } #[derive(Clone)] pub(crate) struct Lambertian { pub albedo: RRgb, } impl Scatterer for Lambertian { fn scatter( &self, _ray: &Ray<f32>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let scatter_direction = ray_hit.normal + random_in_unit_sphere(thread_rng); let scattered = Ray::new(ray_hit.point, scatter_direction); Some((self.albedo.clone(), scattered)) } } #[derive(Clone)] pub(crate) struct Metal { pub albedo: RRgb, } fn reflect(v: &Vector3<RT>, normal: &Vector3<RT>) -> Vector3<RT> { v - normal.scale((2. as RT) * v.dot(normal)) } impl Scatterer for Metal { fn scatter( &self, ray: &Ray<f32>, ray_hit: &RayHit, _thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let reflected = reflect(&ray.direction().normalize(), &ray_hit.normal); let scattered = Ray::new(ray_hit.point, reflected); if scattered.direction().dot(&ray_hit.normal) > (0. as RT) { Some((self.albedo.clone(), scattered)) } else { None } } } fn refract(uv: &Vector3<RT>, normal: &Vector3<RT>, etai_over_eta: RT) -> Vector3<RT> { let cos_theta = -uv.dot(normal); let r_out_perp = etai_over_eta * (uv + normal.scale(cos_theta)); let r_out_parallel = normal.scale(-(1.0 as RT - r_out_perp.norm_squared()).abs().sqrt()); r_out_perp + r_out_parallel } #[derive(Clone)] pub(crate) struct Dieletric { pub refraction_index: f64, } fn schlick(cosine: f64, refraction_index: f64) -> f64 { let r0 = (1f64 - refraction_index) / (1f64 + refraction_index); let r0 = r0 * r0; r0 + (1f64 - r0) * (1f64 - cosine).powf(5f64) } impl Scatterer for Dieletric { fn scatter( &self, ray: &Ray<f32>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let attenuation = RRgb::new(1f64, 1f64, 1f64); let etai_over_etat = if ray_hit.front_face { 1f64 / self.refraction_index } else { self.refraction_index }; let unit_direction = ray.direction().normalize(); let cos_theta = f64::min(-unit_direction.dot(&ray_hit.normal) as f64, 1f64); let sin_theta = (1f64 - cos_theta * cos_theta).sqrt(); let reflected_probability = schlick(cos_theta, etai_over_etat); let side = Uniform::new(0., 1.); let randomly_reflected = thread_rng.sample(side) < reflected_probability; let scattered = if randomly_reflected || etai_over_etat * sin_theta > 1f64 { reflect(&unit_direction, &ray_hit.normal) } else { refract(&unit_direction, &ray_hit.normal, etai_over_etat as f32) }; Some((attenuation, Ray::new(ray_hit.point, scattered))) } }
use crate::color::RRgb; use crate::ray::{random_in_unit_sphere, Ray, RayHit, RT}; use nalgebra::Vector3; use rand::distributions::Uniform; use rand::prelude::ThreadRng; use rand::Rng; #[derive(Clone)] pub(crate) enum Material { Dieletric(Dieletric), Lambertian(Lambertian), Metal(Metal), Light(Light), } impl Scatterer for Material { fn scatter( &self, ray: &Ray<f32>,
} impl Emitter for Material { fn emit(&self) -> RRgb { match self { Material::Dieletric(_) => RRgb::new(0., 0., 0.), Material::Lambertian(_) => RRgb::new(0., 0., 0.), Material::Metal(_) => RRgb::new(0., 0., 0.), Material::Light(light) => light.emit(), } } } pub(crate) trait Scatterer { fn scatter( &self, ray: &Ray<RT>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<RT>)>; } pub(crate) trait Emitter { fn emit(&self) -> RRgb; } #[derive(Clone)] pub(crate) struct Light { pub emitted: RRgb, } impl Emitter for Light { fn emit(&self) -> RRgb { self.emitted.clone() } } #[derive(Clone)] pub(crate) struct Lambertian { pub albedo: RRgb, } impl Scatterer for Lambertian { fn scatter( &self, _ray: &Ray<f32>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let scatter_direction = ray_hit.normal + random_in_unit_sphere(thread_rng); let scattered = Ray::new(ray_hit.point, scatter_direction); Some((self.albedo.clone(), scattered)) } } #[derive(Clone)] pub(crate) struct Metal { pub albedo: RRgb, } fn reflect(v: &Vector3<RT>, normal: &Vector3<RT>) -> Vector3<RT> { v - normal.scale((2. as RT) * v.dot(normal)) } impl Scatterer for Metal { fn scatter( &self, ray: &Ray<f32>, ray_hit: &RayHit, _thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let reflected = reflect(&ray.direction().normalize(), &ray_hit.normal); let scattered = Ray::new(ray_hit.point, reflected); if scattered.direction().dot(&ray_hit.normal) > (0. as RT) { Some((self.albedo.clone(), scattered)) } else { None } } } fn refract(uv: &Vector3<RT>, normal: &Vector3<RT>, etai_over_eta: RT) -> Vector3<RT> { let cos_theta = -uv.dot(normal); let r_out_perp = etai_over_eta * (uv + normal.scale(cos_theta)); let r_out_parallel = normal.scale(-(1.0 as RT - r_out_perp.norm_squared()).abs().sqrt()); r_out_perp + r_out_parallel } #[derive(Clone)] pub(crate) struct Dieletric { pub refraction_index: f64, } fn schlick(cosine: f64, refraction_index: f64) -> f64 { let r0 = (1f64 - refraction_index) / (1f64 + refraction_index); let r0 = r0 * r0; r0 + (1f64 - r0) * (1f64 - cosine).powf(5f64) } impl Scatterer for Dieletric { fn scatter( &self, ray: &Ray<f32>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let attenuation = RRgb::new(1f64, 1f64, 1f64); let etai_over_etat = if ray_hit.front_face { 1f64 / self.refraction_index } else { self.refraction_index }; let unit_direction = ray.direction().normalize(); let cos_theta = f64::min(-unit_direction.dot(&ray_hit.normal) as f64, 1f64); let sin_theta = (1f64 - cos_theta * cos_theta).sqrt(); let reflected_probability = schlick(cos_theta, etai_over_etat); let side = Uniform::new(0., 1.); let randomly_reflected = thread_rng.sample(side) < reflected_probability; let scattered = if randomly_reflected || etai_over_etat * sin_theta > 1f64 { reflect(&unit_direction, &ray_hit.normal) } else { refract(&unit_direction, &ray_hit.normal, etai_over_etat as f32) }; Some((attenuation, Ray::new(ray_hit.point, scattered))) } }
ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { match self { Material::Dieletric(dieletric) => dieletric.scatter(ray, ray_hit, thread_rng), Material::Lambertian(lambertian) => lambertian.scatter(ray, ray_hit, thread_rng), Material::Metal(metal) => metal.scatter(ray, ray_hit, thread_rng), Material::Light(_) => None, } }
function_block-function_prefix_line
[ { "content": "fn bvh_direction(v: Vector3<RT>) -> bvh::nalgebra::Vector3<RT> {\n\n bvh::nalgebra::Vector3::new(v.x, v.y, v.z)\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub(crate) struct Ray<T: Scalar> {\n\n origin: Point3<T>,\n\n direction: Vector3<T>,\n\n}\n\n\n\nimpl Ray<RT> {\n\n pub(crate) fn new(o...
Rust
serialization-tests/tests/serialization.rs
saona-raimundo/petgraph
9ff688872b467d3e1b5adef19f5c52f519d3279c
extern crate petgraph; #[macro_use] extern crate quickcheck; extern crate bincode; extern crate itertools; extern crate serde_json; #[macro_use] extern crate defmac; use std::collections::HashSet; use std::fmt::Debug; use std::iter::FromIterator; use itertools::assert_equal; use itertools::{repeat_n, Itertools}; use petgraph::graph::{edge_index, node_index, IndexType}; use petgraph::prelude::*; use petgraph::visit::EdgeRef; use petgraph::visit::IntoEdgeReferences; use petgraph::visit::NodeIndexable; use petgraph::EdgeType; pub fn assert_graph_eq<N, N2, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>, h: &Graph<N2, E, Ty, Ix>) where N: PartialEq<N2> + Debug, N2: PartialEq<N2> + Debug, E: PartialEq + Debug, Ty: EdgeType, Ix: IndexType, { assert_eq!(g.node_count(), h.node_count()); assert_eq!(g.edge_count(), h.edge_count()); assert_equal( g.raw_nodes().iter().map(|n| &n.weight), h.raw_nodes().iter().map(|n| &n.weight), ); assert_equal( g.raw_edges().iter().map(|n| &n.weight), h.raw_edges().iter().map(|n| &n.weight), ); for e1 in g.edge_references() { let (a2, b2) = h.edge_endpoints(e1.id()).unwrap(); assert_eq!(e1.source(), a2); assert_eq!(e1.target(), b2); } for index in g.node_indices() { let outgoing1 = <HashSet<_>>::from_iter(g.neighbors(index)); let outgoing2 = <HashSet<_>>::from_iter(h.neighbors(index)); assert_eq!(outgoing1, outgoing2); let incoming1 = <HashSet<_>>::from_iter(g.neighbors_directed(index, Incoming)); let incoming2 = <HashSet<_>>::from_iter(h.neighbors_directed(index, Incoming)); assert_eq!(incoming1, incoming2); } } pub fn assert_stable_graph_eq<N, E>(g: &StableGraph<N, E>, h: &StableGraph<N, E>) where N: PartialEq + Debug, E: PartialEq + Debug, { assert_eq!(g.node_count(), h.node_count()); assert_eq!(g.edge_count(), h.edge_count()); assert_equal( (0..g.node_bound()).map(|i| g.node_weight(node_index(i))), (0..h.node_bound()).map(|i| h.node_weight(node_index(i))), ); let last_edge_g = g.edge_references().next_back(); let last_edge_h = h.edge_references().next_back(); assert_eq!(last_edge_g.is_some(), last_edge_h.is_some()); if let (Some(lg), Some(lh)) = (last_edge_g, last_edge_h) { let lgi = lg.id().index(); let lhi = lh.id().index(); assert_equal( (0..lgi).map(|i| g.edge_weight(edge_index(i))), (0..lhi).map(|i| h.edge_weight(edge_index(i))), ); } for e1 in g.edge_references() { let (a2, b2) = h.edge_endpoints(e1.id()).unwrap(); assert_eq!(e1.source(), a2); assert_eq!(e1.target(), b2); } for index in g.node_indices() { let outgoing1 = <HashSet<_>>::from_iter(g.neighbors(index)); let outgoing2 = <HashSet<_>>::from_iter(h.neighbors(index)); assert_eq!(outgoing1, outgoing2); let incoming1 = <HashSet<_>>::from_iter(g.neighbors_directed(index, Incoming)); let incoming2 = <HashSet<_>>::from_iter(h.neighbors_directed(index, Incoming)); assert_eq!(incoming1, incoming2); } } fn make_graph<Ty, Ix>() -> Graph<&'static str, i32, Ty, Ix> where Ty: EdgeType, Ix: IndexType, { let mut g = Graph::default(); let a = g.add_node("A"); let b = g.add_node("B"); let c = g.add_node("C"); let d = g.add_node("D"); let e = g.add_node("E"); let f = g.add_node("F"); g.extend_with_edges(&[ (a, b, 7), (c, a, 9), (a, d, 14), (b, c, 10), (d, c, 2), (d, e, 9), (b, f, 15), (c, f, 11), (e, f, 6), ]); g.remove_node(d); g } fn make_stable_graph<Ty, Ix>() -> StableGraph<String, i32, Ty, Ix> where Ty: EdgeType, Ix: IndexType, { let mut g = StableGraph::default(); let indices: Vec<_> = (0..1024).map(|i| g.add_node(format!("{}", i))).collect(); for i in 1..256 { g.extend_with_edges((0..1024).map(|j| (indices[j], indices[(j + i) % 1024], i as i32))); } for i in (0..1024).step_by(10) { g.remove_node(indices[i]); } g } defmac!(tojson ref g => serde_json::to_string(g).unwrap()); defmac!(fromjson ref data => serde_json::from_str(data).unwrap()); defmac!(rejson ref g => fromjson!(tojson!(g))); #[test] fn json_graph_str_i32() { let g1: DiGraph<_, _> = make_graph(); let g2: Graph<String, i32> = rejson!(&g1); assert_graph_eq(&g1, &g2); assert_graph_eq(&g2, &g1); } #[test] fn json_graph_nils() { let g1 = make_graph().map(|_, _| (), |_, _| ()); let g2: Graph<(), ()> = rejson!(&g1); assert_graph_eq(&g1, &g2); assert_graph_eq(&g2, &g1); } const DIGRAPH_NILS: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,0,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_NILS_INDEX_OOB: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,5,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_NILS_INDEX_OUTSIDE_U8: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,300,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_STRI32: &str = r#"{ "nodes":["A","B","C","D","E","F"], "edge_property": "directed", "edges":[[0,1,7],[2,0,9],[0,3,14],[1,2,10],[3,2,2],[3,4,9],[1,5,15],[2,5,11],[4,5,6]] }"#; type DiGraphNils = DiGraph<(), ()>; type UnGraphNils = UnGraph<(), ()>; type DiGraphNilsU8 = DiGraph<(), (), u8>; type DiGraphStrI32 = DiGraph<String, i32>; #[test] fn from_json_digraph_nils() { let _: DiGraphNils = fromjson!(&DIGRAPH_NILS); } #[test] #[should_panic(expected = "edge property mismatch")] fn from_json_graph_nils_edge_property_mismatch() { let _: UnGraphNils = fromjson!(&DIGRAPH_NILS); } #[test] #[should_panic(expected = "does not exist")] fn from_json_graph_nils_index_oob() { let _: DiGraphNils = fromjson!(&DIGRAPH_NILS_INDEX_OOB); } #[test] #[should_panic(expected = "expected u8")] fn from_json_graph_nils_index_too_large() { let _: DiGraphNilsU8 = fromjson!(&DIGRAPH_NILS_INDEX_OUTSIDE_U8); } #[test] fn from_json_graph_directed_str_i32() { let _: DiGraphStrI32 = fromjson!(&DIGRAPH_STRI32); } #[test] #[should_panic(expected = "expected unit")] fn from_json_graph_from_edge_type_1() { let _: DiGraphNils = fromjson!(&DIGRAPH_STRI32); } #[test] #[should_panic(expected = "expected a string")] fn from_json_graph_from_edge_type_2() { let _: DiGraphStrI32 = fromjson!(&DIGRAPH_NILS); } #[test] fn from_json_digraph_str_i32() { let g4nodes = ["A", "B", "C", "D", "E", "F"]; let g4edges = [ [0, 1, 7], [2, 0, 9], [0, 3, 14], [1, 2, 10], [3, 2, 2], [3, 4, 9], [1, 5, 15], [2, 5, 11], [4, 5, 6], ]; type GSI = DiGraph<String, i32>; type GSISmall = DiGraph<String, i32, u8>; let g4: GSI = fromjson!(&DIGRAPH_STRI32); for ni in g4.node_indices() { assert_eq!(&g4nodes[ni.index()], &g4[ni]); } for e in g4.edge_references() { let edge_data = g4edges[e.id().index()]; let (s, t) = g4.edge_endpoints(e.id()).unwrap(); assert_eq!(edge_data[0] as usize, s.index()); assert_eq!(edge_data[1] as usize, t.index()); assert_eq!(edge_data[2], g4[e.id()]); } let _g4small: GSISmall = fromjson!(&DIGRAPH_STRI32); } #[test] fn from_json_nodes_too_big() { use serde_json::from_str; let j1_big = &format!( "{}{}{}", r#" {"nodes": [ "#, repeat_n(0, 300).format(", "), r#" ], "edge_property": "directed", "edges": [] } "# ); type G8 = DiGraph<i32, (), u8>; type G16 = DiGraph<i32, (), u16>; type G32 = DiGraph<i32, (), u32>; type G64 = DiGraph<i32, (), usize>; type H1 = DiGraph<i32, i32>; assert!(from_str::<G8>(j1_big).is_err()); let _: G16 = fromjson!(&j1_big); let _: G32 = fromjson!(&j1_big); let _: G64 = fromjson!(&j1_big); let _: H1 = fromjson!(&j1_big); } #[test] fn from_json_edges_too_big() { use serde_json::from_str; let j1_big = format!( "{}{}{}", r#" {"nodes": [0], "edge_property": "directed", "edges": ["#, repeat_n("[0, 0, 1]", (1 << 16) - 1).format(", "), "]}" ); type G8 = DiGraph<i32, i32, u8>; type G16 = DiGraph<i32, i32, u16>; type G32 = DiGraph<i32, i32, u32>; type G64 = DiGraph<i32, i32, usize>; assert!(from_str::<G8>(&j1_big).is_err()); assert!(from_str::<G16>(&j1_big).is_err()); let _: G32 = fromjson!(&j1_big); let _: G64 = fromjson!(&j1_big); } #[test] fn json_stable_graph_str() { let g1 = make_stable_graph(); let g2: StableGraph<String, i32> = rejson!(&g1); let g1 = g1.map(|_, s| s.to_string(), |_, &w| w); assert_stable_graph_eq(&g1, &g2); } #[test] fn json_stable_graph_nils() { let g1 = make_stable_graph().map(|_, _| (), |_, _| ()); let g2 = rejson!(&g1); assert_stable_graph_eq(&g1, &g2); } defmac!(encode ref g => bincode::serialize(g).unwrap()); defmac!(decode ref data => bincode::deserialize(data).unwrap()); defmac!(recode ref g => decode!(encode!(g))); #[test] fn bincode_stablegraph_to_graph_i32_0() { let g1 = StableGraph::<i32, i32>::new(); let g2: Graph<i32, i32> = recode!(&g1); assert_graph_eq(&g2, &Graph::<i32, i32>::default()); } #[test] fn bincode_graph_to_stablegraph_i32_0() { let g1 = Graph::<i32, i32>::new(); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g2, &StableGraph::<i32, i32>::default()); } #[test] fn bincode_graph_to_graph_i32_1() { let mut g1 = Graph::<i32, i32>::new(); let x = 1729; g1.add_node(x); let g2: Graph<i32, i32> = recode!(&g1); assert_graph_eq(&g1, &g2); } #[test] fn bincode_stablegraph_added2_removed2() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; let a = g1.add_node(x); let b = g1.add_node(x + 1); g1.remove_node(a); g1.remove_node(b); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); } #[test] fn bincode_stablegraph_added3_removed2() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; let a = g1.add_node(x); let b = g1.add_node(x + 1); let _c = g1.add_node(x + 2); g1.remove_node(a); g1.remove_node(b); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); } #[test] fn bincode_stablegraph_to_graph_i32_1() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; g1.add_node(x); let g2: Graph<i32, i32> = recode!(&g1); assert_eq!(g2.node_count(), 1); assert_eq!(g2.edge_count(), 0); assert_eq!(g2[node_index(0)], x); } quickcheck! { fn json_graph_to_stablegraph_to_graph(g1: Graph<i32, i32>) -> () { let sg: StableGraph<i32, i32> = rejson!(&g1); let g2: Graph<i32, i32> = rejson!(&sg); assert_graph_eq(&g1, &g2); } fn json_stablegraph_to_stablegraph(g1: StableGraph<i32, i32>) -> () { let sg: StableGraph<i32, i32> = rejson!(&g1); assert_stable_graph_eq(&g1, &sg); } fn json_graph_to_bigger_graph(g1: DiGraph<i32, i32, u16>) -> () { let g2: DiGraph<i32, i32, usize> = rejson!(&g1); let g3: DiGraph<i32, i32, u16> = rejson!(&g2); assert_graph_eq(&g1, &g3); } fn bincode_graph_to_graph_nils(g1: Graph<(), ()>) -> () { let g2: Graph<(), ()> = recode!(&g1); assert_graph_eq(&g1, &g2); } fn bincode_graph_to_stablegraph_to_graph_nils(g1: Graph<(), ()>) -> () { let data = encode!(&g1); let sg: StableGraph<(), ()> = decode!(&data); let data2 = encode!(&sg); let g2: Graph<(), ()> = decode!(&data2); assert_eq!(data, data2); assert_graph_eq(&g1, &g2); } fn bincode_graph_to_stablegraph_to_graph_u16(g1: DiGraph<i32, i32, u16>) -> () { let data = encode!(&g1); let sg: StableDiGraph<i32, i32, u16> = decode!(&data); let data2 = encode!(&sg); let g2: DiGraph<i32, i32, u16> = decode!(&data2); assert_eq!(data, data2); assert_graph_eq(&g1, &g2); } fn bincode_stablegraph_to_stablegraph(g1: StableGraph<i32, i32>) -> () { let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); } }
extern crate petgraph; #[macro_use] extern crate quickcheck; extern crate bincode; extern crate itertools; extern crate serde_json; #[macro_use] extern crate defmac; use std::collections::HashSet; use std::fmt::Debug; use std::iter::FromIterator; use itertools::assert_equal; use itertools::{repeat_n, Itertools}; use petgraph::graph::{edge_index, node_index, IndexType}; use petgraph::prelude::*; use petgraph::visit::EdgeRef; use petgraph::visit::IntoEdgeReferences; use petgraph::visit::NodeIndexable; use petgraph::EdgeType; pub fn assert_graph_eq<N, N2, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>, h: &Graph<N2, E, Ty, Ix>) where N: PartialEq<N2> + Debug, N2: PartialEq<N2> + Debug, E: PartialEq + Debug, Ty: EdgeType, Ix: IndexType, { assert_eq!(g.node_count(), h.node_count()); assert_eq!(g.edge_count(), h.edge_count()); assert_equal( g.raw_nodes().iter().map(|n| &n.weight), h.raw_nodes().iter().map(|n| &n.weight), ); assert_equal( g.raw_edges().iter().map(|n| &n.weight), h.raw_edges().iter().map(|n| &n.weight), ); for e1 in g.edge_references() { let (a2, b2) = h.edge_endpoints(e1.id()).unwrap(); assert_eq!(e1.source(), a2); assert_eq!(e1.target(), b2); } for index in g.node_indices() { let outgoing1 = <HashSet<_>>::from_iter(g.neighbors(index)); let outgoing2 = <HashSet<_>>::from_iter(h.neighbors(index)); assert_eq!(outgoing1, outgoing2); let incoming1 = <HashSet<_>>::from_iter(g.neighbors_directed(index, Incoming)); let incoming2 = <HashSet<_>>::from_iter(h.neighbors_directed(index, Incoming)); assert_eq!(incoming1, incoming2); } } pub fn assert_stable_graph_eq<N, E>(g: &StableGraph<N, E>, h: &StableGraph<N, E>) where N: PartialEq + Debug, E: PartialEq + Debug, { assert_eq!(g.node_count(), h.node_count()); assert_eq!(g.edge_count(), h.edge_count()); assert_equal( (0..g.node_bound()).map(|i| g.node_weight(node_index(i))), (0..h.node_bound()).map(|i| h.node_weight(node_index(i))), ); let last_edge_g = g.edge_references().next_back(); let last_edge_h = h.edge_references().next_back(); assert_eq!(last_edge_g.is_some(), last_edge_h.is_some()); if let (Some(lg), Some(lh)) = (last_edge_g, last_edge_h) { let lgi = lg.id().index(); let lhi = lh.id().index(); assert_equal( (0..lgi).map(|i| g.edge_weight(edge_index(i))), (0..lhi).map(|i| h.edge_weight(edge_index(i))), ); } for e1 in g.edge_references() { let (a2, b2) = h.edge_endpoints(e1.id()).unwrap(); assert_eq!(e1.source(), a2); assert_eq!(e1.target(), b2); } for index in g.node_indices() { let outgoing1 = <HashSet<_>>::from_iter(g.neighbors(index)); let outgoing2 = <HashSet<_>>::from_iter(h.neighbors(index)); assert_eq!(outgoing1, outgoing2); let incoming1 = <HashSet<_>>::from_iter(g.neighbors_directed(index, Incoming)); let incoming2 = <HashSet<_>>::from_iter(h.neighbors_directed(index, Incoming)); assert_eq!(incoming1, incoming2); } } fn make_graph<Ty, Ix>() -> Graph<&'static str, i32, Ty, Ix> where Ty: EdgeType, Ix: IndexType, { let mut g = Graph::default(); let a = g.add_node("A"); let b = g.add_node("B"); let c = g.add_node("C"); let d = g.add_node("D"); let e = g.add_node("E"); let f = g.add_node("F"); g.extend_with_edges(&[ (a, b, 7), (c, a, 9), (a, d, 14), (b, c, 10), (d, c, 2), (d, e, 9), (b, f, 15), (c, f, 11), (e, f, 6), ]); g.remove_node(d); g } fn make_stable_graph<Ty, Ix>() -> StableGraph<String, i32, Ty, Ix> where Ty: EdgeType, Ix: IndexType, { let mut g = StableGraph::default(); let indices: Vec<_> = (0..1024).map(|i| g.add_node(format!("{}", i))).collect(); for i in 1..256 { g.extend_with_edges((0..1024).map(|j| (indices[j], indices[(j + i) % 1024], i as i32))); } for i in (0..1024).step_by(10) { g.remove_node(indices[i]); } g } defmac!(tojson ref g => serde_json::to_string(g).unwrap()); defmac!(fromjson ref data => serde_json::from_str(data).unwrap()); defmac!(rejson ref g => fromjson!(tojson!(g))); #[test] fn json_graph_str_i32() { let g1: DiGraph<_, _> = make_graph(); let g2: Graph<String, i32> = rejson!(&g1); assert_graph_eq(&g1, &g2); assert_graph_eq(&g2, &g1); } #[test] fn json_graph_nils() { let g1 = make_graph().map(|_, _| (), |_, _| ()); let g2: Graph<(), ()> = rejson!(&g1); assert_graph_eq(&g1, &g2); assert_graph_eq(&g2, &g1); } const DIGRAPH_NILS: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,0,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_NILS_INDEX_OOB: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,5,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_NILS_INDEX_OUTSIDE_U8: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,300,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_STRI32: &str = r#"{ "nodes":["A","B","C","D","E","F"], "edge_property": "directed", "edges":[[0,1,7],[2,0,9],[0,3,14],[1,2,10],[3,2,2],[3,4,9],[1,5,15],[2,5,11],[4,5,6]] }"#; type DiGraphNils = DiGraph<(), ()>; type UnGraphNils = UnGraph<(), ()>; type DiGraphNilsU8 = DiGraph<(), (), u8>; type DiGraphStrI32 = DiGraph<String, i32>; #[test] fn from_json_digraph_nils() { let _: DiGraphNils = fromjson!(&DIGRAPH_NILS); } #[test] #[should_panic(expected = "edge property mismatch")] fn from_json_graph_nils_edge_property_mismatch() { let _: UnGraphNils = fromjson!(&DIGRAPH_NILS); } #[test] #[should_panic(expected = "does not exist")] fn from_json_graph_nils_index_oob() { let _: DiGraphNils = fromjson!(&DIGRAPH_NILS_INDEX_OOB); } #[test] #[should_panic(expected = "expected u8")] fn from_json_graph_nils_index_too_large() { let _: DiGraphNilsU8 = fromjson!(&DIGRAPH_NILS_INDEX_OUTSIDE_U8); } #[test] fn from_json_graph_directed_str_i32() { let _: DiGraphStrI32 = fromjson!(&DIGRAPH_STRI32); } #[test] #[should_panic(expected = "expected unit")] fn from_json_graph_from_edge_type_1() { let _: DiGraphNils = fromjson!(&DIGRAPH_STRI32); } #[test] #[should_panic(expected = "expected a string")] fn from_json_graph_from_edge_type_2() { let _: DiGraphStrI32 = fromjson!(&DIGRAPH_NILS); } #[test] fn from_json_digraph_str_i32() { let g4nodes = ["A", "B", "C", "D", "E", "F"]; let g4edges = [ [0, 1, 7], [2, 0, 9], [0, 3, 14], [1, 2, 10], [3, 2, 2], [3, 4, 9], [1, 5, 15], [2, 5, 11], [4, 5, 6], ]; type GSI = DiGraph<String, i32>; type GSISmall = DiGraph<String, i32, u8>; let g4: GSI = fromjson!(&DIGRAPH_STRI32); for ni in g4.node_indices() { assert_eq!(&g4nodes[ni.index()], &g4[ni]); } for e in g4.edge_references() { let edge_data = g4edges[e.id().index()]; let (s, t) = g4.edge_endpoints(e.id()).unwrap(); assert_eq!(edge_data[0] as usize, s.index()); assert_eq!(edge_data[1] as usize, t.index()); assert_eq!(edge_data[2], g4[e.id()]); } let _g4small: GSISmall = fromjson!(&DIGRAPH_STRI32); } #[test] fn from_json_nodes_too_big() { use serde_json::from_str; let j1_big = &format!( "{}{}{}", r#" {"nodes": [ "#, repeat_n(0, 300).format(", "), r#" ], "edge_property": "directed", "edges": [] } "# ); type G8 = DiGraph<i32, (), u8>; type G16 = DiGraph<i32, (), u16>; type G32 = DiGraph<i32, (), u32>; type G64 = DiGraph<i32, (), usize>; type H1 = DiGraph<i32, i32>; assert!(from_str::<G8>(j1_big).is_err()); let _: G16 = fromjson!(&j1_big); let _: G32 = fromjson!(&j1_big); let _: G64 = fromjson!(&j1_big); let _: H1 = fromjson!(&j1_big); } #[test] fn from_json_edges_too_big() { use serde_json::from_str; let j1_big = format!( "{}{}{}", r#" {"nodes": [0], "edge_property": "directed", "edges": ["#, repeat_n("[0, 0, 1]", (1 << 16) - 1).format(", "), "]}" ); type G8 = DiGraph<i32, i32, u8>; type G16 = DiGraph<i32, i32, u16>; type G32 = DiGraph<i32, i32, u32>; type G64 = DiGraph<i32, i32, usize>; assert!(from_str::<G8>(&j1_big).is_err()); assert!(from_str::<G16>(&j1_big).is_err()); let _: G32 = fromjson!(&j1_big); let _: G64 = fromjson!(&j1_big); } #[test] fn json_stable_graph_str() { let g1 = make_stable_graph(); let g2: StableGraph<String, i32> = rejson!(&g1); let g1 = g1.map(|_, s| s.to_string(), |_, &w| w); assert_stable_graph_eq(&g1, &g2); } #[test] fn json_stable_graph_nils() { let g1 = make_stable_graph().map(|_, _| (), |_, _| ()); let g2 = rejson!(&g1); assert_stable_graph_eq(&g1, &g2); } defmac!(encode ref g => bincode::serialize(g).unwrap()); defmac!(decode ref data => bincode::deserialize(data).unwrap()); defmac!(recode ref g => decode!(encode!(g))); #[test] fn bincode_stablegraph_to_graph_i32_0() { let g1 = StableGraph::<i32, i32>::new(); let g2: Graph<i32, i32> = recode!(&g1); assert_graph_eq(&g2, &Graph::<i32, i32>::default()); } #[test] fn bincode_graph_to_stablegraph_i32_0() { let g1 = Graph::<i32, i32>::new(); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g2, &StableGraph::<i32, i32>::default()); } #[test] fn bincode_graph_to_graph_i32_1() { let mut g1 = Graph::<i32, i32>::new(); let x = 1729; g1.add_node(x); let g2: Graph<i32, i32> = recode!(&g1); assert_graph_eq(&g1, &g2); } #[test]
#[test] fn bincode_stablegraph_added3_removed2() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; let a = g1.add_node(x); let b = g1.add_node(x + 1); let _c = g1.add_node(x + 2); g1.remove_node(a); g1.remove_node(b); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); } #[test] fn bincode_stablegraph_to_graph_i32_1() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; g1.add_node(x); let g2: Graph<i32, i32> = recode!(&g1); assert_eq!(g2.node_count(), 1); assert_eq!(g2.edge_count(), 0); assert_eq!(g2[node_index(0)], x); } quickcheck! { fn json_graph_to_stablegraph_to_graph(g1: Graph<i32, i32>) -> () { let sg: StableGraph<i32, i32> = rejson!(&g1); let g2: Graph<i32, i32> = rejson!(&sg); assert_graph_eq(&g1, &g2); } fn json_stablegraph_to_stablegraph(g1: StableGraph<i32, i32>) -> () { let sg: StableGraph<i32, i32> = rejson!(&g1); assert_stable_graph_eq(&g1, &sg); } fn json_graph_to_bigger_graph(g1: DiGraph<i32, i32, u16>) -> () { let g2: DiGraph<i32, i32, usize> = rejson!(&g1); let g3: DiGraph<i32, i32, u16> = rejson!(&g2); assert_graph_eq(&g1, &g3); } fn bincode_graph_to_graph_nils(g1: Graph<(), ()>) -> () { let g2: Graph<(), ()> = recode!(&g1); assert_graph_eq(&g1, &g2); } fn bincode_graph_to_stablegraph_to_graph_nils(g1: Graph<(), ()>) -> () { let data = encode!(&g1); let sg: StableGraph<(), ()> = decode!(&data); let data2 = encode!(&sg); let g2: Graph<(), ()> = decode!(&data2); assert_eq!(data, data2); assert_graph_eq(&g1, &g2); } fn bincode_graph_to_stablegraph_to_graph_u16(g1: DiGraph<i32, i32, u16>) -> () { let data = encode!(&g1); let sg: StableDiGraph<i32, i32, u16> = decode!(&data); let data2 = encode!(&sg); let g2: DiGraph<i32, i32, u16> = decode!(&data2); assert_eq!(data, data2); assert_graph_eq(&g1, &g2); } fn bincode_stablegraph_to_stablegraph(g1: StableGraph<i32, i32>) -> () { let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); } }
fn bincode_stablegraph_added2_removed2() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; let a = g1.add_node(x); let b = g1.add_node(x + 1); g1.remove_node(a); g1.remove_node(b); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); }
function_block-function_prefix_line
[ { "content": "fn mst_graph<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>) -> Graph<N, E, Undirected, Ix>\n\nwhere\n\n Ty: EdgeType,\n\n Ix: IndexType,\n\n N: Clone,\n\n E: Clone + PartialOrd,\n\n{\n\n Graph::from_elements(min_spanning_tree(&g))\n\n}\n\n\n\nuse std::fmt;\n\n\n\nquickcheck! {\n\n fn mst...
Rust
src/header/ethernet.rs
ajguerrer/tygress
6c05e4a27dfe825a6cc3b89ec4f80905c8ef51a2
use core::fmt; use super::as_header; use crate::error::{Error, Result}; #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)] #[repr(C)] pub struct EthernetII { dst: EtherAddr, src: EtherAddr, ty: EtherTypeRepr, } impl EthernetII { #[inline] pub fn split_header(bytes: &[u8]) -> Result<(&Self, &[u8])> { let (header, payload) = as_header!(EthernetII, bytes)?; header.ty.check()?; Ok((header, payload)) } #[inline] pub fn source(&self) -> EtherAddr { self.src } #[inline] pub fn destination(&self) -> EtherAddr { self.dst } #[inline] pub fn ethertype(&self) -> EtherType { self.ty.get() } } impl fmt::Display for EthernetII { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "EthernetII src: {}, dst: {}, type: {}", self.src, self.dst, self.ty ) } } #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)] #[repr(transparent)] pub struct EtherAddr([u8; 6]); impl EtherAddr { pub const BROADCAST: EtherAddr = EtherAddr([0xFF; 6]); #[inline] pub fn new(bytes: [u8; 6]) -> Self { Self(bytes) } #[inline] pub fn as_bytes(&self) -> &[u8] { self.0.as_ref() } #[inline] pub const fn is_unicast(&self) -> bool { self.0[0] & 0x01 == 0 } #[inline] pub const fn is_multicast(&self) -> bool { !self.is_unicast() } #[inline] pub const fn is_broadcast(&self) -> bool { self.0[0] == 0xFF && self.0[1] == 0xFF && self.0[2] == 0xFF && self.0[3] == 0xFF && self.0[4] == 0xFF && self.0[5] == 0xFF } #[inline] pub const fn is_universal(&self) -> bool { self.0[0] & 0x02 == 0 } #[inline] pub const fn is_local(&self) -> bool { !self.is_universal() } } impl fmt::Display for EtherAddr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let bytes = self.0; write!( f, "{:02x}:{:02x}:{:02x}:{:02x}:{:02x}:{:02x}", bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5] ) } } #[non_exhaustive] #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)] #[repr(u16)] pub enum EtherType { Ipv4 = 0x0800, Arp = 0x0806, Ipv6 = 0x86DD, } impl From<EtherType> for u16 { #[inline] fn from(val: EtherType) -> Self { val as u16 } } impl TryFrom<u16> for EtherType { type Error = Error; #[inline] fn try_from(value: u16) -> Result<Self> { match value { value if value == Self::Ipv4 as u16 => Ok(Self::Ipv4), value if value == Self::Arp as u16 => Ok(Self::Arp), value if value == Self::Ipv6 as u16 => Ok(Self::Ipv6), _ => Err(Error::Unsupported), } } } impl fmt::Display for EtherType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&self, f) } } #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)] #[repr(transparent)] struct EtherTypeRepr([u8; 2]); impl EtherTypeRepr { const IPV4: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Ipv4 as u16)); const ARP: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Arp as u16)); const IPV6: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Ipv6 as u16)); #[inline] const fn check(&self) -> Result<()> { match *self { Self::IPV4 | Self::ARP | Self::IPV6 => Ok(()), _ => Err(Error::Unsupported), } } #[inline] const fn get(&self) -> EtherType { match *self { Self::IPV4 => EtherType::Ipv4, Self::ARP => EtherType::Arp, Self::IPV6 => EtherType::Ipv6, _ => unreachable!(), } } } impl From<EtherType> for EtherTypeRepr { #[inline] fn from(value: EtherType) -> Self { EtherTypeRepr(u16::to_be_bytes(value as u16)) } } impl fmt::Display for EtherTypeRepr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.get(), f) } } #[cfg(test)] mod tests { use crate::error::Error; use super::*; #[test] fn short_header() { let bytes = [0; 13]; assert_eq!( EthernetII::split_header(&bytes).unwrap_err(), Error::Truncated ); } #[test] fn invalid_ethertype() { let bytes = [0; 14]; assert_eq!( EthernetII::split_header(&bytes).unwrap_err(), Error::Unsupported ); } #[test] fn valid_ethertypes() { let bytes = [&[0; 12][..], &[0x08, 0x00][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Ipv4); let bytes = [&[0; 12][..], &[0x08, 0x06][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Arp); let bytes = [&[0; 12][..], &[0x86, 0xDD][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Ipv6); } #[test] fn ether_addr() { let mut addr = EtherAddr([0xFF; 6]); assert!(addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0EF; assert!(!addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0FE; assert!(!addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((false, true), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0FD; assert!(!addr.is_broadcast()); assert_eq!((false, true), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0xFC; assert!(!addr.is_broadcast()); assert_eq!((false, true), (addr.is_local(), addr.is_universal())); assert_eq!((false, true), (addr.is_multicast(), addr.is_unicast())); } }
use core::fmt; use super::as_header; use crate::error::{Error, Result}; #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)] #[repr(C)] pub struct EthernetII { dst: EtherAddr, src: EtherAddr, ty: EtherTypeRepr, } impl EthernetII { #[inline] pub fn split_header(bytes: &[u8]) -> Result<(&Self, &[u8])> { let (header, payload) = as_header!(EthernetII, bytes)?; header.ty.check()?; Ok((header, payload)) } #[inline] pub fn source(&self) -> EtherAddr { self.src } #[inline] pub fn destination(&self) -> EtherAddr { self.dst } #[inline] pub fn ethertype(&self) -> EtherType { self.ty.get() } } impl fmt::Display for EthernetII { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "EthernetII src: {}, dst: {}, type: {}", self.src, self.dst, self.ty ) } } #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)] #[repr(transparent)] pub struct EtherAddr([u8; 6]); impl EtherAddr { pub const BROADCAST: EtherAddr = EtherAddr([0xFF; 6]); #[inline] pub fn new(bytes: [u8; 6]) -> Self { Self(bytes) } #[inline] pub fn as_bytes(&self) -> &[u8] { self.0.as_ref() } #[inline] pub const fn is_unicast(&self) -> bool { self.0[0] & 0x01 == 0 } #[inline] pub const fn is_multicast(&self) -> bool { !self.is_unicast() } #[inline] pub const fn is_broadcast(&self) -> bool { self.0[0] == 0xFF && self.0[1] == 0xFF && self.0[2] == 0xFF && self.0[3] == 0xFF && self.0[4] == 0xFF && self.0[5] == 0xFF } #[inline] pub const fn is_universal(&self) -> bool { self.0[0] & 0x02 == 0 } #[inline] pub const fn is_local(&self) -> bool { !self.is_universal() } } impl fmt::Display for EtherAddr {
} #[non_exhaustive] #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)] #[repr(u16)] pub enum EtherType { Ipv4 = 0x0800, Arp = 0x0806, Ipv6 = 0x86DD, } impl From<EtherType> for u16 { #[inline] fn from(val: EtherType) -> Self { val as u16 } } impl TryFrom<u16> for EtherType { type Error = Error; #[inline] fn try_from(value: u16) -> Result<Self> { match value { value if value == Self::Ipv4 as u16 => Ok(Self::Ipv4), value if value == Self::Arp as u16 => Ok(Self::Arp), value if value == Self::Ipv6 as u16 => Ok(Self::Ipv6), _ => Err(Error::Unsupported), } } } impl fmt::Display for EtherType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&self, f) } } #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)] #[repr(transparent)] struct EtherTypeRepr([u8; 2]); impl EtherTypeRepr { const IPV4: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Ipv4 as u16)); const ARP: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Arp as u16)); const IPV6: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Ipv6 as u16)); #[inline] const fn check(&self) -> Result<()> { match *self { Self::IPV4 | Self::ARP | Self::IPV6 => Ok(()), _ => Err(Error::Unsupported), } } #[inline] const fn get(&self) -> EtherType { match *self { Self::IPV4 => EtherType::Ipv4, Self::ARP => EtherType::Arp, Self::IPV6 => EtherType::Ipv6, _ => unreachable!(), } } } impl From<EtherType> for EtherTypeRepr { #[inline] fn from(value: EtherType) -> Self { EtherTypeRepr(u16::to_be_bytes(value as u16)) } } impl fmt::Display for EtherTypeRepr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.get(), f) } } #[cfg(test)] mod tests { use crate::error::Error; use super::*; #[test] fn short_header() { let bytes = [0; 13]; assert_eq!( EthernetII::split_header(&bytes).unwrap_err(), Error::Truncated ); } #[test] fn invalid_ethertype() { let bytes = [0; 14]; assert_eq!( EthernetII::split_header(&bytes).unwrap_err(), Error::Unsupported ); } #[test] fn valid_ethertypes() { let bytes = [&[0; 12][..], &[0x08, 0x00][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Ipv4); let bytes = [&[0; 12][..], &[0x08, 0x06][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Arp); let bytes = [&[0; 12][..], &[0x86, 0xDD][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Ipv6); } #[test] fn ether_addr() { let mut addr = EtherAddr([0xFF; 6]); assert!(addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0EF; assert!(!addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0FE; assert!(!addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((false, true), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0FD; assert!(!addr.is_broadcast()); assert_eq!((false, true), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0xFC; assert!(!addr.is_broadcast()); assert_eq!((false, true), (addr.is_local(), addr.is_universal())); assert_eq!((false, true), (addr.is_multicast(), addr.is_unicast())); } }
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let bytes = self.0; write!( f, "{:02x}:{:02x}:{:02x}:{:02x}:{:02x}:{:02x}", bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5] ) }
function_block-full_function
[ { "content": "#[inline]\n\npub fn verify_checksum(bytes: &[u8]) -> Result<()> {\n\n let sum: u32 = bytes\n\n .chunks_exact(2)\n\n // chunks_exact(2) always maps to arrays of 2 bytes as a slice so the conversion should\n\n // never fail.\n\n .map(|bytes| u32::from(u16::from_be_byte...
Rust
crates/holochain/src/core/workflow/call_zome_workflow.rs
guillemcordoba/holochain
fa4acd2067176757327328446368b1e09bfa2a34
use super::app_validation_workflow; use super::app_validation_workflow::AppValidationError; use super::app_validation_workflow::Outcome; use super::error::WorkflowResult; use super::sys_validation_workflow::sys_validate_element; use crate::conductor::api::CellConductorApi; use crate::conductor::api::CellConductorApiT; use crate::conductor::interface::SignalBroadcaster; use crate::conductor::ConductorHandle; use crate::core::queue_consumer::TriggerSender; use crate::core::ribosome::error::RibosomeResult; use crate::core::ribosome::guest_callback::post_commit::send_post_commit; use crate::core::ribosome::RibosomeT; use crate::core::ribosome::ZomeCallHostAccess; use crate::core::ribosome::ZomeCallInvocation; use crate::core::workflow::error::WorkflowError; use holochain_keystore::MetaLairClient; use holochain_p2p::HolochainP2pDna; use holochain_state::host_fn_workspace::HostFnWorkspace; use holochain_state::host_fn_workspace::SourceChainWorkspace; use holochain_state::source_chain::SourceChainError; use holochain_zome_types::element::Element; use holochain_types::prelude::*; use tracing::instrument; #[cfg(test)] mod validation_test; pub type ZomeCallResult = RibosomeResult<ZomeCallResponse>; pub struct CallZomeWorkflowArgs<RibosomeT> { pub ribosome: RibosomeT, pub invocation: ZomeCallInvocation, pub signal_tx: SignalBroadcaster, pub conductor_handle: ConductorHandle, pub is_root_zome_call: bool, pub cell_id: CellId, } #[instrument(skip( workspace, network, keystore, args, trigger_publish_dht_ops, trigger_integrate_dht_ops ))] pub async fn call_zome_workflow<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, keystore: MetaLairClient, args: CallZomeWorkflowArgs<Ribosome>, trigger_publish_dht_ops: TriggerSender, trigger_integrate_dht_ops: TriggerSender, ) -> WorkflowResult<ZomeCallResult> where Ribosome: RibosomeT + 'static, { let coordinator_zome = args .ribosome .dna_def() .get_coordinator_zome(args.invocation.zome.zome_name()) .ok(); let should_write = args.is_root_zome_call; let conductor_handle = args.conductor_handle.clone(); let result = call_zome_workflow_inner(workspace.clone(), network.clone(), keystore.clone(), args) .await?; if should_write { let is_empty = workspace.source_chain().is_empty()?; let countersigning_op = workspace.source_chain().countersigning_op()?; let flushed_headers: Vec<SignedHeaderHashed> = HostFnWorkspace::from(workspace.clone()) .flush(&network) .await?; if !is_empty { match countersigning_op { Some(op) => { if let Err(error_response) = super::countersigning_workflow::countersigning_publish(&network, op).await { return Ok(Ok(error_response)); } } None => { trigger_publish_dht_ops.trigger(&"trigger_publish_dht_ops"); trigger_integrate_dht_ops.trigger(&"trigger_integrate_dht_ops"); } } } if let Some(coordinator_zome) = coordinator_zome { send_post_commit( conductor_handle, workspace, network, keystore, flushed_headers, vec![coordinator_zome], ) .await?; } } Ok(result) } async fn call_zome_workflow_inner<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, keystore: MetaLairClient, args: CallZomeWorkflowArgs<Ribosome>, ) -> WorkflowResult<ZomeCallResult> where Ribosome: RibosomeT + 'static, { let CallZomeWorkflowArgs { ribosome, invocation, signal_tx, conductor_handle, cell_id, .. } = args; let call_zome_handle = CellConductorApi::new(conductor_handle.clone(), cell_id).into_call_zome_handle(); tracing::trace!("Before zome call"); let host_access = ZomeCallHostAccess::new( workspace.clone().into(), keystore, network.clone(), signal_tx, call_zome_handle, ); let (ribosome, result) = call_zome_function_authorized(ribosome, host_access, invocation).await?; tracing::trace!("After zome call"); let validation_result = inline_validation(workspace.clone(), network, conductor_handle, ribosome).await; if matches!( validation_result, Err(WorkflowError::SourceChainError( SourceChainError::InvalidCommit(_) )) ) { let scratch_elements = workspace.source_chain().scratch_elements()?; if scratch_elements.len() == 1 { let lock = holochain_state::source_chain::lock_for_entry( scratch_elements[0].entry().as_option(), )?; if !lock.is_empty() && workspace .source_chain() .is_chain_locked(Vec::with_capacity(0)) .await? && !workspace.source_chain().is_chain_locked(lock).await? { if let Err(error) = workspace.source_chain().unlock_chain().await { tracing::error!(?error); } } } } validation_result?; Ok(result) } pub async fn call_zome_function_authorized<R>( ribosome: R, host_access: ZomeCallHostAccess, invocation: ZomeCallInvocation, ) -> WorkflowResult<(R, RibosomeResult<ZomeCallResponse>)> where R: RibosomeT + 'static, { if invocation.is_authorized(&host_access).await? { tokio::task::spawn_blocking(|| { let r = ribosome.call_zome_function(host_access, invocation); Ok((ribosome, r)) }) .await? } else { Ok(( ribosome, Ok(ZomeCallResponse::Unauthorized( invocation.cell_id.clone(), invocation.zome.zome_name().clone(), invocation.fn_name.clone(), invocation.provenance.clone(), )), )) } } pub async fn inline_validation<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, conductor_handle: ConductorHandle, ribosome: Ribosome, ) -> WorkflowResult<()> where Ribosome: RibosomeT + 'static, { let to_app_validate = { let scratch_elements = workspace.source_chain().scratch_elements()?; let mut to_app_validate: Vec<Element> = Vec::with_capacity(scratch_elements.len()); for element in scratch_elements { sys_validate_element(&element, &workspace, network.clone(), &(*conductor_handle)) .await .or_else(|outcome_or_err| outcome_or_err.invalid_call_zome_commit())?; to_app_validate.push(element); } to_app_validate }; let mut cascade = holochain_cascade::Cascade::from_workspace_network(&workspace, network.clone()); for mut chain_element in to_app_validate { for op_type in header_to_op_types(chain_element.header()) { let op = app_validation_workflow::element_to_op(chain_element, op_type, &mut cascade).await; let (op, activity_entry) = match op { Ok(op) => op, Err(outcome_or_err) => return map_outcome(Outcome::try_from(outcome_or_err)), }; let outcome = app_validation_workflow::validate_op( &op, workspace.clone().into(), &network, &ribosome, ) .await; let outcome = outcome.or_else(Outcome::try_from); map_outcome(outcome)?; chain_element = app_validation_workflow::op_to_element(op, activity_entry); } } Ok(()) } fn map_outcome( outcome: Result<app_validation_workflow::Outcome, AppValidationError>, ) -> WorkflowResult<()> { match outcome.map_err(SourceChainError::other)? { app_validation_workflow::Outcome::Accepted => {} app_validation_workflow::Outcome::Rejected(reason) => { return Err(SourceChainError::InvalidCommit(reason).into()); } app_validation_workflow::Outcome::AwaitingDeps(hashes) => { return Err(SourceChainError::InvalidCommit(format!("{:?}", hashes)).into()); } } Ok(()) }
use super::app_validation_workflow; use super::app_validation_workflow::AppValidationError; use super::app_validation_workflow::Outcome; use super::error::WorkflowResult; use super::sys_validation_workflow::sys_validate_element; use crate::conductor::api::CellConductorApi; use crate::conductor::api::CellConductorApiT; use crate::conductor::interface::SignalBroadcaster; use crate::conductor::ConductorHandle; use crate::core::queue_consumer::TriggerSender; use crate::core::ribosome::error::RibosomeResult; use crate::core::ribosome::guest_callback::post_commit::send_post_commit; use crate::core::ribosome::RibosomeT; use crate::core::ribosome::ZomeCallHostAccess; use crate::core::ribosome::ZomeCallInvocation; use crate::core::workflow::error::WorkflowError; use holochain_keystore::MetaLairClient; use holochain_p2p::HolochainP2pDna; use holochain_state::host_fn_workspace::HostFnWorkspace; use holochain_state::host_fn_workspace::SourceChainWorkspace; use holochain_state::source_chain::SourceChainError; use holochain_zome_types::element::Element; use holochain_types::prelude::*; use tracing::instrument; #[cfg(test)] mod validation_test; pub type ZomeCallResult = RibosomeResult<ZomeCallResponse>; pub struct CallZomeWorkflowArgs<RibosomeT> { pub ribosome: RibosomeT, pub invocation: ZomeCallInvocation, pub signal_tx: SignalBroadcaster, pub conductor_handle: ConductorHandle, pub is_root_zome_call: bool, pub cell_id: CellId, } #[instrument(skip( workspace, network, keystore, args, trigger_publish_dht_ops, trigger_integrate_dht_ops ))] pub async fn call_zome_workflow<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, keystore: MetaLairClient, args: CallZomeWorkflowArgs<Ribosome>, trigger_publish_dht_ops: TriggerSender, trigger_integrate_dht_ops: TriggerSender, ) -> WorkflowResult<ZomeCallResult> where Ribosome: RibosomeT + 'static, { let coordinator_zome = args .ribosome .dna_def() .get_coordinator_zome(args.invocation.zome.zome_name()) .ok(); let should_write = args.is_root_zome_call; let conductor_handle = args.conductor_handle.clone(); let result = call_zome_workflow_inner(workspace.clone(), network.clone(), keystore.clone(), args) .await?; if should_write { let is_empty = workspace.source_chain().is_empty()?; let countersigning_op = workspace.source_chain().countersigning_op()?; let flushed_headers: Vec<SignedHeaderHashed> = HostFnWorkspace::from(workspace.clone()) .flush(&network) .await?; if !is_empty { match countersigning_op { Some(op) => { if let Err(error_response) = super::countersigning_workflow::countersigning_publish(&network, op).await { return Ok(Ok(error_response)); } } None => { trigger_publish_dht_ops.trigger(&"trigger_publish_dht_ops"); trigger_integrate_dht_ops.trigger(&"trigger_integrate_dht_ops"); } } } if let Some(coordinator_zome) = coordinator_zome { send_post_commit( conductor_handle, workspace, network, keystore, flushed_headers, vec![coordinator_zome], ) .await?; } } Ok(result) } async fn call_zome_workflow_inner<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, keystore: MetaLairClient, args: CallZomeWorkflowArgs<Ribosome>, ) -> WorkflowResult<ZomeCallResult> where Ribosome: RibosomeT + 'static, { let CallZomeWorkflowArgs { ribosome, invocation, signal_tx, conductor_handle, cell_id, .. } = args; let call_zome_handle = CellConductorApi::new(conductor_handle.clone(), cell_id).into_call_zome_handle(); tracing::trace!("Before zome call"); let host_access = ZomeCallHostAccess::new( workspace.clone().into(), keystore, network.clone(), signal_tx, call_zome_handle, ); let (ribosome, result) = call_zome_function_authorized(ribosome, host_access, invocation).await?; tracing::trace!("After zome call"); let validation_result = inline_validation(workspace.clone(), network, conductor_handle, ribosome).await; if matches!( validation_result, Err(WorkflowError::SourceChainError( SourceChainError::InvalidCommit(_) )) ) { let scratch_elements = workspace.source_chain().scratch_elements()?; if scratch_elements.len() == 1 { let lock = holochain_state::source_chain::lock_for_entry( scratch_elements[0].entry().as_option(), )?;
} } validation_result?; Ok(result) } pub async fn call_zome_function_authorized<R>( ribosome: R, host_access: ZomeCallHostAccess, invocation: ZomeCallInvocation, ) -> WorkflowResult<(R, RibosomeResult<ZomeCallResponse>)> where R: RibosomeT + 'static, { if invocation.is_authorized(&host_access).await? { tokio::task::spawn_blocking(|| { let r = ribosome.call_zome_function(host_access, invocation); Ok((ribosome, r)) }) .await? } else { Ok(( ribosome, Ok(ZomeCallResponse::Unauthorized( invocation.cell_id.clone(), invocation.zome.zome_name().clone(), invocation.fn_name.clone(), invocation.provenance.clone(), )), )) } } pub async fn inline_validation<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, conductor_handle: ConductorHandle, ribosome: Ribosome, ) -> WorkflowResult<()> where Ribosome: RibosomeT + 'static, { let to_app_validate = { let scratch_elements = workspace.source_chain().scratch_elements()?; let mut to_app_validate: Vec<Element> = Vec::with_capacity(scratch_elements.len()); for element in scratch_elements { sys_validate_element(&element, &workspace, network.clone(), &(*conductor_handle)) .await .or_else(|outcome_or_err| outcome_or_err.invalid_call_zome_commit())?; to_app_validate.push(element); } to_app_validate }; let mut cascade = holochain_cascade::Cascade::from_workspace_network(&workspace, network.clone()); for mut chain_element in to_app_validate { for op_type in header_to_op_types(chain_element.header()) { let op = app_validation_workflow::element_to_op(chain_element, op_type, &mut cascade).await; let (op, activity_entry) = match op { Ok(op) => op, Err(outcome_or_err) => return map_outcome(Outcome::try_from(outcome_or_err)), }; let outcome = app_validation_workflow::validate_op( &op, workspace.clone().into(), &network, &ribosome, ) .await; let outcome = outcome.or_else(Outcome::try_from); map_outcome(outcome)?; chain_element = app_validation_workflow::op_to_element(op, activity_entry); } } Ok(()) } fn map_outcome( outcome: Result<app_validation_workflow::Outcome, AppValidationError>, ) -> WorkflowResult<()> { match outcome.map_err(SourceChainError::other)? { app_validation_workflow::Outcome::Accepted => {} app_validation_workflow::Outcome::Rejected(reason) => { return Err(SourceChainError::InvalidCommit(reason).into()); } app_validation_workflow::Outcome::AwaitingDeps(hashes) => { return Err(SourceChainError::InvalidCommit(format!("{:?}", hashes)).into()); } } Ok(()) }
if !lock.is_empty() && workspace .source_chain() .is_chain_locked(Vec::with_capacity(0)) .await? && !workspace.source_chain().is_chain_locked(lock).await? { if let Err(error) = workspace.source_chain().unlock_chain().await { tracing::error!(?error); } }
if_condition
[ { "content": "/// Helper function for the common case of returning this boxed future type.\n\npub fn ok_fut<E1, R: Send + 'static>(result: R) -> Result<MustBoxFuture<'static, R>, E1> {\n\n use futures::FutureExt;\n\n Ok(async move { result }.boxed().into())\n\n}\n\n\n", "file_path": "crates/kitsune_p2...
Rust
src/watchpoints.rs
mewbak/haybale
d22f0eb7ae09bbbca75da6b409dfb8036ecc3fe2
use crate::backend::{Backend, BV}; use crate::error::Result; use crate::state::State; use log::info; use std::collections::HashMap; use std::fmt; #[derive(Eq, PartialEq, Clone, Debug, Hash)] pub struct Watchpoint { low: u64, high: u64, } impl Watchpoint { pub fn new(addr: u64, bytes: u64) -> Self { if bytes == 0 { panic!("Watchpoint::new: `bytes` cannot be 0"); } Self { low: addr, high: addr + bytes - 1, } } } impl fmt::Display for Watchpoint { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{:#x}, {:#x}]", self.low, self.high) } } #[derive(Clone)] pub struct Watchpoints(HashMap<String, (Watchpoint, bool)>); impl Watchpoints { pub fn new() -> Self { Self(HashMap::new()) } pub fn from_iter(iter: impl IntoIterator<Item = (String, Watchpoint)>) -> Self { Self(iter.into_iter().map(|(name, w)| (name, (w, true))).collect()) } pub fn add(&mut self, name: impl Into<String>, watchpoint: Watchpoint) -> bool { self.0.insert(name.into(), (watchpoint, true)).is_some() } pub fn remove(&mut self, name: &str) -> bool { self.0.remove(name).is_some() } pub fn disable(&mut self, name: &str) -> bool { match self.0.get_mut(name) { Some(v) => { v.1 = false; true }, None => false, } } pub fn enable(&mut self, name: &str) -> bool { match self.0.get_mut(name) { Some(v) => { v.1 = true; true }, None => false, } } pub(crate) fn process_watchpoint_triggers<B: Backend>( &self, state: &State<B>, addr: &B::BV, bits: u32, is_write: bool, ) -> Result<bool> { let mut retval = false; if !self.0.is_empty() { let addr_width = addr.get_width(); let op_lower = addr; let bytes = if bits < 8 { 1 } else { bits / 8 }; let op_upper = addr.add(&state.bv_from_u32(bytes - 1, addr_width)); for (name, (watchpoint, enabled)) in self.0.iter() { if *enabled && self.is_watchpoint_triggered(state, watchpoint, op_lower, &op_upper)? { retval = true; info!("Memory watchpoint {:?} {} {} by {:?}", name, watchpoint, if is_write { "written" } else { "read" }, state.cur_loc); } } } Ok(retval) } pub(crate) fn is_watchpoint_triggered<B: Backend>( &self, state: &State<B>, watchpoint: &Watchpoint, interval_lower: &B::BV, interval_upper: &B::BV, ) -> Result<bool> { let width = interval_lower.get_width(); assert_eq!(width, interval_upper.get_width()); let watchpoint_lower = state.bv_from_u64(watchpoint.low, width); let watchpoint_upper = state.bv_from_u64(watchpoint.high, width); let interval_lower_contained = interval_lower.ugte(&watchpoint_lower).and(&interval_lower.ulte(&watchpoint_upper)); let interval_upper_contained = interval_upper.ugte(&watchpoint_lower).and(&interval_upper.ulte(&watchpoint_upper)); let contains_entire_watchpoint = interval_lower.ulte(&watchpoint_lower).and(&interval_upper.ugte(&watchpoint_upper)); state.sat_with_extra_constraints(std::iter::once( &interval_lower_contained.or(&interval_upper_contained).or(&contains_entire_watchpoint) )) } } #[cfg(test)] mod tests { use super::*; use crate::backend::BtorBackend; use crate::config::Config; use crate::state::Location; use crate::project::Project; use llvm_ir::*; fn blank_state<'p>(project: &'p Project, funcname: &str) -> State<'p, BtorBackend> { let (func, module) = project.get_func_by_name(funcname).expect("Failed to find function"); let start_loc = Location { module, func, bbname: "test_bb".to_owned().into(), instr: 0, }; State::new(project, start_loc, Config::default()) } fn blank_project(modname: impl Into<String>, func: Function) -> Project { Project::from_module(Module { name: modname.into(), source_file_name: String::new(), data_layout: String::new(), target_triple: None, functions: vec![func], global_vars: vec![], global_aliases: vec![], named_struct_types: HashMap::new(), inline_assembly: String::new(), }) } fn blank_function(name: impl Into<String>) -> Function { Function::new(name) } #[test] fn watchpoints() -> Result<()> { let func = blank_function("test_func"); let project = blank_project("test_mod", func); let state = blank_state(&project, "test_func"); let mut watchpoints = Watchpoints::new(); watchpoints.add("w1", Watchpoint::new(0x1000, 8)); watchpoints.add("w2", Watchpoint::new(0x2000, 32)); let addr = state.bv_from_u32(0x1000, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x1002, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x0fff, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x1008, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x0ff0, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 0x100 * 8, true)?); assert!(watchpoints.disable("w1")); let addr = state.bv_from_u32(0x1002, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.enable("w1")); assert!(!watchpoints.disable("foo")); assert!(!watchpoints.enable("foo")); let addr = state.bv_from_u32(0x2000, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x2010, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x0ff0, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 0x10000, true)?); let addr = state.bv_from_u32(0x1f00, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 16, true)?); assert!(watchpoints.remove("w2")); let addr = state.bv_from_u32(0x2000, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(!watchpoints.enable("w2")); Ok(()) } }
use crate::backend::{Backend, BV}; use crate::error::Result; use crate::state::State; use log::info; use std::collections::HashMap; use std::fmt; #[derive(Eq, PartialEq, Clone, Debug, Hash)] pub struct Watchpoint { low: u64, high: u64, } impl Watchpoint { pub fn new(addr: u64, bytes: u64) -> Self { if bytes == 0 { panic!("Watchpoint::new: `bytes` cannot be 0"); } Self { low: addr, high: addr + bytes - 1, } } } impl fmt::Display for Watchpoint { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{:#x}, {:#x}]", self.low, self.high) } } #[derive(Clone)] pub struct Watchpoints(HashMap<String, (Watchpoint, bool)>); impl Watchpoints { pub fn new() -> Self { Self(HashMap::new()) } pub fn from_iter(iter: impl IntoIterator<Item = (String, Watchpoint)>) -> Self { Self(iter.into_iter().map(|(name, w)| (name, (w, true))).collect()) } pub fn add(&mut self, name: impl Into<String>, watchpoint: Watchpoint) -> bool { self.0.insert(name.into(), (watchpoint, true)).is_some() } pub fn remove(&mut self, name: &str) -> bool { self.0.remove(name).is_some() } pub fn disable(&mut self, name: &str) -> bool { match self.0.get_mut(name) { Some(v) => { v.1 = false; true }, None => false, } } pub fn enable(&mut self, name: &str) -> bool { match self.0.get_mut(name) { Some(v) => { v.1 = true; true }, None => false, } } pub(crate) fn process_watchpoint_triggers<B: Backend>( &self, state: &State<B>, addr: &B::BV,
pub(crate) fn is_watchpoint_triggered<B: Backend>( &self, state: &State<B>, watchpoint: &Watchpoint, interval_lower: &B::BV, interval_upper: &B::BV, ) -> Result<bool> { let width = interval_lower.get_width(); assert_eq!(width, interval_upper.get_width()); let watchpoint_lower = state.bv_from_u64(watchpoint.low, width); let watchpoint_upper = state.bv_from_u64(watchpoint.high, width); let interval_lower_contained = interval_lower.ugte(&watchpoint_lower).and(&interval_lower.ulte(&watchpoint_upper)); let interval_upper_contained = interval_upper.ugte(&watchpoint_lower).and(&interval_upper.ulte(&watchpoint_upper)); let contains_entire_watchpoint = interval_lower.ulte(&watchpoint_lower).and(&interval_upper.ugte(&watchpoint_upper)); state.sat_with_extra_constraints(std::iter::once( &interval_lower_contained.or(&interval_upper_contained).or(&contains_entire_watchpoint) )) } } #[cfg(test)] mod tests { use super::*; use crate::backend::BtorBackend; use crate::config::Config; use crate::state::Location; use crate::project::Project; use llvm_ir::*; fn blank_state<'p>(project: &'p Project, funcname: &str) -> State<'p, BtorBackend> { let (func, module) = project.get_func_by_name(funcname).expect("Failed to find function"); let start_loc = Location { module, func, bbname: "test_bb".to_owned().into(), instr: 0, }; State::new(project, start_loc, Config::default()) } fn blank_project(modname: impl Into<String>, func: Function) -> Project { Project::from_module(Module { name: modname.into(), source_file_name: String::new(), data_layout: String::new(), target_triple: None, functions: vec![func], global_vars: vec![], global_aliases: vec![], named_struct_types: HashMap::new(), inline_assembly: String::new(), }) } fn blank_function(name: impl Into<String>) -> Function { Function::new(name) } #[test] fn watchpoints() -> Result<()> { let func = blank_function("test_func"); let project = blank_project("test_mod", func); let state = blank_state(&project, "test_func"); let mut watchpoints = Watchpoints::new(); watchpoints.add("w1", Watchpoint::new(0x1000, 8)); watchpoints.add("w2", Watchpoint::new(0x2000, 32)); let addr = state.bv_from_u32(0x1000, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x1002, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x0fff, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x1008, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x0ff0, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 0x100 * 8, true)?); assert!(watchpoints.disable("w1")); let addr = state.bv_from_u32(0x1002, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.enable("w1")); assert!(!watchpoints.disable("foo")); assert!(!watchpoints.enable("foo")); let addr = state.bv_from_u32(0x2000, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x2010, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x0ff0, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 0x10000, true)?); let addr = state.bv_from_u32(0x1f00, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 16, true)?); assert!(watchpoints.remove("w2")); let addr = state.bv_from_u32(0x2000, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(!watchpoints.enable("w2")); Ok(()) } }
bits: u32, is_write: bool, ) -> Result<bool> { let mut retval = false; if !self.0.is_empty() { let addr_width = addr.get_width(); let op_lower = addr; let bytes = if bits < 8 { 1 } else { bits / 8 }; let op_upper = addr.add(&state.bv_from_u32(bytes - 1, addr_width)); for (name, (watchpoint, enabled)) in self.0.iter() { if *enabled && self.is_watchpoint_triggered(state, watchpoint, op_lower, &op_upper)? { retval = true; info!("Memory watchpoint {:?} {} {} by {:?}", name, watchpoint, if is_write { "written" } else { "read" }, state.cur_loc); } } } Ok(retval) }
function_block-function_prefix_line
[ { "content": "/// Reallocate the given `addr` to be at least the number of bytes given by the `Operand`.\n\n///\n\n/// Returns the address of the allocation, which may or may not be the same\n\n/// address which was passed in.\n\npub fn realloc<B: Backend>(state: &mut State<B>, addr: &Operand, num_bytes: &Opera...
Rust
src/kv_manager/kv.rs
axelarnetwork/tofnd
ec8b9a00a652ae47da6b2102284f8dd979dbbae5
use crate::encrypted_sled::{self, Password}; use super::{ error::{KvError::*, KvResult}, sled_bindings::{handle_delete, handle_exists, handle_get, handle_put, handle_reserve}, types::{ Command::{self, *}, KeyReservation, DEFAULT_KV_NAME, DEFAULT_KV_PATH, }, }; use serde::{de::DeserializeOwned, Serialize}; use std::{fmt::Debug, path::PathBuf}; use tokio::sync::{mpsc, oneshot}; use tracing::{info, warn}; #[derive(Clone)] pub struct Kv<V> { sender: mpsc::UnboundedSender<Command<V>>, } impl<V: 'static> Kv<V> where V: Debug + Send + Sync + Serialize + DeserializeOwned, { pub fn new(root_path: PathBuf, password: Password) -> KvResult<Self> { let kv_path = root_path.join(DEFAULT_KV_PATH).join(DEFAULT_KV_NAME); let kv_path = kv_path.to_string_lossy().to_string(); Self::with_db_name(kv_path, password) } pub fn with_db_name(full_db_name: String, password: Password) -> KvResult<Self> { let (sender, rx) = mpsc::unbounded_channel(); let kv = get_kv_store(&full_db_name, password)?; tokio::spawn(kv_cmd_handler(rx, kv)); Ok(Self { sender }) } pub async fn reserve_key(&self, key: String) -> KvResult<KeyReservation> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(ReserveKey { key, resp: resp_tx }) .map_err(|err| SendErr(err.to_string()))?; resp_rx.await?.map_err(ReserveErr) } pub async fn unreserve_key(&self, reservation: KeyReservation) { let _ = self.sender.send(UnreserveKey { reservation }); } pub async fn put(&self, reservation: KeyReservation, value: V) -> KvResult<()> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Put { reservation, value, resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(PutErr) } pub async fn get(&self, key: &str) -> KvResult<V> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Get { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(GetErr) } pub async fn delete(&self, key: &str) -> KvResult<()> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Delete { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(DeleteErr) } pub async fn exists(&self, key: &str) -> KvResult<bool> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Exists { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(ExistsErr) } } pub fn get_kv_store( db_name: &str, password: Password, ) -> encrypted_sled::Result<encrypted_sled::Db> { info!("START: decrypt kvstore"); let kv = encrypted_sled::Db::open(db_name, password)?; info!("DONE: decrypt kvstore"); if kv.was_recovered() { info!("kv_manager found existing db [{}]", db_name); } else { info!( "kv_manager cannot open existing db [{}]. creating new db", db_name ); } Ok(kv) } async fn kv_cmd_handler<V: 'static>( mut rx: mpsc::UnboundedReceiver<Command<V>>, kv: encrypted_sled::Db, ) where V: Serialize + DeserializeOwned, { while let Some(cmd) = rx.recv().await { match cmd { ReserveKey { key, resp } => { if resp.send(handle_reserve(&kv, key)).is_err() { warn!("receiver dropped"); } } UnreserveKey { reservation } => { let _ = kv.remove(&reservation.key); } Put { reservation, value, resp, } => { if resp.send(handle_put(&kv, reservation, value)).is_err() { warn!("receiver dropped"); } } Get { key, resp } => { if resp.send(handle_get(&kv, key)).is_err() { warn!("receiver dropped"); } } Exists { key, resp } => { if resp.send(handle_exists(&kv, &key)).is_err() { warn!("receiver dropped"); } } Delete { key, resp } => { if resp.send(handle_delete(&kv, key)).is_err() { warn!("receiver dropped"); } } } } info!("kv_manager stop"); }
use crate::encrypted_sled::{self, Password}; use super::{ error::{KvError::*, KvResult}, sled_bindings::{handle_delete, handle_exists, handle_get, handle_put, handle_reserve}, types::{ Command::{self, *}, KeyReservation, DEFAULT_KV_NAME, DEFAULT_KV_PATH, }, }; use serde::{de::DeserializeOwned, Serialize}; use std::{fmt::Debug, path::PathBuf}; use tokio::sync::{mpsc, oneshot}; use tracing::{info, warn}; #[derive(Clone)] pub struct Kv<V> { sender: mpsc::UnboundedSender<Command<V>>, } impl<V: 'static> Kv<V> where V: Debug + Send + Sync + Serialize + DeserializeOwned, { pub fn new(root_path: PathBuf, password: Password) -> KvResult<Self> { let kv_path = root_path.join(DEFAULT_KV_PATH).join(DEFAULT_KV_NAME); let kv_path = kv_path.to_string_lossy().to_string(); Self::with_db_name(kv_path, password) } pub fn with_db_name(full_db_name: String, password: Password) -> KvResult<Self> { let (sender, rx) = mpsc::unbounded_channel(); let kv = get_kv_store(&full_db_name, password)?; tokio::spawn(kv_cmd_handler(rx, kv)); Ok(Self { sender }) } pub async fn reserve_key(&self, key: String) -> KvResult<KeyReservation> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(ReserveKey { key, resp: resp_tx }) .map_err(|err| SendErr(err.to_string()))?; resp_rx.await?.map_err(ReserveErr) } pub async fn unreserve_key(&self, reservation: KeyReservation) { let _ = self.sender.send(UnreserveKey { reservation }); } pub async fn put(&self, reservation: KeyReservation, value: V) -> KvResult<()> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Put { reservation, value, resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(PutErr) } pub async fn get(&self, key: &str) -> KvResult<V> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Get { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(GetErr) }
pub async fn exists(&self, key: &str) -> KvResult<bool> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Exists { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(ExistsErr) } } pub fn get_kv_store( db_name: &str, password: Password, ) -> encrypted_sled::Result<encrypted_sled::Db> { info!("START: decrypt kvstore"); let kv = encrypted_sled::Db::open(db_name, password)?; info!("DONE: decrypt kvstore"); if kv.was_recovered() { info!("kv_manager found existing db [{}]", db_name); } else { info!( "kv_manager cannot open existing db [{}]. creating new db", db_name ); } Ok(kv) } async fn kv_cmd_handler<V: 'static>( mut rx: mpsc::UnboundedReceiver<Command<V>>, kv: encrypted_sled::Db, ) where V: Serialize + DeserializeOwned, { while let Some(cmd) = rx.recv().await { match cmd { ReserveKey { key, resp } => { if resp.send(handle_reserve(&kv, key)).is_err() { warn!("receiver dropped"); } } UnreserveKey { reservation } => { let _ = kv.remove(&reservation.key); } Put { reservation, value, resp, } => { if resp.send(handle_put(&kv, reservation, value)).is_err() { warn!("receiver dropped"); } } Get { key, resp } => { if resp.send(handle_get(&kv, key)).is_err() { warn!("receiver dropped"); } } Exists { key, resp } => { if resp.send(handle_exists(&kv, &key)).is_err() { warn!("receiver dropped"); } } Delete { key, resp } => { if resp.send(handle_delete(&kv, key)).is_err() { warn!("receiver dropped"); } } } } info!("kv_manager stop"); }
pub async fn delete(&self, key: &str) -> KvResult<()> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Delete { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(DeleteErr) }
function_block-full_function
[ { "content": "// Provided by the requester and used by the manager task to send the command response back to the requester.\n\ntype Responder<T> = tokio::sync::oneshot::Sender<super::error::InnerKvResult<T>>;\n\n\n\n#[derive(Debug)]\n\npub(super) enum Command<V> {\n\n ReserveKey {\n\n key: String,\n\n...
Rust
src/desktop/request.rs
jtojnar/ashpd
7cd1792916b9e169cd015f975f4f453fe5c2207c
use std::{ collections::HashMap, convert::TryFrom, fmt::{self, Debug}, marker::PhantomData, }; use serde::{ de::{self, Error as SeError, Visitor}, Deserialize, Deserializer, Serialize, }; use serde_repr::{Deserialize_repr, Serialize_repr}; use zvariant::OwnedValue; use zvariant_derive::Type; use super::DESTINATION; use crate::{ desktop::HandleToken, helpers::{call_method, receive_signal}, Error, }; #[derive(Debug)] pub(crate) enum Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { Ok(T), Err(ResponseError), } impl<T> zvariant::Type for Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { fn signature() -> zvariant::Signature<'static> { <(ResponseType, HashMap<&str, OwnedValue>)>::signature() } } impl<'de, T> Deserialize<'de> for Response<T> where T: for<'d> Deserialize<'d> + zvariant::Type, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { struct ResponseVisitor<T>(PhantomData<fn() -> (ResponseType, T)>); impl<'de, T> Visitor<'de> for ResponseVisitor<T> where T: Deserialize<'de>, { type Value = (ResponseType, Option<T>); fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!( formatter, "a tuple composed of the response status along with the response" ) } fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: de::SeqAccess<'de>, { let type_: ResponseType = seq.next_element()?.ok_or_else(|| A::Error::custom( "Failed to deserialize the response. Expected a numeric (u) value as the first item of the returned tuple", ))?; if type_ == ResponseType::Success { let data: T = seq.next_element()?.ok_or_else(|| A::Error::custom( "Failed to deserialize the response. Expected a vardict (a{sv}) with the returned results", ))?; Ok((type_, Some(data))) } else { Ok((type_, None)) } } } let visitor = ResponseVisitor::<T>(PhantomData); let response: (ResponseType, Option<T>) = deserializer.deserialize_tuple(2, visitor)?; Ok(response.into()) } } #[doc(hidden)] impl<T> From<(ResponseType, Option<T>)> for Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { fn from(f: (ResponseType, Option<T>)) -> Self { match f.0 { ResponseType::Success => { Response::Ok(f.1.expect("Expected a valid response, found nothing.")) } ResponseType::Cancelled => Response::Err(ResponseError::Cancelled), ResponseType::Other => Response::Err(ResponseError::Other), } } } #[derive(Serialize, Deserialize, Type)] pub(crate) struct BasicResponse(HashMap<String, OwnedValue>); impl Debug for BasicResponse { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("BasicResponse").finish() } } #[derive(Debug, Copy, PartialEq, Hash, Clone)] pub enum ResponseError { Cancelled, Other, } impl std::error::Error for ResponseError {} impl std::fmt::Display for ResponseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Cancelled => f.write_str("Cancelled"), Self::Other => f.write_str("Other"), } } } #[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug, Type)] #[repr(u32)] #[doc(hidden)] enum ResponseType { Success = 0, Cancelled = 1, Other = 2, } #[doc(hidden)] impl From<ResponseError> for ResponseType { fn from(err: ResponseError) -> Self { match err { ResponseError::Other => Self::Other, ResponseError::Cancelled => Self::Cancelled, } } } #[doc(alias = "org.freedesktop.portal.Request")] pub(crate) struct RequestProxy<'a>(zbus::Proxy<'a>); impl<'a> RequestProxy<'a> { pub async fn new( connection: &zbus::Connection, path: zvariant::ObjectPath<'a>, ) -> Result<RequestProxy<'a>, Error> { let proxy = zbus::ProxyBuilder::new_bare(connection) .interface("org.freedesktop.portal.Request")? .path(path)? .destination(DESTINATION)? .build() .await?; Ok(Self(proxy)) } pub async fn from_unique_name( connection: &zbus::Connection, handle_token: &HandleToken, ) -> Result<RequestProxy<'a>, Error> { let unique_name = connection.unique_name().unwrap(); let unique_identifier = unique_name.trim_start_matches(':').replace('.', "_"); let path = zvariant::ObjectPath::try_from(format!( "/org/freedesktop/portal/desktop/request/{}/{}", unique_identifier, handle_token )) .unwrap(); tracing::info!("Creating a org.freedesktop.portal.Request {}", path); RequestProxy::new(connection, path).await } pub fn inner(&self) -> &zbus::Proxy<'_> { &self.0 } #[doc(alias = "Response")] #[allow(dead_code)] pub async fn receive_response<R>(&self) -> Result<R, Error> where R: for<'de> Deserialize<'de> + zvariant::Type + Debug, { let response = receive_signal::<Response<R>>(&self.0, "Response").await?; match response { Response::Err(e) => Err(e.into()), Response::Ok(r) => Ok(r), } } #[allow(dead_code)] #[doc(alias = "Close")] pub async fn close(&self) -> Result<(), Error> { call_method(&self.0, "Close", &()).await } } impl<'a> Debug for RequestProxy<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_tuple("RequestProxy") .field(&self.inner().path().as_str()) .finish() } }
use std::{ collections::HashMap, convert::TryFrom, fmt::{self, Debug}, marker::PhantomData, }; use serde::{ de::{self, Error as SeError, Visitor}, Deserialize, Deserializer, Serialize, }; use serde_repr::{Deserialize_repr, Serialize_repr}; use zvariant::OwnedValue; use zvariant_derive::Type; use super::DESTINATION; use crate::{ desktop::HandleToken, helpers::{call_method, receive_signal}, Error, }; #[derive(Debug)] pub(crate) enum Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { Ok(T), Err(ResponseError), } impl<T> zvariant::Type for Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { fn signature() -> zvariant::Signature<'static> { <(ResponseType, HashMap<&str, OwnedValue>)>::signature() } } impl<'de, T> Deserialize<'de> for Response<T> where T: for<'d> Deserialize<'d> + zvariant::Type, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { struct ResponseVisitor<T>(PhantomData<fn() -> (ResponseType, T)>); impl<'de, T> Visitor<'de> for ResponseVisitor<T> where T: Deserialize<'de>, { type Value = (ResponseType, Option<T>); fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!( formatter, "a tuple composed of the response status along with the response" ) } fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: de::SeqAccess<'de>, { let type_: ResponseType = seq.next_element()?.ok_or_else(|| A::Error::custom( "Failed to deserialize the response. Expected a numeric (u) value as the first item of the returned tuple", ))?; if type_ == ResponseType::Success { let data: T = seq.next_element()?.ok_or_else(|| A::Error::custom( "Failed to deserialize the response. Expected a vardict (a{sv}) with the returned results", ))?; Ok((type_, Some(data))) } else { Ok((type_, None)) } } } let visitor = ResponseVisitor::<T>(PhantomData); let response: (ResponseType, Option<T>) = deserializer.deserialize_tuple(2, visitor)?; Ok(response.into()) } } #[doc(hidden)] impl<T> From<(ResponseType, Option<T>)> for Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { fn from(f: (ResponseType, Option<T>)) -> Self { match f.0 { ResponseType::Success => { Response::Ok(f.1.expect("Expected a valid response, found nothing.")) } ResponseType::Cancelled => Response::Err(ResponseError::Cancelled), ResponseType::Other => Response::Err(ResponseError::Other), } } } #[derive(Serialize, Deserialize, Type)] pub(crate) struct BasicResponse(HashMap<String, OwnedValue>); impl Debug for BasicResponse { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("BasicResponse").finish() } } #[derive(Debug, Copy, PartialEq, Hash, Clone)] pub enum ResponseError { Cancelled, Other, } impl std::error::Error for ResponseError {} impl std::fmt::Display for ResponseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Cancelled => f.write_str("Cancelled"), Self::Other => f.write_str("Other"), } } } #[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug, Type)] #[repr(u32)] #[doc(hidden)] enum ResponseType { Success = 0, Cancelled = 1, Other = 2, } #[doc(hidden)] impl From<ResponseError> for ResponseType { fn from(err: ResponseError) -> Self { match err { ResponseError::Other => Self::Other, ResponseError::Cancelled => Self::Cancelled, } } } #[doc(alias = "org.freedesktop.portal.Request")] pub(crate) struct RequestProxy<'a>(zbus::Proxy<'a>); impl<'a> RequestProxy<'a> { pub async fn new( connection: &zbus::Connection, path: zvariant::ObjectPath<'a>, ) -> Result<RequestProxy<'a>, Error> {
Ok(Self(proxy)) } pub async fn from_unique_name( connection: &zbus::Connection, handle_token: &HandleToken, ) -> Result<RequestProxy<'a>, Error> { let unique_name = connection.unique_name().unwrap(); let unique_identifier = unique_name.trim_start_matches(':').replace('.', "_"); let path = zvariant::ObjectPath::try_from(format!( "/org/freedesktop/portal/desktop/request/{}/{}", unique_identifier, handle_token )) .unwrap(); tracing::info!("Creating a org.freedesktop.portal.Request {}", path); RequestProxy::new(connection, path).await } pub fn inner(&self) -> &zbus::Proxy<'_> { &self.0 } #[doc(alias = "Response")] #[allow(dead_code)] pub async fn receive_response<R>(&self) -> Result<R, Error> where R: for<'de> Deserialize<'de> + zvariant::Type + Debug, { let response = receive_signal::<Response<R>>(&self.0, "Response").await?; match response { Response::Err(e) => Err(e.into()), Response::Ok(r) => Ok(r), } } #[allow(dead_code)] #[doc(alias = "Close")] pub async fn close(&self) -> Result<(), Error> { call_method(&self.0, "Close", &()).await } } impl<'a> Debug for RequestProxy<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_tuple("RequestProxy") .field(&self.inner().path().as_str()) .finish() } }
let proxy = zbus::ProxyBuilder::new_bare(connection) .interface("org.freedesktop.portal.Request")? .path(path)? .destination(DESTINATION)? .build() .await?;
assignment_statement
[ { "content": "#[cfg(feature = \"feature_pipewire\")]\n\nfn pipewire_node_id_inner<F: FnOnce(u32) + Clone + 'static>(\n\n fd: RawFd,\n\n callback: F,\n\n) -> Result<(), pw::Error> {\n\n use pw::prelude::*;\n\n let mainloop = pw::MainLoop::new()?;\n\n let context = pw::Context::new(&mainloop)?;\n\n...
Rust
language/move-prover/src/prover_task_runner.rs
CVeniamin/diem
6fad0d397f683bdc9a80e4c6dbe74f5370ac45b4
use crate::cli::Options; use async_trait::async_trait; use futures::{future::FutureExt, pin_mut, select}; use log::debug; use rand::Rng; use regex::Regex; use std::{ process::Output, sync::{ mpsc::{channel, Sender}, Arc, }, }; use tokio::{ process::Command, sync::{broadcast, broadcast::Receiver, Semaphore}, }; #[derive(Debug, Clone)] enum BroadcastMsg { Stop, } const MAX_PERMITS: usize = usize::MAX >> 4; #[async_trait] pub trait ProverTask { type TaskResult: Send + 'static; type TaskId: Send + Copy + 'static; fn init(&mut self, num_instances: usize) -> Vec<Self::TaskId>; async fn run(&mut self, task_id: Self::TaskId, sem: Arc<Semaphore>) -> Self::TaskResult; fn is_success(&self, task_result: &Self::TaskResult) -> bool; } pub struct ProverTaskRunner(); impl ProverTaskRunner { pub fn run_tasks<T>( mut task: T, num_instances: usize, sequential: bool, ) -> (T::TaskId, T::TaskResult) where T: ProverTask + Clone + Send + 'static, { let rt = tokio::runtime::Builder::new() .threaded_scheduler() .enable_all() .build() .unwrap(); let sem = if sequential { Arc::new(Semaphore::new(1)) } else { Arc::new(Semaphore::new(MAX_PERMITS)) }; let (worker_tx, master_rx) = channel(); let (master_tx, _): ( tokio::sync::broadcast::Sender<BroadcastMsg>, Receiver<BroadcastMsg>, ) = broadcast::channel(num_instances); let task_ids = task.init(num_instances); for task_id in task_ids { let s = sem.clone(); let send_n = worker_tx.clone(); let worker_rx = master_tx.subscribe(); let cloned_task = task.clone(); rt.spawn(async move { Self::run_task_until_cancelled(cloned_task, task_id, send_n, worker_rx, s).await; }); } let mut num_working_instances = num_instances; loop { let res = master_rx.recv(); if let Ok((task_id, result)) = res { if num_working_instances == 1 { return (task_id, result); } else if task.is_success(&result) { let _ = master_tx.send(BroadcastMsg::Stop); return (task_id, result); } debug! {"previous instance failed, waiting for another worker to report..."} num_working_instances -= 1; } } } async fn run_task_until_cancelled<T>( mut task: T, task_id: T::TaskId, tx: Sender<(T::TaskId, T::TaskResult)>, rx: Receiver<BroadcastMsg>, sem: Arc<Semaphore>, ) where T: ProverTask, { let task_fut = task.run(task_id, sem).fuse(); let watchdog_fut = Self::watchdog(rx).fuse(); pin_mut!(task_fut, watchdog_fut); select! { _ = watchdog_fut => { } res = task_fut => { let _ = tx.send((task_id, res)); }, } } async fn watchdog(mut rx: Receiver<BroadcastMsg>) { let _ = rx.recv().await; } } #[derive(Debug, Clone)] pub struct RunBoogieWithSeeds { pub options: Options, pub boogie_file: String, } #[async_trait] impl ProverTask for RunBoogieWithSeeds { type TaskResult = Output; type TaskId = usize; fn init(&mut self, num_instances: usize) -> Vec<Self::TaskId> { if num_instances == 1 { return vec![self.options.backend.random_seed]; } let mut rng = rand::thread_rng(); (0..num_instances) .map(|_| rng.gen::<u8>() as usize) .collect() } async fn run(&mut self, task_id: Self::TaskId, sem: Arc<Semaphore>) -> Self::TaskResult { let _guard = sem.acquire().await; let args = self.get_boogie_command(task_id); debug!("runing Boogie command with seed {}", task_id); Command::new(&args[0]) .args(&args[1..]) .kill_on_drop(true) .output() .await .unwrap() } fn is_success(&self, task_result: &Self::TaskResult) -> bool { if !task_result.status.success() { return false; } let output = String::from_utf8_lossy(&task_result.stdout); self.contains_compilation_error(&output) || !self.contains_timeout(&output) } } impl RunBoogieWithSeeds { pub fn get_boogie_command(&mut self, seed: usize) -> Vec<String> { self.options .backend .boogie_flags .push(format!("-proverOpt:O:smt.random_seed={}", seed)); self.options.get_boogie_command(&self.boogie_file) } fn contains_compilation_error(&self, output: &str) -> bool { let regex = Regex::new(r"(?m)^.*\((?P<line>\d+),(?P<col>\d+)\).*(Error:|error:).*$").unwrap(); regex.is_match(output) } fn contains_timeout(&self, output: &str) -> bool { let regex = Regex::new(r"(?m)^.*\((?P<line>\d+),(?P<col>\d+)\).*Verification.*(inconclusive|out of resource|timed out).*$") .unwrap(); regex.is_match(output) } }
use crate::cli::Options; use async_trait::async_trait; use futures::{future::FutureExt, pin_mut, select}; use log::debug; use rand::Rng; use regex::Regex; use std::{ process::Output, sync::{ mpsc::{channel, Sender}, Arc, }, }; use tokio::{ process::Command, sync::{broadcast, broadcast::Receiver, Semaphore}, }; #[derive(Debug, Clone)] enum BroadcastMsg { Stop, } const MAX_PERMITS: usize = usize::MAX >> 4; #[async_trait] pub trait ProverTask { type TaskResult: Send + 'static; type TaskId: Send + Copy + 'static; fn init(&mut self, num_instances: usize) -> Vec<Self::TaskId>; async fn run(&mut self, task_id: Self::TaskId, sem: Arc<Semaphore>) -> Self::TaskResult; fn is_success(&self, task_result: &Self::TaskResult) -> bool; } pub struct ProverTaskRunner(); impl ProverTaskRunner { pub fn run_tasks<T>( mut task: T, num_instances: usize, sequential: bool, ) -> (T::TaskId, T::TaskResult) where T: ProverTask + Clone + Send + 'static, { let rt = tokio::runtime::Builder::new() .threaded_scheduler() .enable_all() .build() .unwrap(); let sem = if sequential { Arc::new(Semaphore::new(1)) } else { Arc::new(Semaphore::new(MAX_PERMITS)) }; let (worker_tx, master_rx) = channel(); let (master_tx, _): ( tokio::sync::broadcast::Sender<BroadcastMsg>, Receiver<BroadcastMsg>, ) = broadcast::channel(num_instances); let task_ids = task.init(num_instances); for task_id in task_ids { let s = sem.clone(); let send_n = worker_tx.clone(); let worker_rx = master_tx.subscribe(); let cloned_task = task.clone(); rt.spawn(async move { Self::run_task_until_cancelled(cloned_task, task_id, send_n, worker_rx, s).await; }); } let mut num_working_instances = num_instances; loop { let res = master_rx.recv(); if let Ok((task_id, result)) = res { if num_working_instances == 1 { return (task_id, result); } else if task.is_success(&result) { let _ = master_tx.send(BroadcastMsg::Stop); return (task_id, result); } debug! {"previous instance failed, waiting for another worker to report..."} num_working_instances -= 1; } } } async fn run_task_until_cancelled<T>( mut task: T, task_id: T::TaskId, tx: Sender<(T::TaskId, T::TaskResult)>, rx: Receiver<BroadcastMsg>, sem: Arc<Semaphore>, ) where T: ProverTask, { let task_fut = task.run(task_id, sem).fuse(); let watchdog_fut = Self::watchdog(rx).fuse(); pin_mut!(task_fut, watchdog_fut); select! { _ = watchdog_fut => { } res = task_fut => { let _ = tx.send((task_id, res)); }, } } async fn watchdog(mut rx: Receiver<BroadcastMsg>) { let _ = rx.recv().await; } } #[derive(Debug, Clone)] pub struct RunBoogieWithSeeds { pub options: Options, pub boogie_file: String, } #[async_trait] impl ProverTask for RunBoogieWithSeeds { type TaskResult = Output; type TaskId = usize; fn init(&mut self, num_instances: usize) -> Vec<Self::TaskId> { if num_instances == 1 { return vec![self.options.backend.random_seed]; } let mut rng = rand::thread_rng(); (0..num_instances) .map(|_| rng.gen::<u8>() as usize) .collect() } async fn run(&mut self, task_id: Self::TaskId, sem: Arc<Semaphore>) -> Self::TaskResult { let _guard = sem.acquire().await; let args = self.get_boogie_command(task_id); debug!("runing Boogie command with seed {}", task_id); Command::new(&args[0]) .args(&args[1..]) .kill_on_drop(true) .output() .await .unwrap() } fn is_success(&self, task_result: &Self::TaskResult) -> bool { if !task_result.status.success() { return false; } let output = String::from_utf8_lossy(&task_result.stdout); self.contains_compilation_error(&output) || !self.contains_timeout(&output) } } impl RunBoogieWithSeeds { pub fn get_boogie_command(&mut self, seed: usize) -> Vec<String> { self.options .backend .boogie_flags .push(format!("-proverOpt:O:smt.random_seed={}", seed)); self.options.get_boogie_command(&self.boogie_file) } fn contains_compilation_error(&self, output: &str) -> bool { let regex = Regex::new(r"(?m)^.*\((?P<line>\d+),(?P<col>\d+)\).*(Error:|error:).*$").unwrap(); regex.is_match(output) } fn contains_timeout(&self, output: &str) -> bool { let regex = Rege
}
x::new(r"(?m)^.*\((?P<line>\d+),(?P<col>\d+)\).*Verification.*(inconclusive|out of resource|timed out).*$") .unwrap(); regex.is_match(output) }
function_block-function_prefixed
[ { "content": "pub trait RetryStrategy: std::fmt::Debug + Send + Sync {\n\n fn max_retries(&self, err: &Error) -> u32;\n\n fn delay(&self, err: &Error, retries: u32) -> Duration;\n\n fn is_retriable(&self, err: &Error) -> bool;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Retry {\n\n pub max_retries: u...
Rust
cube3x3x3/src/main.rs
lePerdu/twisted
6f3330fbb594beb9f06d8bfeb307cb60ca8035b9
extern crate pretty_env_logger; extern crate twisted; use std::io::{self, Write}; use twisted::coord::Coord; use twisted::cube::cube3::{ coord::{ CornerOrientCoord, CornerPosCoord, EEdgePosCoord, ESliceAndEOCoord, ESliceEdgePosCoord, EdgeOrientCoord, Phase1Coord, Phase2Coord, Phase2MinusECoord, UdEdgePosCoord, }, notation::Cube3Notation, Cube3Perm, CubeTurn, G1CubeTurn, }; use twisted::move_table::{BasicMoveTable, CompositeMoveTable, MoveTable}; use twisted::notation::{NotationMove, NotationStr}; use twisted::prune_table::{CompositePruneTable, FullPruneTable, PruneTable, ZeroPruneTable}; use twisted::puzzle::PuzzlePerm; use twisted::solver::{solve_cube, SolutionIter}; type Notation = NotationStr<Cube3Notation>; fn do_phase_solve<C, M, P>( move_table: &M, prune_table: &P, perm: &Cube3Perm, target: C, ) -> Option<Notation> where C: Coord<Cube3Perm>, M: MoveTable<Puzzle = Cube3Perm, Coord = C>, P: PruneTable<Puzzle = Cube3Perm, Coord = C, Move = M::Move>, M::Move: Into<NotationMove<Cube3Notation>>, { SolutionIter::new(move_table, prune_table, target, perm) .next() .map(|sol| { Notation::from( sol.iter() .map(|m| (*m).into()) .collect::<Vec<NotationMove<Cube3Notation>>>(), ) }) } fn do_solve<M1, P1, M2, P2>( phase1_move_table: &M1, phase1_prune_table: &P1, phase2_move_table: &M2, phase2_prune_table: &P2, notation: Notation, ) where M1: MoveTable<Puzzle = Cube3Perm, Coord = Phase1Coord, Move = CubeTurn>, P1: PruneTable<Puzzle = Cube3Perm, Coord = Phase1Coord, Move = CubeTurn>, M2: MoveTable<Puzzle = Cube3Perm, Coord = Phase2Coord, Move = G1CubeTurn>, P2: PruneTable<Puzzle = Cube3Perm, Coord = Phase2Coord, Move = G1CubeTurn>, { let perm = notation.permutation(); let phase1_sol_generator = SolutionIter::new( phase1_move_table, phase1_prune_table, Phase1Coord::default(), &perm, ); for phase1_sol_moves in phase1_sol_generator.take(5) { let phase1_solution = Notation::from( phase1_sol_moves .iter() .map(|m| (*m).into()) .collect::<Vec<NotationMove<Cube3Notation>>>(), ); let phase1_perm = perm.sequence(&phase1_solution.permutation()); let g1_coord = Phase1Coord::from_perm(&phase1_perm); if g1_coord != Phase1Coord::default() { eprintln!("Error, invalid solution (Coord = {:?})", g1_coord); } let (phase2_solution, solved_perm) = match do_phase_solve( phase2_move_table, phase2_prune_table, &phase1_perm, Phase2Coord::default(), ) { Some(sol) => { let solved = phase1_perm.sequence(&sol.permutation()); let solved_coord = Phase2Coord::from_perm(&solved); if solved_coord != Phase2Coord::default() { eprintln!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 2 solution found"); return; } }; let l1 = phase1_solution.len(); let l2 = phase2_solution.len(); println!( "Solution ({} + {} = {}): {} {}", l1, l2, l1 + l2, phase1_solution, phase2_solution ); } println!(); /* let (phase1_solution, phase1_perm) = match do_phase_solve( phase1_move_table, phase1_prune_table, &perm, Phase1Coord::default(), ) { Some(sol) => { println!("Phase 1 solution: {}", sol); // Check it let solved = perm.sequence(&sol.permutation()); let solved_coord = Phase1Coord::from_perm(&solved); if solved_coord != Phase1Coord::default() { println!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 1 solution found"); return; } }; let (phase2_solution, solved_perm) = match do_phase_solve( phase2_move_table, phase2_prune_table, &phase1_perm, Phase2Coord::default(), ) { Some(sol) => { println!("Phase 2 solution: {}", sol); // Check it let solved = phase1_perm.sequence(&sol.permutation()); let solved_coord = Phase2Coord::from_perm(&solved); if solved_coord != Phase2Coord::default() { println!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 2 solution found"); return; } }; println!("Solution: {} {}", phase1_solution, phase2_solution); */ } fn main() { pretty_env_logger::init(); let mut stdout = io::stdout(); let stdin = io::stdin(); let mut input_buf = String::new(); println!("Initializing tables..."); println!("Corner orient..."); let co_table: BasicMoveTable<Cube3Perm, CornerOrientCoord, CubeTurn> = BasicMoveTable::create(); println!("Edge orient..."); let eo_table: BasicMoveTable<Cube3Perm, EdgeOrientCoord, CubeTurn> = BasicMoveTable::create(); println!("E edge location..."); let phase1_eslice_table: BasicMoveTable<Cube3Perm, EEdgePosCoord, CubeTurn> = BasicMoveTable::create(); println!("Edge orient and E edge table..."); let phase1_edge_table = CompositeMoveTable::new(&phase1_eslice_table, &eo_table).to_basic(); let phase1_move_table = CompositeMoveTable::new(&co_table, &phase1_edge_table); println!("Corner orient prune..."); let co_prune_table = FullPruneTable::create(&co_table, CornerOrientCoord::default()); println!("Edge orient prune..."); let phase1_edge_prune_table = FullPruneTable::create(&phase1_edge_table, ESliceAndEOCoord::default()); let phase1_prune_table = CompositePruneTable::new(&co_prune_table, &phase1_edge_prune_table); println!("Corner permutation..."); let cp_table = BasicMoveTable::create(); println!("UD edge permutation..."); let ud_ep_table = BasicMoveTable::create(); println!("E Edge permutation..."); let phase2_eslice_table = BasicMoveTable::create(); let phase2_minus_e_table = CompositeMoveTable::new(&cp_table, &ud_ep_table); let phase2_move_table = CompositeMoveTable::new(&phase2_minus_e_table, &phase2_eslice_table); println!("Phase2 prune..."); let cp_prune_table = FullPruneTable::create(&cp_table, CornerPosCoord::default()); let ud_ep_prune_table = FullPruneTable::create(&ud_ep_table, UdEdgePosCoord::default()); let phase2_minus_e_prune_table = CompositePruneTable::new(&cp_prune_table, &ud_ep_prune_table); let e_slice_prune_table = FullPruneTable::create(&phase2_eslice_table, ESliceEdgePosCoord::default()); let phase2_prune_table = CompositePruneTable::new(&phase2_minus_e_prune_table, &e_slice_prune_table); println!("Done"); loop { input_buf.clear(); print!("Scramble: "); stdout.flush().expect("Error flushing stream"); match stdin.read_line(&mut input_buf) { Ok(_) => { if input_buf.is_empty() { break; } match input_buf.parse() { Ok(notation) => do_solve( &phase1_move_table, &phase1_prune_table, &phase2_move_table, &phase2_prune_table, notation, ), Err(_) => { println!("Invalid cube notation"); } } } Err(err) => { eprint!("{}", err); break; } } } }
extern crate pretty_env_logger; extern crate twisted; use std::io::{self, Write}; use twisted::coord::Coord; use twisted::cube::cube3::{ coord::{ CornerOrientCoord, CornerPosCoord, EEdgePosCoord, ESliceAndEOCoord, ESliceEdgePosCoord, EdgeOrientCoord, Phase1Coord, Phase2Coord, Phase2MinusECoord, UdEdgePosCoord, }, notation::Cube3Notation, Cube3Perm, CubeTurn, G1CubeTurn, }; use twisted::move_table::{BasicMoveTable, CompositeMoveTable, MoveTable}; use twisted::notation::{NotationMove, NotationStr}; use twisted::prune_table::{CompositePruneTable, FullPruneTable, PruneTable, ZeroPruneTable}; use twisted::puzzle::PuzzlePerm; use twisted::solver::{solve_cube, SolutionIter}; type Notation = NotationStr<Cube3Notation>; fn do_phase_solve<C, M, P>( move_table: &M, prune_table: &P, perm: &Cube3Perm, target: C, ) -> Option<Notation> where C: Coord<Cube3Perm>, M: MoveTable<Puzzle = Cube3Perm, Coord = C>, P: PruneTable<Puzzle = Cube3Perm, Coord = C, Move = M::Move>, M::Move: Into<NotationMove<Cube3Notation>>, { SolutionIter::new(move_table, prune_table, target, perm) .next() .map(|sol| { Notation::from( sol.iter() .map(|m| (*m).into()) .collect::<Vec<NotationMove<Cube3Notation>>>(), ) }) } fn do_solve<M1, P1, M2, P2>( phase1_move_table: &M1, phase1_prune_table: &P1, phase2_move_table: &M2, phase2_prune_table: &P2, notation: Notation, ) where M1: MoveTable<Puzzle = Cube3Perm, Coord = Phase1Coord, Move = CubeTurn>, P1: PruneTable<Puzzle = Cube3Perm, Coord = Phase1Coord, Move = CubeTurn>, M2: MoveTable<Puzzle = Cube3Perm, Coord = Phase2Coord, Move = G1CubeTurn>, P2: PruneTable<Puzzle = Cube3Perm, Coord = Phase2Coord, Move = G1CubeTurn>, { let perm = notation.permutation(); let phase1_sol_generator = SolutionIter::new( phase1_move_table, phase1_prune_table, Phase1Coord::default(), &perm, ); for phase1_sol_moves in phase1_sol_generator.take(5) { let phase1_solution = Notation::from( phase1_sol_moves .iter() .map(|m| (*m).into()) .collect::<Vec<NotationMove<Cube3Notation>>>(), ); let phase1_perm = perm.sequence(&phase1_solution.permutation()); let g1_coord = Phase1Coord::from_perm(&phase1_perm); if g1_coord != Phase1Coord::default() { eprintln!("Error, invalid solution (Coord = {:?})", g1_coord); } let (phase2_solution, solved_perm) = match do_phase_solve( phase2_move_table, phase2_prune_table, &phase1_perm, Phase2Coord::default(), ) { Some(sol) => { let solved = phase1_perm.sequence(&sol.permutation()); let solved_coord = Phase2Coord::from_perm(&solved); if solved_coord != Phase2Coord::default() { eprintln!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 2 solution found"); return; } }; let l1 = phase1_solution.len(); let l2 = phase2_solution.len(); println!( "Solution ({} + {} = {}): {} {}", l1, l2, l1 + l2, phase1_solution, phase2_solution ); } println!(); /* let (phase1_solution, phase1_perm) = match do_phase_solve( phase1_move_table, phase1_prune_table, &perm, Phase1Coord::default(), ) { Some(sol) => { println!("Phase 1 solution: {}", sol); // Check it let solved = perm.sequence(&sol.permutation()); let solved_coord = Phase1Coord::from_perm(&solved); if solved_coo
Ok(notation) => do_solve( &phase1_move_table, &phase1_prune_table, &phase2_move_table, &phase2_prune_table, notation, ), Err(_) => { println!("Invalid cube notation"); } } } Err(err) => { eprint!("{}", err); break; } } } }
rd != Phase1Coord::default() { println!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 1 solution found"); return; } }; let (phase2_solution, solved_perm) = match do_phase_solve( phase2_move_table, phase2_prune_table, &phase1_perm, Phase2Coord::default(), ) { Some(sol) => { println!("Phase 2 solution: {}", sol); // Check it let solved = phase1_perm.sequence(&sol.permutation()); let solved_coord = Phase2Coord::from_perm(&solved); if solved_coord != Phase2Coord::default() { println!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 2 solution found"); return; } }; println!("Solution: {} {}", phase1_solution, phase2_solution); */ } fn main() { pretty_env_logger::init(); let mut stdout = io::stdout(); let stdin = io::stdin(); let mut input_buf = String::new(); println!("Initializing tables..."); println!("Corner orient..."); let co_table: BasicMoveTable<Cube3Perm, CornerOrientCoord, CubeTurn> = BasicMoveTable::create(); println!("Edge orient..."); let eo_table: BasicMoveTable<Cube3Perm, EdgeOrientCoord, CubeTurn> = BasicMoveTable::create(); println!("E edge location..."); let phase1_eslice_table: BasicMoveTable<Cube3Perm, EEdgePosCoord, CubeTurn> = BasicMoveTable::create(); println!("Edge orient and E edge table..."); let phase1_edge_table = CompositeMoveTable::new(&phase1_eslice_table, &eo_table).to_basic(); let phase1_move_table = CompositeMoveTable::new(&co_table, &phase1_edge_table); println!("Corner orient prune..."); let co_prune_table = FullPruneTable::create(&co_table, CornerOrientCoord::default()); println!("Edge orient prune..."); let phase1_edge_prune_table = FullPruneTable::create(&phase1_edge_table, ESliceAndEOCoord::default()); let phase1_prune_table = CompositePruneTable::new(&co_prune_table, &phase1_edge_prune_table); println!("Corner permutation..."); let cp_table = BasicMoveTable::create(); println!("UD edge permutation..."); let ud_ep_table = BasicMoveTable::create(); println!("E Edge permutation..."); let phase2_eslice_table = BasicMoveTable::create(); let phase2_minus_e_table = CompositeMoveTable::new(&cp_table, &ud_ep_table); let phase2_move_table = CompositeMoveTable::new(&phase2_minus_e_table, &phase2_eslice_table); println!("Phase2 prune..."); let cp_prune_table = FullPruneTable::create(&cp_table, CornerPosCoord::default()); let ud_ep_prune_table = FullPruneTable::create(&ud_ep_table, UdEdgePosCoord::default()); let phase2_minus_e_prune_table = CompositePruneTable::new(&cp_prune_table, &ud_ep_prune_table); let e_slice_prune_table = FullPruneTable::create(&phase2_eslice_table, ESliceEdgePosCoord::default()); let phase2_prune_table = CompositePruneTable::new(&phase2_minus_e_prune_table, &e_slice_prune_table); println!("Done"); loop { input_buf.clear(); print!("Scramble: "); stdout.flush().expect("Error flushing stream"); match stdin.read_line(&mut input_buf) { Ok(_) => { if input_buf.is_empty() { break; } match input_buf.parse() {
random
[ { "content": "pub fn apply_coord<C, T, P>(coord: C, items_in_order: impl Iterator<Item = T>, items: &mut [P])\n\nwhere\n\n C: PrimInt,\n\n T: Copy + Eq,\n\n P: Copy,\n\n{\n\n let mut coord = coord;\n\n\n\n for (index, _) in items_in_order.enumerate().skip(1) {\n\n let base = C::from(index)...
Rust
src/tui/vterm/vterm.rs
CodeSteak/hs_app
4ec5359ed393fe4a9cbe84254426da9c25b04828
use super::*; use unicode_segmentation::UnicodeSegmentation; pub struct VTerm { width: isize, lines: Vec<Vec<VChar>>, pub tab_size: isize, pub tab_char: VChar, } impl Widget for VTerm { fn size(&mut self) -> (isize, isize) { let w = self.width; let h = self.lines.len(); (w as isize, h as isize) } fn try_set_size(&mut self, w: isize, _h: isize) { for line in self.lines.iter() { if line.len() > w as usize { return; } } self.width = w; } fn get(&mut self, x: isize, y: isize) -> Option<VChar> { let line = self.lines.get(y as usize)?; line.get(x as usize)?.clone().into() } } impl VTerm { pub fn new(width: isize) -> Self { VTerm { width, lines: Default::default(), tab_size: 4, tab_char: VChar { char: ' ', foreground: Color::None, background: Color::None, }, } } pub fn write_words(&mut self, content: &str) { self.write_words_color(content, Color::None); } pub fn write_words_color(&mut self, content: &str, color: Color) { for word in content.split_word_bounds() { self.write_single_word_color(word, color); } } pub fn write(&mut self, content: &str) { self.write_color(content, Color::None); } pub fn write_color(&mut self, content: &str, color: Color) { for c in content.chars() { self.write_char_color(c, color); } } pub fn write_single_word(&mut self, content: &str) { self.write_single_word_color(content, Color::None); } pub fn write_single_word_color(&mut self, content: &str, color: Color) { if self.width == 0 { return; } let word_len = content.len(); let space_left = (self.width - (self .lines .last() .map(|vec| (vec.len() as isize) % (self.width)) .unwrap_or(0))) as usize; if space_left < word_len && word_len < self.width as usize { self.write_char_color('\n', color); } for c in content.chars() { self.write_char_color(c, color); } } pub fn write_vchar(&mut self, ch: VChar) { self.write_char_color(ch.char, ch.foreground); } pub fn write_char_color(&mut self, ch: char, color: Color) { match ch { '\n' => { self.lines .push(Vec::with_capacity((self.width as usize).min(1024))); return; } '\t' => { let cursor_pos = self .lines .last() .map(|vec| (vec.len() as isize)) .unwrap_or(0); if self.tab_size == 0 || self.width == 0 { return; } let mut indent = self.tab_size - (cursor_pos % (self.tab_size)); if cursor_pos + indent >= self.width { self.write_char_color('\n', color); indent = self.tab_size; } for _ in 0..indent { let tc = self.tab_char; self.write_char_color(tc.char, tc.foreground); } return; } '\x00'..='\x19' => { return; } _ => (), }; if self .lines .last() .map(|vec| vec.len() >= self.width as usize) .unwrap_or(true) { self.lines .push(Vec::with_capacity((self.width as usize).min(1024))); } let current_line: &mut Vec<VChar> = self.lines.last_mut().unwrap(); current_line.push(VChar { char: ch, foreground: color, background: Color::None, }); } }
use super::*; use unicode_segmentation::UnicodeSegmentation; pub struct VTerm { width: isize, lines: Vec<Vec<VChar>>, pub tab_size: isize, pub tab_char: VChar, } impl Widget for VTerm { fn size(&mut self) -> (isize, isize) { let w = self.width; let h = self.lines.len(); (w as isize, h as isize) } fn try_set_size(&mut self, w: isize, _h: isize) { for line in self.lines.iter() { if line.len() > w as usize { return; } } self.width = w; } fn get(&mut self, x: isize, y: isize) -> Option<VChar> { let line = self.lines.get(y as usize)?; line.get(x as usize)?.clone().into() } } impl VTerm { pub fn new(width: isize) -> Self { VTerm { width, lines: Default::default(), tab_size: 4, tab_char: VChar { char: ' ', foreground: Color::None, background: Color::None, }, } } pub fn write_words(&mut self, content: &str) { self.write_words_color(content, Color::None); } pub fn write_words_color(&mut self, content: &str, color: Color) { for word in content.split_word_bounds() { self.write_single_word_color(word, color); } } pub fn write(&mut self, content: &str) { self.write_color(content, Color::None); } pub fn write_color(&mut self, content: &str, color: Color) { for c in content.chars() { self.write_char_color(c, color); } } pub fn write_single_word(&mut self, content: &str) { self.write_single_word_color(content, Color::None); } pub fn write_single_word_color(&mut self, content: &str, color: Color) { if self.width == 0 { return; } let word_len = content.len(); let space_left = (self.width - (self .lines .last() .map(|vec| (vec.len() as isize) % (self.width)) .unwrap_or(0))) as usize; if space_left < word_len && word_len < self.width as usize { self.write_char_color('\n', color); } for c in content.chars() { self.write_char_color(c, color); } } pub fn write_vchar(&mut self, ch: VChar) { self.write_char_color(ch.char, ch.foreground); } pub fn write_char_color(&mut self, ch: char, color: Color) { match ch { '\n' => { self.lines .push(Vec::with_capacity((self.width as usize).min(1024))); return; } '\t' => { let cursor_pos = self .lines .last() .map(|vec| (vec.len() as isize)) .unwrap_or(0); if self.tab_size == 0 || self.width == 0 { return; } let mut indent = self.tab_size - (cursor_pos % (self.tab_size));
for _ in 0..indent { let tc = self.tab_char; self.write_char_color(tc.char, tc.foreground); } return; } '\x00'..='\x19' => { return; } _ => (), }; if self .lines .last() .map(|vec| vec.len() >= self.width as usize) .unwrap_or(true) { self.lines .push(Vec::with_capacity((self.width as usize).min(1024))); } let current_line: &mut Vec<VChar> = self.lines.last_mut().unwrap(); current_line.push(VChar { char: ch, foreground: color, background: Color::None, }); } }
if cursor_pos + indent >= self.width { self.write_char_color('\n', color); indent = self.tab_size; }
if_condition
[ { "content": "pub trait WithBackground<W: Widget> {\n\n fn with_background(self, c : Color) -> Background<W>;\n\n}\n\n\n\nimpl<W : Widget+Sized> WithBackground<W> for W {\n\n fn with_background(self, c : Color) -> Background<W> {\n\n Background(c, self)\n\n }\n\n}", "file_path": "src/tui/vte...
Rust
storage/src/context.rs
fafk/tezedge
a42d44b30f938a976731367c857a58633386a668
use failure::Fail; use crypto::hash::{BlockHash, ContextHash, HashType}; use crate::{BlockStorage, BlockStorageReader, StorageError}; use crate::persistent::{ContextList, ContextMap}; use crate::skip_list::{Bucket, SkipListError}; #[derive(Debug, Fail)] pub enum ContextError { #[fail(display = "Failed to save commit error: {}", error)] CommitWriteError { error: SkipListError }, #[fail(display = "Failed to read from context error: {}", error)] ContextReadError { error: SkipListError }, #[fail(display = "Failed to assign context_hash: {:?} to block_hash: {}, error: {}", context_hash, block_hash, error)] ContextHashAssignError { context_hash: String, block_hash: String, error: StorageError, }, #[fail(display = "InvalidContextHash for context diff to commit, expected_context_hash: {:?}, context_hash: {:?}", expected_context_hash, context_hash)] InvalidContextHashError { expected_context_hash: Option<String>, context_hash: Option<String>, }, #[fail(display = "Unknown context_hash: {:?}", context_hash)] UnknownContextHashError { context_hash: String, }, #[fail(display = "Failed to read block for context_hash: {:?}, error: {}", context_hash, error)] ReadBlockError { context_hash: String, error: StorageError, }, } impl From<SkipListError> for ContextError { fn from(error: SkipListError) -> Self { ContextError::CommitWriteError { error } } } #[macro_export] macro_rules! ensure_eq_context_hash { ($x:expr, $y:expr) => {{ let checkouted_diff_context_hash = &$y.predecessor_index.context_hash; if !($x.eq(checkouted_diff_context_hash)) { return Err(ContextError::InvalidContextHashError { expected_context_hash: $x.as_ref().map(|ch| HashType::ContextHash.bytes_to_string(&ch)), context_hash: checkouted_diff_context_hash.as_ref().map(|ch| HashType::ContextHash.bytes_to_string(&ch)), }); } }} } pub trait ContextApi { fn init_from_start(&self) -> ContextDiff; fn checkout(&self, context_hash: &ContextHash) -> Result<ContextDiff, ContextError>; fn commit(&mut self, block_hash: &BlockHash, parent_context_hash: &Option<ContextHash>, new_context_hash: &ContextHash, context_diff: &ContextDiff) -> Result<(), ContextError>; fn delete_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_delete: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn remove_recursively_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_remove: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn copy_to_diff(&self, context_hash: &Option<ContextHash>, from_key: &Vec<String>, to_key: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn get_key(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<Bucket<Vec<u8>>>, ContextError>; } fn to_key(key: &Vec<String>) -> String { key.join("/") } fn key_starts_with(key: &String, prefix: &Vec<String>) -> bool { key.starts_with(&to_key(prefix)) } fn replace_key(key: &String, matched: &Vec<String>, replacer: &Vec<String>) -> String { key.replace(&to_key(matched), &to_key(replacer)) } pub struct ContextIndex { level: Option<usize>, context_hash: Option<ContextHash>, } impl ContextIndex { pub fn new(level: Option<usize>, context_hash: Option<ContextHash>) -> Self { ContextIndex { level, context_hash } } } pub struct ContextDiff { predecessor_index: ContextIndex, diff: ContextMap, } impl ContextDiff { pub fn new(predecessor_level: Option<usize>, predecessor_context_hash: Option<ContextHash>, diff: ContextMap) -> Self { ContextDiff { predecessor_index: ContextIndex::new(predecessor_level, predecessor_context_hash), diff, } } pub fn set(&mut self, context_hash: &Option<ContextHash>, key: &Vec<String>, value: &Vec<u8>) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &self); &self.diff.insert(to_key(key), Bucket::Exists(value.clone())); Ok(()) } } pub struct TezedgeContext { block_storage: BlockStorage, storage: ContextList, } impl TezedgeContext { pub fn new(block_storage: BlockStorage, storage: ContextList) -> Self { TezedgeContext { block_storage, storage } } fn level_by_context_hash(&self, context_hash: &ContextHash) -> Result<usize, ContextError> { let block = self.block_storage .get_by_context_hash(context_hash) .map_err(|e| ContextError::ReadBlockError { context_hash: HashType::ContextHash.bytes_to_string(context_hash), error: e })?; if block.is_none() { return Err(ContextError::UnknownContextHashError { context_hash: HashType::ContextHash.bytes_to_string(context_hash) }); } let block = block.unwrap(); Ok(block.header.level() as usize) } fn get_by_key_prefix(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<ContextMap>, ContextError> { if context_index.context_hash.is_none() && context_index.level.is_none() { return Ok(None); } let level = if let Some(context_index_level) = context_index.level { context_index_level } else { self.level_by_context_hash(context_index.context_hash.as_ref().unwrap())? }; let list = self.storage.read().expect("lock poisoning"); list .get_prefix(level, &to_key(key)) .map_err(|se| ContextError::ContextReadError { error: se }) } } impl ContextApi for TezedgeContext { fn init_from_start(&self) -> ContextDiff { ContextDiff::new(None, None, Default::default()) } fn checkout(&self, context_hash: &ContextHash) -> Result<ContextDiff, ContextError> { let level = self.level_by_context_hash(&context_hash)?; Ok( ContextDiff::new( Some(level), Some(context_hash.clone()), Default::default(), ) ) } fn commit(&mut self, block_hash: &BlockHash, parent_context_hash: &Option<ContextHash>, new_context_hash: &ContextHash, context_diff: &ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(parent_context_hash, &context_diff); let mut writer = self.storage.write().expect("lock poisoning"); writer.push(&context_diff.diff)?; self.block_storage .assign_to_context(block_hash, new_context_hash) .map_err(|e| ContextError::ContextHashAssignError { block_hash: HashType::BlockHash.bytes_to_string(block_hash), context_hash: HashType::ContextHash.bytes_to_string(new_context_hash), error: e, })?; Ok(()) } fn delete_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_delete: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); self.remove_recursively_to_diff(context_hash, key_prefix_to_delete, context_diff) } fn remove_recursively_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_remove: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); let context_map_diff = &mut context_diff.diff; context_map_diff.retain(|k, v| { if key_starts_with(k, key_prefix_to_remove) == true { match v { Bucket::Deleted => true, _ => false } } else { true } }); let context = self.get_by_key_prefix(&context_diff.predecessor_index, key_prefix_to_remove)?; if context.is_some() { let context = context.unwrap(); for key in context.keys() { context_map_diff.insert(key.clone(), Bucket::Deleted); } } Ok(()) } fn copy_to_diff(&self, context_hash: &Option<ContextHash>, from_key: &Vec<String>, to_key: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); let mut final_context_to_copy = self.get_by_key_prefix(&context_diff.predecessor_index, from_key)?.unwrap_or(ContextMap::default()); for (key, bucket) in &context_diff.diff { if key_starts_with(key, from_key) == true { match bucket { Bucket::Exists(_) => final_context_to_copy.insert(key.clone(), bucket.clone()), | Bucket::Deleted => final_context_to_copy.remove(key), _ => None }; } } for (key, bucket) in final_context_to_copy { match bucket { Bucket::Exists(_) => { let destination_key = replace_key(&key, from_key, to_key); context_diff.diff.insert(destination_key, bucket.clone()); () } _ => () }; } Ok(()) } fn get_key(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<Bucket<Vec<u8>>>, ContextError> { if context_index.context_hash.is_none() && context_index.level.is_none() { return Ok(None); } let level = if let Some(context_index_level) = context_index.level { context_index_level } else { self.level_by_context_hash(context_index.context_hash.as_ref().unwrap())? }; let list = self.storage.read().expect("lock poisoning"); list .get_key(level, &to_key(key)) .map_err(|se| ContextError::ContextReadError { error: se }) } }
use failure::Fail; use crypto::hash::{BlockHash, ContextHash, HashType}; use crate::{BlockStorage, BlockStorageReader, StorageError}; use crate::persistent::{ContextList, ContextMap}; use crate::skip_list::{Bucket, SkipListError}; #[derive(Debug, Fail)] pub enum ContextError { #[fail(display = "Failed to save commit error: {}", error)] CommitWriteError { error: SkipListError }, #[fail(display = "Failed to read from context error: {}", error)] ContextReadError { error: SkipListError }, #[fail(display = "Failed to assign context_hash: {:?} to block_hash: {}, error: {}", context_hash, block_hash, error)] ContextHashAssignError { context_hash: String, block_hash: String, error: StorageError, }, #[fail(display = "InvalidContextHash for context diff to commit, expected_context_hash: {:?}, context_hash: {:?}", expected_context_hash, context_hash)] InvalidContextHashError { expected_context_hash: Option<String>, context_hash: Option<String>, }, #[fail(display = "Unknown context_hash: {:?}", context_hash)] UnknownContextHashError { context_hash: String, }, #[fail(display = "Failed to read block for context_hash: {:?}, error: {}", context_hash, error)] ReadBlockError { context_hash: String, error: StorageError, }, } impl From<SkipListError> for ContextError { fn from(error: SkipListError) -> Self { ContextError::CommitWriteError { error } } } #[macro_export] macro_rules! ensure_eq_context_hash { ($x:expr, $y:expr) => {{ let checkouted_diff_context_hash = &$y.predecessor_index.context_hash; if !($x.eq(checkouted_diff_context_hash)) { return Err(ContextError::InvalidContextHashError { expected_context_hash: $x.as_ref().map(|ch| HashType::ContextHash.bytes_to_string(&ch)), context_hash: checkouted_diff_context_hash.as_ref().map(|ch| HashType::ContextHash.bytes_to_string(&ch)), }); } }} } pub trait ContextApi { fn init_from_start(&self) -> ContextDiff; fn checkout(&self, context_hash: &ContextHash) -> Result<ContextDiff, ContextError>; fn commit(&mut self, block_hash: &BlockHash, parent_context_hash: &Option<ContextHash>, new_context_hash: &ContextHash, context_diff: &ContextDiff) -> Result<(), ContextError>; fn delete_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_delete: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn remove_recursively_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_remove: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn copy_to_diff(&self, context_hash: &Option<ContextHash>, from_key: &Vec<String>, to_key: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn get_key(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<Bucket<Vec<u8>>>, ContextError>; } fn to_key(key: &Vec<String>) -> String { key.join("/") } fn key_starts_with(key: &String, prefix: &Vec<String>) -> bool { key.starts_with(&to_key(prefix)) } fn replace_key(key: &String, matched: &Vec<String>, replacer: &Vec<String>) -> String { key.replace(&to_key(matched), &to_key(replacer)) } pub struct ContextIndex { level: Option<usize>, context_hash: Option<ContextHash>, } impl ContextIndex { pub fn new(level: Option<usize>, context_hash: Option<ContextHash>) -> Self { ContextIndex { level, context_hash } } } pub struct ContextDiff { predecessor_index: ContextIndex, diff: ContextMap, } impl ContextDiff { pub fn new(predecessor_level: Option<usize>, predecessor_context_hash: Option<ContextHash>, diff: ContextMap) -> Self { ContextDiff { predecessor_index: ContextIndex::new(predecessor_level, predecessor_context_hash), diff, } } pub fn set(&mut self, context_hash: &Option<ContextHash>, key: &Vec<String>, value: &Vec<u8>) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &self); &self.diff.insert(to_key(key), Bucket::Exists(value.clone())); Ok(()) } } pub struct TezedgeContext { block_storage: BlockStorage, storage: ContextList, } impl TezedgeContext { pub fn new(block_storage: BlockStorage, storage: ContextList) -> Self { TezedgeContext { block_storage, storage } } fn level_by_context_hash(&self, context_hash: &ContextHash) -> Result<usize, ContextError> { let block = self.block_storage .get_by_context_hash(context_hash) .map_err(|e| ContextError::ReadBlockError { context_hash: HashType::ContextHash.bytes_to_string(context_hash), error: e })?; if block.is_none() { return Err(ContextError::UnknownContextHashError { context_hash: HashType::ContextHash.bytes_to_string(context_hash) }); } let block = block.unwrap(); Ok(block.header.level() as usize) } fn get_by_key_prefix(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<ContextMap>, ContextError> { if context_index.context_hash.is_none() && context_index.level.is_none() { return Ok(None); } let level = if let Some(context_index_level) = context_index.level { context_index_level } else { self.level_by_context_hash(context_index.context_hash.as_ref().unwrap())? }; let list = self.storage.read().expect("lock poisoning"); list .get_prefix(level, &to_key(key)) .map_err(|se| ContextError::ContextReadError { error: se }) } } impl ContextApi for TezedgeContext { fn init_from_start(&self) -> ContextDiff { ContextDiff::new(None, None, Default::default()) } fn checkout(&self, context_hash: &ContextHash) -> Result<ContextDiff, ContextError> { let level = self.level_by_context_hash(&context_hash)?; Ok( ContextDiff::new( Some(level), Some(context_hash.clone()), Default::default(), ) ) } fn commit(&mut self, block_hash: &BlockHash, parent_context_hash: &Option<ContextHash>, new_context_hash: &ContextHash, context_diff: &ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(parent_context_hash, &cont
fn delete_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_delete: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); self.remove_recursively_to_diff(context_hash, key_prefix_to_delete, context_diff) } fn remove_recursively_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_remove: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); let context_map_diff = &mut context_diff.diff; context_map_diff.retain(|k, v| { if key_starts_with(k, key_prefix_to_remove) == true { match v { Bucket::Deleted => true, _ => false } } else { true } }); let context = self.get_by_key_prefix(&context_diff.predecessor_index, key_prefix_to_remove)?; if context.is_some() { let context = context.unwrap(); for key in context.keys() { context_map_diff.insert(key.clone(), Bucket::Deleted); } } Ok(()) } fn copy_to_diff(&self, context_hash: &Option<ContextHash>, from_key: &Vec<String>, to_key: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); let mut final_context_to_copy = self.get_by_key_prefix(&context_diff.predecessor_index, from_key)?.unwrap_or(ContextMap::default()); for (key, bucket) in &context_diff.diff { if key_starts_with(key, from_key) == true { match bucket { Bucket::Exists(_) => final_context_to_copy.insert(key.clone(), bucket.clone()), | Bucket::Deleted => final_context_to_copy.remove(key), _ => None }; } } for (key, bucket) in final_context_to_copy { match bucket { Bucket::Exists(_) => { let destination_key = replace_key(&key, from_key, to_key); context_diff.diff.insert(destination_key, bucket.clone()); () } _ => () }; } Ok(()) } fn get_key(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<Bucket<Vec<u8>>>, ContextError> { if context_index.context_hash.is_none() && context_index.level.is_none() { return Ok(None); } let level = if let Some(context_index_level) = context_index.level { context_index_level } else { self.level_by_context_hash(context_index.context_hash.as_ref().unwrap())? }; let list = self.storage.read().expect("lock poisoning"); list .get_key(level, &to_key(key)) .map_err(|se| ContextError::ContextReadError { error: se }) } }
ext_diff); let mut writer = self.storage.write().expect("lock poisoning"); writer.push(&context_diff.diff)?; self.block_storage .assign_to_context(block_hash, new_context_hash) .map_err(|e| ContextError::ContextHashAssignError { block_hash: HashType::BlockHash.bytes_to_string(block_hash), context_hash: HashType::ContextHash.bytes_to_string(new_context_hash), error: e, })?; Ok(()) }
function_block-function_prefixed
[ { "content": "#[test]\n\npub fn list_get_values_by_prefix() -> Result<(), failure::Error> {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:list_get_values_by_prefix\").expect(\"Storage error\");\n\n let mut list: Box<dyn TypedSkipList<String, i32>> = Box::new(DatabaseBackedSkipList::new(9, tmp_sto...
Rust
src/oid.rs
Clockwork757/github-types
914a78eba90b2035d071723804d149c379b7bf0f
use std::fmt; use std::ops; use hex::{FromHex, FromHexError, ToHex}; use serde::de::{self, Deserialize, Deserializer, Visitor}; use serde::ser::{self, Serialize, Serializer}; #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Default)] pub struct Oid([u8; 20]); impl Oid { pub fn from_hex(s: &str) -> Result<Self, ()> { Ok(Oid(<[u8; 20]>::from_hex(s).map_err(|_| ())?)) } pub const EMPTY_TREE: Oid = Oid([ 0x4b, 0x82, 0x5d, 0xc6, 0x42, 0xcb, 0x6e, 0xb9, 0xa0, 0x60, 0xe5, 0x4b, 0xf8, 0xd6, 0x92, 0x88, 0xfb, 0xee, 0x49, 0x04, ]); pub const ZERO: Oid = Oid([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]); } impl fmt::UpperHex for Oid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.write_hex_upper(f) } } impl fmt::LowerHex for Oid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.write_hex(f) } } impl fmt::Display for Oid { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::LowerHex>::fmt(self, f) } } impl fmt::Debug for Oid { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Display>::fmt(self, f) } } impl ops::Deref for Oid { type Target = [u8; 20]; fn deref(&self) -> &Self::Target { &self.0 } } impl Serialize for Oid { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if serializer.is_human_readable() { let mut hex = String::new(); self.0 .as_ref() .write_hex(&mut hex) .map_err(ser::Error::custom)?; serializer.serialize_str(&hex) } else { serializer.serialize_bytes(self.0.as_ref()) } } } impl<'de> Deserialize<'de> for Oid { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { struct OidVisitor; impl<'de> Visitor<'de> for OidVisitor { type Value = Oid; fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "hex string or 20 bytes") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: de::Error, { let v = <[u8; 20]>::from_hex(v).map_err(|e| match e { FromHexError::InvalidHexCharacter { c, .. } => { E::invalid_value( de::Unexpected::Char(c), &"string with only hexadecimal characters", ) } FromHexError::InvalidStringLength => E::invalid_length( v.len(), &"hex string with a valid length", ), FromHexError::OddLength => E::invalid_length( v.len(), &"hex string with an even length", ), })?; Ok(Oid(v)) } fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> where E: de::Error, { if v.len() != 20 { return Err(E::invalid_length(v.len(), &"20 bytes")); } let mut inner = <[u8; 20]>::default(); inner.copy_from_slice(v); Ok(Oid(inner)) } } if deserializer.is_human_readable() { deserializer.deserialize_str(OidVisitor) } else { deserializer.deserialize_bytes(OidVisitor) } } }
use std::fmt; use std::ops; use hex::{FromHex, FromHexError, ToHex}; use serde::de::{self, Deserialize, Deserializer, Visitor}; use serde::ser::{self, Serialize, Serializer}; #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Default)] pub struct Oid([u8; 20]); impl Oid { pub fn from_hex(s: &str) -> Result<Self, ()> { Ok(Oid(<[u8; 20]>::from_hex(s).map_err(|_| ())?)) } pub const EMPTY_TREE: Oid = Oid([ 0x4b, 0x82, 0x5d, 0xc6, 0x42, 0xcb, 0x6e, 0xb9, 0xa0, 0x60, 0xe5, 0x4b, 0xf8, 0xd6, 0x92, 0x88, 0xfb, 0xee, 0x49, 0x04, ]); pub const ZERO: Oid = Oid([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]); } impl fmt::UpperHex for Oid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.write_hex_upper(f) } } impl fmt::LowerHex for Oid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.write_hex(f) } } impl fmt::Display for Oid { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::LowerHex>::fmt(self, f) } } impl fmt::Debug for Oid { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Display>::fmt(self, f) } } impl ops::Deref for Oid { type Target = [u8; 20]; fn deref(&self) -> &Self::Target { &self.0 } } impl Serialize for Oid { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if serializer.is_human_readable() { let mut hex = String::new(); self.0 .as_ref() .write_hex(&mut hex) .map_err(ser::Error::custom)?; serializer.serialize_str(&hex) } else { serializer.serialize_bytes(self.0.as_ref()) } } } impl<'de> Deserialize<'de> for Oid { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { struct OidVisitor; impl<'de> Visitor<'de> for OidVisitor { type Value = Oid; fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "hex string or 20 bytes") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: de::Error, { let v = <[u8; 20]>::from_hex(v).map_err(|e| match e {
}
FromHexError::InvalidHexCharacter { c, .. } => { E::invalid_value( de::Unexpected::Char(c), &"string with only hexadecimal characters", ) } FromHexError::InvalidStringLength => E::invalid_length( v.len(), &"hex string with a valid length", ), FromHexError::OddLength => E::invalid_length( v.len(), &"hex string with an even length", ), })?; Ok(Oid(v)) } fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> where E: de::Error, { if v.len() != 20 { return Err(E::invalid_length(v.len(), &"20 bytes")); } let mut inner = <[u8; 20]>::default(); inner.copy_from_slice(v); Ok(Oid(inner)) } } if deserializer.is_human_readable() { deserializer.deserialize_str(OidVisitor) } else { deserializer.deserialize_bytes(OidVisitor) } }
function_block-function_prefixed
[ { "content": "pub trait AppEvent {\n\n /// Returns the installation ID for the event.\n\n fn installation(&self) -> Option<u64> {\n\n None\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_ca...
Rust
vrp-pragmatic/src/checker/assignment.rs
dooley/vrp
0007543128fcf6d261c1e08be4a006aaa90a331f
#[cfg(test)] #[path = "../../tests/unit/checker/assignment_test.rs"] mod assignment_test; use super::*; use crate::format::solution::activity_matcher::try_match_job; use crate::format::{get_coord_index, get_job_index}; use std::collections::HashSet; pub fn check_assignment(ctx: &CheckerContext) -> Result<(), String> { check_vehicles(ctx)?; check_jobs_presence(ctx)?; check_jobs_match(ctx)?; Ok(()) } fn check_vehicles(ctx: &CheckerContext) -> Result<(), String> { let all_vehicles: HashSet<_> = ctx.problem.fleet.vehicles.iter().flat_map(|v| v.vehicle_ids.iter()).collect(); let mut used_vehicles = HashSet::<(String, usize)>::new(); ctx.solution.tours.iter().try_for_each(|tour| { if !all_vehicles.contains(&tour.vehicle_id) { return Err(format!("Used vehicle with unknown id: {}", tour.vehicle_id)); } if !(used_vehicles.insert((tour.vehicle_id.to_string(), tour.shift_index))) { Err(format!("Vehicle with '{}' id used more than once for shift {}", tour.vehicle_id, tour.shift_index)) } else { Ok(()) } })?; Ok(()) } fn check_jobs_presence(ctx: &CheckerContext) -> Result<(), String> { struct JobAssignment { pub tour_info: (String, usize), pub pickups: Vec<usize>, pub deliveries: Vec<usize>, pub replacements: Vec<usize>, pub services: Vec<usize>, } let new_assignment = |tour_info: (String, usize)| JobAssignment { tour_info, pickups: vec![], deliveries: vec![], replacements: vec![], services: vec![], }; let activity_types: HashSet<_> = vec!["pickup", "delivery", "service", "replacement"].into_iter().collect(); let all_jobs = ctx.problem.plan.jobs.iter().map(|job| (job.id.clone(), job.clone())).collect::<HashMap<_, _>>(); let mut used_jobs = HashMap::<String, JobAssignment>::new(); ctx.solution.tours.iter().try_for_each(|tour| { tour.stops .iter() .flat_map(|stop| stop.activities.iter()) .enumerate() .filter(|(_, activity)| activity_types.contains(&activity.activity_type.as_str())) .try_for_each(|(idx, activity)| { let tour_info = (tour.vehicle_id.clone(), tour.shift_index); let asgn = used_jobs.entry(activity.job_id.clone()).or_insert_with(|| new_assignment(tour_info.clone())); if asgn.tour_info != tour_info { return Err(format!("Job served in multiple tours: '{}'", activity.job_id)); } match activity.activity_type.as_str() { "pickup" => asgn.pickups.push(idx), "delivery" => asgn.deliveries.push(idx), "service" => asgn.services.push(idx), "replacement" => asgn.replacements.push(idx), _ => {} } Ok(()) }) })?; used_jobs.iter().try_for_each(|(id, asgn)| { let job = all_jobs.get(id).ok_or_else(|| format!("Cannot find job with id {}", id))?; let expected_tasks = job.pickups.as_ref().map_or(0, |p| p.len()) + job.deliveries.as_ref().map_or(0, |d| d.len()) + job.services.as_ref().map_or(0, |s| s.len()) + job.replacements.as_ref().map_or(0, |r| r.len()); let assigned_tasks = asgn.pickups.len() + asgn.deliveries.len() + asgn.services.len() + asgn.replacements.len(); if expected_tasks != assigned_tasks { return Err(format!( "Not all tasks served for '{}', expected: {}, assigned: {}", id, expected_tasks, assigned_tasks )); } if !asgn.deliveries.is_empty() && asgn.pickups.iter().max() > asgn.deliveries.iter().min() { return Err(format!("Found pickup after delivery for '{}'", id)); } Ok(()) })?; let all_unassigned_jobs = ctx .solution .unassigned .iter() .flat_map(|jobs| jobs.iter().filter(|job| !job.job_id.ends_with("_break"))) .map(|job| job.job_id.clone()) .collect::<Vec<_>>(); let unique_unassigned_jobs = all_unassigned_jobs.iter().cloned().collect::<HashSet<_>>(); if unique_unassigned_jobs.len() != all_unassigned_jobs.len() { return Err("Duplicated job ids in the list of unassigned jobs".to_string()); } unique_unassigned_jobs.iter().try_for_each(|job_id| { if !all_jobs.contains_key(job_id) { return Err(format!("Unknown job id in the list of unassigned jobs: '{}'", job_id)); } if used_jobs.contains_key(job_id) { return Err(format!("Job present as assigned and unassigned: '{}'", job_id)); } Ok(()) })?; let all_used_job = unique_unassigned_jobs.into_iter().chain(used_jobs.into_iter().map(|(id, _)| id)).collect::<Vec<_>>(); if all_used_job.len() != all_jobs.len() { return Err(format!( "Amount of jobs present in problem and solution doesn't match: {} vs {}", all_jobs.len(), all_used_job.len() )); } Ok(()) } fn check_jobs_match(ctx: &CheckerContext) -> Result<(), String> { let job_ids = ctx .solution .tours .iter() .flat_map(move |tour| { tour.stops.iter().flat_map(move |stop| { stop.activities .iter() .filter(move |activity| { try_match_job( tour, stop, activity, get_job_index(&ctx.core_problem), get_coord_index(&ctx.core_problem), ) .is_err() }) .map(|activity| { format!( "{}:{}", activity.job_id.clone(), activity.job_tag.as_ref().unwrap_or(&"<no tag>".to_string()) ) }) }) }) .collect::<Vec<_>>(); if !job_ids.is_empty() { return Err(format!("cannot match activities to jobs: {}", job_ids.join(", "))); } Ok(()) }
#[cfg(test)] #[path = "../../tests/unit/checker/assignment_test.rs"] mod assignment_test; use super::*; use crate::format::solution::activity_matcher::try_match_job; use crate::format::{get_coord_index, get_job_index}; use std::collections::HashSet; pub fn check_assignment(ctx: &CheckerContext) -> Result<(), String> { check_vehicles(ctx)?; check_jobs_presence(ctx)?; check_jobs_match(ctx)?; Ok(()) } fn check_vehicles(ctx: &CheckerContext) -> Result<(), String> { let all_vehicles: HashSet<_> = ctx.problem.fleet.vehicles.iter().flat_map(|v| v.vehicle_ids.iter()).collect(); let mut used_vehicles = HashSet::<(String, usize)>::new(); ctx.solution.tours.iter().try_for_each(|tour| { if !all_vehicles.contains(&tour.vehicle_id) { return Err(format!("Used vehicle with unknown id: {}", tour.vehicle_id)); } if !(used_vehicles.insert((tour.vehicle_id.to_string(), tour.shift_index))) { Err(format!("Vehicle with '{}' id used more than once for shift {}", tour.vehicle_id, tour.shift_index)) } else { Ok(()) } })?; Ok(()) }
fn check_jobs_match(ctx: &CheckerContext) -> Result<(), String> { let job_ids = ctx .solution .tours .iter() .flat_map(move |tour| { tour.stops.iter().flat_map(move |stop| { stop.activities .iter() .filter(move |activity| { try_match_job( tour, stop, activity, get_job_index(&ctx.core_problem), get_coord_index(&ctx.core_problem), ) .is_err() }) .map(|activity| { format!( "{}:{}", activity.job_id.clone(), activity.job_tag.as_ref().unwrap_or(&"<no tag>".to_string()) ) }) }) }) .collect::<Vec<_>>(); if !job_ids.is_empty() { return Err(format!("cannot match activities to jobs: {}", job_ids.join(", "))); } Ok(()) }
fn check_jobs_presence(ctx: &CheckerContext) -> Result<(), String> { struct JobAssignment { pub tour_info: (String, usize), pub pickups: Vec<usize>, pub deliveries: Vec<usize>, pub replacements: Vec<usize>, pub services: Vec<usize>, } let new_assignment = |tour_info: (String, usize)| JobAssignment { tour_info, pickups: vec![], deliveries: vec![], replacements: vec![], services: vec![], }; let activity_types: HashSet<_> = vec!["pickup", "delivery", "service", "replacement"].into_iter().collect(); let all_jobs = ctx.problem.plan.jobs.iter().map(|job| (job.id.clone(), job.clone())).collect::<HashMap<_, _>>(); let mut used_jobs = HashMap::<String, JobAssignment>::new(); ctx.solution.tours.iter().try_for_each(|tour| { tour.stops .iter() .flat_map(|stop| stop.activities.iter()) .enumerate() .filter(|(_, activity)| activity_types.contains(&activity.activity_type.as_str())) .try_for_each(|(idx, activity)| { let tour_info = (tour.vehicle_id.clone(), tour.shift_index); let asgn = used_jobs.entry(activity.job_id.clone()).or_insert_with(|| new_assignment(tour_info.clone())); if asgn.tour_info != tour_info { return Err(format!("Job served in multiple tours: '{}'", activity.job_id)); } match activity.activity_type.as_str() { "pickup" => asgn.pickups.push(idx), "delivery" => asgn.deliveries.push(idx), "service" => asgn.services.push(idx), "replacement" => asgn.replacements.push(idx), _ => {} } Ok(()) }) })?; used_jobs.iter().try_for_each(|(id, asgn)| { let job = all_jobs.get(id).ok_or_else(|| format!("Cannot find job with id {}", id))?; let expected_tasks = job.pickups.as_ref().map_or(0, |p| p.len()) + job.deliveries.as_ref().map_or(0, |d| d.len()) + job.services.as_ref().map_or(0, |s| s.len()) + job.replacements.as_ref().map_or(0, |r| r.len()); let assigned_tasks = asgn.pickups.len() + asgn.deliveries.len() + asgn.services.len() + asgn.replacements.len(); if expected_tasks != assigned_tasks { return Err(format!( "Not all tasks served for '{}', expected: {}, assigned: {}", id, expected_tasks, assigned_tasks )); } if !asgn.deliveries.is_empty() && asgn.pickups.iter().max() > asgn.deliveries.iter().min() { return Err(format!("Found pickup after delivery for '{}'", id)); } Ok(()) })?; let all_unassigned_jobs = ctx .solution .unassigned .iter() .flat_map(|jobs| jobs.iter().filter(|job| !job.job_id.ends_with("_break"))) .map(|job| job.job_id.clone()) .collect::<Vec<_>>(); let unique_unassigned_jobs = all_unassigned_jobs.iter().cloned().collect::<HashSet<_>>(); if unique_unassigned_jobs.len() != all_unassigned_jobs.len() { return Err("Duplicated job ids in the list of unassigned jobs".to_string()); } unique_unassigned_jobs.iter().try_for_each(|job_id| { if !all_jobs.contains_key(job_id) { return Err(format!("Unknown job id in the list of unassigned jobs: '{}'", job_id)); } if used_jobs.contains_key(job_id) { return Err(format!("Job present as assigned and unassigned: '{}'", job_id)); } Ok(()) })?; let all_used_job = unique_unassigned_jobs.into_iter().chain(used_jobs.into_iter().map(|(id, _)| id)).collect::<Vec<_>>(); if all_used_job.len() != all_jobs.len() { return Err(format!( "Amount of jobs present in problem and solution doesn't match: {} vs {}", all_jobs.len(), all_used_job.len() )); } Ok(()) }
function_block-full_function
[ { "content": "/// Checks that vehicle load is assigned correctly. The following rules are checked:\n\n/// * max vehicle's capacity is not violated\n\n/// * load change is correct\n\npub fn check_vehicle_load(context: &CheckerContext) -> Result<(), String> {\n\n context.solution.tours.iter().try_for_each(|tou...
Rust
src/handlers/users/mod.rs
jsvana/uwiki
d00c93d75b8803814e104cf20ad98976f272c0f1
use std::convert::TryInto; use std::iter; use std::time::{SystemTime, UNIX_EPOCH}; use anyhow::{anyhow, Result}; use bcrypt::{hash, verify, DEFAULT_COST}; use handlebars::Handlebars; use rand::distributions::Alphanumeric; use rand::{thread_rng, Rng}; use serde_json::json; use sqlx::{Pool, Postgres}; use warp::http::{StatusCode, Uri}; use warp_sessions::{MemoryStore, SessionWithStore}; use crate::handlers::util::{ attempt_to_set_flash, error_html, error_redirect, get_and_clear_flash, HandlerReturn, Image, Page, User, UserState, }; use crate::{value_or_error_html, value_or_error_redirect, Config}; pub async fn render_create( templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let text = match templates.render("users/create", &json!({})) { Ok(text) => text, Err(e) => { format!("<html>Error rendering new user template: {}</html>", e) } }; Ok(( Box::new(warp::reply::with_status( warp::reply::html(text), StatusCode::OK, )), session_with_store, )) } pub async fn create( request: uwiki_types::AddUserRequest, db: Pool<Postgres>, mut session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let hashed_password = value_or_error_redirect!( hash(request.password, DEFAULT_COST), Uri::from_static("/"), "Error hashing password", session_with_store ); session_with_store = attempt_to_set_flash( &format!("Requested new user {}", request.username), session_with_store, ); match sqlx::query!( "INSERT INTO users (username, password) VALUES ($1, $2)", request.username, hashed_password, ) .execute(&db) .await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } } async fn set_user_state( target_user_id: i32, target_state: UserState, db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let mut tx = value_or_error_html!( db.begin().await, "Error communicating with database", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let token = value_or_error_redirect!( session_with_store .session .get::<String>("sid") .ok_or_else(|| anyhow!("missing sid token")), Uri::from_static("/"), "Not logged in".to_string(), session_with_store ); let admin = value_or_error_redirect!( sqlx::query!( "SELECT users.admin AS admin \ FROM tokens \ LEFT JOIN users \ ON users.id = tokens.user_id \ WHERE tokens.token = $1 \ AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)", token, ) .fetch_one(&mut tx) .await, Uri::from_static("/"), "Not logged in".to_string(), session_with_store ) .admin; if !admin { return Ok(error_redirect( Uri::from_static("/me"), "You do not have admin permissions".to_string(), session_with_store, )); } match sqlx::query!( "UPDATE users SET state = $1 WHERE id = $2", target_state.to_string(), target_user_id ) .execute(&db) .await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/me"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/me"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } } pub async fn approve( user_id: i32, db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { set_user_state( user_id, UserState::Active, db, templates, session_with_store, ) .await } pub async fn reject( user_id: i32, db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { set_user_state( user_id, UserState::Rejected, db, templates, session_with_store, ) .await } pub async fn render_login( templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<(impl warp::Reply, SessionWithStore<MemoryStore>), warp::Rejection> { let (flash, session_with_store) = get_and_clear_flash(session_with_store); let text = match templates.render("users/login", &json!({ "flash": flash })) { Ok(text) => text, Err(e) => { format!("<html>Error: {}</html>", e) } }; Ok(( warp::reply::with_status(warp::reply::html(text), StatusCode::OK), session_with_store, )) } pub async fn login( request: uwiki_types::AuthenticateRequest, db: Pool<Postgres>, config: Config, mut session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let mut tx = value_or_error_redirect!( db.begin().await, Uri::from_static("/login"), "Error authenticating", session_with_store ); let user = value_or_error_redirect!( sqlx::query!( "SELECT id, password, state FROM users WHERE username = $1", request.username, ) .fetch_one(&mut tx) .await, Uri::from_static("/login"), "Invalid username or password", session_with_store ); if user.state != "active" { return Ok(error_redirect( Uri::from_static("/"), "Account not yet marked active".to_string(), session_with_store, )); } if let Ok(false) | Err(_) = verify(request.password, &user.password) { return Ok(error_redirect( Uri::from_static("/login"), "Invalid username or password".to_string(), session_with_store, )); } let token: String = { let mut rng = thread_rng(); iter::repeat(()) .map(|()| rng.sample(Alphanumeric)) .map(char::from) .take(60) .collect() }; let token = format!("lgn:{}", token); if let Err(e) = session_with_store.session.insert("sid", token.clone()) { return Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (failed to persist token to session): {}", e), session_with_store, )); } let now = value_or_error_redirect!( SystemTime::now().duration_since(UNIX_EPOCH), Uri::from_static("/login"), "Internal error (time went backwards)", session_with_store ); let expiration: i32 = value_or_error_redirect!( (now + config.token_ttl).as_secs().try_into(), Uri::from_static("/login"), "Internal error (expiration timestamp too large)", session_with_store ); if let Err(e) = sqlx::query!( "INSERT INTO tokens (user_id, token, expiration) VALUES ($1, $2, $3)", user.id, token, expiration, ) .execute(&mut tx) .await { return Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (error generating token): {}", e), session_with_store, )); } session_with_store = attempt_to_set_flash("Logged in successfully", session_with_store); match tx.commit().await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/me"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } } pub async fn render( db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<(Box<dyn warp::Reply>, SessionWithStore<MemoryStore>), warp::Rejection> { let (flash, session_with_store) = get_and_clear_flash(session_with_store); let token = match session_with_store.session.get::<String>("sid") { Some(token) => token, None => { return Ok(error_redirect( Uri::from_static("/"), "Not logged in".to_string(), session_with_store, )); } }; let mut tx = value_or_error_html!( db.begin().await, "Error generating user page", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let row = value_or_error_redirect!( sqlx::query!( "SELECT \ users.id AS user_id, \ users.username AS username, \ users.admin AS admin \ FROM tokens \ LEFT JOIN users ON users.id = tokens.user_id WHERE token = $1 \ AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)", token, ) .fetch_one(&mut tx) .await, Uri::from_static("/"), "Not logged in".to_string(), session_with_store ); let (user_id, username, admin) = (row.user_id, row.username, row.admin); let pages = value_or_error_html!( sqlx::query_as!( Page, "SELECT slug, title FROM pages \ WHERE owner_id = $1", user_id ) .fetch_all(&db) .await, "Unable to fetch owned pages", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let pages = match pages.len() { 0 => None, _ => Some(pages), }; let images = value_or_error_html!( sqlx::query_as!( Image, "SELECT CONCAT(slug, '.', extension) AS slug_with_extension, slug, alt_text FROM images \ WHERE owner_id = $1", user_id ) .fetch_all(&db) .await, "Unable to fetch owned images", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let images = match images.len() { 0 => None, _ => Some(images), }; let approvals = if admin { let approvals = value_or_error_html!( sqlx::query_as!( User, "SELECT \ username, \ id, \ TO_CHAR(created_at, 'MM/DD/YYYY HH24:MI:SS') AS created_at \ FROM users \ WHERE state = 'pending' \ ORDER BY created_at DESC", ) .fetch_all(&mut tx) .await, "Unable to fetch account approvals", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); match approvals.len() { 0 => None, _ => Some(approvals), } } else { None }; let text = match templates.render( "users/render", &json!({ "flash": flash, "pages": pages, "images": images, "approvals": approvals, "current_username": username}), ) { Ok(text) => text, Err(e) => { format!("<html>Error rendering user template: {}</html>", e) } }; Ok(( Box::new(warp::reply::with_status( warp::reply::html(text), StatusCode::OK, )), session_with_store, )) }
use std::convert::TryInto; use std::iter; use std::time::{SystemTime, UNIX_EPOCH}; use anyhow::{anyhow, Result}; use bcrypt::{hash, verify, DEFAULT_COST}; use handlebars::Handlebars; use rand::distributions::Alphanumeric; use rand::{thread_rng, Rng}; use serde_json::json; use sqlx::{Pool, Postgres}; use warp::http::{StatusCode, Uri}; use warp_sessions::{MemoryStore, SessionWithStore}; use crate::handlers::util::{ attempt_to_set_flash, error_html, error_redirect, get_and_clear_flash, HandlerReturn, Image, Page, User, UserState, }; use crate::{value_or_error_html, value_or_error_redirect, Config}; pub async fn render_create( templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let text = match templates.render("users/create", &json!({})) { Ok(text) => text, Err(e) => { format!("<html>Error rendering new user template: {}</html>", e) } }; Ok(( Box::new(warp::reply::with_status( warp::reply::html(text), StatusCode::OK, )), session_with_store, )) } pub async fn create( request: uwiki_types::AddUserRequest, db: Pool<Postgres>, mut session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let hashed_password = value_or_error_redirect!( hash(request.password, DEFAULT_COST), Uri::from_static("/"), "Error hashing password", session_with_store ); session_with_store = attempt_to_set_flash( &format!("Requested new user {}", request.username), session_with_store, ); match sqlx::query!( "INSERT INTO users (username, password) VALUES ($1, $2)", request.username, hashed_password, ) .execute(&db) .await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } } async fn set_user_state( target_user_id: i32, target_state: UserState,
pub async fn approve( user_id: i32, db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { set_user_state( user_id, UserState::Active, db, templates, session_with_store, ) .await } pub async fn reject( user_id: i32, db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { set_user_state( user_id, UserState::Rejected, db, templates, session_with_store, ) .await } pub async fn render_login( templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<(impl warp::Reply, SessionWithStore<MemoryStore>), warp::Rejection> { let (flash, session_with_store) = get_and_clear_flash(session_with_store); let text = match templates.render("users/login", &json!({ "flash": flash })) { Ok(text) => text, Err(e) => { format!("<html>Error: {}</html>", e) } }; Ok(( warp::reply::with_status(warp::reply::html(text), StatusCode::OK), session_with_store, )) } pub async fn login( request: uwiki_types::AuthenticateRequest, db: Pool<Postgres>, config: Config, mut session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let mut tx = value_or_error_redirect!( db.begin().await, Uri::from_static("/login"), "Error authenticating", session_with_store ); let user = value_or_error_redirect!( sqlx::query!( "SELECT id, password, state FROM users WHERE username = $1", request.username, ) .fetch_one(&mut tx) .await, Uri::from_static("/login"), "Invalid username or password", session_with_store ); if user.state != "active" { return Ok(error_redirect( Uri::from_static("/"), "Account not yet marked active".to_string(), session_with_store, )); } if let Ok(false) | Err(_) = verify(request.password, &user.password) { return Ok(error_redirect( Uri::from_static("/login"), "Invalid username or password".to_string(), session_with_store, )); } let token: String = { let mut rng = thread_rng(); iter::repeat(()) .map(|()| rng.sample(Alphanumeric)) .map(char::from) .take(60) .collect() }; let token = format!("lgn:{}", token); if let Err(e) = session_with_store.session.insert("sid", token.clone()) { return Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (failed to persist token to session): {}", e), session_with_store, )); } let now = value_or_error_redirect!( SystemTime::now().duration_since(UNIX_EPOCH), Uri::from_static("/login"), "Internal error (time went backwards)", session_with_store ); let expiration: i32 = value_or_error_redirect!( (now + config.token_ttl).as_secs().try_into(), Uri::from_static("/login"), "Internal error (expiration timestamp too large)", session_with_store ); if let Err(e) = sqlx::query!( "INSERT INTO tokens (user_id, token, expiration) VALUES ($1, $2, $3)", user.id, token, expiration, ) .execute(&mut tx) .await { return Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (error generating token): {}", e), session_with_store, )); } session_with_store = attempt_to_set_flash("Logged in successfully", session_with_store); match tx.commit().await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/me"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } } pub async fn render( db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<(Box<dyn warp::Reply>, SessionWithStore<MemoryStore>), warp::Rejection> { let (flash, session_with_store) = get_and_clear_flash(session_with_store); let token = match session_with_store.session.get::<String>("sid") { Some(token) => token, None => { return Ok(error_redirect( Uri::from_static("/"), "Not logged in".to_string(), session_with_store, )); } }; let mut tx = value_or_error_html!( db.begin().await, "Error generating user page", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let row = value_or_error_redirect!( sqlx::query!( "SELECT \ users.id AS user_id, \ users.username AS username, \ users.admin AS admin \ FROM tokens \ LEFT JOIN users ON users.id = tokens.user_id WHERE token = $1 \ AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)", token, ) .fetch_one(&mut tx) .await, Uri::from_static("/"), "Not logged in".to_string(), session_with_store ); let (user_id, username, admin) = (row.user_id, row.username, row.admin); let pages = value_or_error_html!( sqlx::query_as!( Page, "SELECT slug, title FROM pages \ WHERE owner_id = $1", user_id ) .fetch_all(&db) .await, "Unable to fetch owned pages", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let pages = match pages.len() { 0 => None, _ => Some(pages), }; let images = value_or_error_html!( sqlx::query_as!( Image, "SELECT CONCAT(slug, '.', extension) AS slug_with_extension, slug, alt_text FROM images \ WHERE owner_id = $1", user_id ) .fetch_all(&db) .await, "Unable to fetch owned images", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let images = match images.len() { 0 => None, _ => Some(images), }; let approvals = if admin { let approvals = value_or_error_html!( sqlx::query_as!( User, "SELECT \ username, \ id, \ TO_CHAR(created_at, 'MM/DD/YYYY HH24:MI:SS') AS created_at \ FROM users \ WHERE state = 'pending' \ ORDER BY created_at DESC", ) .fetch_all(&mut tx) .await, "Unable to fetch account approvals", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); match approvals.len() { 0 => None, _ => Some(approvals), } } else { None }; let text = match templates.render( "users/render", &json!({ "flash": flash, "pages": pages, "images": images, "approvals": approvals, "current_username": username}), ) { Ok(text) => text, Err(e) => { format!("<html>Error rendering user template: {}</html>", e) } }; Ok(( Box::new(warp::reply::with_status( warp::reply::html(text), StatusCode::OK, )), session_with_store, )) }
db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let mut tx = value_or_error_html!( db.begin().await, "Error communicating with database", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let token = value_or_error_redirect!( session_with_store .session .get::<String>("sid") .ok_or_else(|| anyhow!("missing sid token")), Uri::from_static("/"), "Not logged in".to_string(), session_with_store ); let admin = value_or_error_redirect!( sqlx::query!( "SELECT users.admin AS admin \ FROM tokens \ LEFT JOIN users \ ON users.id = tokens.user_id \ WHERE tokens.token = $1 \ AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)", token, ) .fetch_one(&mut tx) .await, Uri::from_static("/"), "Not logged in".to_string(), session_with_store ) .admin; if !admin { return Ok(error_redirect( Uri::from_static("/me"), "You do not have admin permissions".to_string(), session_with_store, )); } match sqlx::query!( "UPDATE users SET state = $1 WHERE id = $2", target_state.to_string(), target_user_id ) .execute(&db) .await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/me"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/me"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } }
function_block-function_prefix_line
[ { "content": "pub fn error_html(\n\n message: &str,\n\n status_code: StatusCode,\n\n templates: &Handlebars,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> HandlerReturn {\n\n let (flash, session_with_store) = get_and_clear_flash(session_with_store);\n\n\n\n let text = match te...
Rust
core/lib/storage/src/chain/mempool/mod.rs
huitseeker/zksync
5b936b1855a08033cca7f75d6f87fde106c6e8fd
use std::{collections::VecDeque, convert::TryFrom, time::Instant}; use itertools::Itertools; use zksync_types::{ mempool::SignedTxVariant, tx::{TxEthSignature, TxHash}, SignedZkSyncTx, }; use self::records::MempoolTx; use crate::{QueryResult, StorageProcessor}; pub mod records; #[derive(Debug)] pub struct MempoolSchema<'a, 'c>(pub &'a mut StorageProcessor<'c>); impl<'a, 'c> MempoolSchema<'a, 'c> { pub async fn load_txs(&mut self) -> QueryResult<VecDeque<SignedTxVariant>> { let start = Instant::now(); let txs: Vec<MempoolTx> = sqlx::query_as!( MempoolTx, "SELECT * FROM mempool_txs ORDER BY created_at", ) .fetch_all(self.0.conn()) .await?; fn batch_id_optional(batch_id: i64) -> Option<i64> { match batch_id { 0 => None, _ => Some(batch_id), } }; let mut prev_batch_id = txs .first() .map(|tx| batch_id_optional(tx.batch_id)) .flatten(); let grouped_txs = txs.into_iter().group_by(|tx| { prev_batch_id = batch_id_optional(tx.batch_id); prev_batch_id }); let mut txs = Vec::new(); for (batch_id, group) in grouped_txs.into_iter() { let deserialized_txs: Vec<SignedZkSyncTx> = group .map(|tx_object| -> QueryResult<SignedZkSyncTx> { let tx = serde_json::from_value(tx_object.tx)?; let sign_data = match tx_object.eth_sign_data { None => None, Some(sign_data_value) => serde_json::from_value(sign_data_value)?, }; Ok(SignedZkSyncTx { tx, eth_sign_data: sign_data, }) }) .collect::<Result<Vec<SignedZkSyncTx>, anyhow::Error>>()?; match batch_id { Some(batch_id) => { let variant = SignedTxVariant::batch(deserialized_txs, batch_id, None); txs.push(variant); } None => { let mut variants = deserialized_txs .into_iter() .map(SignedTxVariant::from) .collect(); txs.append(&mut variants); } } } for tx in &mut txs { if let SignedTxVariant::Batch(batch) = tx { let eth_signature = sqlx::query!( "SELECT eth_signature FROM txs_batches_signatures WHERE batch_id = $1", batch.batch_id ) .fetch_optional(self.0.conn()) .await? .map(|value| { serde_json::from_value(value.eth_signature) .expect("failed to decode TxEthSignature") }); batch.eth_signature = eth_signature; } } txs.sort_by_key(|tx| match tx { SignedTxVariant::Tx(tx) => tx.tx.nonce(), SignedTxVariant::Batch(batch) => batch .txs .last() .expect("batch must contain at least one transaction") .tx .nonce(), }); metrics::histogram!("sql.chain.mempool.load_txs", start.elapsed()); Ok(txs.into()) } pub async fn insert_batch( &mut self, txs: &[SignedZkSyncTx], eth_signature: Option<TxEthSignature>, ) -> QueryResult<i64> { let start = Instant::now(); if txs.is_empty() { anyhow::bail!("Cannot insert an empty batch"); } let batch_id = { let first_tx_data = txs[0].clone(); let tx_hash = hex::encode(first_tx_data.hash().as_ref()); let tx = serde_json::to_value(&first_tx_data.tx) .expect("Unserializable TX provided to the database"); let eth_sign_data = first_tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data) VALUES ($1, $2, $3, $4)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, ) .execute(self.0.conn()) .await?; sqlx::query_as!( MempoolTx, "SELECT * FROM mempool_txs ORDER BY batch_id DESC LIMIT 1", ) .fetch_optional(self.0.conn()) .await? .ok_or_else(|| anyhow::format_err!("Can't get maximal batch_id from mempool_txs"))? .batch_id }; for tx_data in txs[1..].iter() { let tx_hash = hex::encode(tx_data.hash().as_ref()); let tx = serde_json::to_value(&tx_data.tx) .expect("Unserializable TX provided to the database"); let eth_sign_data = tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data, batch_id) VALUES ($1, $2, $3, $4, $5)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, batch_id, ) .execute(self.0.conn()) .await?; } if let Some(signature) = eth_signature { let signature = serde_json::to_value(signature)?; sqlx::query!( "INSERT INTO txs_batches_signatures VALUES($1, $2)", batch_id, signature ) .execute(self.0.conn()) .await?; } metrics::histogram!("sql.chain.mempool.insert_batch", start.elapsed()); Ok(batch_id) } pub async fn insert_tx(&mut self, tx_data: &SignedZkSyncTx) -> QueryResult<()> { let start = Instant::now(); let tx_hash = hex::encode(tx_data.tx.hash().as_ref()); let tx = serde_json::to_value(&tx_data.tx)?; let batch_id = 0; let eth_sign_data = tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data, batch_id) VALUES ($1, $2, $3, $4, $5)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, batch_id, ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.insert_tx", start.elapsed()); Ok(()) } pub async fn remove_tx(&mut self, tx: &[u8]) -> QueryResult<()> { let start = Instant::now(); let tx_hash = hex::encode(tx); sqlx::query!( "DELETE FROM mempool_txs WHERE tx_hash = $1", &tx_hash ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.remove_tx", start.elapsed()); Ok(()) } pub async fn remove_txs(&mut self, txs: &[TxHash]) -> QueryResult<()> { let start = Instant::now(); let tx_hashes: Vec<_> = txs.iter().map(hex::encode).collect(); sqlx::query!( "DELETE FROM mempool_txs WHERE tx_hash = ANY($1)", &tx_hashes ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.remove_txs", start.elapsed()); Ok(()) } pub async fn contains_tx(&mut self, tx_hash: TxHash) -> QueryResult<bool> { let start = Instant::now(); let tx_hash = hex::encode(tx_hash.as_ref()); let row = sqlx::query!( "SELECT count(*) from mempool_txs WHERE tx_hash = $1", &tx_hash ) .fetch_one(self.0.conn()) .await? .count; let contains = row.filter(|&counter| counter > 0).is_some(); metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "contains_tx"); Ok(contains) } pub async fn get_tx(&mut self, tx_hash: TxHash) -> QueryResult<Option<SignedZkSyncTx>> { let start = Instant::now(); let tx_hash = hex::encode(tx_hash.as_ref()); let mempool_tx = sqlx::query_as!( MempoolTx, "SELECT * from mempool_txs WHERE tx_hash = $1", &tx_hash ) .fetch_optional(self.0.conn()) .await?; metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "get_tx"); mempool_tx .map(SignedZkSyncTx::try_from) .transpose() .map_err(anyhow::Error::from) } pub async fn collect_garbage(&mut self) -> QueryResult<()> { let start = Instant::now(); let all_txs: Vec<_> = self.load_txs().await?.into_iter().collect(); let mut tx_hashes_to_remove = Vec::new(); for tx in all_txs { let should_remove = match &tx { SignedTxVariant::Tx(tx) => { let tx_hash = tx.hash(); self.0 .chain() .operations_ext_schema() .get_tx_by_hash(tx_hash.as_ref()) .await .expect("DB issue while restoring the mempool state") .is_some() } SignedTxVariant::Batch(batch) => { let tx_hash = batch.txs[0].hash(); self.0 .chain() .operations_ext_schema() .get_tx_by_hash(tx_hash.as_ref()) .await .expect("DB issue while restoring the mempool state") .is_some() } }; if should_remove { tx_hashes_to_remove.extend(tx.hashes()) } } self.remove_txs(&tx_hashes_to_remove).await?; metrics::histogram!("sql.chain.mempool.collect_garbage", start.elapsed()); Ok(()) } }
use std::{collections::VecDeque, convert::TryFrom, time::Instant}; use itertools::Itertools; use zksync_types::{ mempool::SignedTxVariant, tx::{TxEthSignature, TxHash}, SignedZkSyncTx, }; use self::records::MempoolTx; use crate::{QueryResult, StorageProcessor}; pub mod records; #[derive(Debug)] pub struct MempoolSchema<'a, 'c>(pub &'a mut Storage
let row = sqlx::query!( "SELECT count(*) from mempool_txs WHERE tx_hash = $1", &tx_hash ) .fetch_one(self.0.conn()) .await? .count; let contains = row.filter(|&counter| counter > 0).is_some(); metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "contains_tx"); Ok(contains) } pub async fn get_tx(&mut self, tx_hash: TxHash) -> QueryResult<Option<SignedZkSyncTx>> { let start = Instant::now(); let tx_hash = hex::encode(tx_hash.as_ref()); let mempool_tx = sqlx::query_as!( MempoolTx, "SELECT * from mempool_txs WHERE tx_hash = $1", &tx_hash ) .fetch_optional(self.0.conn()) .await?; metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "get_tx"); mempool_tx .map(SignedZkSyncTx::try_from) .transpose() .map_err(anyhow::Error::from) } pub async fn collect_garbage(&mut self) -> QueryResult<()> { let start = Instant::now(); let all_txs: Vec<_> = self.load_txs().await?.into_iter().collect(); let mut tx_hashes_to_remove = Vec::new(); for tx in all_txs { let should_remove = match &tx { SignedTxVariant::Tx(tx) => { let tx_hash = tx.hash(); self.0 .chain() .operations_ext_schema() .get_tx_by_hash(tx_hash.as_ref()) .await .expect("DB issue while restoring the mempool state") .is_some() } SignedTxVariant::Batch(batch) => { let tx_hash = batch.txs[0].hash(); self.0 .chain() .operations_ext_schema() .get_tx_by_hash(tx_hash.as_ref()) .await .expect("DB issue while restoring the mempool state") .is_some() } }; if should_remove { tx_hashes_to_remove.extend(tx.hashes()) } } self.remove_txs(&tx_hashes_to_remove).await?; metrics::histogram!("sql.chain.mempool.collect_garbage", start.elapsed()); Ok(()) } }
Processor<'c>); impl<'a, 'c> MempoolSchema<'a, 'c> { pub async fn load_txs(&mut self) -> QueryResult<VecDeque<SignedTxVariant>> { let start = Instant::now(); let txs: Vec<MempoolTx> = sqlx::query_as!( MempoolTx, "SELECT * FROM mempool_txs ORDER BY created_at", ) .fetch_all(self.0.conn()) .await?; fn batch_id_optional(batch_id: i64) -> Option<i64> { match batch_id { 0 => None, _ => Some(batch_id), } }; let mut prev_batch_id = txs .first() .map(|tx| batch_id_optional(tx.batch_id)) .flatten(); let grouped_txs = txs.into_iter().group_by(|tx| { prev_batch_id = batch_id_optional(tx.batch_id); prev_batch_id }); let mut txs = Vec::new(); for (batch_id, group) in grouped_txs.into_iter() { let deserialized_txs: Vec<SignedZkSyncTx> = group .map(|tx_object| -> QueryResult<SignedZkSyncTx> { let tx = serde_json::from_value(tx_object.tx)?; let sign_data = match tx_object.eth_sign_data { None => None, Some(sign_data_value) => serde_json::from_value(sign_data_value)?, }; Ok(SignedZkSyncTx { tx, eth_sign_data: sign_data, }) }) .collect::<Result<Vec<SignedZkSyncTx>, anyhow::Error>>()?; match batch_id { Some(batch_id) => { let variant = SignedTxVariant::batch(deserialized_txs, batch_id, None); txs.push(variant); } None => { let mut variants = deserialized_txs .into_iter() .map(SignedTxVariant::from) .collect(); txs.append(&mut variants); } } } for tx in &mut txs { if let SignedTxVariant::Batch(batch) = tx { let eth_signature = sqlx::query!( "SELECT eth_signature FROM txs_batches_signatures WHERE batch_id = $1", batch.batch_id ) .fetch_optional(self.0.conn()) .await? .map(|value| { serde_json::from_value(value.eth_signature) .expect("failed to decode TxEthSignature") }); batch.eth_signature = eth_signature; } } txs.sort_by_key(|tx| match tx { SignedTxVariant::Tx(tx) => tx.tx.nonce(), SignedTxVariant::Batch(batch) => batch .txs .last() .expect("batch must contain at least one transaction") .tx .nonce(), }); metrics::histogram!("sql.chain.mempool.load_txs", start.elapsed()); Ok(txs.into()) } pub async fn insert_batch( &mut self, txs: &[SignedZkSyncTx], eth_signature: Option<TxEthSignature>, ) -> QueryResult<i64> { let start = Instant::now(); if txs.is_empty() { anyhow::bail!("Cannot insert an empty batch"); } let batch_id = { let first_tx_data = txs[0].clone(); let tx_hash = hex::encode(first_tx_data.hash().as_ref()); let tx = serde_json::to_value(&first_tx_data.tx) .expect("Unserializable TX provided to the database"); let eth_sign_data = first_tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data) VALUES ($1, $2, $3, $4)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, ) .execute(self.0.conn()) .await?; sqlx::query_as!( MempoolTx, "SELECT * FROM mempool_txs ORDER BY batch_id DESC LIMIT 1", ) .fetch_optional(self.0.conn()) .await? .ok_or_else(|| anyhow::format_err!("Can't get maximal batch_id from mempool_txs"))? .batch_id }; for tx_data in txs[1..].iter() { let tx_hash = hex::encode(tx_data.hash().as_ref()); let tx = serde_json::to_value(&tx_data.tx) .expect("Unserializable TX provided to the database"); let eth_sign_data = tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data, batch_id) VALUES ($1, $2, $3, $4, $5)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, batch_id, ) .execute(self.0.conn()) .await?; } if let Some(signature) = eth_signature { let signature = serde_json::to_value(signature)?; sqlx::query!( "INSERT INTO txs_batches_signatures VALUES($1, $2)", batch_id, signature ) .execute(self.0.conn()) .await?; } metrics::histogram!("sql.chain.mempool.insert_batch", start.elapsed()); Ok(batch_id) } pub async fn insert_tx(&mut self, tx_data: &SignedZkSyncTx) -> QueryResult<()> { let start = Instant::now(); let tx_hash = hex::encode(tx_data.tx.hash().as_ref()); let tx = serde_json::to_value(&tx_data.tx)?; let batch_id = 0; let eth_sign_data = tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data, batch_id) VALUES ($1, $2, $3, $4, $5)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, batch_id, ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.insert_tx", start.elapsed()); Ok(()) } pub async fn remove_tx(&mut self, tx: &[u8]) -> QueryResult<()> { let start = Instant::now(); let tx_hash = hex::encode(tx); sqlx::query!( "DELETE FROM mempool_txs WHERE tx_hash = $1", &tx_hash ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.remove_tx", start.elapsed()); Ok(()) } pub async fn remove_txs(&mut self, txs: &[TxHash]) -> QueryResult<()> { let start = Instant::now(); let tx_hashes: Vec<_> = txs.iter().map(hex::encode).collect(); sqlx::query!( "DELETE FROM mempool_txs WHERE tx_hash = ANY($1)", &tx_hashes ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.remove_txs", start.elapsed()); Ok(()) } pub async fn contains_tx(&mut self, tx_hash: TxHash) -> QueryResult<bool> { let start = Instant::now(); let tx_hash = hex::encode(tx_hash.as_ref());
random
[ { "content": "pub fn bench_signatures(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Signature verify\");\n\n group.throughput(Throughput::Elements(1));\n\n group.bench_function(\n\n \"bench_signature_verify_zksync_musig\",\n\n bench_signature_zksync_musig_verify,\n\n );\...
Rust
datafusion/src/physical_plan/expressions/average.rs
andts/arrow-datafusion
1c39f5ce865e3e1225b4895196073be560a93e82
use std::any::Any; use std::convert::TryFrom; use std::sync::Arc; use crate::error::{DataFusionError, Result}; use crate::physical_plan::{Accumulator, AggregateExpr, PhysicalExpr}; use crate::scalar::{ ScalarValue, MAX_PRECISION_FOR_DECIMAL128, MAX_SCALE_FOR_DECIMAL128, }; use arrow::compute; use arrow::datatypes::DataType; use arrow::{ array::{ArrayRef, UInt64Array}, datatypes::Field, }; use super::{format_state_name, sum}; #[derive(Debug)] pub struct Avg { name: String, expr: Arc<dyn PhysicalExpr>, data_type: DataType, } pub fn avg_return_type(arg_type: &DataType) -> Result<DataType> { match arg_type { DataType::Decimal(precision, scale) => { let new_precision = MAX_PRECISION_FOR_DECIMAL128.min(*precision + 4); let new_scale = MAX_SCALE_FOR_DECIMAL128.min(*scale + 4); Ok(DataType::Decimal(new_precision, new_scale)) } DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 | DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 | DataType::Float32 | DataType::Float64 => Ok(DataType::Float64), other => Err(DataFusionError::Plan(format!( "AVG does not support {:?}", other ))), } } pub(crate) fn is_avg_support_arg_type(arg_type: &DataType) -> bool { matches!( arg_type, DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 | DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 | DataType::Float32 | DataType::Float64 | DataType::Decimal(_, _) ) } impl Avg { pub fn new( expr: Arc<dyn PhysicalExpr>, name: impl Into<String>, data_type: DataType, ) -> Self { assert!(matches!( data_type, DataType::Float64 | DataType::Decimal(_, _) )); Self { name: name.into(), expr, data_type, } } } impl AggregateExpr for Avg { fn as_any(&self) -> &dyn Any { self } fn field(&self) -> Result<Field> { Ok(Field::new(&self.name, self.data_type.clone(), true)) } fn create_accumulator(&self) -> Result<Box<dyn Accumulator>> { Ok(Box::new(AvgAccumulator::try_new( &self.data_type, )?)) } fn state_fields(&self) -> Result<Vec<Field>> { Ok(vec![ Field::new( &format_state_name(&self.name, "count"), DataType::UInt64, true, ), Field::new( &format_state_name(&self.name, "sum"), self.data_type.clone(), true, ), ]) } fn expressions(&self) -> Vec<Arc<dyn PhysicalExpr>> { vec![self.expr.clone()] } fn name(&self) -> &str { &self.name } } #[derive(Debug)] pub struct AvgAccumulator { sum: ScalarValue, count: u64, } impl AvgAccumulator { pub fn try_new(datatype: &DataType) -> Result<Self> { Ok(Self { sum: ScalarValue::try_from(datatype)?, count: 0, }) } } impl Accumulator for AvgAccumulator { fn state(&self) -> Result<Vec<ScalarValue>> { Ok(vec![ScalarValue::from(self.count), self.sum.clone()]) } fn update(&mut self, _values: &[ScalarValue]) -> Result<()> { unimplemented!("update_batch is implemented instead"); } fn update_batch(&mut self, values: &[ArrayRef]) -> Result<()> { let values = &values[0]; self.count += (values.len() - values.data().null_count()) as u64; self.sum = sum::sum(&self.sum, &sum::sum_batch(values)?)?; Ok(()) } fn merge(&mut self, _states: &[ScalarValue]) -> Result<()> { unimplemented!("merge_batch is implemented instead"); } fn merge_batch(&mut self, states: &[ArrayRef]) -> Result<()> { let counts = states[0].as_any().downcast_ref::<UInt64Array>().unwrap(); self.count += compute::sum(counts).unwrap_or(0); self.sum = sum::sum(&self.sum, &sum::sum_batch(&states[1])?)?; Ok(()) } fn evaluate(&self) -> Result<ScalarValue> { match self.sum { ScalarValue::Float64(e) => { Ok(ScalarValue::Float64(e.map(|f| f / self.count as f64))) } ScalarValue::Decimal128(value, precision, scale) => { Ok(match value { None => ScalarValue::Decimal128(None, precision, scale), Some(v) => ScalarValue::Decimal128( Some(v / self.count as i128), precision, scale, ), }) } _ => Err(DataFusionError::Internal( "Sum should be f64 on average".to_string(), )), } } } #[cfg(test)] mod tests { use super::*; use crate::physical_plan::expressions::col; use crate::{error::Result, generic_test_op}; use arrow::record_batch::RecordBatch; use arrow::{array::*, datatypes::*}; #[test] fn test_avg_return_data_type() -> Result<()> { let data_type = DataType::Decimal(10, 5); let result_type = avg_return_type(&data_type)?; assert_eq!(DataType::Decimal(14, 9), result_type); let data_type = DataType::Decimal(36, 10); let result_type = avg_return_type(&data_type)?; assert_eq!(DataType::Decimal(38, 14), result_type); Ok(()) } #[test] fn avg_decimal() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(6, 10, 0); for i in 1..7 { decimal_builder.append_value(i as i128)?; } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(Some(35000), 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_decimal_with_nulls() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(5, 10, 0); for i in 1..6 { if i == 2 { decimal_builder.append_null()?; } else { decimal_builder.append_value(i)?; } } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(Some(32500), 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_decimal_all_nulls() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(5, 10, 0); for _i in 1..6 { decimal_builder.append_null()?; } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(None, 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_i32() -> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![1, 2, 3, 4, 5])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } #[test] fn avg_i32_with_nulls() -> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![ Some(1), None, Some(3), Some(4), Some(5), ])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::from(3.25f64), DataType::Float64 ) } #[test] fn avg_i32_all_nulls() -> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![None, None])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::Float64(None), DataType::Float64 ) } #[test] fn avg_u32() -> Result<()> { let a: ArrayRef = Arc::new(UInt32Array::from(vec![1_u32, 2_u32, 3_u32, 4_u32, 5_u32])); generic_test_op!( a, DataType::UInt32, Avg, ScalarValue::from(3.0f64), DataType::Float64 ) } #[test] fn avg_f32() -> Result<()> { let a: ArrayRef = Arc::new(Float32Array::from(vec![1_f32, 2_f32, 3_f32, 4_f32, 5_f32])); generic_test_op!( a, DataType::Float32, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } #[test] fn avg_f64() -> Result<()> { let a: ArrayRef = Arc::new(Float64Array::from(vec![1_f64, 2_f64, 3_f64, 4_f64, 5_f64])); generic_test_op!( a, DataType::Float64, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } fn aggregate( batch: &RecordBatch, agg: Arc<dyn AggregateExpr>, ) -> Result<ScalarValue> { let mut accum = agg.create_accumulator()?; let expr = agg.expressions(); let values = expr .iter() .map(|e| e.evaluate(batch)) .map(|r| r.map(|v| v.into_array(batch.num_rows()))) .collect::<Result<Vec<_>>>()?; accum.update_batch(&values)?; accum.evaluate() } }
use std::any::Any; use std::convert::TryFrom; use std::sync::Arc; use crate::error::{DataFusionError, Result}; use crate::physical_plan::{Accumulator, AggregateExpr, PhysicalExpr}; use crate::scalar::{ ScalarValue, MAX_PRECISION_FOR_DECIMAL128, MAX_SCALE_FOR_DECIMAL128, }; use arrow::compute; use arrow::datatypes::DataType; use arrow::{ array::{ArrayRef, UInt64Array}, datatypes::Field, }; use super::{format_state_name, sum}; #[derive(Debug)] pub struct Avg { name: String, expr: Arc<dyn PhysicalExpr>, data_type: DataType, } pub fn avg_return_type(arg_type: &DataType) -> Result<DataType> { match arg_type { DataType::Decimal(precision, scale) => { let new_precision = MAX_PRECISION_FOR_DECIMAL128.min(*precision + 4); let new_scale = MAX_SCALE_FOR_DECIMAL128.min(*scale + 4); Ok(DataType::Decimal(new_precision, new_scale)) } DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 | DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 | DataType::Float32 | DataType::Float64 => Ok(DataType::Float64), other => Err(DataFusionError::Plan(format!( "AVG does not support {:?}", other ))), } } pub(crate) fn is_avg_support_arg_type(arg_type: &DataType) -> bool { matches!( arg_type, DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 | DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 | DataType::Float32 | DataType::Float64 | DataType::Decimal(_, _) ) } impl Avg { pub fn new( expr: Arc<dyn PhysicalExpr>, name: impl Into<String>, data_type: DataType, ) -> Self { assert!(matches!( data_type, DataType::Float64 | DataType::Decimal(_, _) )); Self { name: name.into(), expr, data_type, } } } impl AggregateExpr for Avg { fn as_any(&self) -> &dyn Any { self } fn field(&self) -> Result<Field> { Ok(Field::new(&self.name, self.data_type.clone(), true)) } fn create_accumulator(&self) -> Result<Box<dyn Accumulator>> { Ok(Box::new(AvgAccumulator::try_new( &self.data_type, )?)) } fn state_fields(&self) -> Result<Vec<Field>> { Ok(vec![ Field::new( &format_state_name(&self.name, "count"), DataType::UInt64, true, ), Field::new( &format_state_name(&self.name, "sum"), self.data_type.clone(), true, ), ]) } fn expressions(&self) -> Vec<Arc<dyn PhysicalExpr>> { vec![self.expr.clone()] } fn name(&self) -> &str { &self.name } } #[derive(Debug)] pub struct AvgAccumulator { sum: ScalarValue, count: u64, } impl AvgAccumulator { pub fn try_new(datatype: &DataType) -> Result<Self> { Ok(Self { sum: ScalarValue::try_from(datatype)?, count: 0, }) } } impl Accumulator for AvgAccumulator { fn state(
-> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![None, None])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::Float64(None), DataType::Float64 ) } #[test] fn avg_u32() -> Result<()> { let a: ArrayRef = Arc::new(UInt32Array::from(vec![1_u32, 2_u32, 3_u32, 4_u32, 5_u32])); generic_test_op!( a, DataType::UInt32, Avg, ScalarValue::from(3.0f64), DataType::Float64 ) } #[test] fn avg_f32() -> Result<()> { let a: ArrayRef = Arc::new(Float32Array::from(vec![1_f32, 2_f32, 3_f32, 4_f32, 5_f32])); generic_test_op!( a, DataType::Float32, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } #[test] fn avg_f64() -> Result<()> { let a: ArrayRef = Arc::new(Float64Array::from(vec![1_f64, 2_f64, 3_f64, 4_f64, 5_f64])); generic_test_op!( a, DataType::Float64, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } fn aggregate( batch: &RecordBatch, agg: Arc<dyn AggregateExpr>, ) -> Result<ScalarValue> { let mut accum = agg.create_accumulator()?; let expr = agg.expressions(); let values = expr .iter() .map(|e| e.evaluate(batch)) .map(|r| r.map(|v| v.into_array(batch.num_rows()))) .collect::<Result<Vec<_>>>()?; accum.update_batch(&values)?; accum.evaluate() } }
&self) -> Result<Vec<ScalarValue>> { Ok(vec![ScalarValue::from(self.count), self.sum.clone()]) } fn update(&mut self, _values: &[ScalarValue]) -> Result<()> { unimplemented!("update_batch is implemented instead"); } fn update_batch(&mut self, values: &[ArrayRef]) -> Result<()> { let values = &values[0]; self.count += (values.len() - values.data().null_count()) as u64; self.sum = sum::sum(&self.sum, &sum::sum_batch(values)?)?; Ok(()) } fn merge(&mut self, _states: &[ScalarValue]) -> Result<()> { unimplemented!("merge_batch is implemented instead"); } fn merge_batch(&mut self, states: &[ArrayRef]) -> Result<()> { let counts = states[0].as_any().downcast_ref::<UInt64Array>().unwrap(); self.count += compute::sum(counts).unwrap_or(0); self.sum = sum::sum(&self.sum, &sum::sum_batch(&states[1])?)?; Ok(()) } fn evaluate(&self) -> Result<ScalarValue> { match self.sum { ScalarValue::Float64(e) => { Ok(ScalarValue::Float64(e.map(|f| f / self.count as f64))) } ScalarValue::Decimal128(value, precision, scale) => { Ok(match value { None => ScalarValue::Decimal128(None, precision, scale), Some(v) => ScalarValue::Decimal128( Some(v / self.count as i128), precision, scale, ), }) } _ => Err(DataFusionError::Internal( "Sum should be f64 on average".to_string(), )), } } } #[cfg(test)] mod tests { use super::*; use crate::physical_plan::expressions::col; use crate::{error::Result, generic_test_op}; use arrow::record_batch::RecordBatch; use arrow::{array::*, datatypes::*}; #[test] fn test_avg_return_data_type() -> Result<()> { let data_type = DataType::Decimal(10, 5); let result_type = avg_return_type(&data_type)?; assert_eq!(DataType::Decimal(14, 9), result_type); let data_type = DataType::Decimal(36, 10); let result_type = avg_return_type(&data_type)?; assert_eq!(DataType::Decimal(38, 14), result_type); Ok(()) } #[test] fn avg_decimal() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(6, 10, 0); for i in 1..7 { decimal_builder.append_value(i as i128)?; } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(Some(35000), 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_decimal_with_nulls() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(5, 10, 0); for i in 1..6 { if i == 2 { decimal_builder.append_null()?; } else { decimal_builder.append_value(i)?; } } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(Some(32500), 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_decimal_all_nulls() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(5, 10, 0); for _i in 1..6 { decimal_builder.append_null()?; } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(None, 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_i32() -> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![1, 2, 3, 4, 5])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } #[test] fn avg_i32_with_nulls() -> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![ Some(1), None, Some(3), Some(4), Some(5), ])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::from(3.25f64), DataType::Float64 ) } #[test] fn avg_i32_all_nulls()
random
[]
Rust
src/libfourcc/lib.rs
pfalabella/rust
3deb2c1aa6bb255ebe62b294be5e3c580e19bb9b
/*! Syntax extension to generate FourCCs. Once loaded, fourcc!() is called with a single 4-character string, and an optional ident that is either `big`, `little`, or `target`. The ident represents endianness, and specifies in which direction the characters should be read. If the ident is omitted, it is assumed to be `big`, i.e. left-to-right order. It returns a u32. # Examples To load the extension and use it: ```rust,ignore #[phase(plugin)] extern crate fourcc; fn main() { let val = fourcc!("\xC0\xFF\xEE!"); assert_eq!(val, 0xC0FFEE21u32); let little_val = fourcc!("foo ", little); assert_eq!(little_val, 0x21EEFFC0u32); } ``` # References * [Wikipedia: FourCC](http://en.wikipedia.org/wiki/FourCC) */ #![crate_id = "fourcc#0.11.0-pre"] #![experimental] #![crate_type = "rlib"] #![crate_type = "dylib"] #![license = "MIT/ASL2"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/")] #![feature(plugin_registrar, managed_boxes)] extern crate syntax; extern crate rustc; use syntax::ast; use syntax::attr::contains; use syntax::codemap::{Span, mk_sp}; use syntax::ext::base; use syntax::ext::base::{ExtCtxt, MacExpr}; use syntax::ext::build::AstBuilder; use syntax::parse; use syntax::parse::token; use syntax::parse::token::InternedString; use rustc::plugin::Registry; use std::gc::Gc; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("fourcc", expand_syntax_ext); } pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box<base::MacResult> { let (expr, endian) = parse_tts(cx, tts); let little = match endian { None => false, Some(Ident{ident, span}) => match token::get_ident(ident).get() { "little" => true, "big" => false, "target" => target_endian_little(cx, sp), _ => { cx.span_err(span, "invalid endian directive in fourcc!"); target_endian_little(cx, sp) } } }; let s = match expr.node { ast::ExprLit(ref lit) => match lit.node { ast::LitStr(ref s, _) => { if s.get().char_len() != 4 { cx.span_err(expr.span, "string literal with len != 4 in fourcc!"); } s } _ => { cx.span_err(expr.span, "unsupported literal in fourcc!"); return base::DummyResult::expr(sp) } }, _ => { cx.span_err(expr.span, "non-literal in fourcc!"); return base::DummyResult::expr(sp) } }; let mut val = 0u32; for codepoint in s.get().chars().take(4) { let byte = if codepoint as u32 > 0xFF { cx.span_err(expr.span, "fourcc! literal character out of range 0-255"); 0u8 } else { codepoint as u8 }; val = if little { (val >> 8) | ((byte as u32) << 24) } else { (val << 8) | (byte as u32) }; } let e = cx.expr_lit(sp, ast::LitUint(val as u64, ast::TyU32)); MacExpr::new(e) } struct Ident { ident: ast::Ident, span: Span } fn parse_tts(cx: &ExtCtxt, tts: &[ast::TokenTree]) -> (Gc<ast::Expr>, Option<Ident>) { let p = &mut parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts.iter() .map(|x| (*x).clone()) .collect()); let ex = p.parse_expr(); let id = if p.token == token::EOF { None } else { p.expect(&token::COMMA); let lo = p.span.lo; let ident = p.parse_ident(); let hi = p.last_span.hi; Some(Ident{ident: ident, span: mk_sp(lo, hi)}) }; if p.token != token::EOF { p.unexpected(); } (ex, id) } fn target_endian_little(cx: &ExtCtxt, sp: Span) -> bool { let meta = cx.meta_name_value(sp, InternedString::new("target_endian"), ast::LitStr(InternedString::new("little"), ast::CookedStr)); contains(cx.cfg().as_slice(), meta) } #[test] fn dummy_test() { }
/*! Syntax extension to generate FourCCs. Once loaded, fourcc!() is called with a single 4-character string, and an optional ident that is either `big`, `little`, or `target`. The ident represents endianness, and specifies in which direction the characters should be read. If the ident is omitted, it is assumed to be `big`, i.e. left-to-right order. It returns a u32. # Examples To load the extension and use it: ```rust,ignore #[phase(plugin)] extern crate fourcc; fn main() { let val = fourcc!("\xC0\xFF\xEE!"); assert_eq!(val, 0xC0FFEE21u32); let little_val = fourcc!("foo ", little); assert_eq!(little_val, 0x21EEFFC0u32); } ``` # References * [Wikipedia: FourCC](http://en.wikipedia.org/wiki/FourCC) */ #![crate_id = "fourcc#0.11.0-pre"] #![experimental] #![crate_type = "rlib"] #![crate_type = "dylib"] #![license = "MIT/ASL2"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/")] #![feature(plugin_registrar, managed_boxes)] extern crate syntax; extern crate rustc; use syntax::ast; use syntax::attr::contains; use syntax::codemap::{Span, mk_sp}; use syntax::ext::base; use syntax::ext::base::{ExtCtxt, MacExpr}; use syntax::ext::build::AstBuilder; use syntax::parse; use syntax::parse::token; use syntax::parse::token::InternedString; use rustc::plugin::Registry; use std::gc::Gc; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("fourcc", expand_syntax_ext); } pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box<base::MacResult> { let (expr, endian) = parse_tts(cx, tts); let little = match endian { None => false, Some(Ident{ident, span}) =>
}; let s = match expr.node { ast::ExprLit(ref lit) => match lit.node { ast::LitStr(ref s, _) => { if s.get().char_len() != 4 { cx.span_err(expr.span, "string literal with len != 4 in fourcc!"); } s } _ => { cx.span_err(expr.span, "unsupported literal in fourcc!"); return base::DummyResult::expr(sp) } }, _ => { cx.span_err(expr.span, "non-literal in fourcc!"); return base::DummyResult::expr(sp) } }; let mut val = 0u32; for codepoint in s.get().chars().take(4) { let byte = if codepoint as u32 > 0xFF { cx.span_err(expr.span, "fourcc! literal character out of range 0-255"); 0u8 } else { codepoint as u8 }; val = if little { (val >> 8) | ((byte as u32) << 24) } else { (val << 8) | (byte as u32) }; } let e = cx.expr_lit(sp, ast::LitUint(val as u64, ast::TyU32)); MacExpr::new(e) } struct Ident { ident: ast::Ident, span: Span } fn parse_tts(cx: &ExtCtxt, tts: &[ast::TokenTree]) -> (Gc<ast::Expr>, Option<Ident>) { let p = &mut parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts.iter() .map(|x| (*x).clone()) .collect()); let ex = p.parse_expr(); let id = if p.token == token::EOF { None } else { p.expect(&token::COMMA); let lo = p.span.lo; let ident = p.parse_ident(); let hi = p.last_span.hi; Some(Ident{ident: ident, span: mk_sp(lo, hi)}) }; if p.token != token::EOF { p.unexpected(); } (ex, id) } fn target_endian_little(cx: &ExtCtxt, sp: Span) -> bool { let meta = cx.meta_name_value(sp, InternedString::new("target_endian"), ast::LitStr(InternedString::new("little"), ast::CookedStr)); contains(cx.cfg().as_slice(), meta) } #[test] fn dummy_test() { }
match token::get_ident(ident).get() { "little" => true, "big" => false, "target" => target_endian_little(cx, sp), _ => { cx.span_err(span, "invalid endian directive in fourcc!"); target_endian_little(cx, sp) } }
if_condition
[ { "content": "pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let mut res_str = String::new();\n\n for (i, e) in tts.iter().enumerate() {\n\n if i & 1 == 1 {\n\n match *e {\n\n ast::TTTo...
Rust
src/svg.rs
sleibrock/wad2map
1f1095e1ebadae26ded818e7fa7dbe57ac9a9b33
use std::fs::File; use std::io::Write; use std::error::Error; pub enum Color { Red, Blue, Green, Yellow, Black, White, Grey, None } pub fn color_to_string(c: &Color) -> String { match *c { Color::Red => "red".to_owned(), Color::None => "none".to_owned(), Color::Blue => "blue".to_owned(), Color::Grey => "grey".to_owned(), Color::Green => "green".to_owned(), Color::Black => "black".to_owned(), Color::White => "white".to_owned(), Color::Yellow => "yellow".to_owned(), } } pub trait SVGObject { fn to_string(&self) -> String; } pub struct SVG { pub width: u64, pub height: u64, pub view_width: u64, pub view_height: u64, pub objects: Vec<Box<SVGObject>>, } pub struct SVGLine { pub x1: u64, pub y1: u64, pub x2: u64, pub y2: u64, pub stroke: u64, pub color: Color, } pub struct SVGRect { pub x: u64, pub y: u64, pub w: u64, pub h: u64, pub fill: Color, } pub struct SVGCircle { pub cx: u64, pub cy: u64, pub radius: u64, } pub struct SVGVertex { pub x: u64, pub y: u64, } pub struct SVGPoly { pub color: Color, pub stroke: u64, pub vertices: Vec<SVGVertex>, } impl SVGLine { pub fn new( x1: u64, y1: u64, x2: u64, y2: u64, w: u64, color: Color ) -> SVGLine { SVGLine{x1: x1, y1: y1, x2: x2, y2: y2, stroke: w, color: color} } } impl SVGObject for SVGLine { fn to_string(&self) -> String { format!( "<line x1=\"{}\" y1=\"{}\" x2=\"{}\" y2=\"{}\" stroke=\"{}\" stroke-width=\"{}\" />", self.x1, self.y1, self.x2, self.y2, color_to_string(&self.color), self.stroke, ) } } impl SVGRect { pub fn new(x: u64, y: u64, w: u64, h: u64, fill: Color) -> SVGRect { SVGRect{x: x, y: y, w: w, h: h, fill: fill} } } impl SVGObject for SVGRect { fn to_string(&self) -> String { format!( "<rect x=\"{}\" y=\"{}\" width=\"{}\" height=\"{}\" fill=\"{}\" />", self.x, self.y, self.w, self.h, color_to_string(&self.fill), ) } } impl SVGCircle { pub fn new(cx: u64, cy: u64, r: u64) -> SVGCircle { SVGCircle{cx: cx, cy: cy, radius: r} } } impl SVGObject for SVGCircle { fn to_string(&self) -> String { format!( "<circle cx=\"{}\" cy=\"{}\" r=\"{}\" fill=\"{}\" />", self.cx, self.cy, self.radius, "none" ) } } impl SVGVertex { pub fn new(x: u64, y: u64) -> SVGVertex { SVGVertex { x: x, y: y } } pub fn to_string(&self) -> String { format!("{},{}", self.x, self.y) } } impl SVGPoly { pub fn new(c: Color, stroke: u64) -> SVGPoly { let v: Vec<SVGVertex> = Vec::new(); SVGPoly{color: c, stroke: stroke, vertices: v} } pub fn addv(&mut self, x: u64, y: u64) { self.vertices.push(SVGVertex::new(x, y)); } } impl SVGObject for SVGPoly { fn to_string(&self) -> String { String::from("not implemented") } } impl SVG { pub fn new(w: u64, h: u64, vx: u64, vy: u64) -> SVG { return SVG { width: w, height: h, view_width: vx, view_height: vy, objects: Vec::new(), }; } pub fn add_object(&mut self, sobj: Box<SVGObject>) -> usize { self.objects.push(sobj); return self.objects.len(); } pub fn to_file(&mut self, fname: &str) -> Result<u8, String> { let head = format!( "<svg width=\"{}\" height=\"{}\" viewBox=\"0 0 {} {}\" xmlns=\"http://www.w3.org/2000/svg\" version=\"1.1\">", self.width, self.height, self.view_width, self.view_height, ); let tail = String::from("</svg>"); let mut buf: Vec<String> = Vec::new(); buf.push(head); for obj in &self.objects { buf.push(obj.to_string().to_owned()); } buf.push(tail); let mut f = match File::create(fname) { Ok(new_file) => new_file, Err(why) => { return Err(format!( "Couldn't create '{:?}': {}", fname, why.description() )); } }; for stringthing in buf { match f.write(stringthing.as_ref()) { Ok(_) => {} _ => { return Err(format!("Failed to write bytes to file")); } }; } Ok(0) } } #[cfg(test)] mod tests { #[test] fn test_create_svg() { use svg::*; let mut s = SVG::new(1024, 1024, 1024, 1024); let rect = SVGRect::new(0, 0, 1024, 1024, Color::White); let line = SVGLine::new(0, 0, 1024, 1024, 5, Color::Black); let line2 = SVGLine::new(1024, 0, 0, 1024, 10, Color::Black); s.add_object(Box::new(rect)); s.add_object(Box::new(line)); s.add_object(Box::new(line2)); s.to_file("test.svg"); } }
use std::fs::File; use std::io::Write; use std::error::Error; pub enum Color { Red, Blue, Green, Yellow, Black, White, Grey, None } pub fn color_to_string(c: &Color) -> String { match *c { Color::Red => "red".to_owned(), Color::None => "none".to_owned(), Color::Blue => "blue".to_owned(), Color::Grey => "grey".to_owned(), Color::Green => "green".to_owned(), Color::Black => "black".to_owned(), Color::White => "white".to_owned(), Color::Yellow => "yellow".to_owned(), } } pub trait SVGObject { fn to_string(&self) -> String; } pub struct SVG { pub width: u64, pub height: u64, pub view_width: u64, pub view_height: u64, pub objects: Vec<Box<SVGObject>>, } pub struct SVGLine { pub x1: u64, pub y1: u64, pub x2: u64, pub y2: u64, pub stroke: u64, pub color: Color, } pub struct SVGRect { pub x: u64, pub y: u64, pub w: u64, pub h: u64, pub fill: Color, } pub struct SVGCircle { pub cx: u64, pub cy: u64, pub radius: u64, } pub struct SVGVertex { pub x: u64, pub y: u64, } pub struct SVGPoly { pub color: Color, pub stroke: u64, pub vertices: Vec<SVGVertex>, } impl SVGLine { pub fn new( x1: u64, y1: u64, x2: u64, y2: u64, w: u64, color: Color ) -> SVGLine { SVGLine{x1: x1, y1: y1, x2: x2, y2: y2, stroke: w, color: color} } } impl SVGObject for SVGLine { fn to_string(&self) -> String { format!(
{ format!("{},{}", self.x, self.y) } } impl SVGPoly { pub fn new(c: Color, stroke: u64) -> SVGPoly { let v: Vec<SVGVertex> = Vec::new(); SVGPoly{color: c, stroke: stroke, vertices: v} } pub fn addv(&mut self, x: u64, y: u64) { self.vertices.push(SVGVertex::new(x, y)); } } impl SVGObject for SVGPoly { fn to_string(&self) -> String { String::from("not implemented") } } impl SVG { pub fn new(w: u64, h: u64, vx: u64, vy: u64) -> SVG { return SVG { width: w, height: h, view_width: vx, view_height: vy, objects: Vec::new(), }; } pub fn add_object(&mut self, sobj: Box<SVGObject>) -> usize { self.objects.push(sobj); return self.objects.len(); } pub fn to_file(&mut self, fname: &str) -> Result<u8, String> { let head = format!( "<svg width=\"{}\" height=\"{}\" viewBox=\"0 0 {} {}\" xmlns=\"http://www.w3.org/2000/svg\" version=\"1.1\">", self.width, self.height, self.view_width, self.view_height, ); let tail = String::from("</svg>"); let mut buf: Vec<String> = Vec::new(); buf.push(head); for obj in &self.objects { buf.push(obj.to_string().to_owned()); } buf.push(tail); let mut f = match File::create(fname) { Ok(new_file) => new_file, Err(why) => { return Err(format!( "Couldn't create '{:?}': {}", fname, why.description() )); } }; for stringthing in buf { match f.write(stringthing.as_ref()) { Ok(_) => {} _ => { return Err(format!("Failed to write bytes to file")); } }; } Ok(0) } } #[cfg(test)] mod tests { #[test] fn test_create_svg() { use svg::*; let mut s = SVG::new(1024, 1024, 1024, 1024); let rect = SVGRect::new(0, 0, 1024, 1024, Color::White); let line = SVGLine::new(0, 0, 1024, 1024, 5, Color::Black); let line2 = SVGLine::new(1024, 0, 0, 1024, 10, Color::Black); s.add_object(Box::new(rect)); s.add_object(Box::new(line)); s.add_object(Box::new(line2)); s.to_file("test.svg"); } }
"<line x1=\"{}\" y1=\"{}\" x2=\"{}\" y2=\"{}\" stroke=\"{}\" stroke-width=\"{}\" />", self.x1, self.y1, self.x2, self.y2, color_to_string(&self.color), self.stroke, ) } } impl SVGRect { pub fn new(x: u64, y: u64, w: u64, h: u64, fill: Color) -> SVGRect { SVGRect{x: x, y: y, w: w, h: h, fill: fill} } } impl SVGObject for SVGRect { fn to_string(&self) -> String { format!( "<rect x=\"{}\" y=\"{}\" width=\"{}\" height=\"{}\" fill=\"{}\" />", self.x, self.y, self.w, self.h, color_to_string(&self.fill), ) } } impl SVGCircle { pub fn new(cx: u64, cy: u64, r: u64) -> SVGCircle { SVGCircle{cx: cx, cy: cy, radius: r} } } impl SVGObject for SVGCircle { fn to_string(&self) -> String { format!( "<circle cx=\"{}\" cy=\"{}\" r=\"{}\" fill=\"{}\" />", self.cx, self.cy, self.radius, "none" ) } } impl SVGVertex { pub fn new(x: u64, y: u64) -> SVGVertex { SVGVertex { x: x, y: y } } pub fn to_string(&self) -> String
random
[ { "content": "// flip a value in a certain axis\n\n// if the axis is set to zero, just return the initial value\n\nfn flatten(v: u64, m: u64) -> u64 {\n\n match m == 0 {\n\n true => v,\n\n _ => m - v,\n\n }\n\n}\n\n\n\n\n", "file_path": "src/mapmaker.rs", "rank": 2, "score": 7...
Rust
src/libterm/lib.rs
Ryman/rust
11571cd9c1cde63c3b46ca65e608b84647785ac8
#![crate_id = "term#0.11-pre"] #![comment = "Simple ANSI color library"] #![license = "MIT/ASL2"] #![crate_type = "rlib"] #![crate_type = "dylib"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://static.rust-lang.org/doc/master")] #![feature(macro_rules)] #![deny(missing_doc)] extern crate collections; use std::io; use std::os; use terminfo::TermInfo; use terminfo::searcher::open; use terminfo::parser::compiled::{parse, msys_terminfo}; use terminfo::parm::{expand, Number, Variables}; pub mod terminfo; pub mod color { pub type Color = u16; pub static BLACK: Color = 0u16; pub static RED: Color = 1u16; pub static GREEN: Color = 2u16; pub static YELLOW: Color = 3u16; pub static BLUE: Color = 4u16; pub static MAGENTA: Color = 5u16; pub static CYAN: Color = 6u16; pub static WHITE: Color = 7u16; pub static BRIGHT_BLACK: Color = 8u16; pub static BRIGHT_RED: Color = 9u16; pub static BRIGHT_GREEN: Color = 10u16; pub static BRIGHT_YELLOW: Color = 11u16; pub static BRIGHT_BLUE: Color = 12u16; pub static BRIGHT_MAGENTA: Color = 13u16; pub static BRIGHT_CYAN: Color = 14u16; pub static BRIGHT_WHITE: Color = 15u16; } pub mod attr { pub enum Attr { Bold, Dim, Italic(bool), Underline(bool), Blink, Standout(bool), Reverse, Secure, ForegroundColor(super::color::Color), BackgroundColor(super::color::Color) } } fn cap_for_attr(attr: attr::Attr) -> &'static str { match attr { attr::Bold => "bold", attr::Dim => "dim", attr::Italic(true) => "sitm", attr::Italic(false) => "ritm", attr::Underline(true) => "smul", attr::Underline(false) => "rmul", attr::Blink => "blink", attr::Standout(true) => "smso", attr::Standout(false) => "rmso", attr::Reverse => "rev", attr::Secure => "invis", attr::ForegroundColor(_) => "setaf", attr::BackgroundColor(_) => "setab" } } pub struct Terminal<T> { num_colors: u16, out: T, ti: Box<TermInfo>, } impl<T: Writer> Terminal<T> { pub fn new(out: T) -> Result<Terminal<T>, ~str> { let term = match os::getenv("TERM") { Some(t) => t, None => return Err("TERM environment variable undefined".to_owned()) }; let mut file = match open(term) { Ok(file) => file, Err(err) => { if "cygwin" == term { return Ok(Terminal { out: out, ti: msys_terminfo(), num_colors: 8 }); } return Err(err); } }; let inf = try!(parse(&mut file, false)); let nc = if inf.strings.find_equiv(&("setaf")).is_some() && inf.strings.find_equiv(&("setab")).is_some() { inf.numbers.find_equiv(&("colors")).map_or(0, |&n| n) } else { 0 }; return Ok(Terminal {out: out, ti: inf, num_colors: nc}); } pub fn fg(&mut self, color: color::Color) -> io::IoResult<bool> { let color = self.dim_if_necessary(color); if self.num_colors > color { let s = expand(self.ti .strings .find_equiv(&("setaf")) .unwrap() .as_slice(), [Number(color as int)], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } pub fn bg(&mut self, color: color::Color) -> io::IoResult<bool> { let color = self.dim_if_necessary(color); if self.num_colors > color { let s = expand(self.ti .strings .find_equiv(&("setab")) .unwrap() .as_slice(), [Number(color as int)], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } pub fn attr(&mut self, attr: attr::Attr) -> io::IoResult<bool> { match attr { attr::ForegroundColor(c) => self.fg(c), attr::BackgroundColor(c) => self.bg(c), _ => { let cap = cap_for_attr(attr); let parm = self.ti.strings.find_equiv(&cap); if parm.is_some() { let s = expand(parm.unwrap().as_slice(), [], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } } } pub fn supports_attr(&self, attr: attr::Attr) -> bool { match attr { attr::ForegroundColor(_) | attr::BackgroundColor(_) => { self.num_colors > 0 } _ => { let cap = cap_for_attr(attr); self.ti.strings.find_equiv(&cap).is_some() } } } pub fn reset(&mut self) -> io::IoResult<()> { let mut cap = self.ti.strings.find_equiv(&("sgr0")); if cap.is_none() { cap = self.ti.strings.find_equiv(&("sgr")); if cap.is_none() { cap = self.ti.strings.find_equiv(&("op")); } } let s = cap.map_or(Err("can't find terminfo capability `sgr0`".to_owned()), |op| { expand(op.as_slice(), [], &mut Variables::new()) }); if s.is_ok() { return self.out.write(s.unwrap().as_slice()) } Ok(()) } fn dim_if_necessary(&self, color: color::Color) -> color::Color { if color >= self.num_colors && color >= 8 && color < 16 { color-8 } else { color } } pub fn unwrap(self) -> T { self.out } pub fn get_ref<'a>(&'a self) -> &'a T { &self.out } pub fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.out } } impl<T: Writer> Writer for Terminal<T> { fn write(&mut self, buf: &[u8]) -> io::IoResult<()> { self.out.write(buf) } fn flush(&mut self) -> io::IoResult<()> { self.out.flush() } }
#![crate_id = "term#0.11-pre"] #![comment = "Simple ANSI color library"] #![license = "MIT/ASL2"] #![crate_type = "rlib"] #![crate_type = "dylib"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://static.rust-lang.org/doc/master")] #![feature(macro_rules)] #![deny(missing_doc)] extern crate collections; use std::io; use std::os; use terminfo::TermInfo; use terminfo::searcher::open; use terminfo::parser::compiled::{parse, msys_terminfo}; use terminfo::parm::{expand, Number, Variables}; pub mod terminfo; pub mod color { pub type Color = u16; pub static BLACK: Color = 0u16; pub static RED: Color = 1u16; pub static GREEN: Color = 2u16; pub static YELLOW: Color = 3u16; pub static BLUE: Color = 4u16; pub static MAGENTA: Color = 5u16; pub static CYAN: Color = 6u16; pub static WHITE: Color = 7u16; pub static BRIGHT_BLACK: Color = 8u16; pub static BRIGHT_RED: Color = 9u16; pub static BRIGHT_GREEN: Color = 10u16; pub static BRIGHT_YELLOW: Color = 11u16; pub static BRIGHT_BLUE: Color = 12u16; pub static BRIGHT_MAGENTA: Color = 13u16; pub static BRIGHT_CYAN: Color = 14u16; pub static BRIGHT_WHITE: Color = 15u16; } pub mod attr { pub enum Attr { Bold, Dim, Italic(bool), Underline(bool), Blink, Standout(bool), Reverse, Secure, ForegroundColor(super::color::Color), BackgroundColor(super::color::Color) } } fn cap_for_attr(attr: attr::Attr) -> &'static str { match attr { attr::Bold => "bold", attr::Dim => "dim", attr::Italic(true) => "sitm", attr::Italic(false) => "ritm", attr::Underline(true) => "smul", attr::Underline(false) => "rmul", attr::Blink => "blink", attr::Standout(true) => "smso", attr::Standout(false) => "rmso", attr::Reverse => "rev", attr::Secure => "invis", attr::ForegroundColor(_) => "setaf", attr::BackgroundColor(_) => "setab" } } pub struct Terminal<T> { num_colors: u16, out: T, ti: Box<TermInfo>, } impl<T: Writer> Terminal<T> { pub fn new(out: T) -> Result<Terminal<T>, ~str> { let term = match os::getenv("TERM") { Some(t) => t, None => return Err("TERM environment variable undefined".to_owned()) }; let mut file = match open(term) { Ok(file) => file, Err(err) => { if "cygwin" == term { return Ok(Terminal { out: out, ti: msys_terminfo(), num_colors: 8 }); } return Err(err); } }; let inf = try!(parse(&mut file, fal
pub fn fg(&mut self, color: color::Color) -> io::IoResult<bool> { let color = self.dim_if_necessary(color); if self.num_colors > color { let s = expand(self.ti .strings .find_equiv(&("setaf")) .unwrap() .as_slice(), [Number(color as int)], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } pub fn bg(&mut self, color: color::Color) -> io::IoResult<bool> { let color = self.dim_if_necessary(color); if self.num_colors > color { let s = expand(self.ti .strings .find_equiv(&("setab")) .unwrap() .as_slice(), [Number(color as int)], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } pub fn attr(&mut self, attr: attr::Attr) -> io::IoResult<bool> { match attr { attr::ForegroundColor(c) => self.fg(c), attr::BackgroundColor(c) => self.bg(c), _ => { let cap = cap_for_attr(attr); let parm = self.ti.strings.find_equiv(&cap); if parm.is_some() { let s = expand(parm.unwrap().as_slice(), [], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } } } pub fn supports_attr(&self, attr: attr::Attr) -> bool { match attr { attr::ForegroundColor(_) | attr::BackgroundColor(_) => { self.num_colors > 0 } _ => { let cap = cap_for_attr(attr); self.ti.strings.find_equiv(&cap).is_some() } } } pub fn reset(&mut self) -> io::IoResult<()> { let mut cap = self.ti.strings.find_equiv(&("sgr0")); if cap.is_none() { cap = self.ti.strings.find_equiv(&("sgr")); if cap.is_none() { cap = self.ti.strings.find_equiv(&("op")); } } let s = cap.map_or(Err("can't find terminfo capability `sgr0`".to_owned()), |op| { expand(op.as_slice(), [], &mut Variables::new()) }); if s.is_ok() { return self.out.write(s.unwrap().as_slice()) } Ok(()) } fn dim_if_necessary(&self, color: color::Color) -> color::Color { if color >= self.num_colors && color >= 8 && color < 16 { color-8 } else { color } } pub fn unwrap(self) -> T { self.out } pub fn get_ref<'a>(&'a self) -> &'a T { &self.out } pub fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.out } } impl<T: Writer> Writer for Terminal<T> { fn write(&mut self, buf: &[u8]) -> io::IoResult<()> { self.out.write(buf) } fn flush(&mut self) -> io::IoResult<()> { self.out.flush() } }
se)); let nc = if inf.strings.find_equiv(&("setaf")).is_some() && inf.strings.find_equiv(&("setab")).is_some() { inf.numbers.find_equiv(&("colors")).map_or(0, |&n| n) } else { 0 }; return Ok(Terminal {out: out, ti: inf, num_colors: nc}); }
function_block-function_prefixed
[ { "content": "enum Color { cyan, magenta, yellow, black }\n\n\n\nimpl Equal for Color {\n\n fn isEq(a: Color, b: Color) -> bool {\n\n match (a, b) {\n\n (cyan, cyan) => { true }\n\n (magenta, magenta) => { true }\n\n (yellow, yellow) => { true }\n\n (blac...
Rust
src/error.rs
ygf11/bincode
f33abb21b45ff20b63be2a5ab134fce0d6d86d59
#[non_exhaustive] #[derive(Debug)] pub enum EncodeError { UnexpectedEnd, RefCellAlreadyBorrowed { inner: core::cell::BorrowError, type_name: &'static str, }, Other(&'static str), #[cfg(feature = "alloc")] OtherString(alloc::string::String), #[cfg(feature = "std")] InvalidPathCharacters, #[cfg(feature = "std")] Io { error: std::io::Error, index: usize, }, #[cfg(feature = "std")] LockFailed { type_name: &'static str, }, #[cfg(feature = "std")] InvalidSystemTime { inner: std::time::SystemTimeError, time: std::time::SystemTime, }, #[cfg(feature = "serde")] SequenceMustHaveLength, } impl core::fmt::Display for EncodeError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{:?}", self) } } #[non_exhaustive] #[derive(Debug, PartialEq)] pub enum DecodeError { UnexpectedEnd, LimitExceeded, InvalidIntegerType { expected: IntegerType, found: IntegerType, }, NonZeroTypeIsZero { non_zero_type: IntegerType, }, UnexpectedVariant { type_name: &'static str, allowed: AllowedEnumVariants, found: u32, }, Utf8(core::str::Utf8Error), InvalidCharEncoding([u8; 4]), InvalidBooleanValue(u8), ArrayLengthMismatch { required: usize, found: usize, }, EmptyEnum { type_name: &'static str, }, InvalidDuration { secs: u64, nanos: u32, }, InvalidSystemTime { duration: core::time::Duration, }, #[cfg(feature = "std")] CStrNulError { inner: std::ffi::FromBytesWithNulError, }, #[cfg(feature = "std")] CStringNulError { inner: std::ffi::FromVecWithNulError, }, #[cfg(feature = "alloc")] OtherString(alloc::string::String), #[cfg(feature = "serde")] SerdeAnyNotSupported, #[cfg(feature = "serde")] SerdeIdentifierNotSupported, #[cfg(feature = "serde")] SerdeIgnoredAnyNotSupported, #[cfg(feature = "serde")] CannotBorrowOwnedData, } impl core::fmt::Display for DecodeError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{:?}", self) } } impl DecodeError { pub(crate) fn change_integer_type_to_signed(self) -> DecodeError { match self { Self::InvalidIntegerType { expected, found } => Self::InvalidIntegerType { expected: expected.into_signed(), found: found.into_signed(), }, other => other, } } } #[non_exhaustive] #[derive(Debug, PartialEq)] pub enum AllowedEnumVariants { #[allow(missing_docs)] Range { min: u32, max: u32 }, Allowed(&'static [u32]), } #[non_exhaustive] #[derive(Debug, PartialEq, Eq)] #[allow(missing_docs)] pub enum IntegerType { U8, U16, U32, U64, U128, Usize, I8, I16, I32, I64, I128, Isize, Reserved, } impl IntegerType { pub(crate) fn into_signed(self) -> Self { match self { Self::U8 => Self::I8, Self::U16 => Self::I16, Self::U32 => Self::I32, Self::U64 => Self::I64, Self::U128 => Self::I128, Self::Usize => Self::Isize, other => other, } } }
#[non_exhaustive] #[derive(Debug)] pub enum EncodeError { UnexpectedEnd, RefCellAlreadyBorrowed { inner: core::cell::BorrowError, type_name: &'static str, }, Other(&'static str), #[cfg(feature = "alloc")] OtherString(alloc::string::String), #[cfg(feature = "std")] InvalidPathCharacters, #[cfg(feature = "std")] Io { error: std::io::Error, index: usize, }, #[cfg(feature = "std")] LockFailed { type_name: &'static str, }, #[cfg(feature = "std")] InvalidSystemTime { inner: std::time::SystemTimeError, time: std::time::SystemTime, }, #[cfg(feature = "serde")] SequenceMustHaveLength, } impl core::fmt::Display for EncodeError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{:?}", self) } } #[non_exhaustive] #[derive(Debug, PartialEq)] pub enum DecodeError { UnexpectedEnd, LimitExceeded, InvalidIntegerType { expected: IntegerType, found: IntegerType, }, NonZeroTypeIsZero { non_zero_type: IntegerType, }, UnexpectedVariant { type_name: &'static str, allowed: AllowedEnumVariants, found: u32, }, Utf8(core::str::Utf8Error), InvalidCharEncoding([u8; 4]), InvalidBooleanValue(u8), ArrayLengthMismatch { required: usize, found: usize, }, EmptyEnum { type_name: &'static str, }, InvalidDuration { secs: u64, nanos: u32, }, InvalidSystemTime { duration: core::time::Duration, }, #[cfg(feature = "std")] CStrNulError { inner: std::ffi::FromBytesWithNulError, }, #[cfg(feature = "std")] CStringNulError { inner: std::ffi::FromVecWithNulError, }, #[cfg(feature = "alloc")] OtherString(alloc::string::String), #[cfg(feature = "serde")] SerdeAnyNotSupported, #[cfg(feature = "serde")] SerdeIdentifierNotSupported, #[cfg(feature = "serde")] SerdeIgnoredAnyNotSupported, #[cfg(feature = "serde")] CannotBorrowOwnedData, } impl core::fmt::Display for DecodeError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{:?}", self) } } impl DecodeError { pub(crate) fn change_integer_type_to_signed(self) -> DecodeError { match self { Self::InvalidIntegerType { expected, found } => Self::InvalidIntegerType { expected: expected.into_signed(), found: found.into_signed(), }, other => other, } } } #[non_exhaustive] #[derive(Debug, PartialEq)] pub enum AllowedEnumVariants { #[allow(missing_docs)] Range { min: u32, max: u32 }, Allowed(&'static [u32]), } #[non_exhaustive] #[derive(Debug, PartialEq, Eq)] #[allow(missing_docs)] pub enum IntegerType { U8, U16, U32, U64, U128, Usize, I8, I16, I32, I64, I128, Isize, Reserved, } impl IntegerType { pub(crate) fn into_signed(self) -> Self { match self { Self::U8 => Self::I8, Self::U16 =>
=> Self::Isize, other => other, } } }
Self::I16, Self::U32 => Self::I32, Self::U64 => Self::I64, Self::U128 => Self::I128, Self::Usize
function_block-random_span
[ { "content": "/// Encode a `serde` `Serialize` type into a given byte slice with the bincode algorithm\n\npub fn encode_to_slice<T, C>(t: T, slice: &mut [u8], config: C) -> Result<usize, EncodeError>\n\nwhere\n\n T: Serialize,\n\n C: Config,\n\n{\n\n let mut encoder =\n\n crate::enc::EncoderImpl...
Rust
server/src/realtime/mod.rs
kalgynirae/sudoku
e32241a732a53fdfff08a7a5298e2281ecee5af6
pub mod protocol; pub mod tasks; use futures::prelude::*; use futures::stream::{SplitSink, SplitStream}; use log::{debug, error, warn}; use std::sync::Arc; use tokio::sync::Mutex; use warp::filters::BoxedFilter; use warp::reject::Reject; use warp::ws::{Message, WebSocket}; use warp::{Filter, Reply}; use crate::cursors::SessionCursor; use crate::global_state::GlobalState; use crate::realtime::protocol::{ serialize_response, write_to_socket, ResponseMessage, SocketWriteError, }; use crate::realtime::tasks::error::ApiTaskError; use crate::realtime::tasks::{CursorNotifyReceiver, DiffBroadcastReceiver, RequestReceiver}; use crate::room::{ClientSyncId, RoomId, RoomState, Session}; use crate::sql; #[derive(Debug)] struct InternalErrorReject; impl Reject for InternalErrorReject {} pub fn get_filter( global_state: Arc<GlobalState>, db_pool: Arc<sql::Pool>, ) -> BoxedFilter<(impl Reply,)> { warp::path!("api" / "v1" / "realtime" / ..) .and( warp::path::param::<RoomId>() .map(Some) .or(warp::any().map(|| None)) .unify(), ) .and(warp::any().map(move || global_state.clone())) .and(warp::any().map(move || db_pool.clone())) .and_then( |room_id, global_state: Arc<GlobalState>, db_pool: Arc<_>| async move { Result::<Arc<Mutex<RoomState>>, warp::reject::Rejection>::Ok(match room_id { Some(room_id) => global_state .get_room(&db_pool, &room_id) .await .map_err(|_| warp::reject::custom(InternalErrorReject))? .ok_or_else(warp::reject::not_found)?, None => { let room_id = RoomId::random(); let room_state = Arc::new(Mutex::new(RoomState::new(room_id))); global_state.insert_room(room_id, room_state.clone()).await; room_state } }) }, ) .and(warp::path::end()) .and(warp::ws()) .map(|room_state: Arc<Mutex<RoomState>>, ws: warp::ws::Ws| { ws.max_send_queue(1 * 1024 * 1024) .max_message_size(512 * 1024) .max_frame_size(512 * 1024) .on_upgrade(move |web_socket| handle_realtime_api(web_socket, room_state)) }) .boxed() } async fn handle_realtime_api(ws: WebSocket, room_state: Arc<Mutex<RoomState>>) { let (ws_tx, ws_rx) = ws.split(); let ws_tx = Arc::new(Mutex::new(ws_tx)); let ws_rx = Arc::new(Mutex::new(ws_rx)); let Session { session_id, diff_rx, cursor: SessionCursor { tx: cursor_tx, rx: cursor_rx, }, } = match room_state.lock().await.new_session() { Ok(session) => session, Err(err) => { let response_result = serialize_response(ResponseMessage::from(err)); if let Ok(response) = response_result { let _possible_error = write_to_socket(&ws_tx, response).await; } close_websocket(ws_tx, ws_rx).await; return; } }; let last_received_sync_id: Arc<Mutex<Option<ClientSyncId>>> = Arc::new(Mutex::new(None)); let last_sent_sync_id: Arc<Mutex<Option<ClientSyncId>>> = Arc::new(Mutex::new(None)); debug!("sending init message to client"); let write_result = async { let init_msg = { let rs = room_state.lock().await; ResponseMessage::Init { room_id: rs.room_id.to_string(), board_state: rs.board.clone(), } }; write_to_socket(&ws_tx, serialize_response(init_msg)?).await } .await; if write_result.is_err() { debug!("failed to send init message, so closing socket instead"); close_websocket(ws_tx, ws_rx).await; return; } let request_receiver = RequestReceiver { room_state: room_state.clone(), ws_tx: ws_tx.clone(), ws_rx: ws_rx.clone(), session_id, last_received_sync_id: last_received_sync_id.clone(), cursor_tx, } .run(); let diff_broadcast_receiver = DiffBroadcastReceiver { room_state: room_state.clone(), ws_tx: ws_tx.clone(), diff_rx, session_id, last_received_sync_id: last_received_sync_id.clone(), last_sent_sync_id: last_sent_sync_id.clone(), } .run(); let cursor_notify_receiver = CursorNotifyReceiver { ws_tx: ws_tx.clone(), cursor_rx, } .run(); let result = tokio::select! { r = request_receiver => r, r = diff_broadcast_receiver => r, r = cursor_notify_receiver => r, }; match result { Err(err) => match err { ApiTaskError::CursorReceive(_) => { error!("{}", err); } ApiTaskError::SocketWrite(SocketWriteError::Serialization(_)) => { error!("{}", err); } ApiTaskError::SocketWrite(SocketWriteError::Warp(_)) => { warn!("{}", err); } }, Ok(_) => {} } close_websocket(ws_tx, ws_rx).await; } async fn close_websocket( ws_tx: Arc<Mutex<SplitSink<WebSocket, Message>>>, ws_rx: Arc<Mutex<SplitStream<WebSocket>>>, ) { debug!("gracefully closing websocket"); let ws_tx = Arc::try_unwrap(ws_tx) .expect("there should be one ref to ws_tx once our tasks are finished") .into_inner(); let ws_rx = Arc::try_unwrap(ws_rx) .expect("there should be one ref to ws_rx once our tasks are finished") .into_inner(); if let Err(err) = ws_tx .reunite(ws_rx) .expect("ws_tx and ws_rx are always from the same stream") .close() .await { debug!("failed to close websocket: {}", err); } }
pub mod protocol; pub mod tasks; use futures::prelude::*; use futures::stream::{SplitSink, SplitStream}; use log::{debug, error, warn}; use std::sync::Arc; use tokio::sync::Mutex; use warp::filters::BoxedFilter; use warp::reject::Reject; use warp::ws::{Message, WebSocket}; use warp::{Filter, Reply}; use crate
room_state: room_state.clone(), ws_tx: ws_tx.clone(), ws_rx: ws_rx.clone(), session_id, last_received_sync_id: last_received_sync_id.clone(), cursor_tx, } .run(); let diff_broadcast_receiver = DiffBroadcastReceiver { room_state: room_state.clone(), ws_tx: ws_tx.clone(), diff_rx, session_id, last_received_sync_id: last_received_sync_id.clone(), last_sent_sync_id: last_sent_sync_id.clone(), } .run(); let cursor_notify_receiver = CursorNotifyReceiver { ws_tx: ws_tx.clone(), cursor_rx, } .run(); let result = tokio::select! { r = request_receiver => r, r = diff_broadcast_receiver => r, r = cursor_notify_receiver => r, }; match result { Err(err) => match err { ApiTaskError::CursorReceive(_) => { error!("{}", err); } ApiTaskError::SocketWrite(SocketWriteError::Serialization(_)) => { error!("{}", err); } ApiTaskError::SocketWrite(SocketWriteError::Warp(_)) => { warn!("{}", err); } }, Ok(_) => {} } close_websocket(ws_tx, ws_rx).await; } async fn close_websocket( ws_tx: Arc<Mutex<SplitSink<WebSocket, Message>>>, ws_rx: Arc<Mutex<SplitStream<WebSocket>>>, ) { debug!("gracefully closing websocket"); let ws_tx = Arc::try_unwrap(ws_tx) .expect("there should be one ref to ws_tx once our tasks are finished") .into_inner(); let ws_rx = Arc::try_unwrap(ws_rx) .expect("there should be one ref to ws_rx once our tasks are finished") .into_inner(); if let Err(err) = ws_tx .reunite(ws_rx) .expect("ws_tx and ws_rx are always from the same stream") .close() .await { debug!("failed to close websocket: {}", err); } }
::cursors::SessionCursor; use crate::global_state::GlobalState; use crate::realtime::protocol::{ serialize_response, write_to_socket, ResponseMessage, SocketWriteError, }; use crate::realtime::tasks::error::ApiTaskError; use crate::realtime::tasks::{CursorNotifyReceiver, DiffBroadcastReceiver, RequestReceiver}; use crate::room::{ClientSyncId, RoomId, RoomState, Session}; use crate::sql; #[derive(Debug)] struct InternalErrorReject; impl Reject for InternalErrorReject {} pub fn get_filter( global_state: Arc<GlobalState>, db_pool: Arc<sql::Pool>, ) -> BoxedFilter<(impl Reply,)> { warp::path!("api" / "v1" / "realtime" / ..) .and( warp::path::param::<RoomId>() .map(Some) .or(warp::any().map(|| None)) .unify(), ) .and(warp::any().map(move || global_state.clone())) .and(warp::any().map(move || db_pool.clone())) .and_then( |room_id, global_state: Arc<GlobalState>, db_pool: Arc<_>| async move { Result::<Arc<Mutex<RoomState>>, warp::reject::Rejection>::Ok(match room_id { Some(room_id) => global_state .get_room(&db_pool, &room_id) .await .map_err(|_| warp::reject::custom(InternalErrorReject))? .ok_or_else(warp::reject::not_found)?, None => { let room_id = RoomId::random(); let room_state = Arc::new(Mutex::new(RoomState::new(room_id))); global_state.insert_room(room_id, room_state.clone()).await; room_state } }) }, ) .and(warp::path::end()) .and(warp::ws()) .map(|room_state: Arc<Mutex<RoomState>>, ws: warp::ws::Ws| { ws.max_send_queue(1 * 1024 * 1024) .max_message_size(512 * 1024) .max_frame_size(512 * 1024) .on_upgrade(move |web_socket| handle_realtime_api(web_socket, room_state)) }) .boxed() } async fn handle_realtime_api(ws: WebSocket, room_state: Arc<Mutex<RoomState>>) { let (ws_tx, ws_rx) = ws.split(); let ws_tx = Arc::new(Mutex::new(ws_tx)); let ws_rx = Arc::new(Mutex::new(ws_rx)); let Session { session_id, diff_rx, cursor: SessionCursor { tx: cursor_tx, rx: cursor_rx, }, } = match room_state.lock().await.new_session() { Ok(session) => session, Err(err) => { let response_result = serialize_response(ResponseMessage::from(err)); if let Ok(response) = response_result { let _possible_error = write_to_socket(&ws_tx, response).await; } close_websocket(ws_tx, ws_rx).await; return; } }; let last_received_sync_id: Arc<Mutex<Option<ClientSyncId>>> = Arc::new(Mutex::new(None)); let last_sent_sync_id: Arc<Mutex<Option<ClientSyncId>>> = Arc::new(Mutex::new(None)); debug!("sending init message to client"); let write_result = async { let init_msg = { let rs = room_state.lock().await; ResponseMessage::Init { room_id: rs.room_id.to_string(), board_state: rs.board.clone(), } }; write_to_socket(&ws_tx, serialize_response(init_msg)?).await } .await; if write_result.is_err() { debug!("failed to send init message, so closing socket instead"); close_websocket(ws_tx, ws_rx).await; return; } let request_receiver = RequestReceiver {
random
[ { "content": "pub fn serialize_response(msg: ResponseMessage) -> Result<Message, SocketWriteError> {\n\n let text = serde_json::to_string(&msg)?;\n\n Ok(Message::text(text))\n\n}\n\n\n\npub async fn write_to_socket(\n\n ws_tx: &Mutex<SplitSink<WebSocket, Message>>,\n\n msg: Message,\n\n) -> Result<(...
Rust
src/auto/exception.rs
gtk-rs/javascriptcore-rs
313a15205b5b3d1daa6983faa3948a6d02af42cb
use crate::Context; use glib::{object::IsA, translate::*}; use std::fmt; glib::wrapper! { #[doc(alias = "JSCException")] pub struct Exception(Object<ffi::JSCException, ffi::JSCExceptionClass>); match fn { type_ => || ffi::jsc_exception_get_type(), } } impl Exception { #[doc(alias = "jsc_exception_new")] pub fn new(context: &impl IsA<Context>, message: &str) -> Exception { unsafe { from_glib_full(ffi::jsc_exception_new( context.as_ref().to_glib_none().0, message.to_glib_none().0, )) } } #[doc(alias = "jsc_exception_new_with_name")] #[doc(alias = "new_with_name")] pub fn with_name(context: &impl IsA<Context>, name: &str, message: &str) -> Exception { unsafe { from_glib_full(ffi::jsc_exception_new_with_name( context.as_ref().to_glib_none().0, name.to_glib_none().0, message.to_glib_none().0, )) } } } impl fmt::Display for Exception { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(&ExceptionExt::to_str(self)) } } pub const NONE_EXCEPTION: Option<&Exception> = None; pub trait ExceptionExt: 'static { #[doc(alias = "jsc_exception_get_backtrace_string")] #[doc(alias = "get_backtrace_string")] fn backtrace_string(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_column_number")] #[doc(alias = "get_column_number")] fn column_number(&self) -> u32; #[doc(alias = "jsc_exception_get_line_number")] #[doc(alias = "get_line_number")] fn line_number(&self) -> u32; #[doc(alias = "jsc_exception_get_message")] #[doc(alias = "get_message")] fn message(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_name")] #[doc(alias = "get_name")] fn name(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_source_uri")] #[doc(alias = "get_source_uri")] fn source_uri(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_report")] fn report(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_to_string")] #[doc(alias = "to_string")] fn to_str(&self) -> glib::GString; } impl<O: IsA<Exception>> ExceptionExt for O { fn backtrace_string(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_backtrace_string( self.as_ref().to_glib_none().0, )) } } fn column_number(&self) -> u32 { unsafe { ffi::jsc_exception_get_column_number(self.as_ref().to_glib_none().0) } } fn line_number(&self) -> u32 { unsafe { ffi::jsc_exception_get_line_number(self.as_ref().to_glib_none().0) } } fn message(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_message( self.as_ref().to_glib_none().0, )) } } fn name(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_name(self.as_ref().to_glib_none().0)) } } fn source_uri(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_source_uri( self.as_ref().to_glib_none().0, )) } } fn report(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::jsc_exception_report(self.as_ref().to_glib_none().0)) } } fn to_str(&self) -> glib::GString { unsafe { from_glib_full(ffi::jsc_exception_to_string(self.as_ref().to_glib_none().0)) } } }
use crate::Context; use glib::{object::IsA, translate::*}; use std::fmt; glib::wrapper! { #[doc(alias = "JSCException")] pub struct Exception(Object<ffi::JSCException, ffi::JSCExceptionClass>); match fn { type_ => || ffi::jsc_exception_get_type(), } } impl Exception { #[doc(alias = "jsc_exception_new")] pub fn new(context: &impl IsA<Context>, message: &str) -> Exception { unsafe { from_glib_full(ffi::jsc_exception_new( context.as_ref().to_glib_none().0, message.to_glib_none().0, )) } } #[doc(alias = "jsc_exception_new_with_name")] #[doc(alias = "new_with_name")] pub fn with_name(context: &impl IsA<Context>, name: &str, message: &str) -> Exception { unsafe {
} } } impl fmt::Display for Exception { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(&ExceptionExt::to_str(self)) } } pub const NONE_EXCEPTION: Option<&Exception> = None; pub trait ExceptionExt: 'static { #[doc(alias = "jsc_exception_get_backtrace_string")] #[doc(alias = "get_backtrace_string")] fn backtrace_string(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_column_number")] #[doc(alias = "get_column_number")] fn column_number(&self) -> u32; #[doc(alias = "jsc_exception_get_line_number")] #[doc(alias = "get_line_number")] fn line_number(&self) -> u32; #[doc(alias = "jsc_exception_get_message")] #[doc(alias = "get_message")] fn message(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_name")] #[doc(alias = "get_name")] fn name(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_source_uri")] #[doc(alias = "get_source_uri")] fn source_uri(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_report")] fn report(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_to_string")] #[doc(alias = "to_string")] fn to_str(&self) -> glib::GString; } impl<O: IsA<Exception>> ExceptionExt for O { fn backtrace_string(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_backtrace_string( self.as_ref().to_glib_none().0, )) } } fn column_number(&self) -> u32 { unsafe { ffi::jsc_exception_get_column_number(self.as_ref().to_glib_none().0) } } fn line_number(&self) -> u32 { unsafe { ffi::jsc_exception_get_line_number(self.as_ref().to_glib_none().0) } } fn message(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_message( self.as_ref().to_glib_none().0, )) } } fn name(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_name(self.as_ref().to_glib_none().0)) } } fn source_uri(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_source_uri( self.as_ref().to_glib_none().0, )) } } fn report(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::jsc_exception_report(self.as_ref().to_glib_none().0)) } } fn to_str(&self) -> glib::GString { unsafe { from_glib_full(ffi::jsc_exception_to_string(self.as_ref().to_glib_none().0)) } } }
from_glib_full(ffi::jsc_exception_new_with_name( context.as_ref().to_glib_none().0, name.to_glib_none().0, message.to_glib_none().0, ))
call_expression
[ { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n...
Rust
algebra-core/src/serialize/mod.rs
AleoHQ/zexe
d190054f587791da790d372be62867809e251f12
mod error; mod flags; pub use crate::io::{Read, Write}; pub use error::*; pub use flags::*; #[cfg(feature = "derive")] #[doc(hidden)] pub use algebra_core_derive::*; use crate::Vec; pub trait CanonicalSerializeWithFlags: CanonicalSerialize { fn serialize_with_flags<W: Write, F: Flags>( &self, writer: &mut W, flags: F, ) -> Result<(), SerializationError>; } pub trait CanonicalSerialize { fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError>; fn serialized_size(&self) -> usize; #[inline] fn serialize_uncompressed<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { self.serialize(writer) } #[inline] fn uncompressed_size(&self) -> usize { self.serialized_size() } } pub trait CanonicalDeserializeWithFlags: Sized { fn deserialize_with_flags<R: Read, F: Flags>( reader: &mut R, ) -> Result<(Self, F), SerializationError>; } pub trait CanonicalDeserialize: Sized { fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError>; #[inline] fn deserialize_uncompressed<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { Self::deserialize(reader) } } impl CanonicalSerialize for u64 { #[inline] fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { Ok(writer.write_all(&self.to_le_bytes())?) } #[inline] fn serialized_size(&self) -> usize { 8 } } impl CanonicalDeserialize for u64 { #[inline] fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let mut bytes = [0u8; 8]; reader.read_exact(&mut bytes)?; Ok(u64::from_le_bytes(bytes)) } } impl<T: CanonicalSerialize> CanonicalSerialize for Vec<T> { #[inline] fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { let len = self.len() as u64; len.serialize(writer)?; for item in self.iter() { item.serialize(writer)?; } Ok(()) } #[inline] fn serialized_size(&self) -> usize { 8 + self .iter() .map(|item| item.serialized_size()) .sum::<usize>() } #[inline] fn serialize_uncompressed<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { let len = self.len() as u64; len.serialize(writer)?; for item in self.iter() { item.serialize_uncompressed(writer)?; } Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { 8 + self .iter() .map(|item| item.uncompressed_size()) .sum::<usize>() } } impl<T: CanonicalDeserialize> CanonicalDeserialize for Vec<T> { #[inline] fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let len = u64::deserialize(reader)?; let mut values = vec![]; for _ in 0..len { values.push(T::deserialize(reader)?); } Ok(values) } #[inline] fn deserialize_uncompressed<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let len = u64::deserialize(reader)?; let mut values = vec![]; for _ in 0..len { values.push(T::deserialize_uncompressed(reader)?); } Ok(values) } } #[inline] pub fn buffer_bit_byte_size(modulus_bits: usize) -> (usize, usize) { let byte_size = (modulus_bits + 7) / 8; ((byte_size * 8), byte_size) } macro_rules! impl_prime_field_serializer { ($field: ident, $params: ident, $byte_size: expr) => { impl<P: $params> CanonicalSerializeWithFlags for $field<P> { #[allow(unused_qualifications)] fn serialize_with_flags<W: crate::io::Write, F: crate::serialize::Flags>( &self, writer: &mut W, flags: F, ) -> Result<(), crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (output_bit_size, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); if F::len() > (output_bit_size - P::MODULUS_BITS as usize) { return Err(crate::serialize::SerializationError::NotEnoughSpace); } let mut bytes = [0u8; BYTE_SIZE]; self.write(&mut bytes[..])?; bytes[output_byte_size - 1] |= flags.u8_bitmask(); writer.write_all(&bytes[..output_byte_size])?; Ok(()) } } impl<P: $params> CanonicalSerialize for $field<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { self.serialize_with_flags(writer, crate::serialize::EmptyFlags) } #[inline] fn serialized_size(&self) -> usize { let (_, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); output_byte_size } } impl<P: $params> CanonicalDeserializeWithFlags for $field<P> { #[allow(unused_qualifications)] fn deserialize_with_flags<R: crate::io::Read, F: crate::serialize::Flags>( reader: &mut R, ) -> Result<(Self, F), crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (output_bit_size, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); if F::len() > (output_bit_size - P::MODULUS_BITS as usize) { return Err(crate::serialize::SerializationError::NotEnoughSpace); } let mut masked_bytes = [0; BYTE_SIZE]; reader.read_exact(&mut masked_bytes[..output_byte_size])?; let flags = F::from_u8_remove_flags(&mut masked_bytes[output_byte_size - 1]); Ok((Self::read(&masked_bytes[..])?, flags)) } } impl<P: $params> CanonicalDeserialize for $field<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (_, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); let mut masked_bytes = [0; BYTE_SIZE]; reader.read_exact(&mut masked_bytes[..output_byte_size])?; Ok(Self::read(&masked_bytes[..])?) } } }; } macro_rules! impl_sw_curve_serializer { ($params: ident) => { impl<P: $params> CanonicalSerialize for GroupAffine<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { if self.is_zero() { let flags = crate::serialize::SWFlags::infinity(); P::BaseField::zero().serialize_with_flags(writer, flags) } else { let flags = crate::serialize::SWFlags::from_y_sign(self.y > -self.y); self.x.serialize_with_flags(writer, flags) } } #[inline] fn serialized_size(&self) -> usize { self.x.serialized_size() } #[allow(unused_qualifications)] #[inline] fn serialize_uncompressed<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { let flags = if self.is_zero() { crate::serialize::SWFlags::infinity() } else { crate::serialize::SWFlags::default() }; self.x.serialize(writer)?; self.y.serialize_with_flags(writer, flags)?; Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { self.x.serialized_size() + self.y.serialized_size() } } impl<P: $params> CanonicalDeserialize for GroupAffine<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let (x, flags): (P::BaseField, crate::serialize::SWFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; if flags.is_infinity() { Ok(Self::zero()) } else { let p = GroupAffine::<P>::get_point_from_x(x, flags.is_positive().unwrap()) .ok_or(crate::serialize::SerializationError::InvalidData)?; if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } #[allow(unused_qualifications)] fn deserialize_uncompressed<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let x: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let (y, flags): (P::BaseField, crate::serialize::SWFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; let p = GroupAffine::<P>::new(x, y, flags.is_infinity()); if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } }; } macro_rules! impl_edwards_curve_serializer { ($params: ident) => { impl<P: $params> CanonicalSerialize for GroupAffine<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { if self.is_zero() { let flags = crate::serialize::EdwardsFlags::default(); P::BaseField::zero().serialize_with_flags(writer, flags) } else { let flags = crate::serialize::EdwardsFlags::from_y_sign(self.y > -self.y); self.x.serialize_with_flags(writer, flags) } } #[inline] fn serialized_size(&self) -> usize { CanonicalSerialize::serialized_size(&self.x) } #[allow(unused_qualifications)] #[inline] fn serialize_uncompressed<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { self.x.serialize_uncompressed(writer)?; self.y.serialize_uncompressed(writer)?; Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { self.x.uncompressed_size() + self.y.uncompressed_size() } } impl<P: $params> CanonicalDeserialize for GroupAffine<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let (x, flags): (P::BaseField, crate::serialize::EdwardsFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; if x == P::BaseField::zero() { Ok(Self::zero()) } else { let p = GroupAffine::<P>::get_point_from_x(x, flags.is_positive()) .ok_or(crate::serialize::SerializationError::InvalidData)?; if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } #[allow(unused_qualifications)] fn deserialize_uncompressed<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let x: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let y: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let p = GroupAffine::<P>::new(x, y); if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } }; } #[cfg(test)] mod test { use crate::{io::Cursor, CanonicalDeserialize, CanonicalSerialize}; #[test] fn test_primitives() { let a = 192830918u64; let mut serialized = vec![0u8; a.serialized_size()]; let mut cursor = Cursor::new(&mut serialized[..]); a.serialize(&mut cursor).unwrap(); let mut cursor = Cursor::new(&serialized[..]); let b = u64::deserialize(&mut cursor).unwrap(); assert_eq!(a, b); } }
mod error; mod flags; pub use crate::io::{Read, Write}; pub use error::*; pub use flags::*; #[cfg(feature = "derive")] #[doc(hidden)] pub use algebra_core_derive::*; use crate::Vec; pub trait CanonicalSerializeWithFlags: CanonicalSerialize { fn serialize_with_flags<W: Write, F: Flags>( &self, writer: &mut W, flags: F, ) -> Result<(), SerializationError>; } pub trait CanonicalSerialize { fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError>; fn serialized_size(&self) -> usize; #[inline] fn serialize_uncompressed<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { self.serialize(writer) } #[inline] fn uncompressed_size(&self) -> usize { self.serialized_size() } } pub trait CanonicalDeserializeWithFlags: Sized { fn deserialize_with_flags<R: Read, F: Flags>( reader: &mut R, ) -> Result<(Self, F), SerializationError>; } pub trait CanonicalDeserialize: Sized { fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError>; #[inline] fn deserialize_uncompressed<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { Self::deserialize(reader) } } impl CanonicalSerialize for u64 { #[inline] fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { Ok(writer.write_all(&self.to_le_bytes())?) } #[inline] fn serialized_size(&self) -> usize { 8 } } impl CanonicalDeserialize for u64 { #[inline] fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let mut bytes = [0u8; 8]; reader.read_exact(&mut bytes)?; Ok(u64::from_le_bytes(bytes)) } } impl<T: CanonicalSerialize> CanonicalSerialize for Vec<T> { #[inline] fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { let len = self.len() as u64; len.serialize(writer)?; for item in self.iter() { item.serialize(writer)?; } Ok(()) } #[inline] fn serialized_size(&self) -> usize { 8 + self .iter() .map(|item| item.serialized_size()) .sum::<usize>() } #[inline] fn serialize_uncompressed<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { let len = self.len() as u64; len.serialize(writer)?; for item in self.iter() { item.serialize_uncompressed(writer)?; } Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { 8 + self .iter() .map(|item| item.uncompressed_size()) .sum::<usize>() } } impl<T: CanonicalDeserialize> CanonicalDeserialize for Vec<T> { #[inline] fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let len = u64::deserialize(reader)?; let mut values = vec![]; for _ in 0..len { values.push(T::deserialize(reader)?); } Ok(values) } #[inline] fn deserialize_uncompressed<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let len = u64::deserialize(reader)?; let mut values = vec![]; for _ in 0..len { values.push(T::deserialize_uncompressed(reader)?); } Ok(values) } } #[inline] pub fn buffer_bit_byte_size(modulus_bits: usize) -> (usize, usize) { let byte_size = (modulus_bits + 7) / 8; ((byte_size * 8), byte_size) } macro_rules! impl_prime_field_serializer { ($field: ident, $params: ident, $byte_size: expr) => { impl<P: $params> CanonicalSerializeWithFlags for $field<P> { #[allow(unused_qualifications)] fn serialize_with_flags<W: crate::io::Write, F: crate::serialize::Flags>( &self, writer: &mut W, flags: F, ) -> Result<(), crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (output_bit_size, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); if F::len() > (output_bit_size - P::MODULUS_BITS as usize) { return Err(crate::serialize::SerializationError::NotEnoughSpace); } let mut bytes = [0u8; BYTE_SIZE]; self.write(&mut bytes[..])?; bytes[output_byte_size - 1] |= flags.u8_bitmask(); writer.write_all(&bytes[..output_byte_size])?; Ok(()) } } impl<P: $params> CanonicalSerialize for $field<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { self.serialize_with_flags(writer, crate::serialize::EmptyFlags) } #[inline] fn serialized_size(&self) -> usize { let (_, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); output_byte_size } } impl<P: $params> CanonicalDeserializeWithFlags for $field<P> { #[allow(unused_qualifications)] fn deserialize_with_flags<R: crate::io::Read, F: crate::serialize::Flags>( reader: &mut R, ) -> Result<(Self, F), crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (output_bit_size, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_i
; Ok(Self::read(&masked_bytes[..])?) } } }; } macro_rules! impl_sw_curve_serializer { ($params: ident) => { impl<P: $params> CanonicalSerialize for GroupAffine<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { if self.is_zero() { let flags = crate::serialize::SWFlags::infinity(); P::BaseField::zero().serialize_with_flags(writer, flags) } else { let flags = crate::serialize::SWFlags::from_y_sign(self.y > -self.y); self.x.serialize_with_flags(writer, flags) } } #[inline] fn serialized_size(&self) -> usize { self.x.serialized_size() } #[allow(unused_qualifications)] #[inline] fn serialize_uncompressed<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { let flags = if self.is_zero() { crate::serialize::SWFlags::infinity() } else { crate::serialize::SWFlags::default() }; self.x.serialize(writer)?; self.y.serialize_with_flags(writer, flags)?; Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { self.x.serialized_size() + self.y.serialized_size() } } impl<P: $params> CanonicalDeserialize for GroupAffine<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let (x, flags): (P::BaseField, crate::serialize::SWFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; if flags.is_infinity() { Ok(Self::zero()) } else { let p = GroupAffine::<P>::get_point_from_x(x, flags.is_positive().unwrap()) .ok_or(crate::serialize::SerializationError::InvalidData)?; if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } #[allow(unused_qualifications)] fn deserialize_uncompressed<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let x: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let (y, flags): (P::BaseField, crate::serialize::SWFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; let p = GroupAffine::<P>::new(x, y, flags.is_infinity()); if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } }; } macro_rules! impl_edwards_curve_serializer { ($params: ident) => { impl<P: $params> CanonicalSerialize for GroupAffine<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { if self.is_zero() { let flags = crate::serialize::EdwardsFlags::default(); P::BaseField::zero().serialize_with_flags(writer, flags) } else { let flags = crate::serialize::EdwardsFlags::from_y_sign(self.y > -self.y); self.x.serialize_with_flags(writer, flags) } } #[inline] fn serialized_size(&self) -> usize { CanonicalSerialize::serialized_size(&self.x) } #[allow(unused_qualifications)] #[inline] fn serialize_uncompressed<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { self.x.serialize_uncompressed(writer)?; self.y.serialize_uncompressed(writer)?; Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { self.x.uncompressed_size() + self.y.uncompressed_size() } } impl<P: $params> CanonicalDeserialize for GroupAffine<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let (x, flags): (P::BaseField, crate::serialize::EdwardsFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; if x == P::BaseField::zero() { Ok(Self::zero()) } else { let p = GroupAffine::<P>::get_point_from_x(x, flags.is_positive()) .ok_or(crate::serialize::SerializationError::InvalidData)?; if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } #[allow(unused_qualifications)] fn deserialize_uncompressed<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let x: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let y: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let p = GroupAffine::<P>::new(x, y); if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } }; } #[cfg(test)] mod test { use crate::{io::Cursor, CanonicalDeserialize, CanonicalSerialize}; #[test] fn test_primitives() { let a = 192830918u64; let mut serialized = vec![0u8; a.serialized_size()]; let mut cursor = Cursor::new(&mut serialized[..]); a.serialize(&mut cursor).unwrap(); let mut cursor = Cursor::new(&serialized[..]); let b = u64::deserialize(&mut cursor).unwrap(); assert_eq!(a, b); } }
n_bits()); if F::len() > (output_bit_size - P::MODULUS_BITS as usize) { return Err(crate::serialize::SerializationError::NotEnoughSpace); } let mut masked_bytes = [0; BYTE_SIZE]; reader.read_exact(&mut masked_bytes[..output_byte_size])?; let flags = F::from_u8_remove_flags(&mut masked_bytes[output_byte_size - 1]); Ok((Self::read(&masked_bytes[..])?, flags)) } } impl<P: $params> CanonicalDeserialize for $field<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (_, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); let mut masked_bytes = [0; BYTE_SIZE]; reader.read_exact(&mut masked_bytes[..output_byte_size])?
random
[ { "content": "pub fn batch_inversion<F: Field>(v: &mut [F]) {\n\n // Montgomery’s Trick and Fast Implementation of Masked AES\n\n // Genelle, Prouff and Quisquater\n\n // Section 3.2\n\n\n\n // First pass: compute [a, ab, abc, ...]\n\n let mut prod = Vec::with_capacity(v.len());\n\n let mut tm...
Rust
arci-ros/src/ros_robot_client.rs
mertcookimg/openrr
4bdb49d483383e912b37907c6f651b52f2b7a742
use std::sync::Arc; use arci::*; use parking_lot::Mutex; use crate::msg; #[derive(Clone)] pub struct RosRobotClient(Arc<RosRobotClientInner>); struct RosRobotClientInner { joint_names: Vec<String>, trajectory_publisher: Option<rosrust::Publisher<msg::trajectory_msgs::JointTrajectory>>, _joint_state_subscriber: rosrust::Subscriber, joint_state_message: Arc<Mutex<msg::sensor_msgs::JointState>>, complete_condition: Mutex<Arc<dyn CompleteCondition>>, } impl From<TrajectoryPoint> for msg::trajectory_msgs::JointTrajectoryPoint { fn from(tp: TrajectoryPoint) -> Self { let mut message = msg::trajectory_msgs::JointTrajectoryPoint { positions: tp.positions, ..Default::default() }; message.time_from_start.sec = tp.time_from_start.as_secs() as i32; message.time_from_start.nsec = tp.time_from_start.subsec_nanos() as i32; message } } impl RosRobotClient { pub fn new( joint_names: Vec<String>, joint_state_topic_name: &str, trajectory_topic_name: &str, ) -> Self { let joint_state_message = Arc::new(Mutex::new(msg::sensor_msgs::JointState::default())); let joint_state_message_for_sub = joint_state_message.clone(); let _joint_state_subscriber = rosrust::subscribe( joint_state_topic_name, 1, move |joint_state: msg::sensor_msgs::JointState| { let mut aaa = joint_state_message_for_sub.lock(); *aaa = joint_state; }, ) .unwrap(); while joint_state_message.lock().name.is_empty() { rosrust::ros_info!("waiting joint state publisher"); std::thread::sleep(std::time::Duration::from_millis(100)); } let trajectory_publisher = if trajectory_topic_name.is_empty() { None } else { let publisher = rosrust::publish(trajectory_topic_name, 1).unwrap(); let rate = rosrust::rate(10.0); while rosrust::is_ok() && publisher.subscriber_count() == 0 { rosrust::ros_info!("waiting trajectory subscriber"); rate.sleep(); } Some(publisher) }; Self(Arc::new(RosRobotClientInner { joint_names, trajectory_publisher, _joint_state_subscriber, joint_state_message, complete_condition: Mutex::new(Arc::new(TotalJointDiffCondition::default())), })) } } impl JointTrajectoryClient for RosRobotClient { fn joint_names(&self) -> Vec<String> { self.0.joint_names.clone() } fn current_joint_positions(&self) -> Result<Vec<f64>, Error> { let message = self.0.joint_state_message.lock(); Ok(message.position.clone()) } fn send_joint_positions( &self, positions: Vec<f64>, duration: std::time::Duration, ) -> Result<WaitFuture, Error> { if let Some(ref publisher) = self.0.trajectory_publisher { if self.0.joint_names.len() != positions.len() { return Err(arci::Error::LengthMismatch { model: self.0.joint_names.len(), input: positions.len(), }); } let point = msg::trajectory_msgs::JointTrajectoryPoint { positions: positions.to_vec(), time_from_start: rosrust::Duration::from_nanos(duration.as_nanos() as i64), ..Default::default() }; let traj = msg::trajectory_msgs::JointTrajectory { joint_names: self.0.joint_names.clone(), points: vec![point], ..Default::default() }; publisher.send(traj).unwrap(); let this = self.clone(); Ok(WaitFuture::new(async move { let complete_condition = this.0.complete_condition.lock().clone(); complete_condition .wait(&this, &positions, duration.as_secs_f64()) .await })) } else { Ok(WaitFuture::ready()) } } fn send_joint_trajectory(&self, trajectory: Vec<TrajectoryPoint>) -> Result<WaitFuture, Error> { if let Some(ref publisher) = self.0.trajectory_publisher { let traj = msg::trajectory_msgs::JointTrajectory { joint_names: self.0.joint_names.clone(), points: trajectory.iter().map(|t| (*t).clone().into()).collect(), ..Default::default() }; publisher.send(traj).unwrap(); let this = self.clone(); Ok(WaitFuture::new(async move { let complete_condition = this.0.complete_condition.lock().clone(); complete_condition .wait( &this, &trajectory.last().unwrap().positions, trajectory.last().unwrap().time_from_start.as_secs_f64(), ) .await })) } else { Ok(WaitFuture::ready()) } } } impl SetCompleteCondition for RosRobotClient { fn set_complete_condition(&mut self, condition: Box<dyn CompleteCondition>) { *self.0.complete_condition.lock() = condition.into(); } }
use std::sync::Arc; use arci::*; use parking_lot::Mutex; use crate::msg; #[derive(Clone)] pub struct RosRobotClient(Arc<RosRobotClientInner>); struct RosRobotClientInner { joint_names: Vec<String>, trajectory_publisher: Option<rosrust::Publisher<msg::trajectory_msgs::JointTrajectory>>, _joint_state_subscriber: rosrust::Subscriber, joint_state_message: Arc<Mutex<msg::sensor_msgs::JointState>>, complete_condition: Mutex<Arc<dyn CompleteCondition>>, } impl From<TrajectoryPoint> for msg::trajectory_msgs::JointTrajectoryPoint { fn from(tp: TrajectoryPoint) -> Self { let mut message = msg::trajectory_msgs::JointTrajectoryPoint { positions: tp.positions, ..Default::default() }; message.time_from_start.sec = tp.time_from_start.as_secs() as i32; message.time_from_start.nsec = tp.time_from_start.subsec_nanos() as i32; message } } impl RosRobotClient { pub fn new( joint_names: Vec<String>, joint_state_topic_name: &str, trajectory_topic_name: &str, ) -> Self { let joint_state_message = Arc::new(Mutex::new(msg::sensor_msgs::JointState::default())); let joint_state_message_for_sub = joint_state_message.clone(); let _joint_state_subscriber = rosrust::subscribe( joint_state_topic_name, 1, move |joint_state: msg::sensor_msgs::JointState| { let mut aaa = joint_state_message_for_sub.lock(); *aaa = joint_state; }, ) .unwrap(); while joint_state_message.lock().name.is_empty() { rosrust::ros_info!("waiting joint state publisher"); std::thread::sleep(std::time::Duration::from_millis(100)); } let trajectory_publisher = if trajectory_topic_name.is_empty() { None } else { let publisher = rosrust::publish(trajectory_topic_name, 1).unwrap(); let rate = rosrust::rate(10.0); while rosrust::is_ok() && publisher.subscriber_count() == 0 { rosrust::ros_info!("waiting trajectory subscriber"); rate.sleep(); } Some(publisher) }; Self(Arc::new(RosRobotClientInner { joint_names, trajectory_publisher, _joint_state_subscriber, joint_state_message, complete_condition: Mutex::new(Arc::new(TotalJointDiffCondition::default())),
complete_condition.lock().clone(); complete_condition .wait( &this, &trajectory.last().unwrap().positions, trajectory.last().unwrap().time_from_start.as_secs_f64(), ) .await })) } else { Ok(WaitFuture::ready()) } } } impl SetCompleteCondition for RosRobotClient { fn set_complete_condition(&mut self, condition: Box<dyn CompleteCondition>) { *self.0.complete_condition.lock() = condition.into(); } }
})) } } impl JointTrajectoryClient for RosRobotClient { fn joint_names(&self) -> Vec<String> { self.0.joint_names.clone() } fn current_joint_positions(&self) -> Result<Vec<f64>, Error> { let message = self.0.joint_state_message.lock(); Ok(message.position.clone()) } fn send_joint_positions( &self, positions: Vec<f64>, duration: std::time::Duration, ) -> Result<WaitFuture, Error> { if let Some(ref publisher) = self.0.trajectory_publisher { if self.0.joint_names.len() != positions.len() { return Err(arci::Error::LengthMismatch { model: self.0.joint_names.len(), input: positions.len(), }); } let point = msg::trajectory_msgs::JointTrajectoryPoint { positions: positions.to_vec(), time_from_start: rosrust::Duration::from_nanos(duration.as_nanos() as i64), ..Default::default() }; let traj = msg::trajectory_msgs::JointTrajectory { joint_names: self.0.joint_names.clone(), points: vec![point], ..Default::default() }; publisher.send(traj).unwrap(); let this = self.clone(); Ok(WaitFuture::new(async move { let complete_condition = this.0.complete_condition.lock().clone(); complete_condition .wait(&this, &positions, duration.as_secs_f64()) .await })) } else { Ok(WaitFuture::ready()) } } fn send_joint_trajectory(&self, trajectory: Vec<TrajectoryPoint>) -> Result<WaitFuture, Error> { if let Some(ref publisher) = self.0.trajectory_publisher { let traj = msg::trajectory_msgs::JointTrajectory { joint_names: self.0.joint_names.clone(), points: trajectory.iter().map(|t| (*t).clone().into()).collect(), ..Default::default() }; publisher.send(traj).unwrap(); let this = self.clone(); Ok(WaitFuture::new(async move { let complete_condition = this.0.
random
[ { "content": "/// # To copy joint name and position between `from` and `to`\n\n///\n\n/// Copy position of same joint name.\n\n/// This function returns Ok() or Err().\n\n///\n\n/// # When this function through Error?\n\n///\n\n/// length of joint names and positions is difference.\n\n///\n\n/// # Sample code\n...
Rust
plonky2/src/gates/exponentiation.rs
mfaulk/plonky2
2cedd1b02a718d19115560647ba1f741eab83260
use std::marker::PhantomData; use plonky2_field::extension_field::Extendable; use plonky2_field::field_types::Field; use plonky2_field::ops::Square; use plonky2_field::packed_field::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; use crate::iop::ext_target::ExtensionTarget; use crate::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGenerator}; use crate::iop::target::Target; use crate::iop::wire::Wire; use crate::iop::witness::{PartitionWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::vars::{ EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, EvaluationVarsBasePacked, }; #[derive(Clone, Debug)] pub(crate) struct ExponentiationGate<F: RichField + Extendable<D>, const D: usize> { pub num_power_bits: usize, pub _phantom: PhantomData<F>, } impl<F: RichField + Extendable<D>, const D: usize> ExponentiationGate<F, D> { pub fn new(num_power_bits: usize) -> Self { Self { num_power_bits, _phantom: PhantomData, } } pub fn new_from_config(config: &CircuitConfig) -> Self { let num_power_bits = Self::max_power_bits(config.num_wires, config.num_routed_wires); Self::new(num_power_bits) } fn max_power_bits(num_wires: usize, num_routed_wires: usize) -> usize { let max_for_routed_wires = num_routed_wires - 2; let max_for_wires = (num_wires - 2) / 2; max_for_routed_wires.min(max_for_wires) } pub fn wire_base(&self) -> usize { 0 } pub fn wire_power_bit(&self, i: usize) -> usize { debug_assert!(i < self.num_power_bits); 1 + i } pub fn wire_output(&self) -> usize { 1 + self.num_power_bits } pub fn wire_intermediate_value(&self, i: usize) -> usize { debug_assert!(i < self.num_power_bits); 2 + self.num_power_bits + i } } impl<F: RichField + Extendable<D>, const D: usize> Gate<F, D> for ExponentiationGate<F, D> { fn id(&self) -> String { format!("{:?}<D={}>", self, D) } fn eval_unfiltered(&self, vars: EvaluationVars<F, D>) -> Vec<F::Extension> { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; let mut constraints = Vec::with_capacity(self.num_constraints()); for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { F::Extension::ONE } else { intermediate_values[i - 1].square() }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let not_cur_bit = F::Extension::ONE - cur_bit; let computed_intermediate_value = prev_intermediate_value * (cur_bit * base + not_cur_bit); constraints.push(computed_intermediate_value - intermediate_values[i]); } constraints.push(output - intermediate_values[self.num_power_bits - 1]); constraints } fn eval_unfiltered_base_one( &self, _vars: EvaluationVarsBase<F>, _yield_constr: StridedConstraintConsumer<F>, ) { panic!("use eval_unfiltered_base_packed instead"); } fn eval_unfiltered_base_batch(&self, vars_base: EvaluationVarsBaseBatch<F>) -> Vec<F> { self.eval_unfiltered_base_batch_packed(vars_base) } fn eval_unfiltered_recursively( &self, builder: &mut CircuitBuilder<F, D>, vars: EvaluationTargets<D>, ) -> Vec<ExtensionTarget<D>> { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; let mut constraints = Vec::with_capacity(self.num_constraints()); let one = builder.one_extension(); for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { one } else { builder.square_extension(intermediate_values[i - 1]) }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let mul_by = builder.select_ext_generalized(cur_bit, base, one); let intermediate_value_diff = builder.mul_sub_extension(prev_intermediate_value, mul_by, intermediate_values[i]); constraints.push(intermediate_value_diff); } let output_diff = builder.sub_extension(output, intermediate_values[self.num_power_bits - 1]); constraints.push(output_diff); constraints } fn generators( &self, gate_index: usize, _local_constants: &[F], ) -> Vec<Box<dyn WitnessGenerator<F>>> { let gen = ExponentiationGenerator::<F, D> { gate_index, gate: self.clone(), }; vec![Box::new(gen.adapter())] } fn num_wires(&self) -> usize { self.wire_intermediate_value(self.num_power_bits - 1) + 1 } fn num_constants(&self) -> usize { 0 } fn degree(&self) -> usize { 4 } fn num_constraints(&self) -> usize { self.num_power_bits + 1 } } impl<F: RichField + Extendable<D>, const D: usize> PackedEvaluableBase<F, D> for ExponentiationGate<F, D> { fn eval_unfiltered_base_packed<P: PackedField<Scalar = F>>( &self, vars: EvaluationVarsBasePacked<P>, mut yield_constr: StridedConstraintConsumer<P>, ) { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { P::ONES } else { intermediate_values[i - 1].square() }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let not_cur_bit = P::ONES - cur_bit; let computed_intermediate_value = prev_intermediate_value * (cur_bit * base + not_cur_bit); yield_constr.one(computed_intermediate_value - intermediate_values[i]); } yield_constr.one(output - intermediate_values[self.num_power_bits - 1]); } } #[derive(Debug)] struct ExponentiationGenerator<F: RichField + Extendable<D>, const D: usize> { gate_index: usize, gate: ExponentiationGate<F, D>, } impl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F> for ExponentiationGenerator<F, D> { fn dependencies(&self) -> Vec<Target> { let local_target = |input| Target::wire(self.gate_index, input); let mut deps = Vec::with_capacity(self.gate.num_power_bits + 1); deps.push(local_target(self.gate.wire_base())); for i in 0..self.gate.num_power_bits { deps.push(local_target(self.gate.wire_power_bit(i))); } deps } fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) { let local_wire = |input| Wire { gate: self.gate_index, input, }; let get_local_wire = |input| witness.get_wire(local_wire(input)); let num_power_bits = self.gate.num_power_bits; let base = get_local_wire(self.gate.wire_base()); let power_bits = (0..num_power_bits) .map(|i| get_local_wire(self.gate.wire_power_bit(i))) .collect::<Vec<_>>(); let mut intermediate_values = Vec::new(); let mut current_intermediate_value = F::ONE; for i in 0..num_power_bits { if power_bits[num_power_bits - i - 1] == F::ONE { current_intermediate_value *= base; } intermediate_values.push(current_intermediate_value); current_intermediate_value *= current_intermediate_value; } for i in 0..num_power_bits { let intermediate_value_wire = local_wire(self.gate.wire_intermediate_value(i)); out_buffer.set_wire(intermediate_value_wire, intermediate_values[i]); } let output_wire = local_wire(self.gate.wire_output()); out_buffer.set_wire(output_wire, intermediate_values[num_power_bits - 1]); } } #[cfg(test)] mod tests { use std::marker::PhantomData; use anyhow::Result; use plonky2_field::field_types::Field; use plonky2_field::goldilocks_field::GoldilocksField; use plonky2_util::log2_ceil; use rand::Rng; use crate::gates::exponentiation::ExponentiationGate; use crate::gates::gate::Gate; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::hash::hash_types::HashOut; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; use crate::plonk::vars::EvaluationVars; const MAX_POWER_BITS: usize = 17; #[test] fn wire_indices() { let gate = ExponentiationGate::<GoldilocksField, 4> { num_power_bits: 5, _phantom: PhantomData, }; assert_eq!(gate.wire_base(), 0); assert_eq!(gate.wire_power_bit(0), 1); assert_eq!(gate.wire_power_bit(4), 5); assert_eq!(gate.wire_output(), 6); assert_eq!(gate.wire_intermediate_value(0), 7); assert_eq!(gate.wire_intermediate_value(4), 11); } #[test] fn low_degree() { let config = CircuitConfig { num_wires: 120, num_routed_wires: 30, ..CircuitConfig::standard_recursion_config() }; test_low_degree::<GoldilocksField, _, 4>(ExponentiationGate::new_from_config(&config)); } #[test] fn eval_fns() -> Result<()> { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = <C as GenericConfig<D>>::F; test_eval_fns::<F, C, _, D>(ExponentiationGate::new_from_config( &CircuitConfig::standard_recursion_config(), )) } #[test] fn test_gate_constraint() { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = <C as GenericConfig<D>>::F; type FF = <C as GenericConfig<D>>::FE; fn get_wires(base: F, power: u64) -> Vec<FF> { let mut power_bits = Vec::new(); let mut cur_power = power; while cur_power > 0 { power_bits.push(cur_power % 2); cur_power /= 2; } let num_power_bits = power_bits.len(); let power_bits_f: Vec<_> = power_bits .iter() .map(|b| F::from_canonical_u64(*b)) .collect(); let mut v = vec![base]; v.extend(power_bits_f); let mut intermediate_values = Vec::new(); let mut current_intermediate_value = F::ONE; for i in 0..num_power_bits { if power_bits[num_power_bits - i - 1] == 1 { current_intermediate_value *= base; } intermediate_values.push(current_intermediate_value); current_intermediate_value *= current_intermediate_value; } let output_value = intermediate_values[num_power_bits - 1]; v.push(output_value); v.extend(intermediate_values); v.iter().map(|&x| x.into()).collect::<Vec<_>>() } let mut rng = rand::thread_rng(); let base = F::TWO; let power = rng.gen::<usize>() % (1 << MAX_POWER_BITS); let num_power_bits = log2_ceil(power + 1); let gate = ExponentiationGate::<F, D> { num_power_bits, _phantom: PhantomData, }; let vars = EvaluationVars { local_constants: &[], local_wires: &get_wires(base, power as u64), public_inputs_hash: &HashOut::rand(), }; assert!( gate.eval_unfiltered(vars).iter().all(|x| x.is_zero()), "Gate constraints are not satisfied." ); } }
use std::marker::PhantomData; use plonky2_field::extension_field::Extendable; use plonky2_field::field_types::Field; use plonky2_field::ops::Square; use plonky2_field::packed_field::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; use crate::iop::ext_target::ExtensionTarget; use crate::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGenerator}; use crate::iop::target::Target; use crate::iop::wire::Wire; use crate::iop::witness::{PartitionWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::vars::{ EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, EvaluationVarsBasePacked, }; #[derive(Clone, Debug)] pub(crate) struct ExponentiationGate<F: RichField + Extendable<D>, const D: usize> { pub num_power_bits: usize, pub _phantom: PhantomData<F>, } impl<F: RichField + Extendable<D>, const D: usize> ExponentiationGate<F, D> { pub fn new(num_power_bits: usize) -> Self { Self { num_power_bits, _phantom: PhantomData, } } pub fn new_from_config(config: &CircuitConfig) -> Self { let num_power_bits = Self::max_power_bits(config.num_wires, config.num_routed_wires); Self::new(num_power_bits) } fn max_power_bits(num_wires: usize, num_routed_wires: usize) -> usize { let max_for_routed_wires = num_routed_wires - 2; let max_for_wires = (num_wires - 2) / 2; max_for_routed_wires.min(max_for_wires) } pub fn wire_base(&self) -> usize { 0 } pub fn wire_power_bit(&self, i: usize) -> usize { debug_assert!(i < self.num_power_bits); 1 + i } pub fn wire_output(&self) -> usize { 1 + self.num_power_bits } pub fn wire_intermediate_value(&self, i: usize) -> usize { debug_assert!(i < self.num_power_bits); 2 + self.num_power_bits + i } } impl<F: RichField + Extendable<D>, const D: usize> Gate<F, D> for ExponentiationGate<F, D> { fn id(&self) -> String { format!("{:?}<D={}>", self, D) } fn eval_unfiltered(&self, vars: EvaluationVars<F, D>) -> Vec<F::Extension> { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; let mut constraints = Vec::with_capacity(self.num_constraints()); for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { F::Extension::ONE } else { intermediate_values[i - 1].square() }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let not_cur_bit = F::Extension::ONE - cur_bit; let computed_intermediate_value = prev_intermediate_value * (cur_bit * base + not_cur_bit); constraints.push(computed_intermediate_value - intermediate_values[i]); } constraints.push(output - intermediate_values[self.num_power_bits - 1]); constraints } fn eval_unfiltered_base_one( &self, _vars: EvaluationVarsBase<F>, _yield_constr: StridedConstraintConsumer<F>, ) { panic!("use eval_unfiltered_base_packed instead"); } fn eval_unfiltered_base_batch(&self, vars_base: EvaluationVarsBaseBatch<F>) -> Vec<F> { self.eval_unfiltered_base_batch_packed(vars_base) } fn eval_unfiltered_recursively( &self, builder: &mut CircuitBuilder<F, D>, vars: EvaluationTargets<D>, ) -> Vec<ExtensionTarget<D>> { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; let mut constraints = Vec::with_capacity(self.num_constraints()); let one = builder.one_extension(); for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { one } else { builder.square_extension(intermediate_values[i - 1]) }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let mul_by = builder.select_ext_generalized(cur_bit, base, one); let intermediate_value_diff = builder.mul_sub_extension(prev_intermediate_value, mul_by, intermediate_values[i]); constraints.push(intermediate_value_diff); } let output_diff = builder.sub_extension(output, intermediate_values[self.num_power_bits - 1]); constraints.push(output_diff); constraints } fn generators( &self, gate_index: usize, _local_constants: &[F], ) -> Vec<Box<dyn WitnessGenerator<F>>> { let gen = ExponentiationGenerator::<F, D> { gate_index, gate: self.clone(), }; vec![Box::new(gen.adapter())] } fn num_wires(&self) -> usize { self.wire_intermediate_value(self.num_power_bits - 1) + 1 } fn num_constants(&self) -> usize { 0 } fn degree(&self) -> usize { 4 } fn num_constraints(&self) -> usize { self.num_power_bits + 1 } } impl<F: RichField + Extendable<D>, const D: usize> PackedEvaluableBase<F, D> for ExponentiationGate<F, D> { fn eval_unfiltered_base_packed<P: PackedField<Scalar = F>>( &self, vars: EvaluationVarsBasePacked<P>, mut yield_constr: StridedConstraintConsumer<P>, ) { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { P::ONES } else { intermediate_values[i - 1].square() }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let not_cur_bit = P::ONES - cur_bit; let computed_intermediate_value = prev_intermediate_value * (cur_bit * base + not_cur_bit); yield_constr.one(computed_intermediate_value - intermediate_values[i]); } yield_constr.one(output - intermediate_values[self.num_power_bits - 1]); } } #[derive(Debug)] struct ExponentiationGenerator<F: RichField + Extendable<D>, const D: usize> { gate_index: usize, gate: ExponentiationGate<F, D>, } impl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F> for ExponentiationGenerator<F, D> { fn dependencies(&self) -> Vec<Target> { let local_target = |input| Target::wire(self.gate_index, input); let mut deps = Vec::with_capacity(self.gate.num_power_bits + 1); deps.push(local_target(self.gate.wire_base())); for i in 0..self.gate.num_power_bits { deps.push(local_target(self.gate.wire_power_bit(i))); } deps } fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) { let local_wire = |input| Wire { gate: self.gate_index, input, }; let get_local_wire = |input| witness.get_wire(local_wire(input)); let num_power_bits = self.gate.num_power_bits; let base = get_local_wire(self.gate.wire_base()); let power_bits = (0..num_power_bits) .map(|i| get_local_wire(self.gate.wire_power_bit(i))) .collect::<Vec<_>>(); let mut intermediate_values = Vec::new(); let mut current_intermediate_value = F::ONE; for i in 0..num_power_bits { if power_bits[num_power_bits - i - 1] == F::ONE { current_intermediate_value *= base; } intermediate_values.push(current_intermediate_value); current_intermediate_value *= current_intermediate_value; } for i in 0..num_power_bits { let intermediate_value_wire = local_wire(self.gate.wire_intermediate_value(i)); out_buffer.set_wire(intermediate_value_wire, intermediate_values[i]); } let output_wire = local_wire(self.gate.wire_output()); out_buffer.set_wire(output_wire, intermediate_values[num_power_bits - 1]); } } #[cfg(test)] mod tests { use std::marker::PhantomData; use anyhow::Result; use plonky2_field::field_types::Field; use plonky2_field::goldilocks_field::GoldilocksField; use plonky2_util::log2_ceil; use rand::Rng; use crate::gates::exponentiation::ExponentiationGate; use crate::gates::gate::Gate; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::hash::hash_types::HashOut; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; use crate::plonk::vars::EvaluationVars; const MAX_POWER_BITS: usize = 17; #[test] fn wire_indices() { let gate = ExponentiationGate::<GoldilocksField, 4> { num_power_bits: 5, _phantom: PhantomData, }; assert_eq!(gate.wire_base(), 0); assert_eq!(gate.wire_power_bit(0), 1); assert_eq!(gate.wire_power_bit(4), 5); assert_eq!(gate.wire_output(), 6); assert_eq!(gate.wire_intermediate_value(0), 7); assert_eq!(gate.wire_intermediate_value(4), 11); } #[test] fn low_degree() { let config = CircuitConfig { num_wires: 120, num_routed_wires: 30, ..CircuitConfig::standard_recursion_config() }; test_low_degree::<GoldilocksField, _, 4>(ExponentiationGate::new_from_config(&config)); } #[test] fn eval_fns() -> Result<()> { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = <C as GenericConfig<D>>::F; test_eval_fns::<F, C, _, D>(ExponentiationGate::new_from_config( &CircuitConfig::standard_recursion_config(), )) } #[test] fn test_gate_constraint() { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = <C as GenericConfig<D>>::F; type FF = <C as GenericConfig<D>>::FE; fn get_wires(base: F, power: u64) -> Vec<FF> { let mut power_bits = Vec::new(); let mut cur_power = power; while cur_power > 0 { power_bits.push(cur_power % 2); cur_power /= 2; } let num_power_bits = power_bits.len(); let power_bits_f: Vec<_> = power_bits .iter() .map(|b| F::from_canonical_u64(*b)) .collect(); let mut v = vec![base]; v.extend(power_bits_f); let mut intermediate_values = Vec::new(); let mut cur
let output_value = intermediate_values[num_power_bits - 1]; v.push(output_value); v.extend(intermediate_values); v.iter().map(|&x| x.into()).collect::<Vec<_>>() } let mut rng = rand::thread_rng(); let base = F::TWO; let power = rng.gen::<usize>() % (1 << MAX_POWER_BITS); let num_power_bits = log2_ceil(power + 1); let gate = ExponentiationGate::<F, D> { num_power_bits, _phantom: PhantomData, }; let vars = EvaluationVars { local_constants: &[], local_wires: &get_wires(base, power as u64), public_inputs_hash: &HashOut::rand(), }; assert!( gate.eval_unfiltered(vars).iter().all(|x| x.is_zero()), "Gate constraints are not satisfied." ); } }
rent_intermediate_value = F::ONE; for i in 0..num_power_bits { if power_bits[num_power_bits - i - 1] == 1 { current_intermediate_value *= base; } intermediate_values.push(current_intermediate_value); current_intermediate_value *= current_intermediate_value; }
function_block-random_span
[ { "content": "/// Tests that the constraints imposed by the given gate are low-degree by applying them to random\n\n/// low-degree witness polynomials.\n\npub fn test_low_degree<F: RichField + Extendable<D>, G: Gate<F, D>, const D: usize>(gate: G) {\n\n let rate_bits = log2_ceil(gate.degree() + 1);\n\n\n\n ...
Rust
src/parser.rs
Popog/dtml-rs
43224bba007b951d348061fa05856092e68b66dc
use std::error; use std::iter::{Iterator, once}; use token::{ContentType, Token, NonText, TokenWithComments, TextAccumulator, is_whitespace}; use tokenizer; use tuple::{LazyTuple, parent_element, LazyTupleContainer}; type TokenWithCommentsResult<S, E> = Result<TokenWithComments<S>, tokenizer::Error<E>>; type TokenResult<S, E> = Result<Token<S>, tokenizer::Error<E>>; #[derive(Debug)] pub enum Error<S: Eq, E: error::Error> { UnexpectedEOF, UnexpectedToken(Token<S>), TokenizerError(tokenizer::Error<E>), } fn map_error<T, S: Eq, E: error::Error>(r: Option<Result<T, tokenizer::Error<E>>>) -> Result<Option<T>, Error<S, E>> { match r { None => Ok(None), Some(Err(e)) => Err(Error::TokenizerError(e)), Some(Ok(t)) => Ok(Some(t)), } } #[derive(PartialEq, Eq)] enum Terminator { EOF, Close, Divider, } pub fn parse<S, E, I, C> (i: &mut I) -> Result<LazyTuple<S, C>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let mut text = TextAccumulator::new(); let t = try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())); match (text.uncommitted_type(), t) { (_, None) => { text.commit(); Ok(LazyTuple::Value(text)) }, (ContentType::Text, Some(NonText::Open)) => { let (text, t) = try!(parse_text_helper(i, text, 1, false)); match t { Terminator::EOF => Ok(LazyTuple::Value(text)), Terminator::Close => Err(Error::UnexpectedToken(Token::Close)), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, (_, Some(NonText::Open)) => { text.clear_uncommitted(); match try!(parse_helper(i, text)) { LazyTuple::Value(text) => { let (text, t) = try!(parse_text_helper(i, text, 0, true)); match t { Terminator::EOF => Ok(LazyTuple::Value(text)), Terminator::Close => Err(Error::UnexpectedToken(Token::Close)), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, tuple => { let t = try!(map_error(i.skip_while(is_whitespace).next())); if let Some(t) = t { Err(Error::UnexpectedToken(t)) } else { Ok(tuple) } }, } }, (_, Some(NonText::Close)) => return Err(Error::UnexpectedToken(Token::Close)), (_, Some(NonText::Divider)) => return Err(Error::UnexpectedToken(Token::Divider)), (ContentType::Text, Some(NonText::Null)) => return Err(Error::UnexpectedToken(Token::Null)), (_, Some(NonText::Null)) => { let t = try!(map_error(i.skip_while(is_whitespace).next())); if let Some(t) = t { Err(Error::UnexpectedToken(t)) } else { Ok(LazyTuple::Null) } }, } } fn parse_helper<S, E, I, C> (i: &mut I, text: TextAccumulator<S>) -> Result<LazyTuple<S, C>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let mut elements = match try!(parse_section_helper(i, text)) { ElementType::Empty => return Ok(LazyTuple::Parent(Default::default())), ElementType::Divided(t) => parent_element(t), ElementType::TerminatedText(t) => return Ok(t), ElementType::TerminatedWhitespace(t) => return Ok(t), ElementType::Terminated(t) => return Ok(LazyTuple::Parent(parent_element(t))), }; loop { match try!(parse_section_helper(i, TextAccumulator::new())) { ElementType::Empty => return Ok(LazyTuple::Parent(elements)), ElementType::Divided(t) => elements.extend(once(t)), ElementType::TerminatedText(t) => { elements.extend(once(t)); return Ok(LazyTuple::Parent(elements)); }, ElementType::TerminatedWhitespace(_) => return Ok(LazyTuple::Parent(elements)), ElementType::Terminated(t) => { elements.extend(once(t)); return Ok(LazyTuple::Parent(elements)); }, } } } enum ElementType<T>{ Empty, Divided(T), TerminatedWhitespace(T), TerminatedText(T), Terminated(T), } fn parse_section_helper<S, E, I, C> (i: &mut I, mut text: TextAccumulator<S>) -> Result<ElementType<LazyTuple<S, C>>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let t = try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())); match (text.uncommitted_type(), t) { (_, None) => Err(Error::UnexpectedEOF), (ContentType::Text, Some(NonText::Open)) => { text.commit(); let (text, t) = try!(parse_text_helper(i, text, 1, false)); match t { Terminator::EOF => Err(Error::UnexpectedEOF), Terminator::Close => Ok(ElementType::Terminated(LazyTuple::Value(text))), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, (_, Some(NonText::Open)) => { text.clear_uncommitted(); match try!(parse_helper(i, text)) { LazyTuple::Value(text) => { let (text, t) = try!(parse_text_helper(i, text, 0, true)); match t { Terminator::EOF => Err(Error::UnexpectedEOF), Terminator::Close => Ok(ElementType::TerminatedText(LazyTuple::Value(text))), Terminator::Divider => Ok(ElementType::Divided(LazyTuple::Value(text))), } }, tuple => { match try!(map_error(i.skip_while(is_whitespace).next())) { None => Err(Error::UnexpectedEOF), Some(Token::Close) => Ok(ElementType::Terminated(tuple)), Some(Token::Divider) => Ok(ElementType::Divided(tuple)), Some(t) => Err(Error::UnexpectedToken(t)), } }, } }, (ContentType::Empty, Some(NonText::Close)) => Ok(ElementType::Empty), (ContentType::Whitespace, Some(NonText::Close)) => { text.commit(); Ok(ElementType::TerminatedWhitespace(LazyTuple::Value(text))) }, (ContentType::Text, Some(NonText::Close)) => { text.commit(); Ok(ElementType::TerminatedText(LazyTuple::Value(text))) }, (_, Some(NonText::Divider)) => Ok(ElementType::Divided(LazyTuple::Value(text))), (ContentType::Text, Some(NonText::Null)) => Err(Error::UnexpectedToken(Token::Null)), (_, Some(NonText::Null)) => { text.clear_uncommitted(); match try!(map_error(i.skip_while(is_whitespace).next())) { None => Err(Error::UnexpectedEOF), Some(Token::Close) => Ok(ElementType::Terminated(LazyTuple::Null)), Some(Token::Divider) => Ok(ElementType::Divided(LazyTuple::Null)), Some(t) => Err(Error::UnexpectedToken(t)), } }, } } fn parse_text_helper<S, E, I> (i: &mut I, mut text: TextAccumulator<S>, mut depth: usize, mut post_close: bool) -> Result<(TextAccumulator<S>, Terminator), Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, { loop { match try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())) { None => { if depth == 0 { if post_close && text.uncommitted_type() == ContentType::Whitespace { text.clear_uncommitted(); } else { text.commit(); } return Ok((text, Terminator::EOF)); } return Err(Error::UnexpectedEOF) }, Some(NonText::Divider) => { if depth == 0 { if post_close && text.uncommitted_type() == ContentType::Whitespace { text.clear_uncommitted(); } else { text.commit(); } return Ok((text, Terminator::Divider)); } return Err(Error::UnexpectedToken(Token::Divider)); } Some(NonText::Open) => { if text.uncommitted_type() == ContentType::Text { text.commit(); } else { text.clear_uncommitted(); } depth += 1; post_close = false; }, Some(NonText::Null) => return Err(Error::UnexpectedToken(Token::Null)), Some(NonText::Close) => { match text.uncommitted_type() { ContentType::Empty if !post_close => return Err(Error::UnexpectedToken(Token::Close)), ContentType::Whitespace if post_close => text.clear_uncommitted(), _ => text.commit(), } if depth == 0 { return Ok((text, Terminator::Close)); } depth -= 1; post_close = true; }, } } } /* d888888b d88888b .d8888. d888888b `~~88~~' 88' 88' YP `~~88~~' 88 88ooooo `8bo. 88 88 88~~~~~ `Y8b. 88 88 88. db 8D 88 YP Y88888P `8888Y' YP */ #[cfg(test)] mod test { use tokenizer::{Tokenizer, StringCharReader, StrIndexCharReader}; use token::{filter_comments}; use tuple::{Tuple, VecTuple, TupleContainer}; use parser::parse; fn test_parser<C: Eq+TupleContainer>(a: Tuple<C>, b: Tuple<C>) { assert!(PartialEq::eq(&a,&b), "`{}` != `{}`", a, b); } fn new_parser_1(s: &str) -> Tuple<VecTuple> { match parse::<_,_,_,VecTuple>(&mut Tokenizer::new(StringCharReader::new(s.chars())).filter_map(filter_comments)) { Ok(t) => t.eval(), Err(e) => {panic!("failed {:?}", e)}, } } #[test] fn test_parser_empty() { test_parser(Tuple::Parent(vec![]), new_parser_1("[]")); test_parser(Tuple::Parent(vec![]), new_parser_1(" []")); test_parser(Tuple::Parent(vec![]), new_parser_1("[] ")); test_parser(Tuple::Parent(vec![]), new_parser_1(" [] ")); } #[test] fn test_parser_monad() { test_parser(Tuple::Value(r"".to_string()), new_parser_1(r"")); test_parser(Tuple::Value(r" ".to_string()), new_parser_1(r" ")); test_parser(Tuple::Value(r"test".to_string()), new_parser_1(r"test")); test_parser(Tuple::Value(r" test".to_string()), new_parser_1(r" test")); test_parser(Tuple::Null, new_parser_1(r"\0")); } #[test] fn test_parser_tuple_1() { test_parser( Tuple::Parent(vec![Tuple::Value(r"".to_string()),]), new_parser_1(r"[|]"), ); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [|]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[| ]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [| ]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[|] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [|] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[| ] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [| ] ")); } #[test] fn test_parser_tuple_2() { test_parser( Tuple::Parent(vec![Tuple::Value(r"test".to_string()),]), new_parser_1(r"[test|]"), ); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test|]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test| ]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test| ]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test|] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test|] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test| ] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test| ] ")); } #[test] fn test_parser_tuple_3() { test_parser( Tuple::Parent(vec![ Tuple::Value(r"hello".to_string()), Tuple::Value(r"world".to_string()), ]), new_parser_1(r"[hello|world]"), ); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world]")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world|]")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world|] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world|] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world| ] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world| ] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [[hello]|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [[hello]|[world]] ")); } #[test] fn test_parser_tuple_4() { test_parser( Tuple::Parent(vec![ Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[test|\0|]"), ); test_parser( Tuple::Parent(vec![ Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[test|\0]"), ); test_parser( Tuple::Parent(vec![ Tuple::Parent(vec![]), Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[[]|test|\0|]"), ); test_parser( Tuple::Parent(vec![ Tuple::Parent(vec![ Tuple::Parent(vec![]), Tuple::Value(r"foo".to_string()), ]), Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[[[]|foo|]|test|\0|]"), ); } }
use std::error; use std::iter::{Iterator, once}; use token::{ContentType, Token, NonText, TokenWithComments, TextAccumulator, is_whitespace}; use tokenizer; use tuple::{LazyTuple, parent_element, LazyTupleContainer}; type TokenWithCommentsResult<S, E> = Result<TokenWithComments<S>, tokenizer::Error<E>>; type TokenResult<S, E> = Result<Token<S>, tokenizer::Error<E>>; #[derive(Debug)] pub enum Error<S: Eq, E: error::Error> { UnexpectedEOF, UnexpectedToken(Token<S>), TokenizerError(tokenizer::Error<E>), } fn map_error<T, S: Eq, E: error::Error>(r: Option<Result<T, tokenizer::Error<E>>>) -> Result<Option<T>, Error<S, E>> { match r { None => Ok(None), Some(Err(e)) => Err(Error::TokenizerError(e)), Some(Ok(t)) => Ok(Some(t)), } } #[derive(PartialEq, Eq)] enum Terminator { EOF, Close, Divider, } pub fn parse<S, E, I, C> (i: &mut I) -> Result<LazyTuple<S, C>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let mut text = TextAccumulator::new(); let t = try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())); match (text.uncommitted_type(), t) { (_, None) => { text.commit(); Ok(LazyTuple::Value(text)) }, (ContentType::Text, Some(NonText::Open)) => { let (text, t) = try!(parse_text_helper(i, text, 1, false)); match t { Terminator::EOF => Ok(LazyTuple::Value(text)), Terminator::Close => Err(Error::UnexpectedToken(Token::Close)), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, (_, Some(NonText::Open)) => { text.clear_uncommitted(); match try!(parse_helper(i, text)) { LazyTuple::Value(text) => { let (text, t) = try!(parse_text_helper(i, text, 0, true)); match t { Terminator::EOF => Ok(LazyTuple::Value(text)), Terminator::Close => Err(Error::UnexpectedToken(Token::Close)), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, tuple => { let t = try!(map_error(i.skip_while(is_whitespace).next())); if let So
=> return Err(Error::UnexpectedToken(Token::Null)), (_, Some(NonText::Null)) => { let t = try!(map_error(i.skip_while(is_whitespace).next())); if let Some(t) = t { Err(Error::UnexpectedToken(t)) } else { Ok(LazyTuple::Null) } }, } } fn parse_helper<S, E, I, C> (i: &mut I, text: TextAccumulator<S>) -> Result<LazyTuple<S, C>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let mut elements = match try!(parse_section_helper(i, text)) { ElementType::Empty => return Ok(LazyTuple::Parent(Default::default())), ElementType::Divided(t) => parent_element(t), ElementType::TerminatedText(t) => return Ok(t), ElementType::TerminatedWhitespace(t) => return Ok(t), ElementType::Terminated(t) => return Ok(LazyTuple::Parent(parent_element(t))), }; loop { match try!(parse_section_helper(i, TextAccumulator::new())) { ElementType::Empty => return Ok(LazyTuple::Parent(elements)), ElementType::Divided(t) => elements.extend(once(t)), ElementType::TerminatedText(t) => { elements.extend(once(t)); return Ok(LazyTuple::Parent(elements)); }, ElementType::TerminatedWhitespace(_) => return Ok(LazyTuple::Parent(elements)), ElementType::Terminated(t) => { elements.extend(once(t)); return Ok(LazyTuple::Parent(elements)); }, } } } enum ElementType<T>{ Empty, Divided(T), TerminatedWhitespace(T), TerminatedText(T), Terminated(T), } fn parse_section_helper<S, E, I, C> (i: &mut I, mut text: TextAccumulator<S>) -> Result<ElementType<LazyTuple<S, C>>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let t = try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())); match (text.uncommitted_type(), t) { (_, None) => Err(Error::UnexpectedEOF), (ContentType::Text, Some(NonText::Open)) => { text.commit(); let (text, t) = try!(parse_text_helper(i, text, 1, false)); match t { Terminator::EOF => Err(Error::UnexpectedEOF), Terminator::Close => Ok(ElementType::Terminated(LazyTuple::Value(text))), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, (_, Some(NonText::Open)) => { text.clear_uncommitted(); match try!(parse_helper(i, text)) { LazyTuple::Value(text) => { let (text, t) = try!(parse_text_helper(i, text, 0, true)); match t { Terminator::EOF => Err(Error::UnexpectedEOF), Terminator::Close => Ok(ElementType::TerminatedText(LazyTuple::Value(text))), Terminator::Divider => Ok(ElementType::Divided(LazyTuple::Value(text))), } }, tuple => { match try!(map_error(i.skip_while(is_whitespace).next())) { None => Err(Error::UnexpectedEOF), Some(Token::Close) => Ok(ElementType::Terminated(tuple)), Some(Token::Divider) => Ok(ElementType::Divided(tuple)), Some(t) => Err(Error::UnexpectedToken(t)), } }, } }, (ContentType::Empty, Some(NonText::Close)) => Ok(ElementType::Empty), (ContentType::Whitespace, Some(NonText::Close)) => { text.commit(); Ok(ElementType::TerminatedWhitespace(LazyTuple::Value(text))) }, (ContentType::Text, Some(NonText::Close)) => { text.commit(); Ok(ElementType::TerminatedText(LazyTuple::Value(text))) }, (_, Some(NonText::Divider)) => Ok(ElementType::Divided(LazyTuple::Value(text))), (ContentType::Text, Some(NonText::Null)) => Err(Error::UnexpectedToken(Token::Null)), (_, Some(NonText::Null)) => { text.clear_uncommitted(); match try!(map_error(i.skip_while(is_whitespace).next())) { None => Err(Error::UnexpectedEOF), Some(Token::Close) => Ok(ElementType::Terminated(LazyTuple::Null)), Some(Token::Divider) => Ok(ElementType::Divided(LazyTuple::Null)), Some(t) => Err(Error::UnexpectedToken(t)), } }, } } fn parse_text_helper<S, E, I> (i: &mut I, mut text: TextAccumulator<S>, mut depth: usize, mut post_close: bool) -> Result<(TextAccumulator<S>, Terminator), Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, { loop { match try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())) { None => { if depth == 0 { if post_close && text.uncommitted_type() == ContentType::Whitespace { text.clear_uncommitted(); } else { text.commit(); } return Ok((text, Terminator::EOF)); } return Err(Error::UnexpectedEOF) }, Some(NonText::Divider) => { if depth == 0 { if post_close && text.uncommitted_type() == ContentType::Whitespace { text.clear_uncommitted(); } else { text.commit(); } return Ok((text, Terminator::Divider)); } return Err(Error::UnexpectedToken(Token::Divider)); } Some(NonText::Open) => { if text.uncommitted_type() == ContentType::Text { text.commit(); } else { text.clear_uncommitted(); } depth += 1; post_close = false; }, Some(NonText::Null) => return Err(Error::UnexpectedToken(Token::Null)), Some(NonText::Close) => { match text.uncommitted_type() { ContentType::Empty if !post_close => return Err(Error::UnexpectedToken(Token::Close)), ContentType::Whitespace if post_close => text.clear_uncommitted(), _ => text.commit(), } if depth == 0 { return Ok((text, Terminator::Close)); } depth -= 1; post_close = true; }, } } } /* d888888b d88888b .d8888. d888888b `~~88~~' 88' 88' YP `~~88~~' 88 88ooooo `8bo. 88 88 88~~~~~ `Y8b. 88 88 88. db 8D 88 YP Y88888P `8888Y' YP */ #[cfg(test)] mod test { use tokenizer::{Tokenizer, StringCharReader, StrIndexCharReader}; use token::{filter_comments}; use tuple::{Tuple, VecTuple, TupleContainer}; use parser::parse; fn test_parser<C: Eq+TupleContainer>(a: Tuple<C>, b: Tuple<C>) { assert!(PartialEq::eq(&a,&b), "`{}` != `{}`", a, b); } fn new_parser_1(s: &str) -> Tuple<VecTuple> { match parse::<_,_,_,VecTuple>(&mut Tokenizer::new(StringCharReader::new(s.chars())).filter_map(filter_comments)) { Ok(t) => t.eval(), Err(e) => {panic!("failed {:?}", e)}, } } #[test] fn test_parser_empty() { test_parser(Tuple::Parent(vec![]), new_parser_1("[]")); test_parser(Tuple::Parent(vec![]), new_parser_1(" []")); test_parser(Tuple::Parent(vec![]), new_parser_1("[] ")); test_parser(Tuple::Parent(vec![]), new_parser_1(" [] ")); } #[test] fn test_parser_monad() { test_parser(Tuple::Value(r"".to_string()), new_parser_1(r"")); test_parser(Tuple::Value(r" ".to_string()), new_parser_1(r" ")); test_parser(Tuple::Value(r"test".to_string()), new_parser_1(r"test")); test_parser(Tuple::Value(r" test".to_string()), new_parser_1(r" test")); test_parser(Tuple::Null, new_parser_1(r"\0")); } #[test] fn test_parser_tuple_1() { test_parser( Tuple::Parent(vec![Tuple::Value(r"".to_string()),]), new_parser_1(r"[|]"), ); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [|]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[| ]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [| ]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[|] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [|] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[| ] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [| ] ")); } #[test] fn test_parser_tuple_2() { test_parser( Tuple::Parent(vec![Tuple::Value(r"test".to_string()),]), new_parser_1(r"[test|]"), ); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test|]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test| ]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test| ]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test|] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test|] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test| ] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test| ] ")); } #[test] fn test_parser_tuple_3() { test_parser( Tuple::Parent(vec![ Tuple::Value(r"hello".to_string()), Tuple::Value(r"world".to_string()), ]), new_parser_1(r"[hello|world]"), ); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world]")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world|]")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world|] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world|] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world| ] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world| ] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [[hello]|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [[hello]|[world]] ")); } #[test] fn test_parser_tuple_4() { test_parser( Tuple::Parent(vec![ Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[test|\0|]"), ); test_parser( Tuple::Parent(vec![ Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[test|\0]"), ); test_parser( Tuple::Parent(vec![ Tuple::Parent(vec![]), Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[[]|test|\0|]"), ); test_parser( Tuple::Parent(vec![ Tuple::Parent(vec![ Tuple::Parent(vec![]), Tuple::Value(r"foo".to_string()), ]), Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[[[]|foo|]|test|\0|]"), ); } }
me(t) = t { Err(Error::UnexpectedToken(t)) } else { Ok(tuple) } }, } }, (_, Some(NonText::Close)) => return Err(Error::UnexpectedToken(Token::Close)), (_, Some(NonText::Divider)) => return Err(Error::UnexpectedToken(Token::Divider)), (ContentType::Text, Some(NonText::Null))
function_block-random_span
[ { "content": "fn map_error<C, E: error::Error>(r: Option<Result<C, E>>) -> Result<C, Error<E>> {\n\n match r {\n\n None => return Err(Error::UnexpectedEOF),\n\n Some(Err(e)) => return Err(From::from(e)),\n\n Some(Ok(t)) => Ok(t),\n\n }\n\n}\n\n\n\n/*\n\n .o88b. db db .d8b. d8888b....
Rust
geom/src/segment.rs
HalfVoxel/lyon
5e42176d5a7ad78a23c735c2946caed15955ac82
use crate::generic_math::{Point, Rect, Vector}; use crate::scalar::{One, Scalar}; use crate::{CubicBezierSegment, LineSegment, QuadraticBezierSegment}; use std::ops::Range; pub trait Segment: Copy + Sized { type Scalar: Scalar; fn from(&self) -> Point<Self::Scalar>; fn to(&self) -> Point<Self::Scalar>; fn sample(&self, t: Self::Scalar) -> Point<Self::Scalar>; fn x(&self, t: Self::Scalar) -> Self::Scalar { self.sample(t).x } fn y(&self, t: Self::Scalar) -> Self::Scalar { self.sample(t).y } fn derivative(&self, t: Self::Scalar) -> Vector<Self::Scalar>; fn dx(&self, t: Self::Scalar) -> Self::Scalar { self.derivative(t).x } fn dy(&self, t: Self::Scalar) -> Self::Scalar { self.derivative(t).y } fn split(&self, t: Self::Scalar) -> (Self, Self); fn before_split(&self, t: Self::Scalar) -> Self; fn after_split(&self, t: Self::Scalar) -> Self; fn split_range(&self, t_range: Range<Self::Scalar>) -> Self; fn flip(&self) -> Self; fn approximate_length(&self, tolerance: Self::Scalar) -> Self::Scalar; } pub trait BoundingRect { type Scalar: Scalar; fn bounding_rect(&self) -> Rect<Self::Scalar>; fn fast_bounding_rect(&self) -> Rect<Self::Scalar> { self.bounding_rect() } fn bounding_range_x(&self) -> (Self::Scalar, Self::Scalar); fn bounding_range_y(&self) -> (Self::Scalar, Self::Scalar); fn fast_bounding_range_x(&self) -> (Self::Scalar, Self::Scalar); fn fast_bounding_range_y(&self) -> (Self::Scalar, Self::Scalar); } pub trait FlatteningStep: Segment { fn flattening_step(&self, tolerance: Self::Scalar) -> Self::Scalar; } pub(crate) fn for_each_flattened<T, F>(curve: &T, tolerance: T::Scalar, call_back: &mut F) where T: FlatteningStep, F: FnMut(Point<T::Scalar>), { let mut iter = curve.clone(); loop { let t = iter.flattening_step(tolerance); if t >= T::Scalar::one() { call_back(iter.to()); break; } iter = iter.after_split(t); call_back(iter.from()); } } pub(crate) fn for_each_flattened_with_t<T, F>(curve: &T, tolerance: T::Scalar, call_back: &mut F) where T: FlatteningStep, F: FnMut(Point<T::Scalar>, T::Scalar), { let end = curve.to(); let mut curve = curve.clone(); let mut t0 = T::Scalar::ZERO; loop { let step = curve.flattening_step(tolerance); if step >= T::Scalar::ONE { break; } curve = curve.after_split(step); t0 += step * (T::Scalar::ONE - t0); call_back(curve.from(), t0); } call_back(end, T::Scalar::ONE); } pub struct Flattened<S, T> { curve: T, tolerance: S, done: bool, } impl<S: Scalar, T: FlatteningStep> Flattened<S, T> { pub fn new(curve: T, tolerance: S) -> Self { assert!(tolerance > S::ZERO); Flattened { curve: curve, tolerance: tolerance, done: false, } } } impl<S: Scalar, T: FlatteningStep<Scalar = S>> Iterator for Flattened<S, T> { type Item = Point<S>; fn next(&mut self) -> Option<Point<S>> { if self.done { return None; } let t = self.curve.flattening_step(self.tolerance); if t >= S::ONE { self.done = true; return Some(self.curve.to()); } self.curve = self.curve.after_split(t); return Some(self.curve.from()); } } macro_rules! impl_segment { ($S:ty) => ( type Scalar = $S; fn from(&self) -> Point<$S> { self.from() } fn to(&self) -> Point<$S> { self.to() } fn sample(&self, t: $S) -> Point<$S> { self.sample(t) } fn x(&self, t: $S) -> $S { self.x(t) } fn y(&self, t: $S) -> $S { self.y(t) } fn derivative(&self, t: $S) -> Vector<$S> { self.derivative(t) } fn dx(&self, t: $S) -> $S { self.dx(t) } fn dy(&self, t: $S) -> $S { self.dy(t) } fn split(&self, t: $S) -> (Self, Self) { self.split(t) } fn before_split(&self, t: $S) -> Self { self.before_split(t) } fn after_split(&self, t: $S) -> Self { self.after_split(t) } fn split_range(&self, t_range: Range<$S>) -> Self { self.split_range(t_range) } fn flip(&self) -> Self { self.flip() } fn approximate_length(&self, tolerance: $S) -> $S { self.approximate_length(tolerance) } ) } #[derive(Copy, Clone, Debug, PartialEq)] pub enum BezierSegment<S> { Linear(LineSegment<S>), Quadratic(QuadraticBezierSegment<S>), Cubic(CubicBezierSegment<S>), } impl<S: Scalar> BezierSegment<S> { #[inline] pub fn sample(&self, t: S) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.sample(t), BezierSegment::Quadratic(segment) => segment.sample(t), BezierSegment::Cubic(segment) => segment.sample(t), } } #[inline] pub fn from(&self) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.from, BezierSegment::Quadratic(segment) => segment.from, BezierSegment::Cubic(segment) => segment.from, } } #[inline] pub fn to(&self) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.to, BezierSegment::Quadratic(segment) => segment.to, BezierSegment::Cubic(segment) => segment.to, } } #[inline] pub fn is_linear(&self, tolerance: S) -> bool { match self { BezierSegment::Linear(..) => true, BezierSegment::Quadratic(segment) => segment.is_linear(tolerance), BezierSegment::Cubic(segment) => segment.is_linear(tolerance), } } #[inline] pub fn baseline(&self) -> LineSegment<S> { match self { BezierSegment::Linear(segment) => *segment, BezierSegment::Quadratic(segment) => segment.baseline(), BezierSegment::Cubic(segment) => segment.baseline(), } } pub fn split(&self, t: S) -> (BezierSegment<S>, BezierSegment<S>) { match self { BezierSegment::Linear(segment) => { let (a, b) = segment.split(t); (BezierSegment::Linear(a), BezierSegment::Linear(b)) } BezierSegment::Quadratic(segment) => { let (a, b) = segment.split(t); (BezierSegment::Quadratic(a), BezierSegment::Quadratic(b)) } BezierSegment::Cubic(segment) => { let (a, b) = segment.split(t); (BezierSegment::Cubic(a), BezierSegment::Cubic(b)) } } } } impl<S> From<LineSegment<S>> for BezierSegment<S> { fn from(s: LineSegment<S>) -> Self { BezierSegment::Linear(s) } } impl<S> From<QuadraticBezierSegment<S>> for BezierSegment<S> { fn from(s: QuadraticBezierSegment<S>) -> Self { BezierSegment::Quadratic(s) } } impl<S> From<CubicBezierSegment<S>> for BezierSegment<S> { fn from(s: CubicBezierSegment<S>) -> Self { BezierSegment::Cubic(s) } }
use crate::generic_math::{Point, Rect, Vector}; use crate::scalar::{One, Scalar}; use crate::{CubicBezierSegment, LineSegment, QuadraticBezierSegment}; use std::ops::Range; pub trait Segment: Copy + Sized { type Scalar: Scalar; fn from(&self) -> Point<Self::Scalar>; fn to(&self) -> Point<Self::Scalar>; fn sample(&self, t: Self::Scalar) -> Point<Self::Scalar>; fn x(&self, t: Self::Scalar) -> Self::Scalar { self.sample(t).x } fn y(&self, t: Self::Scalar) -> Self::Scalar { self.sample(t).y } fn derivative(&self, t: Self::Scalar) -> Vector<Self::Scalar>; fn dx(&self, t: Self::Scalar) -> Self::Scalar { self.derivative(t).x } fn dy(&self, t: Self::Scalar) -> Self::Scalar { self.derivative(t).y } fn split(&self, t: Self::Scalar) -> (Self, Self); fn before_split(&self, t: Self::Scalar) -> Self; fn after_split(&self, t: Self::Scalar) -> Self; fn split_range(&self, t_range: Range<Self::Scalar>) -> Self; fn flip(&self) -> Self; fn approximate_length(&self, tolerance: Self::Scalar) -> Self::Scalar; } pub trait BoundingRect { type Scalar: Scalar; fn bounding_rect(&self) -> Rect<Self::Scalar>; fn fast_bounding_rect(&self) -> Rect<Self::Scalar> { self.bounding_rect() } fn bounding_range_x(&self) -> (Self::Scalar, Self::Scalar); fn bounding_range_y(&self) -> (Self::Scalar, Self::Scalar); fn fast_bounding_range_x(&self) -> (Self::Scalar, Self::Scalar); fn fast_bounding_range_y(&self) -> (Self::Scalar, Self::Scalar); } pub trait FlatteningStep: Segment { fn flattening_step(&self, tolerance: Self::Scalar) -> Self::Scalar; } pub(crate) fn for_each_flattened<T, F>(curve: &T, tolerance: T::Scalar, call_back: &mut F) where T: FlatteningStep, F: FnMut(Point<T::Scalar>), { let mut iter = curve.clone(); loop { let t = iter.flattening_step(tolerance); if t >= T::Scalar::one() { call_back(iter.to()); break; } iter = iter.after_split(t); call_back(iter.from()); } } pub(crate) fn for_each_flattened_with_t<T, F>(curve: &T, tolerance: T::Scalar, call_back: &mut F) where T: FlatteningStep, F: FnMut(Point<T::Scalar>, T::Scalar), { let end = curve.to(); let mut curve = curve.clone(); let mut t0 = T::Scalar::ZERO; loop { let step = curve.flattening_step(tolerance); if step >= T::Scalar::ONE { break; } curve = curve.after_split(step); t0 += step * (T::Scalar::ONE - t0); call_back(curve.from(), t0); } call_back(end, T::Scalar::ONE); } pub struct Flattened<S, T> { curve: T, tolerance: S, done: bool, } impl<S: Scalar, T: FlatteningStep> Flattened<S, T> { pub fn new(curve: T, tolerance: S) -> Self { assert!(tolerance > S::ZERO); Flattened { curve: curve, tolerance: tolerance, done: false, } } } impl<S: Scalar, T: FlatteningStep<Scalar = S>> Iterator for Flattened<S, T> { type Item = Point<S>; fn next(&mut self) -> Option<Point<S>> { if self.done { return None; } let t = self.curve.flattening_step(self.tolerance); if t >= S::ONE { self.done = true; return Some(self.curve.to()); } self.curve = self.curve.after_split(t); return Some(self.curve.from()); } } macro_rules! impl_segment { ($S:ty) => ( type Scalar = $S; fn from(&self) -> Point<$S> { self.from() } fn to(&self) -> Point<$S> { self.to() } fn sample(&self, t: $S) -> Point<$S> { self.sample(t) } fn x(&self, t: $S) -> $S { self.x(t) } fn y(&self, t: $S) -> $S { self.y(t) } fn derivative(&self, t: $S) -> Vector<$S> { self.derivative(t) } fn dx(&self, t: $S) -> $S { self.dx(t) } fn dy(&self, t: $S) -> $S { self.dy(t) } fn split(&self, t: $S) -> (Self, Self) { self.split(t) } fn before_split(&self, t: $S) -> Self { self.before_split(t) } fn after_split(&self, t: $S) -> Self { self.after_split(t) } fn split_range(&self, t_range: Range<$S>) -> Self { self.split_range(t_range) } fn flip(&self) -> Self { self.flip() } fn approximate_length(&self, tolerance: $S) -> $S { self.approximate_length(tolerance) } ) } #[derive(Copy, Clone, Debug, PartialEq)] pub enum BezierSegment<S> { Linear(LineSegment<S>), Quadratic(QuadraticBezierSegment<S>), Cubic(CubicBezierSegment<S>), } impl<S: Scalar> BezierSegment<S> { #[inline] pub fn sample(&self, t: S) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.sample(t), BezierSegment::Quadratic(segment) => segment.sample(t), BezierSegment::Cubic(segment) => segment.sample(t), } } #[inline] pub fn from(&self) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.from, BezierSegment::Quadratic(segment) => segment.from, BezierSegment::Cubic(segment) => segment.from, } } #[inline] pub fn to(&self) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.to, BezierSegment::Quadratic(segment) => segment.to, BezierSegment::Cubic(segment) => segment.to, } } #[inline] pub fn is_linear(&self, tolerance: S) -> bool { match self { BezierSegment::Linear(..) => true, BezierSegment::Quadratic(segment) => segment.is_linear(tolerance), BezierSegment::Cubic(segment) => segment.is_linear(tolerance), } } #[inline] pub fn baseline(&self) -> LineSegment<S> { match self { BezierSegment::Linear(segment) => *segment, BezierSegment::Quadratic(segment) => segment.baseline(), BezierSegment::Cubic(segment) => segment.baseline(), } } pub fn split(&self, t: S) -> (BezierSegment<S>, BezierSegment<S>) { match self { BezierSegment::Linear(segment) => { let (a, b) = segment.split(t); (BezierSegment::Linear(a), BezierSegment::Linear(b)) } BezierSegmen
} impl<S> From<LineSegment<S>> for BezierSegment<S> { fn from(s: LineSegment<S>) -> Self { BezierSegment::Linear(s) } } impl<S> From<QuadraticBezierSegment<S>> for BezierSegment<S> { fn from(s: QuadraticBezierSegment<S>) -> Self { BezierSegment::Quadratic(s) } } impl<S> From<CubicBezierSegment<S>> for BezierSegment<S> { fn from(s: CubicBezierSegment<S>) -> Self { BezierSegment::Cubic(s) } }
t::Quadratic(segment) => { let (a, b) = segment.split(t); (BezierSegment::Quadratic(a), BezierSegment::Quadratic(b)) } BezierSegment::Cubic(segment) => { let (a, b) = segment.split(t); (BezierSegment::Cubic(a), BezierSegment::Cubic(b)) } } }
function_block-function_prefixed
[ { "content": "pub fn flatten_cubic_bezier_with_t<S: Scalar, F>(curve: &CubicBezierSegment<S>, tolerance: S, callback: &mut F)\n\nwhere\n\n F: FnMut(Point<S>, S),\n\n{\n\n debug_assert!(tolerance >= S::EPSILON);\n\n let quadratics_tolerance = tolerance * S::value(0.2);\n\n let flattening_tolerance = ...
Rust
src/mouse.rs
gifnksm/sabios
a0729dbdaafbbc318c6bc13636a3a17a842c782b
use crate::{ graphics::{Color, Draw, Offset, Point, ScreenInfo}, layer, prelude::*, sync::{mpsc, OnceCell}, window::Window, }; use core::future::Future; use enumflags2::{bitflags, BitFlags}; const TRANSPARENT_COLOR: Color = Color::RED; const MOUSE_CURSOR_WIDTH: usize = 15; const MOUSE_CURSOR_HEIGHT: usize = 24; const MOUSE_CURSOR_SIZE: Point<i32> = Point::new(MOUSE_CURSOR_WIDTH as i32, MOUSE_CURSOR_HEIGHT as i32); const MOUSE_CURSOR_SHAPE: [[u8; MOUSE_CURSOR_WIDTH]; MOUSE_CURSOR_HEIGHT] = [ *b"@ ", *b"@@ ", *b"@.@ ", *b"@..@ ", *b"@...@ ", *b"@....@ ", *b"@.....@ ", *b"@......@ ", *b"@.......@ ", *b"@........@ ", *b"@.........@ ", *b"@..........@ ", *b"@...........@ ", *b"@............@ ", *b"@......@@@@@@@@", *b"@......@ ", *b"@....@@.@ ", *b"@...@ @.@ ", *b"@..@ @.@ ", *b"@.@ @.@ ", *b"@@ @.@ ", *b"@ @.@ ", *b" @.@ ", *b" @@@ ", ]; #[bitflags] #[repr(u8)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) enum MouseButton { Left = 0b001, Right = 0b010, Middle = 0b100, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct RawMouseEvent { buttons: BitFlags<MouseButton>, displacement: Offset<i32>, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) struct MouseEvent { pub(crate) down: BitFlags<MouseButton>, pub(crate) up: BitFlags<MouseButton>, pub(crate) pos: Point<i32>, pub(crate) pos_diff: Offset<i32>, } static MOUSE_EVENT_TX: OnceCell<mpsc::Sender<RawMouseEvent>> = OnceCell::uninit(); pub(crate) extern "C" fn observer(buttons: u8, displacement_x: i8, displacement_y: i8) { let buttons = BitFlags::<MouseButton>::from_bits_truncate(buttons); let event = RawMouseEvent { buttons, displacement: Offset::new(i32::from(displacement_x), i32::from(displacement_y)), }; let res = MOUSE_EVENT_TX.try_get().and_then(|tx| tx.send(event)); if let Err(err) = res { error!("failed to enqueue to the queue: {}", err); } } fn draw(drawer: &mut dyn Draw) { for (dy, row) in (0..).zip(MOUSE_CURSOR_SHAPE) { for (dx, ch) in (0..).zip(row) { let p = Point::new(dx, dy); match ch { b'@' => drawer.draw(p, Color::BLACK), b'.' => drawer.draw(p, Color::WHITE), b' ' => drawer.draw(p, TRANSPARENT_COLOR), _ => {} } } } } pub(crate) fn handler_task() -> impl Future<Output = Result<()>> { let (tx, mut rx) = mpsc::channel(100); MOUSE_EVENT_TX.init_once(|| tx); async move { let mut cursor_pos = Point::new(300, 200); let screen_info = ScreenInfo::get(); let mut window = Window::builder() .pos(cursor_pos) .size(MOUSE_CURSOR_SIZE) .transparent_color(Some(TRANSPARENT_COLOR)) .height(usize::MAX) .build()?; let cursor_layer_id = window.layer_id(); draw(&mut window); window.flush().await?; let tx = layer::event_tx(); tx.mouse_event( cursor_layer_id, MouseEvent { down: BitFlags::empty(), up: BitFlags::empty(), pos: cursor_pos, pos_diff: Offset::new(0, 0), }, ) .await?; let mut buttons = BitFlags::empty(); while let Some(event) = rx.next().await { let prev_cursor_pos = cursor_pos; let prev_buttons = buttons; if let Some(pos) = (cursor_pos + event.displacement).clamp(screen_info.area()) { cursor_pos = pos; } buttons = event.buttons; let down = buttons & !prev_buttons; let up = prev_buttons & !buttons; let pos_diff = cursor_pos - prev_cursor_pos; if prev_cursor_pos != cursor_pos { window.move_to(cursor_pos).await?; } tx.mouse_event( cursor_layer_id, MouseEvent { down, up, pos: cursor_pos, pos_diff, }, ) .await?; } Ok(()) } }
use crate::{ graphics::{Color, Draw, Offset, Point, ScreenInfo}, layer, prelude::*, sync::{mpsc, OnceCell}, window::Window, }; use core::future::Future; use enumflags2::{bitflags, BitFlags}; const TRANSPARENT_COLOR: Color = Color::RED; const MOUSE_CURSOR_WIDTH: usize
_pos).await?; } tx.mouse_event( cursor_layer_id, MouseEvent { down, up, pos: cursor_pos, pos_diff, }, ) .await?; } Ok(()) } }
= 15; const MOUSE_CURSOR_HEIGHT: usize = 24; const MOUSE_CURSOR_SIZE: Point<i32> = Point::new(MOUSE_CURSOR_WIDTH as i32, MOUSE_CURSOR_HEIGHT as i32); const MOUSE_CURSOR_SHAPE: [[u8; MOUSE_CURSOR_WIDTH]; MOUSE_CURSOR_HEIGHT] = [ *b"@ ", *b"@@ ", *b"@.@ ", *b"@..@ ", *b"@...@ ", *b"@....@ ", *b"@.....@ ", *b"@......@ ", *b"@.......@ ", *b"@........@ ", *b"@.........@ ", *b"@..........@ ", *b"@...........@ ", *b"@............@ ", *b"@......@@@@@@@@", *b"@......@ ", *b"@....@@.@ ", *b"@...@ @.@ ", *b"@..@ @.@ ", *b"@.@ @.@ ", *b"@@ @.@ ", *b"@ @.@ ", *b" @.@ ", *b" @@@ ", ]; #[bitflags] #[repr(u8)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) enum MouseButton { Left = 0b001, Right = 0b010, Middle = 0b100, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct RawMouseEvent { buttons: BitFlags<MouseButton>, displacement: Offset<i32>, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) struct MouseEvent { pub(crate) down: BitFlags<MouseButton>, pub(crate) up: BitFlags<MouseButton>, pub(crate) pos: Point<i32>, pub(crate) pos_diff: Offset<i32>, } static MOUSE_EVENT_TX: OnceCell<mpsc::Sender<RawMouseEvent>> = OnceCell::uninit(); pub(crate) extern "C" fn observer(buttons: u8, displacement_x: i8, displacement_y: i8) { let buttons = BitFlags::<MouseButton>::from_bits_truncate(buttons); let event = RawMouseEvent { buttons, displacement: Offset::new(i32::from(displacement_x), i32::from(displacement_y)), }; let res = MOUSE_EVENT_TX.try_get().and_then(|tx| tx.send(event)); if let Err(err) = res { error!("failed to enqueue to the queue: {}", err); } } fn draw(drawer: &mut dyn Draw) { for (dy, row) in (0..).zip(MOUSE_CURSOR_SHAPE) { for (dx, ch) in (0..).zip(row) { let p = Point::new(dx, dy); match ch { b'@' => drawer.draw(p, Color::BLACK), b'.' => drawer.draw(p, Color::WHITE), b' ' => drawer.draw(p, TRANSPARENT_COLOR), _ => {} } } } } pub(crate) fn handler_task() -> impl Future<Output = Result<()>> { let (tx, mut rx) = mpsc::channel(100); MOUSE_EVENT_TX.init_once(|| tx); async move { let mut cursor_pos = Point::new(300, 200); let screen_info = ScreenInfo::get(); let mut window = Window::builder() .pos(cursor_pos) .size(MOUSE_CURSOR_SIZE) .transparent_color(Some(TRANSPARENT_COLOR)) .height(usize::MAX) .build()?; let cursor_layer_id = window.layer_id(); draw(&mut window); window.flush().await?; let tx = layer::event_tx(); tx.mouse_event( cursor_layer_id, MouseEvent { down: BitFlags::empty(), up: BitFlags::empty(), pos: cursor_pos, pos_diff: Offset::new(0, 0), }, ) .await?; let mut buttons = BitFlags::empty(); while let Some(event) = rx.next().await { let prev_cursor_pos = cursor_pos; let prev_buttons = buttons; if let Some(pos) = (cursor_pos + event.displacement).clamp(screen_info.area()) { cursor_pos = pos; } buttons = event.buttons; let down = buttons & !prev_buttons; let up = prev_buttons & !buttons; let pos_diff = cursor_pos - prev_cursor_pos; if prev_cursor_pos != cursor_pos { window.move_to(cursor
random
[ { "content": "fn draw(drawer: &mut dyn Draw, size: Size<i32>) {\n\n drawer.fill_rect(\n\n Rectangle::new(Point::new(0, 0), Size::new(size.x, size.y - 50)),\n\n BG_COLOR,\n\n );\n\n drawer.fill_rect(\n\n Rectangle::new(Point::new(0, size.y - 50), Size::new(size.x, 50)),\n\n C...
Rust
piet-gpu/bin/winit.rs
linebender/piet-gpu
086e547aef2edbdb595f73b10770a0b5f0853058
use piet::kurbo::Point; use piet::{RenderContext, Text, TextAttribute, TextLayoutBuilder}; use piet_gpu_hal::{Error, ImageLayout, Instance, Session, SubmittedCmdBuf}; use piet_gpu::{test_scenes, PietGpuRenderContext, Renderer}; use clap::{App, Arg}; use winit::{ event::{Event, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::WindowBuilder, }; const NUM_FRAMES: usize = 2; const WIDTH: usize = 2048; const HEIGHT: usize = 1536; fn main() -> Result<(), Error> { let matches = App::new("piet-gpu test") .arg(Arg::with_name("INPUT").index(1)) .arg(Arg::with_name("flip").short("f").long("flip")) .arg( Arg::with_name("scale") .short("s") .long("scale") .takes_value(true), ) .get_matches(); let event_loop = EventLoop::new(); let window = WindowBuilder::new() .with_inner_size(winit::dpi::LogicalSize { width: (WIDTH / 2) as f64, height: (HEIGHT / 2) as f64, }) .with_resizable(false) .build(&event_loop)?; let (instance, surface) = Instance::new(Some(&window))?; let mut info_string = "info".to_string(); unsafe { let device = instance.device(surface.as_ref())?; let mut swapchain = instance.swapchain(WIDTH / 2, HEIGHT / 2, &device, surface.as_ref().unwrap())?; let session = Session::new(device); let mut current_frame = 0; let present_semaphores = (0..NUM_FRAMES) .map(|_| session.create_semaphore()) .collect::<Result<Vec<_>, Error>>()?; let query_pools = (0..NUM_FRAMES) .map(|_| session.create_query_pool(8)) .collect::<Result<Vec<_>, Error>>()?; let mut submitted: [Option<SubmittedCmdBuf>; NUM_FRAMES] = Default::default(); let mut renderer = Renderer::new(&session, WIDTH, HEIGHT, NUM_FRAMES)?; event_loop.run(move |event, _, control_flow| { *control_flow = ControlFlow::Poll; match event { Event::WindowEvent { event, window_id } if window_id == window.id() => { match event { WindowEvent::CloseRequested => { *control_flow = ControlFlow::Exit; } _ => (), } } Event::MainEventsCleared => { window.request_redraw(); } Event::RedrawRequested(window_id) if window_id == window.id() => { let frame_idx = current_frame % NUM_FRAMES; if let Some(submitted) = submitted[frame_idx].take() { submitted.wait().unwrap(); let ts = session.fetch_query_pool(&query_pools[frame_idx]).unwrap(); info_string = format!( "{:.3}ms :: e:{:.3}ms|alloc:{:.3}ms|cp:{:.3}ms|bd:{:.3}ms|bin:{:.3}ms|cr:{:.3}ms|r:{:.3}ms", ts[6] * 1e3, ts[0] * 1e3, (ts[1] - ts[0]) * 1e3, (ts[2] - ts[1]) * 1e3, (ts[3] - ts[2]) * 1e3, (ts[4] - ts[3]) * 1e3, (ts[5] - ts[4]) * 1e3, (ts[6] - ts[5]) * 1e3, ); } let mut ctx = PietGpuRenderContext::new(); if let Some(input) = matches.value_of("INPUT") { let mut scale = matches .value_of("scale") .map(|scale| scale.parse().unwrap()) .unwrap_or(8.0); if matches.is_present("flip") { scale = -scale; } test_scenes::render_svg(&mut ctx, input, scale); } else { test_scenes::render_anim_frame(&mut ctx, current_frame); } render_info_string(&mut ctx, &info_string); if let Err(e) = renderer.upload_render_ctx(&mut ctx, frame_idx) { println!("error in uploading: {}", e); } let (image_idx, acquisition_semaphore) = swapchain.next().unwrap(); let swap_image = swapchain.image(image_idx); let query_pool = &query_pools[frame_idx]; let mut cmd_buf = session.cmd_buf().unwrap(); cmd_buf.begin(); renderer.record(&mut cmd_buf, &query_pool, frame_idx); cmd_buf.image_barrier( &swap_image, ImageLayout::Undefined, ImageLayout::BlitDst, ); cmd_buf.blit_image(&renderer.image_dev, &swap_image); cmd_buf.image_barrier(&swap_image, ImageLayout::BlitDst, ImageLayout::Present); cmd_buf.finish(); submitted[frame_idx] = Some(session .run_cmd_buf( cmd_buf, &[&acquisition_semaphore], &[&present_semaphores[frame_idx]], ) .unwrap()); swapchain .present(image_idx, &[&present_semaphores[frame_idx]]) .unwrap(); current_frame += 1; } Event::LoopDestroyed => { for cmd_buf in &mut submitted { if let Some(cmd_buf) = cmd_buf.take() { cmd_buf.wait().unwrap(); } } } _ => (), } }) } } fn render_info_string(rc: &mut impl RenderContext, info: &str) { let layout = rc .text() .new_text_layout(info.to_string()) .default_attribute(TextAttribute::FontSize(40.0)) .build() .unwrap(); rc.draw_text(&layout, Point::new(110.0, 50.0)); }
use piet::kurbo::Point; use piet::{RenderContext, Text, TextAttribute, TextLayoutBuilder}; use piet_gpu_hal::{Error, ImageLayout, Instance, Session, SubmittedCmdBuf}; use piet_gpu::{test_scenes, PietGpuRenderContext, Renderer}; use clap::{App, Arg}; use winit::{ event::{Event, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::WindowBuilder, }; const NUM_FRAMES: usize = 2; const WIDTH: usize = 2048; const HEIGHT: usize = 1536; fn main() -> Result<(), Error> { let matches = App::new("piet-gpu test") .arg(Arg::with_name("INPUT").index(1)) .arg(Arg::with_name("flip").short("f").long("flip")) .arg( Arg::with_name("scale") .short("s") .long("scale") .takes_value(true), ) .get_matches(); let event_loop = EventLoop::new(); let window = WindowBuilder::new() .with_inner_size(winit::dpi::LogicalSize { width: (WIDTH / 2) as f64, height: (HEIGHT / 2) as f64, }) .with_resizable(false) .build(&event_loop)?; let (instance, surface) = Instance::new(Some(&window))?; let mut info_string = "info".to_string(); unsafe { let device = instance.device(surface.as_ref())?; let mut swapchain = instance.swapchain(WIDTH / 2, HEIGHT / 2, &device, surface.as_ref().unwrap())?; let session = Session::new(device); let mut current_frame = 0; let present_semaphores = (0..NUM_FRAMES) .map(|_| session.create_semaphore()) .collect::<Result<Vec<_>, Error>>()?; let query_pools = (0..NUM_FRAMES) .map(|_| session.create_query_pool(8)) .collect::<Result<Vec<_>, Error>>()?; let mut submitted: [Option<SubmittedCmdBuf>; NUM_FRAMES] = Default::default(); let mut renderer = Renderer::new(&session, WIDTH, HEIGHT, NUM_FRAMES)?; event_loop.run(move |event, _, control_flow| { *control_flow = ControlFlow::Poll; match event { Event::WindowEvent { event, window_id } if window_id == window.id() => { match event { WindowEvent::CloseRequested => { *control_flow = ControlFlow::Exit; } _ => (), } } Event::MainEventsCleared => { window.request_redraw(); } Event::RedrawRequested(window_id) if window_id == window.id() => { let frame_idx = current_frame % NUM_FRAMES; if let Some(submitted) = submitted[frame_idx].take() { submitted.wait().unwrap(); let ts = session.fetch_query_pool(&query_pools[frame_idx]).unwrap(); info_string = format!( "{:.3}ms :: e:{:.3}ms|alloc:{:.3}ms|cp:{:.3}ms|bd:{:.3}ms|bin:{:.3}ms|cr:{:.3}ms|r:{:.3}ms", ts[6] * 1e3, ts[0] * 1e3, (ts[1] - ts[0]) * 1e3, (ts[2] - ts[1]) * 1e3, (ts[3] - ts[2]) * 1e3, (ts[4] - ts[3]) * 1e3, (ts[5] - ts[4]) * 1e3, (ts[6] - ts[5]) * 1e3, ); } let mut ctx = PietGpuRenderContext::new(); if let Some(input) = matches.value_of("INPUT") { let mut scale = matches .value_of("scale") .map(|scale| scale.parse().unwrap()) .unwrap_or(8.0); if matches.is_present("flip") { scale = -scale; } test_scenes::render_svg(&mut ctx, input, scale); } else { test_scenes::render_anim_frame(&mut ctx, current_frame); } render_info_string(&mut ctx, &info_string); if let Err(e) = renderer.upload_render_ctx(&mut ctx, frame_idx) { println!("error in uploading: {}", e); } let (image_idx, acquisition_semaphore) = swapchain.next().unwrap(); let swap_image = swapchain.image(image_idx); let query_pool = &query_pools[frame_idx]; let mut cmd_buf = session.cmd_buf().unwrap(); cmd_buf.begin(); renderer.record(&mut cmd_buf, &query_pool, frame_idx); cmd_buf.image_barrier(
+= 1; } Event::LoopDestroyed => { for cmd_buf in &mut submitted { if let Some(cmd_buf) = cmd_buf.take() { cmd_buf.wait().unwrap(); } } } _ => (), } }) } } fn render_info_string(rc: &mut impl RenderContext, info: &str) { let layout = rc .text() .new_text_layout(info.to_string()) .default_attribute(TextAttribute::FontSize(40.0)) .build() .unwrap(); rc.draw_text(&layout, Point::new(110.0, 50.0)); }
&swap_image, ImageLayout::Undefined, ImageLayout::BlitDst, ); cmd_buf.blit_image(&renderer.image_dev, &swap_image); cmd_buf.image_barrier(&swap_image, ImageLayout::BlitDst, ImageLayout::Present); cmd_buf.finish(); submitted[frame_idx] = Some(session .run_cmd_buf( cmd_buf, &[&acquisition_semaphore], &[&present_semaphores[frame_idx]], ) .unwrap()); swapchain .present(image_idx, &[&present_semaphores[frame_idx]]) .unwrap(); current_frame
random
[ { "content": "pub fn render_svg(rc: &mut impl RenderContext, filename: &str, scale: f64) {\n\n let xml_str = std::fs::read_to_string(filename).unwrap();\n\n let start = std::time::Instant::now();\n\n let svg = PicoSvg::load(&xml_str, scale).unwrap();\n\n println!(\"parsing time: {:?}\", start.elapse...
Rust
shell/tests/common/test_data.rs
tizoc/tezedge
f44cbd00ab73e443593cb8c089cae2732acbfa81
use std::collections::HashMap; use std::convert::TryInto; use anyhow::format_err; use crypto::hash::{BlockHash, ContextHash, OperationHash}; use tezos_api::environment::TezosEnvironment; use tezos_api::ffi::ApplyBlockRequest; use tezos_messages::p2p::binary_message::MessageHash; use tezos_messages::p2p::encoding::block_header::Level; use tezos_messages::p2p::encoding::prelude::{ BlockHeader, Operation, OperationsForBlock, OperationsForBlocksMessage, }; use crate::common::samples::OperationsForBlocksMessageKey; pub struct Db { pub tezos_env: TezosEnvironment, requests: Vec<String>, headers: HashMap<BlockHash, (Level, ContextHash)>, operations: HashMap<OperationsForBlocksMessageKey, OperationsForBlocksMessage>, operation_hashes: HashMap<OperationHash, Level>, } impl Db { pub(crate) fn init_db( (requests, operations, tezos_env): ( Vec<String>, HashMap<OperationsForBlocksMessageKey, OperationsForBlocksMessage>, TezosEnvironment, ), ) -> Db { let mut headers: HashMap<BlockHash, (Level, ContextHash)> = HashMap::new(); let mut operation_hashes: HashMap<OperationHash, Level> = HashMap::new(); for (idx, request) in requests.iter().enumerate() { let level = to_level(idx); let request = crate::common::samples::from_captured_bytes(request) .expect("Failed to parse request"); let block = request .block_header .message_typed_hash() .expect("Failed to decode message_hash"); let context_hash: ContextHash = request.block_header.context().clone(); headers.insert(block, (level, context_hash)); for ops in request.operations { for op in ops { operation_hashes.insert( op.message_typed_hash() .expect("Failed to compute message hash"), level, ); } } } Db { tezos_env, requests, headers, operations, operation_hashes, } } pub fn get(&self, block_hash: &BlockHash) -> Result<Option<BlockHeader>, anyhow::Error> { match self.headers.get(block_hash) { Some((level, _)) => Ok(Some(self.captured_requests(*level)?.block_header)), None => Ok(None), } } pub fn get_operation( &self, operation_hash: &OperationHash, ) -> Result<Option<Operation>, anyhow::Error> { match self.operation_hashes.get(operation_hash) { Some(level) => { let mut found = None; for ops in self.captured_requests(*level)?.operations { for op in ops { if op.message_typed_hash::<OperationHash>()?.eq(operation_hash) { found = Some(op); break; } } } Ok(found) } None => Ok(None), } } pub fn get_operations( &self, block_hash: &BlockHash, ) -> Result<Vec<Vec<Operation>>, anyhow::Error> { match self.headers.get(block_hash) { Some((level, _)) => Ok(self.captured_requests(*level)?.operations), None => Ok(vec![]), } } pub fn get_operations_for_block( &self, block: &OperationsForBlock, ) -> Result<Option<OperationsForBlocksMessage>, anyhow::Error> { match self.operations.get(&OperationsForBlocksMessageKey::new( block.block_hash().clone(), block.validation_pass(), )) { Some(operations) => Ok(Some(operations.clone())), None => Ok(None), } } pub fn block_hash(&self, searched_level: Level) -> Result<BlockHash, anyhow::Error> { let block_hash = self .headers .iter() .find(|(_, (level, _))| searched_level.eq(level)) .map(|(k, _)| k.clone()); match block_hash { Some(block_hash) => Ok(block_hash), None => Err(format_err!( "No block_hash found for level: {}", searched_level )), } } pub fn block_header(&self, searched_level: Level) -> Result<BlockHeader, anyhow::Error> { match self.get(&self.block_hash(searched_level)?)? { Some(header) => Ok(header), None => Err(format_err!( "No block_header found for level: {}", searched_level )), } } pub fn context_hash(&self, searched_level: Level) -> Result<ContextHash, anyhow::Error> { let context_hash = self .headers .iter() .find(|(_, (level, _))| searched_level.eq(level)) .map(|(_, (_, context_hash))| context_hash.clone()); match context_hash { Some(context_hash) => Ok(context_hash), None => Err(format_err!("No header found for level: {}", searched_level)), } } fn captured_requests(&self, level: Level) -> Result<ApplyBlockRequest, anyhow::Error> { crate::common::samples::from_captured_bytes(&self.requests[to_index(level)]) } } fn to_index(level: Level) -> usize { (level - 1) .try_into() .expect("Failed to convert level to usize") } fn to_level(idx: usize) -> Level { (idx + 1) .try_into() .expect("Failed to convert index to Level") }
use std::collections::HashMap; use std::convert::TryInto; use anyhow::format_err; use crypto::hash::{BlockHash, ContextHash, OperationHash}; use tezos_api::environment::TezosEnvironment; use tezos_api::ffi::ApplyBlockRequest; use tezos_messages::p2p::binary_message::MessageHash; use tezos_messages::p2p::encoding::block_header::Level; use tezos_messages::p2p::encoding::prelude::{ BlockHeader, Operation, OperationsForBlock, OperationsForBlocksMessage, }; use crate::common::samples::OperationsForBlocksMessageKey; pub struct Db { pub tezos_env: TezosEnvironment, requests: Vec<String>, headers: HashMap<BlockHash, (Level, ContextHash)>, operations: HashMap<OperationsForBlocksMessageKey, OperationsForBlocksMessage>, operation_hashes: HashMap<OperationHash, Level>, } impl Db { pub(crate) fn init_db( (requests, operations, tezos_env): ( Vec<String>, HashMap<OperationsForBlocksMessageKey, OperationsForBlocksMessage>, TezosEnvironment, ), ) -> Db { let mut headers: HashMap<BlockHash, (Level, ContextHash)> = HashMap::new(); let mut operation_hashes: HashMap<OperationHash, Level> = HashMap::new(); for (idx, request)
message hash"), level, ); } } } Db { tezos_env, requests, headers, operations, operation_hashes, } } pub fn get(&self, block_hash: &BlockHash) -> Result<Option<BlockHeader>, anyhow::Error> { match self.headers.get(block_hash) { Some((level, _)) => Ok(Some(self.captured_requests(*level)?.block_header)), None => Ok(None), } } pub fn get_operation( &self, operation_hash: &OperationHash, ) -> Result<Option<Operation>, anyhow::Error> { match self.operation_hashes.get(operation_hash) { Some(level) => { let mut found = None; for ops in self.captured_requests(*level)?.operations { for op in ops { if op.message_typed_hash::<OperationHash>()?.eq(operation_hash) { found = Some(op); break; } } } Ok(found) } None => Ok(None), } } pub fn get_operations( &self, block_hash: &BlockHash, ) -> Result<Vec<Vec<Operation>>, anyhow::Error> { match self.headers.get(block_hash) { Some((level, _)) => Ok(self.captured_requests(*level)?.operations), None => Ok(vec![]), } } pub fn get_operations_for_block( &self, block: &OperationsForBlock, ) -> Result<Option<OperationsForBlocksMessage>, anyhow::Error> { match self.operations.get(&OperationsForBlocksMessageKey::new( block.block_hash().clone(), block.validation_pass(), )) { Some(operations) => Ok(Some(operations.clone())), None => Ok(None), } } pub fn block_hash(&self, searched_level: Level) -> Result<BlockHash, anyhow::Error> { let block_hash = self .headers .iter() .find(|(_, (level, _))| searched_level.eq(level)) .map(|(k, _)| k.clone()); match block_hash { Some(block_hash) => Ok(block_hash), None => Err(format_err!( "No block_hash found for level: {}", searched_level )), } } pub fn block_header(&self, searched_level: Level) -> Result<BlockHeader, anyhow::Error> { match self.get(&self.block_hash(searched_level)?)? { Some(header) => Ok(header), None => Err(format_err!( "No block_header found for level: {}", searched_level )), } } pub fn context_hash(&self, searched_level: Level) -> Result<ContextHash, anyhow::Error> { let context_hash = self .headers .iter() .find(|(_, (level, _))| searched_level.eq(level)) .map(|(_, (_, context_hash))| context_hash.clone()); match context_hash { Some(context_hash) => Ok(context_hash), None => Err(format_err!("No header found for level: {}", searched_level)), } } fn captured_requests(&self, level: Level) -> Result<ApplyBlockRequest, anyhow::Error> { crate::common::samples::from_captured_bytes(&self.requests[to_index(level)]) } } fn to_index(level: Level) -> usize { (level - 1) .try_into() .expect("Failed to convert level to usize") } fn to_level(idx: usize) -> Level { (idx + 1) .try_into() .expect("Failed to convert index to Level") }
in requests.iter().enumerate() { let level = to_level(idx); let request = crate::common::samples::from_captured_bytes(request) .expect("Failed to parse request"); let block = request .block_header .message_typed_hash() .expect("Failed to decode message_hash"); let context_hash: ContextHash = request.block_header.context().clone(); headers.insert(block, (level, context_hash)); for ops in request.operations { for op in ops { operation_hashes.insert( op.message_typed_hash() .expect("Failed to compute
random
[ { "content": "pub fn log_level() -> Level {\n\n env::var(\"LOG_LEVEL\")\n\n .unwrap_or_else(|_| \"info\".to_string())\n\n .parse::<Level>()\n\n .unwrap()\n\n}\n\n\n", "file_path": "shell/tests/common/mod.rs", "rank": 0, "score": 253031.1164835876 }, { "content": "pub ...
Rust
programs/vest/src/vest_instruction.rs
garious/silk
947a339714752dfa6ecc06d03da65696bee512d6
use crate::{id, vest_state::VestState}; use bincode::serialized_size; use chrono::prelude::{Date, DateTime, Utc}; use num_derive::FromPrimitive; use serde_derive::{Deserialize, Serialize}; use solana_sdk::{ instruction::{AccountMeta, Instruction, InstructionError}, program_utils::DecodeError, pubkey::Pubkey, system_instruction, }; use thiserror::Error; #[derive(Error, Debug, Clone, PartialEq, FromPrimitive)] pub enum VestError { #[error("destination missing")] DestinationMissing, #[error("unauthorized")] Unauthorized, } impl From<VestError> for InstructionError { fn from(e: VestError) -> Self { InstructionError::CustomError(e as u32) } } impl<T> DecodeError<T> for VestError { fn type_of() -> &'static str { "VestError" } } #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)] pub enum VestInstruction { InitializeAccount { terminator_pubkey: Pubkey, payee_pubkey: Pubkey, start_date_time: DateTime<Utc>, date_pubkey: Pubkey, total_lamports: u64, }, SetTerminator(Pubkey), SetPayee(Pubkey), RedeemTokens, Terminate, Renege(u64), VestAll, } fn initialize_account( terminator_pubkey: &Pubkey, payee_pubkey: &Pubkey, contract_pubkey: &Pubkey, start_date: Date<Utc>, date_pubkey: &Pubkey, total_lamports: u64, ) -> Instruction { let keys = vec![AccountMeta::new(*contract_pubkey, false)]; Instruction::new( id(), &VestInstruction::InitializeAccount { terminator_pubkey: *terminator_pubkey, payee_pubkey: *payee_pubkey, start_date_time: start_date.and_hms(0, 0, 0), date_pubkey: *date_pubkey, total_lamports, }, keys, ) } pub fn create_account( payer_pubkey: &Pubkey, terminator_pubkey: &Pubkey, contract_pubkey: &Pubkey, payee_pubkey: &Pubkey, start_date: Date<Utc>, date_pubkey: &Pubkey, lamports: u64, ) -> Vec<Instruction> { let space = serialized_size(&VestState::default()).unwrap(); vec![ system_instruction::create_account(&payer_pubkey, contract_pubkey, lamports, space, &id()), initialize_account( terminator_pubkey, payee_pubkey, contract_pubkey, start_date, date_pubkey, lamports, ), ] } pub fn set_terminator(contract: &Pubkey, old_pubkey: &Pubkey, new_pubkey: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*old_pubkey, true), ]; Instruction::new( id(), &VestInstruction::SetTerminator(*new_pubkey), account_metas, ) } pub fn set_payee(contract: &Pubkey, old_pubkey: &Pubkey, new_pubkey: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*old_pubkey, true), ]; Instruction::new(id(), &VestInstruction::SetPayee(*new_pubkey), account_metas) } pub fn redeem_tokens(contract: &Pubkey, date_pubkey: &Pubkey, to: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new_readonly(*date_pubkey, false), AccountMeta::new(*to, false), ]; Instruction::new(id(), &VestInstruction::RedeemTokens, account_metas) } pub fn terminate(contract: &Pubkey, from: &Pubkey, to: &Pubkey) -> Instruction { let mut account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; if from != to { account_metas.push(AccountMeta::new(*to, false)); } Instruction::new(id(), &VestInstruction::Terminate, account_metas) } pub fn renege(contract: &Pubkey, from: &Pubkey, to: &Pubkey, lamports: u64) -> Instruction { let mut account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; if from != to { account_metas.push(AccountMeta::new(*to, false)); } Instruction::new(id(), &VestInstruction::Renege(lamports), account_metas) } pub fn vest_all(contract: &Pubkey, from: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; Instruction::new(id(), &VestInstruction::VestAll, account_metas) }
use crate::{id, vest_state::VestState}; use bincode::serialized_size; use chrono::prelude::{Date, DateTime, Utc}; use num_derive::FromPrimitive; use serde_derive::{Deserialize, Serialize}; use solana_sdk::{ instruction::{AccountMeta, Instruction, InstructionError}, program_utils::DecodeError, pubkey::Pubkey, system_instruction, }; use thiserror::Error; #[derive(Error, Debug, Clone, PartialEq, FromPrimitive)] pub enum VestError { #[error("destination missing")] DestinationMissing, #[error("unauthorized")] Unauthorized, } impl From<VestError> for InstructionError { fn from(e: VestError) -> Self { InstructionError::CustomError(e as u32) } } impl<T> DecodeError<T> for VestError { fn type_of() -> &'static str { "VestError" } } #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)] pub enum VestInstruction { InitializeAccount { terminator_pubkey: Pubkey, payee_pubkey: Pubkey, start_date_time: DateTime<Utc>, date_pubkey: Pubkey, total_lamports: u64, }, SetTerminator(Pubkey), SetPayee(Pubkey), RedeemTokens, Terminate, Renege(u64), VestAll, } fn initialize_account( terminator_pubkey: &Pubkey, payee_pubkey: &Pubkey, contract_pubkey: &Pubkey, start_date: Date<Utc>, date_pubkey: &Pubkey, total_lamports: u64, ) -> Instruction { let keys = vec![AccountMeta::new(*contract_pubkey, false)]; Instruction::new( id(), &VestInstruction::InitializeAccount { terminator_pubkey: *terminator_pubkey, payee_pubkey: *payee_pubkey, start_date_time: start_date.and_hms(0, 0, 0), date_pubkey: *date_pubkey, total_lamports, }, keys, ) } pub fn create_account( payer_pubkey: &Pubkey, terminator_pubkey: &Pubkey, contract_pubkey: &Pubkey, payee_pubkey: &Pubkey, start_date: Date<Utc>, date_pubkey: &Pubkey, lamports: u64, ) -> Vec<Instruction> { let space = serialized_size(&VestState::default()).unwrap(); vec![ system_instruction::create_account(&payer_pubkey, contract_pubkey, lamports, space, &id()), initialize_account( terminator_pubkey, payee_pubkey, contract_pubkey, start_date, date_pubkey, lamports, ), ] } pub fn set_terminator(contract: &Pubkey, old_pubkey: &Pubkey, new_pubkey: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*old_pubkey, true), ]; Instruction::new( id(), &VestInstruction::SetTerminator(*new_pubkey), account_metas, ) } pub fn set_payee(contract: &Pubkey, old_pubkey: &Pubkey, new_pubkey: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*old_pubkey, true), ]; Instruction::new(i
Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; Instruction::new(id(), &VestInstruction::VestAll, account_metas) }
d(), &VestInstruction::SetPayee(*new_pubkey), account_metas) } pub fn redeem_tokens(contract: &Pubkey, date_pubkey: &Pubkey, to: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new_readonly(*date_pubkey, false), AccountMeta::new(*to, false), ]; Instruction::new(id(), &VestInstruction::RedeemTokens, account_metas) } pub fn terminate(contract: &Pubkey, from: &Pubkey, to: &Pubkey) -> Instruction { let mut account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; if from != to { account_metas.push(AccountMeta::new(*to, false)); } Instruction::new(id(), &VestInstruction::Terminate, account_metas) } pub fn renege(contract: &Pubkey, from: &Pubkey, to: &Pubkey, lamports: u64) -> Instruction { let mut account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; if from != to { account_metas.push(AccountMeta::new(*to, false)); } Instruction::new(id(), &VestInstruction::Renege(lamports), account_metas) } pub fn vest_all(contract: &Pubkey, from: &Pubkey) ->
random
[ { "content": "/// Create and sign new SystemInstruction::Transfer transaction to many destinations\n\npub fn transfer_many(from_pubkey: &Pubkey, to_lamports: &[(Pubkey, u64)]) -> Vec<Instruction> {\n\n to_lamports\n\n .iter()\n\n .map(|(to_pubkey, lamports)| transfer(from_pubkey, to_pubkey, *la...
Rust
src/parser/hir/syntax_shape/expression/number.rs
thegedge/nushell
2716bb020f537470511f1036b1ef95c029a455d7
use crate::parser::hir::syntax_shape::{ expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, FallibleColorSyntax, FlatShape, }; use crate::parser::{ hir, hir::{RawNumber, TokensIterator}, RawToken, }; use crate::prelude::*; #[derive(Debug, Copy, Clone)] pub struct NumberShape; impl ExpandExpression for NumberShape { fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result<hir::Expression, ShellError> { parse_single_node(token_nodes, "Number", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::Variable(tag) if tag.slice(context.source) == "it" => { hir::Expression::it_variable(tag, token_span) } RawToken::ExternalCommand(tag) => { hir::Expression::external_command(tag, token_span) } RawToken::ExternalWord => { return Err(ShellError::invalid_external_word(Tag { span: token_span, anchor: None, })) } RawToken::Variable(tag) => hir::Expression::variable(tag, token_span), RawToken::Number(number) => { hir::Expression::number(number.to_number(context.source), token_span) } RawToken::Bare => hir::Expression::bare(token_span), RawToken::String(tag) => hir::Expression::string(tag, token_span), }) }) } } impl FallibleColorSyntax for NumberShape { type Info = (); type Input = (); fn color_syntax<'a, 'b>( &self, _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, shapes: &mut Vec<Spanned<FlatShape>>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) }); let atom = match atom { Spanned { item: Err(_), span } => { shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } Spanned { item: Ok(atom), .. } => atom, }; atom.color_tokens(shapes); Ok(()) } } #[derive(Debug, Copy, Clone)] pub struct IntShape; impl ExpandExpression for IntShape { fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result<hir::Expression, ShellError> { parse_single_node(token_nodes, "Integer", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::ExternalWord => { return Err(ShellError::invalid_external_word(token_span)) } RawToken::Variable(span) if span.slice(context.source) == "it" => { hir::Expression::it_variable(span, token_span) } RawToken::ExternalCommand(span) => { hir::Expression::external_command(span, token_span) } RawToken::Variable(span) => hir::Expression::variable(span, token_span), RawToken::Number(number @ RawNumber::Int(_)) => { hir::Expression::number(number.to_number(context.source), token_span) } RawToken::Number(_) => return Err(err.error()), RawToken::Bare => hir::Expression::bare(token_span), RawToken::String(span) => hir::Expression::string(span, token_span), }) }) } } impl FallibleColorSyntax for IntShape { type Info = (); type Input = (); fn color_syntax<'a, 'b>( &self, _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, shapes: &mut Vec<Spanned<FlatShape>>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) }); let atom = match atom { Spanned { item: Err(_), span } => { shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } Spanned { item: Ok(atom), .. } => atom, }; atom.color_tokens(shapes); Ok(()) } }
use crate::parser::hir::syntax_shape::{ expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, FallibleColorSyntax, FlatShape, }; use crate::parser::{ hir, hir::{RawNumber, TokensIterator}, RawToken, }; use crate::prelude::*; #[derive(Debug, Copy, Clone)] pub struct NumberShape; impl ExpandExpression for NumberShape { fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result<hir::Expression, ShellError> { parse_single_node(token_nodes, "Number", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::Variable(tag) if tag.slice(context.source) == "it" => { hir::Expression::it_variable(tag, token_span) } RawToken::ExternalCommand(tag) => { hir::Expression::external_command(tag, token_span) } RawToken::ExternalWord => { return Err(ShellError::invalid_external_word(Tag { span: token_span, anchor: None, })) } RawToken::Variable(tag) => hir::Expression::variable(tag, token_span), RawToken::Number(number) => { hir::Expression::number(number.to_number(context.source), token_span) } RawToken::Bare => hir::Expression::bare(token_span), RawToken::String(tag) => hir::Expression::string(tag, token_span), }) }) } } impl FallibleColorSyntax for NumberShape { type Info = (); type Input = (); fn color_syntax<'a, 'b>( &self, _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, shapes: &mut Vec<Spanned<FlatShape>>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) }); let atom = match atom { Spanned { item: Err(_), span } => { shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } Spanned { item: Ok(atom), .. } => atom, }; atom.color_tokens(shapes); Ok(()) } } #[derive(Debug, Copy, Clone)] pub struct IntShape; impl ExpandExpression for IntShape { fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result<hir::Expression, ShellError> { parse_single_node(token_nodes, "Integer", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::ExternalWord => { return Err(ShellError::invalid_external_word(token_span)) } RawToken::Variable(span) if span.slice(context.source) == "it" => { hir::Expression::it_variable(span, token_span) } RawToken::ExternalCommand(span) => { hir::Expression::external_command(span, token_span) } RawToken::Variable(span) => hir::Expression::variable(span, token_span), RawToken::Number(number @ RawNumber::Int(_)) => { hir::Expression::number(number.to_number(
) => hir::Expression::string(span, token_span), }) }) } } impl FallibleColorSyntax for IntShape { type Info = (); type Input = (); fn color_syntax<'a, 'b>( &self, _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, shapes: &mut Vec<Spanned<FlatShape>>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) }); let atom = match atom { Spanned { item: Err(_), span } => { shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } Spanned { item: Ok(atom), .. } => atom, }; atom.color_tokens(shapes); Ok(()) } }
context.source), token_span) } RawToken::Number(_) => return Err(err.error()), RawToken::Bare => hir::Expression::bare(token_span), RawToken::String(span
function_block-random_span
[]
Rust
src/writable.rs
engiwengi/speedy
1489137636c850461529e4b34c56b07818eebab7
use std::io::{ self, Write }; use std::fs::File; use std::path::Path; use crate::writer::Writer; use crate::context::{Context, DefaultContext}; use crate::endianness::Endianness; use crate::Error; use crate::error::{ error_end_of_output_buffer, error_output_buffer_is_too_small }; struct BufferCollector< 'a, C: Context > { context: C, buffer: &'a mut [u8], position: usize } impl< 'a, C: Context > Writer< C > for BufferCollector< 'a, C > { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { let buffer = self.buffer.get_mut( self.position..self.position + slice.len() ).ok_or_else( error_end_of_output_buffer )?; buffer.copy_from_slice( slice ); self.position += slice.len(); Ok(()) } #[inline] fn context( &self ) -> &C { &self.context } #[inline] fn context_mut( &mut self ) -> &mut C { &mut self.context } #[inline(always)] fn can_write_at_least( &self, size: usize ) -> Option< bool > { Some( self.buffer.get( self.position..self.position + size ).is_some() ) } } struct WritingCollector< C: Context, T: Write > { context: C, writer: T } impl< C: Context, T: Write > Writer< C > for WritingCollector< C, T > { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { self.writer.write_all( slice ).map_err( |error| { let error = Error::from_io_error( error ); <C::Error as From< Error >>::from( error ) }) } #[inline] fn context( &self ) -> &C { &self.context } #[inline] fn context_mut( &mut self ) -> &mut C { &mut self.context } } struct SizeCalculatorCollector { size: usize } impl< C: Context > Writer< C > for SizeCalculatorCollector { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { self.size += slice.len(); Ok(()) } #[inline] fn write_u8( &mut self, _: u8 ) -> Result< (), C::Error > { self.size += 1; Ok(()) } #[inline] fn write_u16( &mut self, _: u16 ) -> Result< (), C::Error > { self.size += 2; Ok(()) } #[inline] fn write_u32( &mut self, _: u32 ) -> Result< (), C::Error > { self.size += 4; Ok(()) } #[inline] fn write_u64( &mut self, _: u64 ) -> Result< (), C::Error > { self.size += 8; Ok(()) } #[inline] fn endianness( &self ) -> Endianness { Endianness::NATIVE } #[inline] fn context( &self ) -> &C { panic!(); } #[inline] fn context_mut( &mut self ) -> &mut C { panic!(); } } pub trait Writable< C: Context > { fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error >; #[inline] fn write_to_buffer( &self, buffer: &mut [u8] ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_buffer_with_ctx( Default::default(), buffer ) } #[inline] fn write_to_vec( &self) -> Result< Vec< u8 >, C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_vec_with_ctx( Default::default() ) } #[inline] fn write_to_stream< S: Write >( &self, stream: S ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_stream_with_ctx( Default::default(), stream ) } #[inline] fn write_to_file( &self, path: impl AsRef< Path > ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_file_with_ctx( Default::default(), path ) } #[inline] fn write_to_buffer_with_ctx( &self, context: C, buffer: &mut [u8] ) -> Result< (), C::Error > { let bytes_needed = self.bytes_needed()?; let buffer_length = buffer.len(); let buffer = buffer.get_mut( 0..bytes_needed ).ok_or_else( || error_output_buffer_is_too_small( buffer_length, bytes_needed ) )?; let mut writer = BufferCollector { context, buffer, position: 0 }; self.write_to( &mut writer )?; Ok(()) } #[inline] fn write_to_vec_with_ctx( &self, context: C ) -> Result< Vec< u8 >, C::Error > { let capacity = self.bytes_needed()?; let mut vec = Vec::with_capacity( capacity ); unsafe { vec.set_len( capacity ); } let mut writer = BufferCollector { context, buffer: vec.as_mut_slice(), position: 0 }; self.write_to( &mut writer )?; let position = writer.position; unsafe { vec.set_len( position ); } debug_assert_eq!( position, capacity ); Ok( vec ) } #[inline] fn write_to_stream_with_ctx< S: Write >( &self, context: C, stream: S ) -> Result< (), C::Error > { let mut writer = WritingCollector { context, writer: stream }; self.write_to( &mut writer ) } #[inline] fn write_to_file_with_ctx( &self, context: C, path: impl AsRef< Path > ) -> Result< (), C::Error > { let stream = File::create( path ).map_err( |error| { let error = Error::from_io_error( error ); <C::Error as From< Error >>::from( error ) })?; let stream = io::BufWriter::new( stream ); self.write_to_stream_with_ctx( context, stream ) } #[inline] fn bytes_needed( &self ) -> Result< usize, C::Error > { let mut writer = SizeCalculatorCollector { size: 0 }; self.write_to( &mut writer )?; Ok( writer.size ) } #[doc(hidden)] #[inline] fn speedy_is_primitive() -> bool { false } #[doc(hidden)] #[inline] unsafe fn speedy_slice_as_bytes( _: &[Self] ) -> &[u8] where Self: Sized { panic!(); } }
use std::io::{ self, Write }; use std::fs::File; use std::path::Path; use crate::writer::Writer; use crate::context::{Context, DefaultContext}; use crate::endianness::Endianness; use crate::Error; use crate::error::{ error_end_of_output_buffer, error_output_buffer_is_too_small }; struct BufferCollector< 'a, C: Context > { context: C, buffer: &'a mut [u8], position: usize } impl< 'a, C: Context > Writer< C > for BufferCollector< 'a, C > { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { let buffer = self.buffer.get_mut( self.position..self.position + slice.len() ).ok_or_else( error_end_of_output_buffer )?; buffer.copy_from_slice( slice ); self.position += slice.len(); Ok(()) } #[inline] fn context( &self ) -> &C { &self.context } #[inline] fn context_mut( &mut self ) -> &mut C { &mut self.context } #[inline(always)] fn can_write_at_least( &self, size: usize ) -> Option< bool > { Some( self.buffer.get( self.po
()?; let mut vec = Vec::with_capacity( capacity ); unsafe { vec.set_len( capacity ); } let mut writer = BufferCollector { context, buffer: vec.as_mut_slice(), position: 0 }; self.write_to( &mut writer )?; let position = writer.position; unsafe { vec.set_len( position ); } debug_assert_eq!( position, capacity ); Ok( vec ) } #[inline] fn write_to_stream_with_ctx< S: Write >( &self, context: C, stream: S ) -> Result< (), C::Error > { let mut writer = WritingCollector { context, writer: stream }; self.write_to( &mut writer ) } #[inline] fn write_to_file_with_ctx( &self, context: C, path: impl AsRef< Path > ) -> Result< (), C::Error > { let stream = File::create( path ).map_err( |error| { let error = Error::from_io_error( error ); <C::Error as From< Error >>::from( error ) })?; let stream = io::BufWriter::new( stream ); self.write_to_stream_with_ctx( context, stream ) } #[inline] fn bytes_needed( &self ) -> Result< usize, C::Error > { let mut writer = SizeCalculatorCollector { size: 0 }; self.write_to( &mut writer )?; Ok( writer.size ) } #[doc(hidden)] #[inline] fn speedy_is_primitive() -> bool { false } #[doc(hidden)] #[inline] unsafe fn speedy_slice_as_bytes( _: &[Self] ) -> &[u8] where Self: Sized { panic!(); } }
sition..self.position + size ).is_some() ) } } struct WritingCollector< C: Context, T: Write > { context: C, writer: T } impl< C: Context, T: Write > Writer< C > for WritingCollector< C, T > { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { self.writer.write_all( slice ).map_err( |error| { let error = Error::from_io_error( error ); <C::Error as From< Error >>::from( error ) }) } #[inline] fn context( &self ) -> &C { &self.context } #[inline] fn context_mut( &mut self ) -> &mut C { &mut self.context } } struct SizeCalculatorCollector { size: usize } impl< C: Context > Writer< C > for SizeCalculatorCollector { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { self.size += slice.len(); Ok(()) } #[inline] fn write_u8( &mut self, _: u8 ) -> Result< (), C::Error > { self.size += 1; Ok(()) } #[inline] fn write_u16( &mut self, _: u16 ) -> Result< (), C::Error > { self.size += 2; Ok(()) } #[inline] fn write_u32( &mut self, _: u32 ) -> Result< (), C::Error > { self.size += 4; Ok(()) } #[inline] fn write_u64( &mut self, _: u64 ) -> Result< (), C::Error > { self.size += 8; Ok(()) } #[inline] fn endianness( &self ) -> Endianness { Endianness::NATIVE } #[inline] fn context( &self ) -> &C { panic!(); } #[inline] fn context_mut( &mut self ) -> &mut C { panic!(); } } pub trait Writable< C: Context > { fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error >; #[inline] fn write_to_buffer( &self, buffer: &mut [u8] ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_buffer_with_ctx( Default::default(), buffer ) } #[inline] fn write_to_vec( &self) -> Result< Vec< u8 >, C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_vec_with_ctx( Default::default() ) } #[inline] fn write_to_stream< S: Write >( &self, stream: S ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_stream_with_ctx( Default::default(), stream ) } #[inline] fn write_to_file( &self, path: impl AsRef< Path > ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_file_with_ctx( Default::default(), path ) } #[inline] fn write_to_buffer_with_ctx( &self, context: C, buffer: &mut [u8] ) -> Result< (), C::Error > { let bytes_needed = self.bytes_needed()?; let buffer_length = buffer.len(); let buffer = buffer.get_mut( 0..bytes_needed ).ok_or_else( || error_output_buffer_is_too_small( buffer_length, bytes_needed ) )?; let mut writer = BufferCollector { context, buffer, position: 0 }; self.write_to( &mut writer )?; Ok(()) } #[inline] fn write_to_vec_with_ctx( &self, context: C ) -> Result< Vec< u8 >, C::Error > { let capacity = self.bytes_needed
random
[ { "content": "#[inline]\n\npub fn write_length_u8< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error >\n\n where C: Context,\n\n W: ?Sized + Writer< C >\n\n{\n\n if length as u64 > std::u8::MAX as u64 {\n\n return Err( error_out_of_range_length() );\n\n }\n\n\n\n write...
Rust
src/diagnostic.rs
DanSnow/clang-rs
55bb2aa91de51d691cd04571c9c5368f64e5639e
use std::fmt; use std::mem; use std::cmp::{self, Ordering}; use clang_sys::*; use utility; use super::{TranslationUnit}; use super::source::{SourceLocation, SourceRange}; #[derive(Clone, Debug, PartialEq, Eq)] pub enum FixIt<'tu> { Deletion(SourceRange<'tu>), Insertion(SourceLocation<'tu>, String), Replacement(SourceRange<'tu>, String), } #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(C)] pub enum Severity { Ignored = 0, Note = 1, Warning = 2, Error = 3, Fatal = 4, } #[derive(Copy, Clone)] pub struct Diagnostic<'tu> { ptr: CXDiagnostic, tu: &'tu TranslationUnit<'tu>, } impl<'tu> Diagnostic<'tu> { #[doc(hidden)] pub fn from_ptr(ptr: CXDiagnostic, tu: &'tu TranslationUnit<'tu>) -> Diagnostic<'tu> { assert!(!ptr.is_null()); Diagnostic { ptr, tu } } pub fn get_severity(&self) -> Severity { unsafe { mem::transmute(clang_getDiagnosticSeverity(self.ptr)) } } pub fn get_text(&self) -> String { unsafe { utility::to_string(clang_getDiagnosticSpelling(self.ptr)) } } pub fn get_location(&self) -> SourceLocation<'tu> { unsafe { SourceLocation::from_raw(clang_getDiagnosticLocation(self.ptr), self.tu) } } pub fn get_ranges(&self) -> Vec<SourceRange<'tu>> { iter!( clang_getDiagnosticNumRanges(self.ptr), clang_getDiagnosticRange(self.ptr), ).map(|r| SourceRange::from_raw(r, self.tu)).collect() } pub fn get_fix_its(&self) -> Vec<FixIt<'tu>> { unsafe { (0..clang_getDiagnosticNumFixIts(self.ptr)).map(|i| { let mut range = mem::uninitialized(); let fixit = clang_getDiagnosticFixIt(self.ptr, i, &mut range); let string = utility::to_string(fixit); let range = SourceRange::from_raw(range, self.tu); if string.is_empty() { FixIt::Deletion(range) } else if range.get_start() == range.get_end() { FixIt::Insertion(range.get_start(), string) } else { FixIt::Replacement(range, string) } }).collect() } } pub fn get_children(&self) -> Vec<Diagnostic> { let ptr = unsafe { clang_getChildDiagnostics(self.ptr) }; iter!( clang_getNumDiagnosticsInSet(ptr), clang_getDiagnosticInSet(ptr), ).map(|d| Diagnostic::from_ptr(d, self.tu)).collect() } pub fn formatter(&self) -> DiagnosticFormatter<'tu> { DiagnosticFormatter::new(*self) } } #[doc(hidden)] impl<'tu> cmp::PartialEq for Diagnostic<'tu> { fn eq(&self, other: &Diagnostic<'tu>) -> bool { self.ptr == other.ptr } } impl<'tu> cmp::PartialOrd for Diagnostic<'tu> { fn partial_cmp(&self, other: &Diagnostic<'tu>) -> Option<Ordering> { Some(self.get_severity().cmp(&other.get_severity())) } } impl<'tu> fmt::Debug for Diagnostic<'tu> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.debug_struct("Diagnostic") .field("location", &self.get_location()) .field("severity", &self.get_severity()) .field("text", &self.get_text()) .finish() } } impl<'tu> fmt::Display for Diagnostic<'tu> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "{}", DiagnosticFormatter::new(*self).format()) } } builder! { builder DiagnosticFormatter: CXDiagnosticDisplayOptions { diagnostic: Diagnostic<'tu>; OPTIONS: pub source_location: CXDiagnostic_DisplaySourceLocation, pub column: CXDiagnostic_DisplayColumn, pub source_ranges: CXDiagnostic_DisplaySourceRanges, pub option: CXDiagnostic_DisplayOption, pub category_id: CXDiagnostic_DisplayCategoryId, pub category_name: CXDiagnostic_DisplayCategoryName, } } impl<'tu> DiagnosticFormatter<'tu> { fn new(diagnostic: Diagnostic<'tu>) -> DiagnosticFormatter<'tu> { let flags = unsafe { clang_defaultDiagnosticDisplayOptions() }; DiagnosticFormatter { diagnostic, flags } } pub fn format(&self) -> String { unsafe { utility::to_string(clang_formatDiagnostic(self.diagnostic.ptr, self.flags)) } } }
use std::fmt; use std::mem; use std::cmp::{self, Ordering}; use clang_sys::*; use utility; use super::{TranslationUnit}; use super::source::{SourceLocation, SourceRange}; #[derive(Clone, Debug, PartialEq, Eq)] pub enum FixIt<'tu> { Deletion(SourceRange<'tu>), Insertion(SourceLocation<'tu>, String), Replacement(SourceRange<'tu>, String), } #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(C)] pub enum Severity { Ignored = 0, Note = 1, Warning = 2, Error = 3, Fatal = 4, } #[derive(Copy, Clone)] pub struct Diagnostic<'tu> { ptr: CXDiagnostic, tu: &'tu TranslationUnit<'tu>, } impl<'tu> Diagnostic<'tu> { #[doc(hidden)] pub fn from_ptr(ptr: CXDiagnostic, tu: &'tu TranslationUnit<'tu>) -> Diagnostic<'tu> { assert!(!ptr.is_null()); Diagnostic { ptr, tu } } pub fn get_severity(&self) -> Severity { unsafe { mem::transmute(clang_getDiagnosticSeverity(self.ptr)) } } pub fn get_text(&self) -> String { unsafe { utility::to_string(clang_getDiagnosticSpelling(self.ptr)) } } pub fn get_location(&self) -> SourceLocation<'tu> { unsafe { SourceLocation::from_raw(clang_getDiagnosticLocation(self.ptr), self.tu) } } pub fn get_ranges(&self) -> Ve
pub fn get_fix_its(&self) -> Vec<FixIt<'tu>> { unsafe { (0..clang_getDiagnosticNumFixIts(self.ptr)).map(|i| { let mut range = mem::uninitialized(); let fixit = clang_getDiagnosticFixIt(self.ptr, i, &mut range); let string = utility::to_string(fixit); let range = SourceRange::from_raw(range, self.tu); if string.is_empty() { FixIt::Deletion(range) } else if range.get_start() == range.get_end() { FixIt::Insertion(range.get_start(), string) } else { FixIt::Replacement(range, string) } }).collect() } } pub fn get_children(&self) -> Vec<Diagnostic> { let ptr = unsafe { clang_getChildDiagnostics(self.ptr) }; iter!( clang_getNumDiagnosticsInSet(ptr), clang_getDiagnosticInSet(ptr), ).map(|d| Diagnostic::from_ptr(d, self.tu)).collect() } pub fn formatter(&self) -> DiagnosticFormatter<'tu> { DiagnosticFormatter::new(*self) } } #[doc(hidden)] impl<'tu> cmp::PartialEq for Diagnostic<'tu> { fn eq(&self, other: &Diagnostic<'tu>) -> bool { self.ptr == other.ptr } } impl<'tu> cmp::PartialOrd for Diagnostic<'tu> { fn partial_cmp(&self, other: &Diagnostic<'tu>) -> Option<Ordering> { Some(self.get_severity().cmp(&other.get_severity())) } } impl<'tu> fmt::Debug for Diagnostic<'tu> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.debug_struct("Diagnostic") .field("location", &self.get_location()) .field("severity", &self.get_severity()) .field("text", &self.get_text()) .finish() } } impl<'tu> fmt::Display for Diagnostic<'tu> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "{}", DiagnosticFormatter::new(*self).format()) } } builder! { builder DiagnosticFormatter: CXDiagnosticDisplayOptions { diagnostic: Diagnostic<'tu>; OPTIONS: pub source_location: CXDiagnostic_DisplaySourceLocation, pub column: CXDiagnostic_DisplayColumn, pub source_ranges: CXDiagnostic_DisplaySourceRanges, pub option: CXDiagnostic_DisplayOption, pub category_id: CXDiagnostic_DisplayCategoryId, pub category_name: CXDiagnostic_DisplayCategoryName, } } impl<'tu> DiagnosticFormatter<'tu> { fn new(diagnostic: Diagnostic<'tu>) -> DiagnosticFormatter<'tu> { let flags = unsafe { clang_defaultDiagnosticDisplayOptions() }; DiagnosticFormatter { diagnostic, flags } } pub fn format(&self) -> String { unsafe { utility::to_string(clang_formatDiagnostic(self.diagnostic.ptr, self.flags)) } } }
c<SourceRange<'tu>> { iter!( clang_getDiagnosticNumRanges(self.ptr), clang_getDiagnosticRange(self.ptr), ).map(|r| SourceRange::from_raw(r, self.tu)).collect() }
function_block-function_prefixed
[ { "content": "pub fn to_string(clang: CXString) -> String {\n\n unsafe {\n\n let c = CStr::from_ptr(clang_getCString(clang));\n\n let rust = c.to_str().expect(\"invalid Rust string\").into();\n\n clang_disposeString(clang);\n\n rust\n\n }\n\n}\n\n\n", "file_path": "src/util...
Rust
dev-utils/src/commands/test_vector_command.rs
crypto-raymond/chain-1
f0b612fc5cadf42308d3377f364f5751cca893a6
use parity_scale_codec::Encode; use serde::Serialize; use chain_core::common::Proof; use chain_core::init::address::{CroAddress, RedeemAddress}; use chain_core::init::coin::Coin; use chain_core::init::network::Network; use chain_core::state::account::{ CouncilNodeMeta, DepositBondTx, NodeMetadata, StakedStateAddress, StakedStateOpAttributes, StakedStateOpWitness, UnbondTx, WithdrawUnbondedTx, }; use chain_core::state::tendermint::TendermintValidatorPubKey; use chain_core::state::validator::NodeJoinRequestTx; use chain_core::tx::data::access::{TxAccess, TxAccessPolicy}; use chain_core::tx::data::address::ExtendedAddr; use chain_core::tx::data::attribute::TxAttributes; use chain_core::tx::data::input::TxoPointer; use chain_core::tx::data::output::TxOut; use chain_core::tx::data::{Tx, TxId}; use chain_core::tx::witness::tree::RawXOnlyPubkey; use chain_core::tx::witness::{TxInWitness, TxWitness}; use chain_core::tx::TransactionId; use chain_core::tx::{PlainTxAux, TxAux, TxPublicAux}; use client_common::key::PrivateKeyAction; use client_common::{MultiSigAddress, PrivateKey, PublicKey, Result, Transaction}; use client_core::service::{HDAccountType, HdKey}; use client_core::HDSeed; use secp256k1::Secp256k1; use secp256k1::{key::XOnlyPublicKey, SecretKey}; use test_common::chain_env::mock_confidential_init; #[derive(Debug)] pub struct TestVectorCommand { network: Network, seed: Vec<u8>, } impl TestVectorCommand { pub fn new(network: String, seed: String) -> Self { let network = if network == "devnet" { Network::Devnet } else if network == "testnet" { Network::Testnet } else if network == "mainnet" { Network::Mainnet } else { unreachable!() }; let seed = hex::decode(&seed).expect("invali seed"); Self { network, seed } } pub fn execute(&self) -> Result<()> { let mut vector_factory = VectorFactory::new(self.network, self.seed.clone()); vector_factory.create_test_vectors() } } #[derive(Debug, Serialize)] struct WithdrawUnboundedVector { from_address: String, to_address: String, coin_amount: String, witness: String, plain_tx_aux: String, tx_id: String, view_keys: Vec<String>, } #[derive(Debug, Serialize)] struct TransferVector { to_address: String, return_address: String, transfer_amount: String, return_amount: String, inputs: Vec<String>, outputs: Vec<String>, witness: String, plain_tx_aux: String, tx_id: String, } #[derive(Debug, Serialize)] struct DepositStakeVector { staking_address: String, witness: String, transaction: String, tx_id: String, } #[derive(Debug, Serialize)] struct NodeJoinVector { staking_address: String, tendermint_validator_pubkey: String, witness: String, tx: String, tx_id: String, } #[derive(Debug, Serialize)] struct UnboundedStakeVector { staking_address: String, witness: String, tx: String, tx_id: String, } #[derive(Default, Debug, Serialize)] struct TestVectors { wallet_view_key: Option<String>, withdraw_unbonded_vector: Option<WithdrawUnboundedVector>, transfer_vector: Option<TransferVector>, deposit_stake_vector: Option<DepositStakeVector>, nodejoin_vector: Option<NodeJoinVector>, unbonded_stake_vector: Option<UnboundedStakeVector>, } struct TestVectorWallet { hd_key: HdKey, view_key: (PublicKey, PrivateKey), transfer_addresses: Vec<(ExtendedAddr, PublicKey, PrivateKey)>, staking_address: Option<(StakedStateAddress, PublicKey, PrivateKey)>, } impl TestVectorWallet { pub fn create_keypair( &self, network: Network, account_type: HDAccountType, ) -> (PublicKey, PrivateKey) { let index = match account_type { HDAccountType::Transfer => self.hd_key.transfer_index, HDAccountType::Staking => self.hd_key.staking_index, HDAccountType::Viewkey => self.hd_key.viewkey_index, }; self.hd_key .seed .derive_key_pair(network, account_type.index(), index) .unwrap() } pub fn create_transfer_address( &mut self, network: Network, ) -> Result<(ExtendedAddr, PublicKey, PrivateKey)> { let (pub_key, priv_key) = self.create_keypair(network, HDAccountType::Transfer); self.hd_key.transfer_index += 1; let public_keys = vec![pub_key.clone()]; let multi_sig_address = MultiSigAddress::new(public_keys, pub_key.clone(), 1)?; let address_info = (multi_sig_address.into(), pub_key, priv_key); self.transfer_addresses.push(address_info.clone()); Ok(address_info) } pub fn create_staking_address(&mut self, network: Network) { let (pub_key, priv_key) = self.create_keypair(network, HDAccountType::Staking); self.hd_key.staking_index += 1; let addr = StakedStateAddress::from(RedeemAddress::from(&pub_key)); self.staking_address = Some((addr, pub_key, priv_key)); } pub fn gen_proof(public_key: PublicKey) -> Result<Option<Proof<RawXOnlyPubkey>>> { let public_keys = vec![public_key.clone()]; let multi_sig_address = MultiSigAddress::new(public_keys.clone(), public_key, 1)?; multi_sig_address.generate_proof(public_keys) } } pub struct VectorFactory { network: Network, chain_hex_id: u8, wallet: TestVectorWallet, test_vectors: TestVectors, } impl VectorFactory { pub fn new(network: Network, seed: Vec<u8>) -> Self { let hd_seed = HDSeed { bytes: seed }; let hd_key = HdKey { seed: hd_seed, staking_index: 0, transfer_index: 0, viewkey_index: 0, }; let (view_key, priv_key) = hd_key .seed .derive_key_pair(network, HDAccountType::Viewkey.index(), 0) .expect("invalid seed"); let mut wallet = TestVectorWallet { hd_key, view_key: (view_key, priv_key), transfer_addresses: vec![], staking_address: None, }; let _ = wallet.create_transfer_address(network); wallet.create_staking_address(network); let chain_hex_id = match network { Network::Testnet => 0x42, Network::Mainnet => 0x2A, Network::Devnet => 0x0, }; let test_vectors = TestVectors::default(); Self { network, chain_hex_id, wallet, test_vectors, } } pub fn create_withdraw_unbonded_tx(&mut self) -> Result<TxId> { let amount = Coin::from(1000); let view_key = self.wallet.view_key.clone(); let nonce = 0; let (from_addr, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let to_addr = self.wallet.transfer_addresses[0].0.clone(); let output = TxOut::new_with_timelock(to_addr.clone(), amount, 0); let attributes = TxAttributes::new_with_access( self.chain_hex_id, vec![TxAccessPolicy::new(view_key.0.into(), TxAccess::AllData)], ); let transaction = WithdrawUnbondedTx::new(nonce, vec![output], attributes); let tx = Transaction::WithdrawUnbondedStakeTransaction(transaction.clone()); let txid = tx.id(); let witness = sign_key.sign(&tx).map(StakedStateOpWitness::new)?; let plain_tx_aux = PlainTxAux::WithdrawUnbondedStakeTx(transaction); let withdraw_unbonded_vector = WithdrawUnboundedVector { to_address: to_addr.to_cro(self.network).unwrap(), from_address: format!("{}", from_addr), coin_amount: format!("{:?}", amount), witness: hex::encode(witness.encode()), plain_tx_aux: hex::encode(plain_tx_aux.encode()), tx_id: hex::encode(txid), view_keys: vec![hex::encode(self.wallet.view_key.0.serialize())], }; self.test_vectors.withdraw_unbonded_vector = Some(withdraw_unbonded_vector); Ok(txid) } pub fn create_transfer_tx(&mut self, withdraw_unbonded_tx_id: TxId) -> Result<()> { let public_key = self.wallet.transfer_addresses[0].1.clone(); let sign_key = self.wallet.transfer_addresses[0].2.clone(); let (return_address, _, _) = self.wallet.create_transfer_address(self.network)?; let (to_address, _, _) = self.wallet.create_transfer_address(self.network)?; let inputs = vec![TxoPointer::new(withdraw_unbonded_tx_id, 0)]; let transfer_amount = Coin::from(100); let return_amount = Coin::from(900); let outputs = vec![ TxOut::new(return_address.clone(), return_amount), TxOut::new(to_address.clone(), transfer_amount), ]; let view_keys = vec![self.wallet.view_key.clone()]; let access_policies = view_keys .iter() .map(|key| TxAccessPolicy { view_key: key.0.clone().into(), access: TxAccess::AllData, }) .collect(); let attributes = TxAttributes::new_with_access(self.chain_hex_id, access_policies); let tx = Tx::new_with(inputs.clone(), outputs.clone(), attributes); let tx_id = tx.id(); let proof = TestVectorWallet::gen_proof(public_key)?.unwrap(); let witness: TxWitness = vec![TxInWitness::TreeSig( sign_key.schnorr_sign(&Transaction::TransferTransaction(tx.clone()))?, proof, )] .into(); let plain_tx_aux = PlainTxAux::TransferTx(tx, witness.clone()); let transfer_vector = TransferVector { to_address: to_address.to_cro(self.network).unwrap(), return_address: return_address.to_cro(self.network).unwrap(), transfer_amount: format!("{:?}", transfer_amount), return_amount: format!("{:?}", return_amount), inputs: inputs.iter().map(|i| hex::encode(i.encode())).collect(), outputs: outputs.iter().map(|o| hex::encode(o.encode())).collect(), witness: hex::encode(witness.encode()), plain_tx_aux: hex::encode(plain_tx_aux.encode()), tx_id: hex::encode(tx_id), }; self.test_vectors.transfer_vector = Some(transfer_vector); Ok(()) } fn create_deposit_stake_tx(&mut self, withdraw_unbonded_tx_id: TxId) -> Result<()> { let public_key = self.wallet.transfer_addresses[0].1.clone(); let sign_key = self.wallet.transfer_addresses[0].2.clone(); let utxo = TxoPointer::new(withdraw_unbonded_tx_id, 0); let staking_address = self.wallet.staking_address.clone().unwrap().0; let attributes = StakedStateOpAttributes::new(self.chain_hex_id); let tx = DepositBondTx::new(vec![utxo], staking_address, attributes); let proof = TestVectorWallet::gen_proof(public_key)?.unwrap(); let witness: TxWitness = vec![TxInWitness::TreeSig( sign_key.schnorr_sign(&Transaction::DepositStakeTransaction(tx.clone()))?, proof, )] .into(); let tx_id = tx.id(); let deposit_vector = DepositStakeVector { staking_address: format!("{}", staking_address), witness: hex::encode(witness.encode()), transaction: hex::encode(tx.encode()), tx_id: hex::encode(tx_id), }; self.test_vectors.deposit_stake_vector = Some(deposit_vector); Ok(()) } fn create_nodejoin_tx(&mut self) -> Result<()> { let (staking_address, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let pk = hex::decode("d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a") .unwrap(); let mut pkl = [0u8; 32]; pkl.copy_from_slice(&pk); let tendermint_validator_pubkey = TendermintValidatorPubKey::Ed25519(pkl); let tx = NodeJoinRequestTx::new( 1, staking_address, StakedStateOpAttributes::new(self.chain_hex_id), NodeMetadata::CouncilNode(CouncilNodeMeta::new_with_details( "example".to_string(), Some("security@example.com".to_string()), tendermint_validator_pubkey.clone(), mock_confidential_init(), )), ); let txid = tx.id(); let witness = sign_key .sign(&Transaction::NodejoinTransaction(tx.clone())) .map(StakedStateOpWitness::new)?; let nodejoin_tx = TxAux::PublicTx(TxPublicAux::NodeJoinTx(tx, witness.clone())); let nodejoin_vector = NodeJoinVector { staking_address: format!("{}", staking_address), tendermint_validator_pubkey: hex::encode(tendermint_validator_pubkey.encode()), witness: hex::encode(witness.encode()), tx: hex::encode(nodejoin_tx.encode()), tx_id: hex::encode(&txid), }; self.test_vectors.nodejoin_vector = Some(nodejoin_vector); Ok(()) } fn create_unbonded_stake_tx(&mut self) -> Result<()> { let (staking_address, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let tx = UnbondTx::new( staking_address, 0, Coin::from(1000), StakedStateOpAttributes::new(self.chain_hex_id), ); let txid = tx.id(); let transaction = Transaction::UnbondStakeTransaction(tx.clone()); let witness = sign_key.sign(&transaction).map(StakedStateOpWitness::new)?; let unbond_tx = TxAux::PublicTx(TxPublicAux::UnbondStakeTx(tx, witness.clone())); let unbonded_stake_vector = UnboundedStakeVector { staking_address: format!("{}", staking_address), witness: hex::encode(witness.encode()), tx: hex::encode(unbond_tx.encode()), tx_id: hex::encode(&txid), }; self.test_vectors.unbonded_stake_vector = Some(unbonded_stake_vector); Ok(()) } pub fn create_test_vectors(&mut self) -> Result<()> { self.test_vectors.wallet_view_key = Some(hex::encode(self.wallet.view_key.0.serialize())); let tx_id = self.create_withdraw_unbonded_tx().unwrap(); self.create_transfer_tx(tx_id)?; self.create_deposit_stake_tx(tx_id)?; self.create_nodejoin_tx()?; self.create_unbonded_stake_tx()?; println!( "view secret key: {}", hex::encode(self.wallet.view_key.1.serialize()) ); if let Some((ref address, ref public, ref secret)) = self.wallet.staking_address { println!("staking address: {:?}", address); println!("secret: {}", hex::encode(secret.serialize())); println!("public key: {}", hex::encode(public.serialize())); } for (address, public, secret) in self.wallet.transfer_addresses.iter() { println!("transfer address"); println!("mainnet: {}", address.to_cro(Network::Mainnet).unwrap()); println!( "public testnet: {}", address.to_cro(Network::Testnet).unwrap() ); let xonly = XOnlyPublicKey::from_secret_key(&Secp256k1::new(), &SecretKey::from(secret)); println!("secret: {}", hex::encode(secret.serialize())); println!("public key: {}", hex::encode(public.serialize())); println!("X only public key: {}", hex::encode(&xonly.serialize())); } println!( "{}", serde_json::to_string_pretty(&self.test_vectors).unwrap() ); Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_vectors() { let seed = hex::decode("9ee5468093cf78ce008ace0b676b606d94548f8eac79e727e3cb0500ae739facca7bb5ee1f3dd698bc6fcd044117905d42d90fadf324c6187e1faba7e662410f").unwrap(); println!("seed: {:?}", hex::encode(seed.clone())); let mut work_flow = VectorFactory::new(Network::Devnet, seed); assert!(work_flow.create_test_vectors().is_ok()); } }
use parity_scale_codec::Encode; use serde::Serialize; use chain_core::common::Proof; use chain_core::init::address::{CroAddress, RedeemAddress}; use chain_core::init::coin::Coin; use chain_core::init::network::Network; use chain_core::state::account::{ CouncilNodeMeta, DepositBondTx, NodeMetadata, StakedStateAddress, StakedStateOpAttributes, StakedStateOpWitness, UnbondTx, WithdrawUnbondedTx, }; use chain_core::state::tendermint::TendermintValidatorPubKey; use chain_core::state::validator::NodeJoinRequestTx; use chain_core::tx::data::access::{TxAccess, TxAccessPolicy}; use chain_core::tx::data::address::ExtendedAddr; use chain_core::tx::data::attribute::TxAttributes; use chain_core::tx::data::input::TxoPointer; use chain_core::tx::data::output::TxOut; use chain_core::tx::data::{Tx, TxId}; use chain_core::tx::witness::tree::RawXOnlyPubkey; use chain_core::tx::witness::{TxInWitness, TxWitness}; use chain_core::tx::TransactionId; use chain_core::tx::{PlainTxAux, TxAux, TxPublicAux}; use client_common::key::PrivateKeyAction; use client_common::{MultiSigAddress, PrivateKey, PublicKey, Result, Transaction}; use client_core::service::{HDAccountType, HdKey}; use client_core::HDSeed; use secp256k1::Secp256k1; use secp256k1::{key::XOnlyPublicKey, SecretKey}; use test_common::chain_env::mock_confidential_init; #[derive(Debug)] pub struct TestVectorCommand { network: Network, seed: Vec<u8>, } impl TestVectorCommand { pub fn new(network: String, seed: String) -> Self { let network = if network == "devnet" { Network::Devnet } else if network == "testnet" { Network::Testnet } else if network == "mainnet" { Network::Mainnet } else { unreachable!() }; let seed = hex::decode(&seed).expect("invali seed"); Self { network, seed } } pub fn execute(&self) -> Result<()> { let mut vector_factory = VectorFactory::new(self.network, self.seed.clone()); vector_factory.create_test_vectors() } } #[derive(Debug, Serialize)] struct WithdrawUnboundedVector { from_address: String, to_address: String, coin_amount: String, witness: String, plain_tx_aux: String, tx_id: String, view_keys: Vec<String>, } #[derive(Debug, Serialize)] struct TransferVector { to_address: String, return_address: String, transfer_amount: String, return_amount: String, inputs: Vec<String>, outputs: Vec<String>, witness: String, plain_tx_aux: String, tx_id: String, } #[derive(Debug, Serialize)] struct DepositStakeVector { staking_address: String, witness: String, transaction: String, tx_id: String, } #[derive(Debug, Serialize)] struct NodeJoinVector { staking_address: String, tendermint_validator_pubkey: String, witness: String, tx: String, tx_id: String, } #[derive(Debug, Serialize)] struct UnboundedStakeVector { staking_address: String, witness: String, tx: String, tx_id: String, } #[derive(Default, Debug, Serialize)] struct TestVectors { wallet_view_key: Option<String>, withdraw_unbonded_vector: Option<WithdrawUnboundedVector>, transfer_vector: Option<TransferVector>, deposit_stake_vector: Option<DepositStakeVector>, nodejoin_vector: Option<NodeJoinVector>, unbonded_stake_vector: Option<UnboundedStakeVector>, } struct TestVectorWallet { hd_key: HdKey, view_key: (PublicKey, PrivateKey), transfer_addresses: Vec<(ExtendedAddr, PublicKey, PrivateKey)>, staking_address: Option<(StakedStateAddress, PublicKey, PrivateKey)>, } impl TestVectorWallet { pub fn create_keypair( &self, network: Network, account_type: HDAccountType, ) -> (PublicKey, PrivateKey) { let index = match account_type { HDAccountType::Transfer => self.hd_key.transfer_index, HDAccountType::Staking => self.hd_key.staking_index, HDAccountType::Viewkey => self.hd_key.viewkey_index, }; self.hd_key .seed .derive_key_pair(network, account_type.index(), index) .unwrap() } pub fn create_transfer_address( &mut self, network: Network, ) -> Result<(ExtendedAddr, PublicKey, PrivateKey)> { let (pub_key, priv_key) = self.create_keypair(network, HDAccountType::Transfer); self.hd_key.transfer_index += 1; let public_keys = vec![pub_key.clone()]; let multi_sig_address = MultiSigAddress::new(public_keys, pub_key.clone(), 1)?; let address_info = (multi_sig_address.into(), pub_key, priv_key); self.transfer_addresses.push(address_info.clone()); Ok(address_info) } pub fn create_staking_address(&mut self, network: Network) { let (pub_key, priv_key) = self.create_keypair(network, HDAccountType::Staking); self.hd_key.staking_index += 1; let addr = StakedStateAddress::from(RedeemAddress::from(&pub_key)); self.staking_address = Some((addr, pub_key, priv_key)); } pub fn gen_proof(public_key: PublicKey) -> Result<Option<Proof<RawXOnlyPubkey>>> { let public_keys = vec![public_key.clone()]; let multi_sig_address = MultiSigAddress::new(public_keys.clone(), public_key, 1)?; multi_sig_address.generate_proof(public_keys) } } pub struct VectorFactory { network: Network, chain_hex_id: u8, wallet: TestVectorWallet, test_vectors: TestVectors, } impl VectorFactory { pub fn new(network: Network, seed: Vec<u8>) -> Self { let hd_seed = HDSeed { bytes: seed }; let hd_key = HdKey { seed: hd_seed, staking_index: 0, transfer_index: 0, viewkey_index: 0, }; let (view_key, priv_key) = hd_key .seed .derive_key_pair(network, HDAccountType::Viewkey.index(), 0) .expect("invalid seed"); let mut wallet = TestVectorWallet { hd_key, view_key: (view_key, priv_key), transfer_addresses: vec![], staking_address: None, }; let _ = wallet.create_transfer_address(network); wallet.create_staking_address(network); let chain_hex_id = match network { Network::Testnet => 0x42, Network::Mainnet => 0x2A, Network::Devnet => 0x0, }; let test_vectors = TestVectors::default(); Self { network, chain_hex_id, wallet, test_vectors, } } pub fn create_withdraw_unbonded_tx(&mut self) -> Result<TxId> { let amount = Coin::from(1000); let view_key = self.wallet.view_key.clone(); let nonce = 0; let (from_addr, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let to_addr = self.wallet.transfer_addresses[0].0.clone(); let output = TxOut::new_with_timelock(to_addr.clone(), amount, 0); let attributes = TxAttributes::new_with_access( self.chain_hex_id, vec![TxAccessPolicy::new(view_key.0.into(), TxAccess::AllData)], ); let transaction = WithdrawUnbondedTx::new(nonce, vec![output], attributes); let tx = Transaction::WithdrawUnbondedStakeTransaction(transaction.clone()); let txid = tx.id(); let witness = sign_key.sign(&tx).map(StakedStateOpWitness::new)?; let plain_tx_aux = PlainTxAux::WithdrawUnbondedStakeTx(transaction); let withdraw_unbonded_vector = WithdrawUnboundedVector { to_address: to_addr.to_cro(self.network).unwrap(), from_address: format!("{}", from_addr), coin_amount: format!("{:?}", amount), witness: hex::encode(witness.encode()), plain_tx_aux: hex::encode(plain_tx_aux.encode()), tx_id: hex::encode(txid), view_keys: vec![hex::encode(self.wallet.view_key.0.serialize())], }; self.test_vectors.withdraw_unbonded_vector = Some(withdraw_unbonded_vector); Ok(txid) } pub fn create_transfer_tx(&mut self, withdraw_unbonded_tx_id: TxId) -> Result<()> { let public_key = self.wallet.transfer_addresses[0].1.clone(); let sign_key = self.wallet.transfer_addresses[0].2.clone(); let (return_address, _, _) = self.wallet.create_transfer_address(self.network)?; let (to_address, _, _) = self.wallet.create_transfer_address(self.network)?; let inputs = vec![TxoPointer::new(withdraw_unbonded_tx_id, 0)]; let transfer_amount = Coin::from(100); let return_amount = Coin::from(900); let outputs = vec![ TxOut::new(return_address.clone(), return_amount), TxOut::new(to_address.clone(), transfer_amount), ]; let view_keys = vec![self.wallet.view_key.clone()]; let access_policies = view_keys .iter() .map(|key| TxAccessPolicy { view_key: key.0.clone().into(), access: TxAccess::AllData, }) .collect(); let attributes = TxAttributes::new_with_access(self.chain_hex_id, access_policies); let tx = Tx::new_with(inputs.clone(), outputs.clone(), attributes); let tx_id = tx.id(); let proof = TestVectorWallet::gen_proof(public_key)?.unwrap(); let witness: TxWitness = vec![TxInWitness::TreeSig( sign_key.schnorr_sign(&Transaction::TransferTransaction(tx.clone()))?, proof, )] .into(); let plain_tx_aux = PlainTxAux::TransferTx(tx, witness.clone()); let transfer_vector = TransferVector { to_address: to_address.to_cro(self.network).unwrap(), return_address: return_address.to_cro(self.network).unwrap(), transfer_amount: format!("{:?}", transfer_amount), return_amount: format!("{:?}", return_amount), inputs: inputs.iter().map(|i| hex::encode(i.encode())).collect(), outputs: outputs.iter().map(|o| hex::encode(o.encode())).collect(), witness: hex::encode(witness.encode()), plain_tx_aux: hex::encode(plain_tx_aux.encode()), tx_id: hex::encode(tx_id), }; self.test_vectors.transfer_vector = Some(transfer_vector); Ok(()) } fn create_deposit_stake_tx(&mut self, withdraw_unbonded_tx_id: TxId) -> Result<()> { let public_key = self.wallet.transfer_addresses[0].1.clone(); let sign_key = self.wallet.transfer_addresses[0].2.clone(); let utxo = TxoPointer::new(withdraw_unbonded_tx_id, 0); let staking_address = self.wallet.staking_address.clone().unwrap().0; let attributes = StakedStateOpAttributes::new(self.chain_hex_id); let tx = DepositBondTx::new(vec![utxo], staking_address, attributes); let proof = TestVectorWallet::gen_proof(public_key)?.unwrap(); let witness: TxWitness = vec![TxInWitness::TreeSig( sign_key.schnorr_sign(&Transaction::DepositStakeTransaction(tx.clone()))?, proof, )] .into(); let tx_id = tx.id(); let deposit_vector = DepositStakeVector { staking_address: format!("{}", staking_address), witness: hex::encode(witness.encode()), transaction: hex::encode(tx.encode()), tx_id: hex::encode(tx_id), }; self.test_vectors.deposit_stake_vector = Some(deposit_vector); Ok(()) } fn create_nodejoin_tx(&mut self) -> Result<()> { let (staking_address, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let pk = hex::decode("d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a") .unwrap(); let mut pkl = [0u8; 32]; pkl.copy_from_slice(&pk); let tendermint_validator_pubkey = TendermintValidatorPubKey::Ed25519(pkl); let tx = NodeJoinRequestTx::new( 1, staking_address, StakedStateOpAttributes::new(self.chain_hex_id), NodeMetadata::CouncilNode(CouncilNodeMeta::new_with_details( "example".to_string(), Some("security@example.com".to_string()), tendermint_validator_pubkey.clone(), mock_confidential_init(), )), ); let txid = tx.id(); let witness = sign_key .sign(&Transaction::NodejoinTransaction(tx.clone())) .map(StakedStateOpWitness::new)?; let nodejoin_tx = TxAux::PublicTx(TxPublicAux::NodeJoinTx(tx, witness.clone())); let nodejoin_vector = NodeJoinVector { staking_address: format!("{}", staking_address), tendermint_validator_pubkey: hex::encode(tendermint_validator_pubkey.encode()), witness: hex::encode(witness.encode()), tx: hex::encode(nodejoin_tx.encode()), tx_id: hex::encode(&txid), }; self.test_vectors.nodejoin_vector = Some(nodejoin_vector); Ok(()) } fn create_unbonded_stake_tx(&mut self) -> Result<()> { let (staking_address, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let tx = UnbondTx::new( staking_address, 0, Coin::from(1000), StakedStateOpAttributes::new(self.chain_hex_id), ); let txid = tx.id(); let transaction = Transaction::UnbondStakeTransaction(tx.clone()); let witness = sign_key.sign(&transaction).map(StakedStateOpWitness::new)?; let unbond_tx = TxAux::PublicTx(TxPublicAux::UnbondStakeTx(tx, witness.clone())); let unbonded_stake_vector = UnboundedStakeVector { staking_address: format!("{}", staking_address), witness: hex::encode(witness.encode()), tx: hex::encode(unbond_tx.encode()), tx_id: hex::encode(&txid), }; self.test_vectors.unbonded_stake_vector = Some(unbonded_stake_vector); Ok(()) }
} #[cfg(test)] mod tests { use super::*; #[test] fn test_vectors() { let seed = hex::decode("9ee5468093cf78ce008ace0b676b606d94548f8eac79e727e3cb0500ae739facca7bb5ee1f3dd698bc6fcd044117905d42d90fadf324c6187e1faba7e662410f").unwrap(); println!("seed: {:?}", hex::encode(seed.clone())); let mut work_flow = VectorFactory::new(Network::Devnet, seed); assert!(work_flow.create_test_vectors().is_ok()); } }
pub fn create_test_vectors(&mut self) -> Result<()> { self.test_vectors.wallet_view_key = Some(hex::encode(self.wallet.view_key.0.serialize())); let tx_id = self.create_withdraw_unbonded_tx().unwrap(); self.create_transfer_tx(tx_id)?; self.create_deposit_stake_tx(tx_id)?; self.create_nodejoin_tx()?; self.create_unbonded_stake_tx()?; println!( "view secret key: {}", hex::encode(self.wallet.view_key.1.serialize()) ); if let Some((ref address, ref public, ref secret)) = self.wallet.staking_address { println!("staking address: {:?}", address); println!("secret: {}", hex::encode(secret.serialize())); println!("public key: {}", hex::encode(public.serialize())); } for (address, public, secret) in self.wallet.transfer_addresses.iter() { println!("transfer address"); println!("mainnet: {}", address.to_cro(Network::Mainnet).unwrap()); println!( "public testnet: {}", address.to_cro(Network::Testnet).unwrap() ); let xonly = XOnlyPublicKey::from_secret_key(&Secp256k1::new(), &SecretKey::from(secret)); println!("secret: {}", hex::encode(secret.serialize())); println!("public key: {}", hex::encode(public.serialize())); println!("X only public key: {}", hex::encode(&xonly.serialize())); } println!( "{}", serde_json::to_string_pretty(&self.test_vectors).unwrap() ); Ok(()) }
function_block-full_function
[ { "content": "pub fn store_tx_witness(db: &mut impl StoreKV, txid: &TxId, witness_payload: &[u8]) {\n\n insert_item(db, LookupItem::TxWitness, *txid, witness_payload.to_vec());\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 0, "score": 481254.94713891984 }, { "content": "#[...
Rust
spectrum_primitives/src/prg/group.rs
znewman01/spectrum-impl
389b463afa6463bc4a5de6884157730e9cf0b59e
use super::*; use crate::util::Sampleable; use crate::{ algebra::{Group, Monoid, SpecialExponentMonoid}, Bytes, }; use itertools::Itertools; use serde::{Deserialize, Serialize}; use std::convert::TryFrom; use std::fmt::Debug; use std::hash::Hash; use std::iter::repeat; use std::ops::{Add, BitXor, BitXorAssign}; #[cfg(any(test, feature = "testing"))] use proptest::{collection::SizeRange, prelude::*}; #[cfg(any(test, feature = "testing"))] use proptest_derive::Arbitrary; #[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] pub struct ElementVector<G>(pub Vec<G>); impl<G: Group> ElementVector<G> { pub fn new(inner: Vec<G>) -> Self { ElementVector(inner) } pub fn len(&self) -> usize { self.0.len() } pub fn is_empty(&self) -> bool { self.0.is_empty() } } impl<G> Add for ElementVector<G> where G: Add<Output = G>, { type Output = Self; fn add(self, rhs: Self) -> Self::Output { let inner = Iterator::zip(self.0.into_iter(), rhs.0.into_iter()) .map(|(x, y)| x + y) .collect(); Self(inner) } } impl<G: Monoid> Monoid for ElementVector<G> { fn zero() -> Self { panic!("not enough information (don't know the right length)"); } } impl<G> SpecialExponentMonoid for ElementVector<G> where G: SpecialExponentMonoid, G::Exponent: Clone, { type Exponent = G::Exponent; fn pow(&self, exp: Self::Exponent) -> Self { Self(self.0.iter().map(|x| x.pow(exp.clone())).collect()) } } #[cfg(any(test, feature = "testing"))] impl<G> Arbitrary for ElementVector<G> where G: Debug + Arbitrary + Group + 'static, { type Parameters = Option<usize>; type Strategy = BoxedStrategy<Self>; fn arbitrary_with(size: Self::Parameters) -> Self::Strategy { let range = size .map(SizeRange::from) .unwrap_or_else(|| SizeRange::from(1..5)); prop::collection::vec( any::<G>().prop_filter("nonzero", |g| g != &G::zero()), range, ) .prop_map(ElementVector::new) .boxed() } } impl<G> ElementVector<G> where G: Group + Into<Vec<u8>>, { pub fn hash_all(self) -> Vec<u8> { let mut hasher = blake3::Hasher::new(); for element in self.0 { let chunk: Vec<u8> = element.into(); hasher.update(&chunk); } let data: [u8; 32] = hasher.finalize().into(); data.to_vec() } } #[cfg_attr(any(test, feature = "testing"), derive(Arbitrary))] #[derive(Clone, PartialEq, Debug, Serialize, Deserialize)] pub struct GroupPrg<G: Group + 'static> { generators: ElementVector<G>, } impl<G: Group> GroupPrg<G> { fn len(&self) -> usize { self.generators.0.len() } } impl<G> GroupPrg<G> where G: Group + Sampleable, { pub fn new(generators: ElementVector<G>) -> Self { GroupPrg { generators } } pub fn from_seed(num_elements: usize, seed: <G as Sampleable>::Seed) -> Self { let elements = G::sample_many_from_seed(&seed, num_elements); GroupPrg::new(ElementVector(elements)) } pub fn random(num_elements: usize) -> Self { use std::iter::repeat_with; let elements = repeat_with(G::sample).take(num_elements).collect(); GroupPrg::new(ElementVector(elements)) } } impl<G> Prg for GroupPrg<G> where G: Group + SpecialExponentMonoid + Clone, G::Exponent: Sampleable + Clone, { type Seed = G::Exponent; type Output = ElementVector<G>; fn new_seed() -> Self::Seed { Self::Seed::sample() } fn eval(&self, seed: &Self::Seed) -> Self::Output { ElementVector( self.generators .0 .iter() .cloned() .map(|g| g.pow(seed.clone())) .collect(), ) } fn null_output(&self) -> Self::Output { ElementVector(repeat(G::zero()).take(self.len()).collect()) } fn output_size(&self) -> usize { self.generators.len() } } impl<G> SeedHomomorphicPrg for GroupPrg<G> where G: Group + SpecialExponentMonoid + Clone, G::Exponent: Sampleable + Monoid + Clone, { fn null_seed() -> Self::Seed { <G as SpecialExponentMonoid>::Exponent::zero() } fn combine_seeds(&self, seeds: Vec<Self::Seed>) -> Self::Seed { seeds .into_iter() .fold(Self::null_seed(), std::ops::Add::add) } fn combine_outputs(&self, outputs: &[&ElementVector<G>]) -> ElementVector<G> { let mut combined = self.null_output(); for output in outputs { for (acc, val) in combined.0.iter_mut().zip(output.0.iter()) { *acc = acc.clone() + val.clone(); } } combined } } impl<G> TryFrom<Bytes> for ElementVector<G> where G: Group + TryFrom<Bytes>, { type Error = &'static str; fn try_from(value: Bytes) -> Result<Self, Self::Error> { let len = value.len(); value .into_iter() .chunks(32) .into_iter() .map(|chunk| G::try_from(Into::<Bytes>::into(chunk.collect::<Vec<_>>()))) .collect::<Result<Vec<G>, _>>() .map(|vec| { assert_eq!(vec.len() * 32, len); vec }) .map(ElementVector::new) .map_err(|_| "conversion from bytes failed") } } impl<G> TryFrom<Vec<u8>> for ElementVector<G> where G: Group + TryFrom<Vec<u8>> + std::fmt::Debug, <G as TryFrom<Vec<u8>>>::Error: std::fmt::Debug, { type Error = &'static str; fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> { let chunk_size = 32; value .into_iter() .chunks(chunk_size) .into_iter() .map(|chunk| G::try_from(chunk.collect::<Vec<u8>>())) .collect::<Result<Vec<G>, _>>() .map_err(|_| "conversion failed") .map(ElementVector::new) } } impl<G> From<ElementVector<G>> for Bytes where G: Group + Into<Bytes>, { fn from(value: ElementVector<G>) -> Bytes { let chunk_size = 32; let mut all_bytes = Vec::with_capacity(chunk_size * value.0.len()); for element in value.0.into_iter() { let bytes: Bytes = element.into(); let bytes: Vec<u8> = bytes.into(); let bytes = Bytes::from(bytes[0..32].to_vec()); all_bytes.append(&mut bytes.into()); } Bytes::from(all_bytes) } } impl<G> BitXor<ElementVector<G>> for ElementVector<G> where G: Group, { type Output = ElementVector<G>; #[allow(clippy::suspicious_arithmetic_impl)] fn bitxor(self, rhs: ElementVector<G>) -> ElementVector<G> { ElementVector( self.0 .into_iter() .zip(rhs.0.into_iter()) .map(|(element1, element2)| element1 + element2) .collect(), ) } } impl<G> From<ElementVector<G>> for Vec<u8> where G: Group + Into<Vec<u8>>, { fn from(value: ElementVector<G>) -> Vec<u8> { let chunk_size = 32; let mut all_bytes = Vec::with_capacity(chunk_size * value.0.len()); for element in value.0.into_iter() { let mut bytes: Vec<u8> = element.into(); all_bytes.append(&mut bytes); } all_bytes } } impl<G> BitXorAssign<ElementVector<G>> for ElementVector<G> where G: Group + Clone, { #[allow(clippy::suspicious_op_assign_impl)] fn bitxor_assign(&mut self, rhs: ElementVector<G>) { self.0 .iter_mut() .zip(rhs.0.into_iter()) .for_each(|(element1, element2)| *element1 = element1.clone() + element2); } }
use super::*; use crate::util::Sampleable; use crate::{ algebra::{Group, Monoid, SpecialExponentMonoid}, Bytes, }; use itertools::Itertools; use serde::{Deserialize, Serialize}; use std::convert::TryFrom; use std::fmt::Debug; use std::hash::Hash; use std::iter::repeat; use std::ops::{Add, BitXor, BitXorAssign}; #[cfg(any(test, feature = "testing"))] use proptest::{collection::SizeR
element2)| element1 + element2) .collect(), ) } } impl<G> From<ElementVector<G>> for Vec<u8> where G: Group + Into<Vec<u8>>, { fn from(value: ElementVector<G>) -> Vec<u8> { let chunk_size = 32; let mut all_bytes = Vec::with_capacity(chunk_size * value.0.len()); for element in value.0.into_iter() { let mut bytes: Vec<u8> = element.into(); all_bytes.append(&mut bytes); } all_bytes } } impl<G> BitXorAssign<ElementVector<G>> for ElementVector<G> where G: Group + Clone, { #[allow(clippy::suspicious_op_assign_impl)] fn bitxor_assign(&mut self, rhs: ElementVector<G>) { self.0 .iter_mut() .zip(rhs.0.into_iter()) .for_each(|(element1, element2)| *element1 = element1.clone() + element2); } }
ange, prelude::*}; #[cfg(any(test, feature = "testing"))] use proptest_derive::Arbitrary; #[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] pub struct ElementVector<G>(pub Vec<G>); impl<G: Group> ElementVector<G> { pub fn new(inner: Vec<G>) -> Self { ElementVector(inner) } pub fn len(&self) -> usize { self.0.len() } pub fn is_empty(&self) -> bool { self.0.is_empty() } } impl<G> Add for ElementVector<G> where G: Add<Output = G>, { type Output = Self; fn add(self, rhs: Self) -> Self::Output { let inner = Iterator::zip(self.0.into_iter(), rhs.0.into_iter()) .map(|(x, y)| x + y) .collect(); Self(inner) } } impl<G: Monoid> Monoid for ElementVector<G> { fn zero() -> Self { panic!("not enough information (don't know the right length)"); } } impl<G> SpecialExponentMonoid for ElementVector<G> where G: SpecialExponentMonoid, G::Exponent: Clone, { type Exponent = G::Exponent; fn pow(&self, exp: Self::Exponent) -> Self { Self(self.0.iter().map(|x| x.pow(exp.clone())).collect()) } } #[cfg(any(test, feature = "testing"))] impl<G> Arbitrary for ElementVector<G> where G: Debug + Arbitrary + Group + 'static, { type Parameters = Option<usize>; type Strategy = BoxedStrategy<Self>; fn arbitrary_with(size: Self::Parameters) -> Self::Strategy { let range = size .map(SizeRange::from) .unwrap_or_else(|| SizeRange::from(1..5)); prop::collection::vec( any::<G>().prop_filter("nonzero", |g| g != &G::zero()), range, ) .prop_map(ElementVector::new) .boxed() } } impl<G> ElementVector<G> where G: Group + Into<Vec<u8>>, { pub fn hash_all(self) -> Vec<u8> { let mut hasher = blake3::Hasher::new(); for element in self.0 { let chunk: Vec<u8> = element.into(); hasher.update(&chunk); } let data: [u8; 32] = hasher.finalize().into(); data.to_vec() } } #[cfg_attr(any(test, feature = "testing"), derive(Arbitrary))] #[derive(Clone, PartialEq, Debug, Serialize, Deserialize)] pub struct GroupPrg<G: Group + 'static> { generators: ElementVector<G>, } impl<G: Group> GroupPrg<G> { fn len(&self) -> usize { self.generators.0.len() } } impl<G> GroupPrg<G> where G: Group + Sampleable, { pub fn new(generators: ElementVector<G>) -> Self { GroupPrg { generators } } pub fn from_seed(num_elements: usize, seed: <G as Sampleable>::Seed) -> Self { let elements = G::sample_many_from_seed(&seed, num_elements); GroupPrg::new(ElementVector(elements)) } pub fn random(num_elements: usize) -> Self { use std::iter::repeat_with; let elements = repeat_with(G::sample).take(num_elements).collect(); GroupPrg::new(ElementVector(elements)) } } impl<G> Prg for GroupPrg<G> where G: Group + SpecialExponentMonoid + Clone, G::Exponent: Sampleable + Clone, { type Seed = G::Exponent; type Output = ElementVector<G>; fn new_seed() -> Self::Seed { Self::Seed::sample() } fn eval(&self, seed: &Self::Seed) -> Self::Output { ElementVector( self.generators .0 .iter() .cloned() .map(|g| g.pow(seed.clone())) .collect(), ) } fn null_output(&self) -> Self::Output { ElementVector(repeat(G::zero()).take(self.len()).collect()) } fn output_size(&self) -> usize { self.generators.len() } } impl<G> SeedHomomorphicPrg for GroupPrg<G> where G: Group + SpecialExponentMonoid + Clone, G::Exponent: Sampleable + Monoid + Clone, { fn null_seed() -> Self::Seed { <G as SpecialExponentMonoid>::Exponent::zero() } fn combine_seeds(&self, seeds: Vec<Self::Seed>) -> Self::Seed { seeds .into_iter() .fold(Self::null_seed(), std::ops::Add::add) } fn combine_outputs(&self, outputs: &[&ElementVector<G>]) -> ElementVector<G> { let mut combined = self.null_output(); for output in outputs { for (acc, val) in combined.0.iter_mut().zip(output.0.iter()) { *acc = acc.clone() + val.clone(); } } combined } } impl<G> TryFrom<Bytes> for ElementVector<G> where G: Group + TryFrom<Bytes>, { type Error = &'static str; fn try_from(value: Bytes) -> Result<Self, Self::Error> { let len = value.len(); value .into_iter() .chunks(32) .into_iter() .map(|chunk| G::try_from(Into::<Bytes>::into(chunk.collect::<Vec<_>>()))) .collect::<Result<Vec<G>, _>>() .map(|vec| { assert_eq!(vec.len() * 32, len); vec }) .map(ElementVector::new) .map_err(|_| "conversion from bytes failed") } } impl<G> TryFrom<Vec<u8>> for ElementVector<G> where G: Group + TryFrom<Vec<u8>> + std::fmt::Debug, <G as TryFrom<Vec<u8>>>::Error: std::fmt::Debug, { type Error = &'static str; fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> { let chunk_size = 32; value .into_iter() .chunks(chunk_size) .into_iter() .map(|chunk| G::try_from(chunk.collect::<Vec<u8>>())) .collect::<Result<Vec<G>, _>>() .map_err(|_| "conversion failed") .map(ElementVector::new) } } impl<G> From<ElementVector<G>> for Bytes where G: Group + Into<Bytes>, { fn from(value: ElementVector<G>) -> Bytes { let chunk_size = 32; let mut all_bytes = Vec::with_capacity(chunk_size * value.0.len()); for element in value.0.into_iter() { let bytes: Bytes = element.into(); let bytes: Vec<u8> = bytes.into(); let bytes = Bytes::from(bytes[0..32].to_vec()); all_bytes.append(&mut bytes.into()); } Bytes::from(all_bytes) } } impl<G> BitXor<ElementVector<G>> for ElementVector<G> where G: Group, { type Output = ElementVector<G>; #[allow(clippy::suspicious_arithmetic_impl)] fn bitxor(self, rhs: ElementVector<G>) -> ElementVector<G> { ElementVector( self.0 .into_iter() .zip(rhs.0.into_iter()) .map(|(element1,
random
[ { "content": "/// A monoid with custom exponentiation for a particular exponent type.\n\npub trait SpecialExponentMonoid: Monoid {\n\n type Exponent: Monoid;\n\n\n\n /// Raise `self` to the `exp`th power.\n\n fn pow(&self, exp: Self::Exponent) -> Self;\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! check_mon...
Rust
solana/pyth2wormhole/program/src/types/mod.rs
dendisuhubdy/wormhole
29cd5a3934aaf489a1b7aa45495414c5cb974c82
pub mod pyth_extensions; use std::{ convert::{ TryFrom, TryInto, }, io::Read, mem, }; use borsh::BorshSerialize; use pyth_client::{ AccountType, CorpAction, Ema, Price, PriceStatus, PriceType, }; use solana_program::{ clock::UnixTimestamp, program_error::ProgramError, pubkey::Pubkey, }; use solitaire::{ trace, ErrBox, Result as SoliResult, SolitaireError, }; use self::pyth_extensions::{ P2WCorpAction, P2WEma, P2WPriceStatus, P2WPriceType, }; pub const P2W_MAGIC: &'static [u8] = b"P2WH"; pub const P2W_FORMAT_VERSION: u16 = 1; pub const PUBKEY_LEN: usize = 32; #[repr(u8)] pub enum PayloadId { PriceAttestation = 1, } #[derive(Clone, Default, Debug, Eq, PartialEq)] #[cfg_attr(feature = "wasm", derive(serde_derive::Serialize, serde_derive::Deserialize))] pub struct PriceAttestation { pub product_id: Pubkey, pub price_id: Pubkey, pub price_type: P2WPriceType, pub price: i64, pub expo: i32, pub twap: P2WEma, pub twac: P2WEma, pub confidence_interval: u64, pub status: P2WPriceStatus, pub corp_act: P2WCorpAction, pub timestamp: UnixTimestamp, } impl PriceAttestation { pub fn from_pyth_price_bytes( price_id: Pubkey, timestamp: UnixTimestamp, value: &[u8], ) -> Result<Self, SolitaireError> { let price = parse_pyth_price(value)?; Ok(PriceAttestation { product_id: Pubkey::new(&price.prod.val[..]), price_id, price_type: (&price.ptype).into(), price: price.agg.price, twap: (&price.twap).into(), twac: (&price.twac).into(), expo: price.expo, confidence_interval: price.agg.conf, status: (&price.agg.status).into(), corp_act: (&price.agg.corp_act).into(), timestamp: timestamp, }) } pub fn serialize(&self) -> Vec<u8> { #[deny(warnings)] let PriceAttestation { product_id, price_id, price_type, price, expo, twap, twac, confidence_interval, status, corp_act, timestamp, } = self; let mut buf = P2W_MAGIC.to_vec(); buf.extend_from_slice(&P2W_FORMAT_VERSION.to_be_bytes()[..]); buf.push(PayloadId::PriceAttestation as u8); buf.extend_from_slice(&product_id.to_bytes()[..]); buf.extend_from_slice(&price_id.to_bytes()[..]); buf.push(price_type.clone() as u8); buf.extend_from_slice(&price.to_be_bytes()[..]); buf.extend_from_slice(&expo.to_be_bytes()[..]); buf.append(&mut twap.serialize()); buf.append(&mut twac.serialize()); buf.extend_from_slice(&confidence_interval.to_be_bytes()[..]); buf.push(status.clone() as u8); buf.push(corp_act.clone() as u8); buf.extend_from_slice(&timestamp.to_be_bytes()[..]); buf } pub fn deserialize(mut bytes: impl Read) -> Result<Self, ErrBox> { use P2WCorpAction::*; use P2WPriceStatus::*; use P2WPriceType::*; println!("Using {} bytes for magic", P2W_MAGIC.len()); let mut magic_vec = vec![0u8; P2W_MAGIC.len()]; bytes.read_exact(magic_vec.as_mut_slice())?; if magic_vec.as_slice() != P2W_MAGIC { return Err(format!( "Invalid magic {:02X?}, expected {:02X?}", magic_vec, P2W_MAGIC, ) .into()); } let mut version_vec = vec![0u8; mem::size_of_val(&P2W_FORMAT_VERSION)]; bytes.read_exact(version_vec.as_mut_slice())?; let mut version = u16::from_be_bytes(version_vec.as_slice().try_into()?); if version != P2W_FORMAT_VERSION { return Err(format!( "Unsupported format version {}, expected {}", version, P2W_FORMAT_VERSION ) .into()); } let mut payload_id_vec = vec![0u8; mem::size_of::<PayloadId>()]; bytes.read_exact(payload_id_vec.as_mut_slice())?; if PayloadId::PriceAttestation as u8 != payload_id_vec[0] { return Err(format!( "Invalid Payload ID {}, expected {}", payload_id_vec[0], PayloadId::PriceAttestation as u8, ) .into()); } let mut product_id_vec = vec![0u8; PUBKEY_LEN]; bytes.read_exact(product_id_vec.as_mut_slice())?; let product_id = Pubkey::new(product_id_vec.as_slice()); let mut price_id_vec = vec![0u8; PUBKEY_LEN]; bytes.read_exact(price_id_vec.as_mut_slice())?; let price_id = Pubkey::new(price_id_vec.as_slice()); let mut price_type_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(price_type_vec.as_mut_slice())?; let price_type = match price_type_vec[0] { a if a == Price as u8 => Price, a if a == P2WPriceType::Unknown as u8 => P2WPriceType::Unknown, other => { return Err(format!("Invalid price_type value {}", other).into()); } }; let mut price_vec = vec![0u8; mem::size_of::<i64>()]; bytes.read_exact(price_vec.as_mut_slice())?; let price = i64::from_be_bytes(price_vec.as_slice().try_into()?); let mut expo_vec = vec![0u8; mem::size_of::<i32>()]; bytes.read_exact(expo_vec.as_mut_slice())?; let expo = i32::from_be_bytes(expo_vec.as_slice().try_into()?); let twap = P2WEma::deserialize(&mut bytes)?; let twac = P2WEma::deserialize(&mut bytes)?; println!("twac OK"); let mut confidence_interval_vec = vec![0u8; mem::size_of::<u64>()]; bytes.read_exact(confidence_interval_vec.as_mut_slice())?; let confidence_interval = u64::from_be_bytes(confidence_interval_vec.as_slice().try_into()?); let mut status_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(status_vec.as_mut_slice())?; let status = match status_vec[0] { a if a == P2WPriceStatus::Unknown as u8 => P2WPriceStatus::Unknown, a if a == Trading as u8 => Trading, a if a == Halted as u8 => Halted, a if a == Auction as u8 => Auction, other => { return Err(format!("Invalid status value {}", other).into()); } }; let mut corp_act_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(corp_act_vec.as_mut_slice())?; let corp_act = match corp_act_vec[0] { a if a == NoCorpAct as u8 => NoCorpAct, other => { return Err(format!("Invalid corp_act value {}", other).into()); } }; let mut timestamp_vec = vec![0u8; mem::size_of::<UnixTimestamp>()]; bytes.read_exact(timestamp_vec.as_mut_slice())?; let timestamp = UnixTimestamp::from_be_bytes(timestamp_vec.as_slice().try_into()?); Ok( Self { product_id, price_id, price_type, price, expo, twap, twac, confidence_interval, status, corp_act, timestamp }) } } fn parse_pyth_price(price_data: &[u8]) -> SoliResult<&Price> { if price_data.len() != mem::size_of::<Price>() { trace!(&format!( "parse_pyth_price: buffer length mismatch ({} expected, got {})", mem::size_of::<Price>(), price_data.len() )); return Err(ProgramError::InvalidAccountData.into()); } let price_account = pyth_client::cast::<Price>(price_data); if price_account.atype != AccountType::Price as u32 { trace!(&format!( "parse_pyth_price: AccountType mismatch ({} expected, got {})", mem::size_of::<Price>(), price_data.len() )); return Err(ProgramError::InvalidAccountData.into()); } Ok(price_account) } #[cfg(test)] mod tests { use super::*; use pyth_client::{ AccKey, AccountType, PriceComp, PriceInfo, }; macro_rules! empty_acckey { () => { AccKey { val: [0u8; 32] } }; } macro_rules! empty_priceinfo { () => { PriceInfo { price: 0, conf: 0, status: PriceStatus::Unknown, corp_act: CorpAction::NoCorpAct, pub_slot: 0, } }; } macro_rules! empty_pricecomp { () => { PriceComp { publisher: empty_acckey!(), agg: empty_priceinfo!(), latest: empty_priceinfo!(), } }; } macro_rules! empty_ema { () => { (&P2WEma::default()).into() }; } macro_rules! empty_price { () => { Price { magic: pyth_client::MAGIC, ver: pyth_client::VERSION, atype: AccountType::Price as u32, size: 0, ptype: PriceType::Price, expo: 0, num: 0, num_qt: 0, last_slot: 0, valid_slot: 0, drv1: 0, drv2: 0, drv3: 0, twap: empty_ema!(), twac: empty_ema!(), prod: empty_acckey!(), next: empty_acckey!(), prev_slot: 0, prev_price: 0, prev_conf: 0, agg: empty_priceinfo!(), comp: [ empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), ], } }; } #[test] fn test_parse_pyth_price_wrong_size_slices() { assert!(parse_pyth_price(&[]).is_err()); assert!(parse_pyth_price(vec![0u8; 1].as_slice()).is_err()); } #[test] fn test_normal_values() -> SoliResult<()> { let price = Price { expo: 5, agg: PriceInfo { price: 42, ..empty_priceinfo!() }, ..empty_price!() }; let price_vec = vec![price]; let (_, bytes, _) = unsafe { price_vec.as_slice().align_to::<u8>() }; parse_pyth_price(bytes)?; Ok(()) } #[test] fn test_serialize_deserialize() -> Result<(), ErrBox> { let product_id_bytes = [21u8; 32]; let price_id_bytes = [222u8; 32]; println!("Hex product_id: {:02X?}", &product_id_bytes); println!("Hex price_id: {:02X?}", &price_id_bytes); let attestation: PriceAttestation = PriceAttestation { product_id: Pubkey::new_from_array(product_id_bytes), price_id: Pubkey::new_from_array(price_id_bytes), price: (0xdeadbeefdeadbabe as u64) as i64, price_type: P2WPriceType::Price, twap: P2WEma { val: -42, numer: 15, denom: 37, }, twac: P2WEma { val: 42, numer: 1111, denom: 2222, }, expo: -3, status: P2WPriceStatus::Trading, confidence_interval: 101, corp_act: P2WCorpAction::NoCorpAct, timestamp: 123456789i64, }; println!("Regular: {:#?}", &attestation); println!("Hex: {:#02X?}", &attestation); let bytes = attestation.serialize(); println!("Hex Bytes: {:02X?}", bytes); assert_eq!(PriceAttestation::deserialize(bytes.as_slice())?, attestation); Ok(()) } }
pub mod pyth_extensions; use std::{ convert::{ TryFrom, TryInto, }, io::Read, mem, }; use borsh::BorshSerialize; use pyth_client::{ AccountType, CorpAction, Ema, Price, PriceStatus, PriceType, }; use solana_program::{ clock::UnixTimestamp, program_error::ProgramError, pubkey::Pubkey, }; use solitaire::{ trace, ErrBox, Result as SoliResult, SolitaireError, }; use self::pyth_extensions::{ P2WCorpAction, P2WEma, P2WPriceStatus, P2WPriceType, }; pub const P2W_MAGIC: &'static [u8] = b"P2WH"; pub const P2W_FORMAT_VERSION: u16 = 1; pub const PUBKEY_LEN: usize = 32; #[repr(u8)] pub enum PayloadId { PriceAttestation = 1, } #[derive(Clone, Default, Debug, Eq, PartialEq)] #[cfg_attr(feature = "wasm", derive(serde_derive::Serialize, serde_derive::Deserialize))] pub struct PriceAttestation { pub product_id: Pubkey, pub price_id: Pubkey, pub price_type: P2WPriceType, pub price: i64, pub expo: i32, pub twap: P2WEma, pub twac: P2WEma, pub confidence_interval: u64, pub status: P2WPriceStatus, pub corp_act: P2WCorpAction, pub timestamp: UnixTimestamp, } impl PriceAttestation { pub fn from_pyth_price_bytes( price_id: Pubkey, timestamp: UnixTimestamp, value: &[u8], ) -> Result<Self, SolitaireError> { let price = parse_pyth_price(value)?; Ok(PriceAttestation { product_id: Pubkey::new(&price.prod.val[..]), price_id, price_type: (&price.ptype).into(), price: price.agg.price, twap: (&price.twap).into(), twac: (&price.twac).into(), expo: price.expo, confidence_interval: price.agg.conf, status: (&price.agg.status).into(), corp_act: (&price.agg.corp_act).into(), timestamp: timestamp, }) } pub fn serialize(&self) -> Vec<u8> { #[deny(warnings)] let PriceAttestation { product_id, price_id, price_type, price, expo, twap, twac, confidence_interval, status, corp_act, timestamp, } = self; let mut buf = P2W_MAGIC.to_vec(); buf.extend_from_slice(&P2W_FORMAT_VERSION.to_be_bytes()[..]); buf.push(PayloadId::PriceAttestation as u8); buf.extend_from_slice(&product_id.to_bytes()[..]); buf.extend_from_slice(&price_id.to_bytes()[..]); buf.push(price_type.clone() as u8); buf.extend_from_slice(&price.to_be_bytes()[..]); buf.extend_from_slice(&expo.to_be_bytes()[..]); buf.append(&mut twap.serialize()); buf.append(&mut twac.serialize()); buf.extend_from_slice(&confidence_interval.to_be_bytes()[..]); buf.push(status.clone() as u8); buf.push(corp_act.clone() as u8); buf.extend_from_slice(&timestamp.to_be_bytes()[..]); buf } pub fn deserialize(mut bytes: impl Read) -> Result<Self, ErrBox> { use P2WCorpAction::*; use P2WPriceStatus::*; use P2WPriceType::*; println!("Using {} bytes for magic", P2W_MAGIC.len()); let mut magic_vec = vec![0u8; P2W_MAGIC.len()]; bytes.read_exact(magic_vec.as_mut_slice())?; if magic_vec.as_slice() != P2W_MAGIC { return Err(format!( "Invalid magic {:02X?}, expected {:02X?}", magic_vec, P2W_MAGIC, ) .into()); } let mut version_vec = vec![0u8; mem::size_of_val(&P2W_FORMAT_VERSION)]; bytes.read_exact(version_vec.as_mut_slice())?; let mut version = u16::from_be_bytes(version_vec.as_slice().try_into()?); if version != P2W_FORMAT_VERSION { return Err(format!( "Unsupported format version {}, expected {}", version, P2W_FORMAT_VERSION ) .into()); } let mut payload_id_vec = vec![0u8; mem::size_of::<PayloadId>()]; bytes.read_exact(payload_id_vec.as_mut_slice())?; if PayloadId::PriceAttestation as u8 != payload_id_vec[0] { return Err(format!( "Invalid Payload ID {}, expected {}", payload_id_vec[0], PayloadId::PriceAttestation as u8, ) .into()); } let mut product_id_vec = vec![0u8; PUBKEY_LEN]; bytes.read_exact(product_id_vec.as_mut_slice())?; let product_id = Pubkey::new(product_id_vec.as_slice()); let mut price_id_vec = vec![0u8; PUBKEY_LEN]; bytes.read_exact(price_id_vec.as_mut_slice())?; let price_id = Pubkey::new(price_id_vec.as_slice()); let mut price_type_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(price_type_vec.as_mut_slice())?; let price_type = match price_type_vec[0] { a if a == Price as u8 => Price, a if a == P2WPriceType::Unknown as u8 => P2WPriceType::Unknown, other => { return Err(format!("Invalid price_type value {}", other).into()); } }; let mut price_vec = vec![0u8; mem::size_of::<i64>()]; bytes.read_exact(price_vec.as_mut_slice())?; let price = i64::from_be_bytes(price_vec.as_slice().try_into()?); let mut expo_vec = vec![0u8; mem::size_of::<i32>()]; bytes.read_exact(expo_vec.as_mut_slice())?; let expo = i32::from_be_bytes(expo_vec.as_slice().try_into()?); let twap = P2WEma::deserialize(&mut bytes)?; let twac = P2WEma::deserialize(&mut bytes)?; println!("twac OK"); let mut confidence_interval_vec = vec![0u8; mem::size_of::<u64>()]; bytes.read_exact(confidence_interval_vec.as_mut_slice())?; let confidence_interval = u64::from_be_bytes(confidence_interval_vec.as_slice().try_into()?); let mut status_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(status_vec.as_mut_slice())?; let status = match status_vec[0] { a if a == P2WPriceStatus::Unknown as u8 => P2WPriceStatus::Unknown, a if a == Trading as u8 => Trading, a if a == Halted as u8 => Halted, a if a == Auction as u8 => Auction, other => { return Err(format!("Invalid status value {}", other).into()); } }; let mut corp_act_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(corp_act_vec.as_mut_slice())?; let corp_act = match corp_act_vec[0] { a if a == NoCorpAct as u8 => NoCorpAct, other => { return Err(format!("Invalid corp_act value {}", other).into()); } }; let mut timestamp_vec = vec![0u8; mem::size_of::<UnixTimestamp>()]; bytes.read_exact(timestamp_vec.as_mut_slice())?; let timestamp = UnixTimestamp::from_be_bytes(timestamp_vec.as_slice().try_into()?); Ok( Self { product_id, price_id, price_type, price, expo, twap, twac, confidence_interval, status, corp_act, timestamp }) } } fn parse_pyth_price(price_data: &[u8]) -> SoliResult<&Price> { if price_data.len() != mem::size_of::<Price>() { trace!(&format!( "parse_pyth_price: buffer length mismatch ({} expected, got {})", mem::size_of::<Price>(), price_data.len() )); return Err(ProgramError::InvalidAccountData.into()); } let price_account = pyth_client::cast::<Price>(price_data); if price_account.atype != AccountType::Price as u32 { trace!(&format!( "parse_pyth_price: AccountType mismatch ({} expected, got {})", mem::size_of::<Price>(), price_data.len() )); return Err(ProgramError::InvalidAccountData.into()); } Ok(price_account) } #[cfg(test)] mod tests { use super::*; use pyth_client::{ AccKey, AccountType, PriceComp, PriceInfo, }; macro_rules! empty_acckey { () => { AccKey { val: [0u8; 32] } }; } macro_rules! empty_priceinfo { () => { PriceInfo { price: 0, conf: 0, status: PriceStatus::Unknown, corp_act: CorpAction::NoCorpAct, pub_slot: 0, } }; } macro_rules! empty_pricecomp { () => { PriceComp { publisher: empty_acckey!(), agg: empty_priceinfo!(), latest: empty_priceinfo!(), } }; } macro_rules! empty_ema { () => { (&P2WEma::default()).into() }; } macro_rules! empty_price { () => { Price { magic: pyth_client::MAGIC, ver: pyth_client::VERSION, atype: AccountType::Price as u32, size: 0, ptype: PriceType::Price, expo: 0, num: 0, num_qt: 0, last_slot: 0, valid_slot: 0, drv1: 0, drv2: 0, drv3: 0, twap: empty_ema!(), twac: empty_ema!(), prod: empty_acckey!(), next: empty_acckey!(), prev_slot: 0, prev_price: 0, prev_conf: 0, agg: empty_priceinfo!(), comp: [ empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), ], } }; } #[test] fn test_parse_pyth_price_wrong_size_slices() { assert!(parse_pyth_price(&[]).is_err()); assert!(parse_pyth_price(vec![0u8; 1].as_slice()).is_err()); } #[test] fn test_normal_values() -> SoliResult<()> { let price = Price { expo: 5, agg: PriceInfo { price: 42, ..empty_priceinfo!() }, ..empty_price!() }; let price_vec = vec![price]; let (_, bytes, _) = unsafe { price_vec.as_slice().align_to::<u8>() }; parse_pyth_price(bytes)?; Ok(()) } #[test] fn test_serialize_deserialize() -> Result<(), ErrBox> { let product_id_bytes = [21u8; 32]; let price_id_bytes = [222u8; 32]; println!("Hex product_id: {:02X?}", &product_id_bytes); println!("Hex price_id: {:02X?}", &price_id_bytes); let attestation: PriceAttestation = PriceAttestation { product_id: Pubkey::new_from_array(product_id_bytes), price_id: Pubkey::new_from_array(price_id_bytes), price: (0xdeadbeefdeadbabe as u64) as i64, price_type: P2WPriceType::Price, twap: P2WEma { val: -42, numer: 15, denom: 37, }, twac: P2WEma { val: 42, numer: 1111, denom: 2222, }, expo: -
}
3, status: P2WPriceStatus::Trading, confidence_interval: 101, corp_act: P2WCorpAction::NoCorpAct, timestamp: 123456789i64, }; println!("Regular: {:#?}", &attestation); println!("Hex: {:#02X?}", &attestation); let bytes = attestation.serialize(); println!("Hex Bytes: {:02X?}", bytes); assert_eq!(PriceAttestation::deserialize(bytes.as_slice())?, attestation); Ok(()) }
function_block-function_prefixed
[ { "content": "#[wasm_bindgen]\n\npub fn parse_attestation(bytes: Vec<u8>) -> JsValue {\n\n let a = PriceAttestation::deserialize(bytes.as_slice()).unwrap();\n\n \n\n JsValue::from_serde(&a).unwrap()\n\n}\n", "file_path": "solana/pyth2wormhole/program/src/wasm.rs", "rank": 0, "score": 363306...
Rust
src/testing/sl4f/src/bluetooth/avrcp_facade.rs
EnderNightLord-ChromeBook/zircon-rpi
b09b1eb3aa7a127c65568229fe10edd251869283
use super::types::{CustomAvcPanelCommand, CustomPlayStatus}; use crate::common_utils::common::macros::{fx_err_and_bail, with_line}; use anyhow::Error; use fidl::endpoints::create_endpoints; use fidl_fuchsia_bluetooth_avrcp::{ ControllerMarker, ControllerProxy, PeerManagerMarker, PeerManagerProxy, }; use fuchsia_component::client; use fuchsia_syslog::{fx_log_err, fx_log_info}; use parking_lot::RwLock; #[derive(Debug)] struct AvrcpFacadeInner { avrcp_service_proxy: Option<PeerManagerProxy>, controller_proxy: Option<ControllerProxy>, } #[derive(Debug)] pub struct AvrcpFacade { inner: RwLock<AvrcpFacadeInner>, } impl AvrcpFacade { pub fn new() -> AvrcpFacade { AvrcpFacade { inner: RwLock::new(AvrcpFacadeInner { avrcp_service_proxy: None, controller_proxy: None, }), } } async fn create_avrcp_service_proxy(&self) -> Result<PeerManagerProxy, Error> { let tag = "AvrcpFacade::create_avrcp_service_proxy"; match self.inner.read().avrcp_service_proxy.clone() { Some(avrcp_service_proxy) => { fx_log_info!( tag: &with_line!(tag), "Current AVRCP service proxy: {:?}", avrcp_service_proxy ); Ok(avrcp_service_proxy) } None => { let avrcp_service_proxy = client::connect_to_service::<PeerManagerMarker>(); if let Err(err) = avrcp_service_proxy { fx_err_and_bail!( &with_line!(tag), format_err!("Failed to create AVRCP service proxy: {}", err) ); } avrcp_service_proxy } } } pub async fn init_avrcp(&self, target_id: String) -> Result<(), Error> { let tag = "AvrcpFacade::init_avrcp"; self.inner.write().avrcp_service_proxy = Some(self.create_avrcp_service_proxy().await?); let avrcp_service_proxy = match &self.inner.read().avrcp_service_proxy { Some(p) => p.clone(), None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy created"), }; let (cont_client, cont_server) = create_endpoints::<ControllerMarker>()?; let _status = avrcp_service_proxy.get_controller_for_target(&target_id.as_str(), cont_server).await?; self.inner.write().controller_proxy = Some(cont_client.into_proxy().expect("Error obtaining controller client proxy")); Ok(()) } pub async fn get_media_attributes(&self) -> Result<String, Error> { let tag = "AvrcpFacade::get_media_attributes"; match self.inner.read().controller_proxy.clone() { Some(proxy) => match proxy.get_media_attributes().await? { Ok(media_attribs) => Ok(format!("Media attributes: {:#?}", media_attribs)), Err(e) => fx_err_and_bail!( &with_line!(tag), format!("Error fetching media attributes: {:?}", e) ), }, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), } } pub async fn get_play_status(&self) -> Result<CustomPlayStatus, Error> { let tag = "AvrcpFacade::get_play_status"; match self.inner.read().controller_proxy.clone() { Some(proxy) => match proxy.get_play_status().await? { Ok(play_status) => Ok(CustomPlayStatus::new(&play_status)), Err(e) => fx_err_and_bail!( &with_line!(tag), format!("Error fetching play status: {:?}", e) ), }, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), } } pub async fn send_command(&self, command: CustomAvcPanelCommand) -> Result<(), Error> { let tag = "AvrcpFacade::send_command"; let result = match self.inner.read().controller_proxy.clone() { Some(proxy) => proxy.send_command(command.into()).await?, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), }; match result { Ok(res) => Ok(res), Err(err) => { fx_err_and_bail!(&with_line!(tag), format!("Error sending command:{:?}", err)) } } } fn clear(&self) { self.inner.write().avrcp_service_proxy = None; self.inner.write().controller_proxy = None; } pub async fn cleanup(&self) -> Result<(), Error> { self.clear(); Ok(()) } } #[cfg(test)] mod tests { use super::*; use fidl::endpoints::create_proxy_and_stream; use fidl_fuchsia_bluetooth_avrcp::{ControllerRequest, PlayStatus}; use fuchsia_async as fasync; use futures::prelude::*; use futures::Future; use lazy_static::lazy_static; use matches::assert_matches; lazy_static! { static ref PLAY_STATUS: CustomPlayStatus = CustomPlayStatus { song_length: Some(120), song_position: Some(10), playback_status: Some(4), }; } struct MockAvrcpTester { expected_state: Vec<Box<dyn FnOnce(ControllerRequest) + Send + 'static>>, } impl MockAvrcpTester { fn new() -> Self { Self { expected_state: vec![] } } fn push(mut self, request: impl FnOnce(ControllerRequest) + Send + 'static) -> Self { self.expected_state.push(Box::new(request)); self } fn build_controller(self) -> (AvrcpFacade, impl Future<Output = ()>) { let (proxy, mut stream) = create_proxy_and_stream::<ControllerMarker>().unwrap(); let fut = async move { for expected in self.expected_state { expected(stream.next().await.unwrap().unwrap()); } assert_matches!(stream.next().await, None); }; ( AvrcpFacade { inner: RwLock::new(AvrcpFacadeInner { controller_proxy: Some(proxy), avrcp_service_proxy: None, }), }, fut, ) } fn expect_get_play_status(self, result: CustomPlayStatus) -> Self { self.push(move |req| match req { ControllerRequest::GetPlayStatus { responder } => { responder.send(&mut Ok(PlayStatus::from(result))).unwrap(); } _ => {} }) } } #[fasync::run_singlethreaded(test)] async fn test_get_play_status() { let (facade, play_status_fut) = MockAvrcpTester::new().expect_get_play_status(*PLAY_STATUS).build_controller(); let facade_fut = async move { let play_status = facade.get_play_status().await.unwrap(); assert_eq!(play_status, *PLAY_STATUS); }; future::join(facade_fut, play_status_fut).await; } }
use super::types::{CustomAvcPanelCommand, CustomPlayStatus}; use crate::common_utils::common::macros::{fx_err_and_bail, with_line}; use anyhow::Error; use fidl::endpoints::create_endpoints; use fidl_fuchsia_bluetooth_avrcp::{ ControllerMarker, ControllerProxy, PeerManagerMarker, PeerManagerProxy, }; use fuchsia_component::client; use fuchsia_syslog::{fx_log_err, fx_log_info}; use parking_lot::RwLock; #[derive(Debug)] struct AvrcpFacadeInner { avrcp_service_proxy: Option<PeerManagerProxy>, controller_proxy: Option<ControllerProxy>, } #[derive(Debug)] pub struct AvrcpFacade { inner: RwLock<AvrcpFacadeInner>, } impl AvrcpFacade { pub fn new() -> AvrcpFacade { AvrcpFacade { inner: RwLock::new(AvrcpFacadeInner { avrcp_service_proxy: None, controller_proxy: None, }), } } async fn create_avrcp_service_proxy(&self) -> Result<PeerManagerProxy, Error> { let tag = "AvrcpFacade::create_avrcp_service_proxy"; match self.inner.read().avrcp_service_proxy.clone() { Some(avrcp_service_proxy) => { fx_log_info!( tag: &with_line!(tag), "Current AVRCP service proxy: {:?}", avrcp_service_proxy ); Ok(avrcp_service_proxy) } None => { let avrcp_service_proxy = client::connect_to_service::<PeerManagerMarker>(); if let Err(err) = avrcp_service_proxy { fx_err_and_bail!( &with_line!(tag), format_err!("Failed to create AVRCP service proxy: {}", err) ); } avrcp_service_proxy } } } pub async fn init_avrcp(&self, target_id: String) -> Result<(), Error> { let tag = "AvrcpFacade::init_avrcp"; self.inner.write().avrcp_service_proxy = Some(self.create_avrcp_service_proxy().await?); let avrcp_service_proxy = match &self.inner.read().avrcp_service_proxy { Some(p) => p.clone(), None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy created"), }; let (cont_client, cont_server) = create_endpoints::<ControllerMarker>()?; let _status = avrcp_service_proxy.get_controller_for_target(&target_id.as_str(), cont_server).await?; self.inner.write().controller_proxy = Some(cont_client.into_proxy().expect("Error obtaining controller client proxy")); Ok(()) } pub async fn get_media_attributes(&self) -> Result<String, Error> { let tag = "AvrcpFacade::get_media_attributes"; match self.inner.read().controller_proxy.clone() { Some(proxy) => match proxy.get_media_attributes().await? { Ok(media_attribs) => Ok(format!("Media attributes: {:#?}", media_attribs)), Err(e) => fx_err_and_bail!( &with_line!(tag), format!("Error fetching media attributes: {:?}", e) ), }, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), } } pub async fn get_play_status(&self) -> Result<CustomPlayStatus, Error> { let tag = "AvrcpFacade::get_play_status";
} pub async fn send_command(&self, command: CustomAvcPanelCommand) -> Result<(), Error> { let tag = "AvrcpFacade::send_command"; let result = match self.inner.read().controller_proxy.clone() { Some(proxy) => proxy.send_command(command.into()).await?, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), }; match result { Ok(res) => Ok(res), Err(err) => { fx_err_and_bail!(&with_line!(tag), format!("Error sending command:{:?}", err)) } } } fn clear(&self) { self.inner.write().avrcp_service_proxy = None; self.inner.write().controller_proxy = None; } pub async fn cleanup(&self) -> Result<(), Error> { self.clear(); Ok(()) } } #[cfg(test)] mod tests { use super::*; use fidl::endpoints::create_proxy_and_stream; use fidl_fuchsia_bluetooth_avrcp::{ControllerRequest, PlayStatus}; use fuchsia_async as fasync; use futures::prelude::*; use futures::Future; use lazy_static::lazy_static; use matches::assert_matches; lazy_static! { static ref PLAY_STATUS: CustomPlayStatus = CustomPlayStatus { song_length: Some(120), song_position: Some(10), playback_status: Some(4), }; } struct MockAvrcpTester { expected_state: Vec<Box<dyn FnOnce(ControllerRequest) + Send + 'static>>, } impl MockAvrcpTester { fn new() -> Self { Self { expected_state: vec![] } } fn push(mut self, request: impl FnOnce(ControllerRequest) + Send + 'static) -> Self { self.expected_state.push(Box::new(request)); self } fn build_controller(self) -> (AvrcpFacade, impl Future<Output = ()>) { let (proxy, mut stream) = create_proxy_and_stream::<ControllerMarker>().unwrap(); let fut = async move { for expected in self.expected_state { expected(stream.next().await.unwrap().unwrap()); } assert_matches!(stream.next().await, None); }; ( AvrcpFacade { inner: RwLock::new(AvrcpFacadeInner { controller_proxy: Some(proxy), avrcp_service_proxy: None, }), }, fut, ) } fn expect_get_play_status(self, result: CustomPlayStatus) -> Self { self.push(move |req| match req { ControllerRequest::GetPlayStatus { responder } => { responder.send(&mut Ok(PlayStatus::from(result))).unwrap(); } _ => {} }) } } #[fasync::run_singlethreaded(test)] async fn test_get_play_status() { let (facade, play_status_fut) = MockAvrcpTester::new().expect_get_play_status(*PLAY_STATUS).build_controller(); let facade_fut = async move { let play_status = facade.get_play_status().await.unwrap(); assert_eq!(play_status, *PLAY_STATUS); }; future::join(facade_fut, play_status_fut).await; } }
match self.inner.read().controller_proxy.clone() { Some(proxy) => match proxy.get_play_status().await? { Ok(play_status) => Ok(CustomPlayStatus::new(&play_status)), Err(e) => fx_err_and_bail!( &with_line!(tag), format!("Error fetching play status: {:?}", e) ), }, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), }
if_condition
[]
Rust
azure-functions/src/rpc/client.rs
rylev/azure-functions-rs
7bc1e1d977da8bca669e7d802d401c689448e852
use crate::logger; use crate::registry::Registry; use azure_functions_shared::rpc::protocol; use futures::future::{lazy, ok}; use futures::sync::mpsc; use futures::{Future, Sink, Stream}; use grpcio::{ChannelBuilder, ClientDuplexReceiver, EnvBuilder, WriteFlags}; use log::{self, error}; use std::cell::RefCell; use std::panic::{self, AssertUnwindSafe}; use std::sync::{Arc, Mutex}; use std::thread; use tokio_threadpool; pub type Sender = mpsc::Sender<protocol::StreamingMessage>; type Receiver = ClientDuplexReceiver<protocol::StreamingMessage>; const UNKNOWN: &str = "<unknown>"; thread_local!(static FUNCTION_NAME: RefCell<&'static str> = RefCell::new(UNKNOWN)); pub struct Client { worker_id: String, max_message_len: Option<i32>, client: Option<protocol::FunctionRpcClient>, sender: Option<Sender>, receiver: Option<Receiver>, host_version: Option<String>, } impl Client { pub fn new(worker_id: String, max_message_len: Option<i32>) -> Client { Client { worker_id, max_message_len, client: None, sender: None, receiver: None, host_version: None, } } pub fn host_version(&self) -> Option<&str> { self.host_version.as_ref().map(|x| x.as_str()) } pub fn sender(&self) -> Option<Sender> { self.sender.clone() } pub fn connect(mut self, host: &str, port: u32) -> impl Future<Item = Client, Error = ()> { let mut channel = ChannelBuilder::new(Arc::new(EnvBuilder::new().build())); if let Some(len) = self.max_message_len { if len > 0 { channel = channel .max_receive_message_len(len) .max_send_message_len(len); } } let (rpc_tx, rpc_rx) = self .client .get_or_insert(protocol::FunctionRpcClient::new( channel.connect(&format!("{}:{}", host, port)), )) .event_stream() .unwrap(); let (tx, rx) = mpsc::channel(1); self.sender = Some(tx); self.receiver = Some(rpc_rx); thread::spawn(move || { let mut rx = rx; let mut rpc_tx = rpc_tx; while let (Some(message), r) = rx.into_future().wait().unwrap() { rpc_tx = rpc_tx .send((message, WriteFlags::default())) .wait() .expect("failed to send message to host"); rx = r; } }); let mut message = protocol::StreamingMessage::new(); message.mut_start_stream().worker_id = self.worker_id.to_owned(); self.send(message) .and_then(|c| c.read()) .and_then(|(mut c, msg)| { let msg = msg.expect("host disconnected during worker initialization"); if !msg.has_worker_init_request() { panic!("expected a worker init request, but received: {:?}.", msg); } c.host_version = Some(msg.get_worker_init_request().host_version.clone()); let mut msg = protocol::StreamingMessage::new(); { let worker_init_res = msg.mut_worker_init_response(); worker_init_res.worker_version = env!("CARGO_PKG_VERSION").to_owned(); let result = worker_init_res.mut_result(); result.status = protocol::StatusResult_Status::Success; } c.send(msg) }) } pub fn send( mut self, message: protocol::StreamingMessage, ) -> impl Future<Item = Client, Error = ()> { self.sender .take() .unwrap() .send(message) .map_err(|err| panic!("failed to send message: {:?}.", err)) .and_then(move |sender| { self.sender = Some(sender); ok(self) }) } pub fn read( mut self, ) -> impl Future<Item = (Client, Option<protocol::StreamingMessage>), Error = ()> { self.receiver .take() .unwrap() .into_future() .map_err(|(err, _)| panic!("failed to receive message: {:?}.", err)) .and_then(move |(msg, r)| { self.receiver = Some(r); ok((self, msg)) }) } pub fn process_all_messages( mut self, registry: &Arc<Mutex<Registry<'static>>>, ) -> impl Future<Item = Client, Error = ()> { let pool = tokio_threadpool::ThreadPool::new(); log::set_boxed_logger(Box::new(logger::Logger::new( log::Level::Trace, self.sender.clone().unwrap(), ))) .expect("Failed to set the global logger instance"); panic::set_hook(Box::new(|info| match info.location() { Some(location) => { error!( "Azure Function '{}' panicked with '{}', {}:{}:{}", FUNCTION_NAME.with(|f| *f.borrow()), info.payload() .downcast_ref::<&str>() .cloned() .unwrap_or_else(|| info .payload() .downcast_ref::<String>() .map(|x| x.as_str()) .unwrap_or(UNKNOWN)), location.file(), location.line(), location.column() ); } None => { error!( "Azure Function '{}' panicked with '{}'", FUNCTION_NAME.with(|f| *f.borrow()), info.payload() .downcast_ref::<&str>() .cloned() .unwrap_or_else(|| info .payload() .downcast_ref::<String>() .map(|x| x.as_str()) .unwrap_or(UNKNOWN)), ); } })); log::set_max_level(log::LevelFilter::Trace); loop { let (c, msg) = self.read().wait().expect("Failed to read message"); self = c; if msg.is_none() { break; } let msg = msg.unwrap(); if msg.has_worker_terminate() { break; } let sender = self.sender().unwrap(); let reg = registry.clone(); pool.spawn(lazy(move || { Client::handle_request(&reg, sender, msg); Ok(()) })); } pool.shutdown_on_idle().and_then(|_| ok(self)) } fn handle_function_load_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, req: &protocol::FunctionLoadRequest, ) { let mut message = protocol::StreamingMessage::new(); { let response = message.mut_function_load_response(); response.function_id = req.function_id.clone(); response.set_result(match req.metadata.as_ref() { Some(metadata) => { let mut result = protocol::StatusResult::new(); if registry .lock() .unwrap() .register(&req.function_id, &metadata.name) { result.status = protocol::StatusResult_Status::Success; } else { result.status = protocol::StatusResult_Status::Failure; result.result = format!("Function '{}' does not exist.", metadata.name); } result } None => { let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = "Function load request metadata is missing.".to_string(); result } }); } sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_invocation_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, req: &mut protocol::InvocationRequest, ) { let mut message = protocol::StreamingMessage::new(); let res = match registry .lock() .unwrap() .get(&req.function_id) .and_then(|func| { Some( match panic::catch_unwind(AssertUnwindSafe(|| { FUNCTION_NAME.with(|n| { *n.borrow_mut() = &func.name; }); logger::INVOCATION_ID.with(|id| { id.borrow_mut().replace_range(.., &req.invocation_id); }); (func .invoker .as_ref() .expect("function must have an invoker"))( &func.name, req ) })) { Ok(res) => res, Err(_) => { let mut res = protocol::InvocationResponse::new(); res.set_invocation_id(req.invocation_id.clone()); let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = "Azure Function panicked: see log for more information." .to_string(); res.set_result(result); res } }, ) }) { Some(res) => res, None => { let mut res = protocol::InvocationResponse::new(); res.set_invocation_id(req.invocation_id.clone()); let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = format!("Function with id '{}' does not exist.", req.function_id); res.set_result(result); res } }; FUNCTION_NAME.with(|n| { *n.borrow_mut() = UNKNOWN; }); logger::INVOCATION_ID.with(|id| { id.borrow_mut().clear(); }); message.set_invocation_response(res); sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_worker_status_request(sender: Sender, _req: &protocol::WorkerStatusRequest) { let mut message = protocol::StreamingMessage::new(); { message.mut_worker_status_response(); } sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, mut msg: protocol::StreamingMessage, ) { if msg.has_function_load_request() { Client::handle_function_load_request( &registry, sender, msg.get_function_load_request(), ); return; } if msg.has_invocation_request() { Client::handle_invocation_request(&registry, sender, msg.mut_invocation_request()); return; } if msg.has_worker_status_request() { Client::handle_worker_status_request(sender, msg.get_worker_status_request()); return; } if msg.has_file_change_event_request() { return; } if msg.has_invocation_cancel() { return; } panic!("Unexpected message from host: {:?}.", msg); } }
use crate::logger; use crate::registry::Registry; use azure_functions_shared::rpc::protocol; use futures::future::{lazy, ok}; use futures::sync::mpsc; use futures::{Future, Sink, Stream}; use grpcio::{ChannelBuilder, ClientDuplexReceiver, EnvBuilder, WriteFlags}; use log::{self, error}; use std::cell::RefCell; use std::panic::{self, AssertUnwindSafe}; use std::sync::{Arc, Mutex}; use std::thread; use tokio_threadpool; pub type Sender = mpsc::Sender<protocol::StreamingMessage>; type Receiver = ClientDuplexReceiver<protocol::StreamingMessage>; const UNKNOWN: &str = "<unknown>"; thread_local!(static FUNCTION_NAME: RefCell<&'static str> = RefCell::new(UNKNOWN)); pub struct Client { worker_id: String, max_message_len: Option<i32>, client: Option<protocol::FunctionRpcClient>, sender: Option<Sender>, receiver: Option<Receiver>, host_version: Option<String>, } impl Client { pub fn new(worker_id: String, max_message_len: Option<i32>) -> Client { Client { worker_id, max_message_len, client: None, sender: None, receiver: None, host_version: None, } } pub fn host_version(&self) -> Option<&str> { self.host_version.as_ref().map(|x| x.as_str()) } pub fn sender(&self) -> Option<Sender> { self.sender.clone() } pub fn connect(mut self, host: &str, port: u32) -> impl Future<Item = Client, Error = ()> { let mut channel = ChannelBuilder::new(Arc::new(EnvBuilder::new().build())); if let Some(len) = self.max_message_len { if len > 0 { channel = channel .max_receive_message_len(len) .max_send_message_len(len); } } let (rpc_tx, rpc_rx) = self .client .get_or_insert(protocol::FunctionRpcClient::new( channel.connect(&format!("{}:{}", host, port)), )) .event_stream() .unwrap(); let (tx, rx) = mpsc::channel(1); self.sender = Some(tx); self.receiver = Some(rpc_rx); thread::spawn(move || { let mut rx = rx; let mut rpc_tx = rpc_tx; while let (Some(message), r) = rx.into_future().wait().unwrap() { rpc_tx = rpc_tx .send((message, WriteFlags::default())) .wait() .expect("failed to send message to host"); rx = r; } }); let mut message = protocol::StreamingMessage::new(); message.mut_start_stream().worker_id = self.worker_id.to_owned(); self.send(message) .and_then(|c| c.read()) .and_then(|(mut c, msg)| { let msg = msg.expect("host disconnected during worker initialization"); if !msg.has_worker_init_request() { panic!("expected a worker init request, but received: {:?}.", msg); } c.host_version = Some(msg.get_worker_init_request().host_version.clone()); let mut msg = protocol::StreamingMessage::new(); { let worker_init_res = msg.mut_worker_init_response(); worker_init_res.worker_version = env!("CARGO_PKG_VERSION").to_owned(); let result = worker_init_res.mut_result(); result.status = protocol::StatusResult_Status::Success; } c.send(msg) }) } pub fn send( mut self, message: protocol::StreamingMessage, ) -> impl Future<Item = Client, Error = ()> { self.sender .
pub fn read( mut self, ) -> impl Future<Item = (Client, Option<protocol::StreamingMessage>), Error = ()> { self.receiver .take() .unwrap() .into_future() .map_err(|(err, _)| panic!("failed to receive message: {:?}.", err)) .and_then(move |(msg, r)| { self.receiver = Some(r); ok((self, msg)) }) } pub fn process_all_messages( mut self, registry: &Arc<Mutex<Registry<'static>>>, ) -> impl Future<Item = Client, Error = ()> { let pool = tokio_threadpool::ThreadPool::new(); log::set_boxed_logger(Box::new(logger::Logger::new( log::Level::Trace, self.sender.clone().unwrap(), ))) .expect("Failed to set the global logger instance"); panic::set_hook(Box::new(|info| match info.location() { Some(location) => { error!( "Azure Function '{}' panicked with '{}', {}:{}:{}", FUNCTION_NAME.with(|f| *f.borrow()), info.payload() .downcast_ref::<&str>() .cloned() .unwrap_or_else(|| info .payload() .downcast_ref::<String>() .map(|x| x.as_str()) .unwrap_or(UNKNOWN)), location.file(), location.line(), location.column() ); } None => { error!( "Azure Function '{}' panicked with '{}'", FUNCTION_NAME.with(|f| *f.borrow()), info.payload() .downcast_ref::<&str>() .cloned() .unwrap_or_else(|| info .payload() .downcast_ref::<String>() .map(|x| x.as_str()) .unwrap_or(UNKNOWN)), ); } })); log::set_max_level(log::LevelFilter::Trace); loop { let (c, msg) = self.read().wait().expect("Failed to read message"); self = c; if msg.is_none() { break; } let msg = msg.unwrap(); if msg.has_worker_terminate() { break; } let sender = self.sender().unwrap(); let reg = registry.clone(); pool.spawn(lazy(move || { Client::handle_request(&reg, sender, msg); Ok(()) })); } pool.shutdown_on_idle().and_then(|_| ok(self)) } fn handle_function_load_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, req: &protocol::FunctionLoadRequest, ) { let mut message = protocol::StreamingMessage::new(); { let response = message.mut_function_load_response(); response.function_id = req.function_id.clone(); response.set_result(match req.metadata.as_ref() { Some(metadata) => { let mut result = protocol::StatusResult::new(); if registry .lock() .unwrap() .register(&req.function_id, &metadata.name) { result.status = protocol::StatusResult_Status::Success; } else { result.status = protocol::StatusResult_Status::Failure; result.result = format!("Function '{}' does not exist.", metadata.name); } result } None => { let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = "Function load request metadata is missing.".to_string(); result } }); } sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_invocation_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, req: &mut protocol::InvocationRequest, ) { let mut message = protocol::StreamingMessage::new(); let res = match registry .lock() .unwrap() .get(&req.function_id) .and_then(|func| { Some( match panic::catch_unwind(AssertUnwindSafe(|| { FUNCTION_NAME.with(|n| { *n.borrow_mut() = &func.name; }); logger::INVOCATION_ID.with(|id| { id.borrow_mut().replace_range(.., &req.invocation_id); }); (func .invoker .as_ref() .expect("function must have an invoker"))( &func.name, req ) })) { Ok(res) => res, Err(_) => { let mut res = protocol::InvocationResponse::new(); res.set_invocation_id(req.invocation_id.clone()); let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = "Azure Function panicked: see log for more information." .to_string(); res.set_result(result); res } }, ) }) { Some(res) => res, None => { let mut res = protocol::InvocationResponse::new(); res.set_invocation_id(req.invocation_id.clone()); let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = format!("Function with id '{}' does not exist.", req.function_id); res.set_result(result); res } }; FUNCTION_NAME.with(|n| { *n.borrow_mut() = UNKNOWN; }); logger::INVOCATION_ID.with(|id| { id.borrow_mut().clear(); }); message.set_invocation_response(res); sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_worker_status_request(sender: Sender, _req: &protocol::WorkerStatusRequest) { let mut message = protocol::StreamingMessage::new(); { message.mut_worker_status_response(); } sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, mut msg: protocol::StreamingMessage, ) { if msg.has_function_load_request() { Client::handle_function_load_request( &registry, sender, msg.get_function_load_request(), ); return; } if msg.has_invocation_request() { Client::handle_invocation_request(&registry, sender, msg.mut_invocation_request()); return; } if msg.has_worker_status_request() { Client::handle_worker_status_request(sender, msg.get_worker_status_request()); return; } if msg.has_file_change_event_request() { return; } if msg.has_invocation_cancel() { return; } panic!("Unexpected message from host: {:?}.", msg); } }
take() .unwrap() .send(message) .map_err(|err| panic!("failed to send message: {:?}.", err)) .and_then(move |sender| { self.sender = Some(sender); ok(self) }) }
function_block-function_prefix_line
[ { "content": "pub fn read_crate_name(path: &str) -> Result<String, String> {\n\n let mut _file =\n\n File::open(path).map_err(|e| format!(\"Failed to open {}: {}\", \"Cargo.toml\".cyan(), e))?;\n\n\n\n let mut contents = String::new();\n\n _file\n\n .read_to_string(&mut contents)\n\n ...
Rust
src/adc0/verid.rs
conorpp/lpc55-pac
eb30d633de05113362de01095123d70c54e63ef4
#[doc = "Reader of register VERID"] pub type R = crate::R<u32, super::VERID>; #[doc = "Resolution\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RES_A { #[doc = "0: Up to 13-bit differential/12-bit single ended resolution supported."] RES_0 = 0, #[doc = "1: Up to 16-bit differential/16-bit single ended resolution supported."] RES_1 = 1, } impl From<RES_A> for bool { #[inline(always)] fn from(variant: RES_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `RES`"] pub type RES_R = crate::R<bool, RES_A>; impl RES_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RES_A { match self.bits { false => RES_A::RES_0, true => RES_A::RES_1, } } #[doc = "Checks if the value of the field is `RES_0`"] #[inline(always)] pub fn is_res_0(&self) -> bool { *self == RES_A::RES_0 } #[doc = "Checks if the value of the field is `RES_1`"] #[inline(always)] pub fn is_res_1(&self) -> bool { *self == RES_A::RES_1 } } #[doc = "Differential Supported\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum DIFFEN_A { #[doc = "0: Differential operation not supported."] DIFFEN_0 = 0, #[doc = "1: Differential operation supported. CMDLa\\[CTYPE\\] controls fields implemented."] DIFFEN_1 = 1, } impl From<DIFFEN_A> for bool { #[inline(always)] fn from(variant: DIFFEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `DIFFEN`"] pub type DIFFEN_R = crate::R<bool, DIFFEN_A>; impl DIFFEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DIFFEN_A { match self.bits { false => DIFFEN_A::DIFFEN_0, true => DIFFEN_A::DIFFEN_1, } } #[doc = "Checks if the value of the field is `DIFFEN_0`"] #[inline(always)] pub fn is_diffen_0(&self) -> bool { *self == DIFFEN_A::DIFFEN_0 } #[doc = "Checks if the value of the field is `DIFFEN_1`"] #[inline(always)] pub fn is_diffen_1(&self) -> bool { *self == DIFFEN_A::DIFFEN_1 } } #[doc = "Multi Vref Implemented\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MVI_A { #[doc = "0: Single voltage reference high (VREFH) input supported."] MVI_0 = 0, #[doc = "1: Multiple voltage reference high (VREFH) inputs supported."] MVI_1 = 1, } impl From<MVI_A> for bool { #[inline(always)] fn from(variant: MVI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `MVI`"] pub type MVI_R = crate::R<bool, MVI_A>; impl MVI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MVI_A { match self.bits { false => MVI_A::MVI_0, true => MVI_A::MVI_1, } } #[doc = "Checks if the value of the field is `MVI_0`"] #[inline(always)] pub fn is_mvi_0(&self) -> bool { *self == MVI_A::MVI_0 } #[doc = "Checks if the value of the field is `MVI_1`"] #[inline(always)] pub fn is_mvi_1(&self) -> bool { *self == MVI_A::MVI_1 } } #[doc = "Channel Scale Width\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum CSW_A { #[doc = "0: Channel scaling not supported."] CSW_0 = 0, #[doc = "1: Channel scaling supported. 1-bit CSCALE control field."] CSW_1 = 1, #[doc = "6: Channel scaling supported. 6-bit CSCALE control field."] CSW_6 = 6, } impl From<CSW_A> for u8 { #[inline(always)] fn from(variant: CSW_A) -> Self { variant as _ } } #[doc = "Reader of field `CSW`"] pub type CSW_R = crate::R<u8, CSW_A>; impl CSW_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, CSW_A> { use crate::Variant::*; match self.bits { 0 => Val(CSW_A::CSW_0), 1 => Val(CSW_A::CSW_1), 6 => Val(CSW_A::CSW_6), i => Res(i), } } #[doc = "Checks if the value of the field is `CSW_0`"] #[inline(always)] pub fn is_csw_0(&self) -> bool { *self == CSW_A::CSW_0 } #[doc = "Checks if the value of the field is `CSW_1`"] #[inline(always)] pub fn is_csw_1(&self) -> bool { *self == CSW_A::CSW_1 } #[doc = "Checks if the value of the field is `CSW_6`"] #[inline(always)] pub fn is_csw_6(&self) -> bool { *self == CSW_A::CSW_6 } } #[doc = "Voltage Reference 1 Range Control Bit Implemented\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum VR1RNGI_A { #[doc = "0: Range control not required. CFG\\[VREF1RNG\\] is not implemented."] VR1RNGI_0 = 0, #[doc = "1: Range control required. CFG\\[VREF1RNG\\] is implemented."] VR1RNGI_1 = 1, } impl From<VR1RNGI_A> for bool { #[inline(always)] fn from(variant: VR1RNGI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `VR1RNGI`"] pub type VR1RNGI_R = crate::R<bool, VR1RNGI_A>; impl VR1RNGI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> VR1RNGI_A { match self.bits { false => VR1RNGI_A::VR1RNGI_0, true => VR1RNGI_A::VR1RNGI_1, } } #[doc = "Checks if the value of the field is `VR1RNGI_0`"] #[inline(always)] pub fn is_vr1rngi_0(&self) -> bool { *self == VR1RNGI_A::VR1RNGI_0 } #[doc = "Checks if the value of the field is `VR1RNGI_1`"] #[inline(always)] pub fn is_vr1rngi_1(&self) -> bool { *self == VR1RNGI_A::VR1RNGI_1 } } #[doc = "Internal ADC Clock implemented\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IADCKI_A { #[doc = "0: Internal clock source not implemented."] IADCKI_0 = 0, #[doc = "1: Internal clock source (and CFG\\[ADCKEN\\]) implemented."] IADCKI_1 = 1, } impl From<IADCKI_A> for bool { #[inline(always)] fn from(variant: IADCKI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `IADCKI`"] pub type IADCKI_R = crate::R<bool, IADCKI_A>; impl IADCKI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> IADCKI_A { match self.bits { false => IADCKI_A::IADCKI_0, true => IADCKI_A::IADCKI_1, } } #[doc = "Checks if the value of the field is `IADCKI_0`"] #[inline(always)] pub fn is_iadcki_0(&self) -> bool { *self == IADCKI_A::IADCKI_0 } #[doc = "Checks if the value of the field is `IADCKI_1`"] #[inline(always)] pub fn is_iadcki_1(&self) -> bool { *self == IADCKI_A::IADCKI_1 } } #[doc = "Calibration Function Implemented\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CALOFSI_A { #[doc = "0: Calibration Not Implemented."] CALOFSI_0 = 0, #[doc = "1: Calibration Implemented."] CALOFSI_1 = 1, } impl From<CALOFSI_A> for bool { #[inline(always)] fn from(variant: CALOFSI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `CALOFSI`"] pub type CALOFSI_R = crate::R<bool, CALOFSI_A>; impl CALOFSI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CALOFSI_A { match self.bits { false => CALOFSI_A::CALOFSI_0, true => CALOFSI_A::CALOFSI_1, } } #[doc = "Checks if the value of the field is `CALOFSI_0`"] #[inline(always)] pub fn is_calofsi_0(&self) -> bool { *self == CALOFSI_A::CALOFSI_0 } #[doc = "Checks if the value of the field is `CALOFSI_1`"] #[inline(always)] pub fn is_calofsi_1(&self) -> bool { *self == CALOFSI_A::CALOFSI_1 } } #[doc = "Number of Single Ended Outputs Supported\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum NUM_SEC_A { #[doc = "0: This design supports one single ended conversion at a time."] NUM_SEC_0 = 0, #[doc = "1: This design supports two simultanious single ended conversions."] NUM_SEC_1 = 1, } impl From<NUM_SEC_A> for bool { #[inline(always)] fn from(variant: NUM_SEC_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `NUM_SEC`"] pub type NUM_SEC_R = crate::R<bool, NUM_SEC_A>; impl NUM_SEC_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> NUM_SEC_A { match self.bits { false => NUM_SEC_A::NUM_SEC_0, true => NUM_SEC_A::NUM_SEC_1, } } #[doc = "Checks if the value of the field is `NUM_SEC_0`"] #[inline(always)] pub fn is_num_sec_0(&self) -> bool { *self == NUM_SEC_A::NUM_SEC_0 } #[doc = "Checks if the value of the field is `NUM_SEC_1`"] #[inline(always)] pub fn is_num_sec_1(&self) -> bool { *self == NUM_SEC_A::NUM_SEC_1 } } #[doc = "Number of FIFOs\n\nValue on reset: 2"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum NUM_FIFO_A { #[doc = "0: N/A"] NUM_FIFO_0 = 0, #[doc = "1: This design supports one result FIFO."] NUM_FIFO_1 = 1, #[doc = "2: This design supports two result FIFOs."] NUM_FIFO_2 = 2, #[doc = "3: This design supports three result FIFOs."] NUM_FIFO_3 = 3, #[doc = "4: This design supports four result FIFOs."] NUM_FIFO_4 = 4, } impl From<NUM_FIFO_A> for u8 { #[inline(always)] fn from(variant: NUM_FIFO_A) -> Self { variant as _ } } #[doc = "Reader of field `NUM_FIFO`"] pub type NUM_FIFO_R = crate::R<u8, NUM_FIFO_A>; impl NUM_FIFO_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, NUM_FIFO_A> { use crate::Variant::*; match self.bits { 0 => Val(NUM_FIFO_A::NUM_FIFO_0), 1 => Val(NUM_FIFO_A::NUM_FIFO_1), 2 => Val(NUM_FIFO_A::NUM_FIFO_2), 3 => Val(NUM_FIFO_A::NUM_FIFO_3), 4 => Val(NUM_FIFO_A::NUM_FIFO_4), i => Res(i), } } #[doc = "Checks if the value of the field is `NUM_FIFO_0`"] #[inline(always)] pub fn is_num_fifo_0(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_0 } #[doc = "Checks if the value of the field is `NUM_FIFO_1`"] #[inline(always)] pub fn is_num_fifo_1(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_1 } #[doc = "Checks if the value of the field is `NUM_FIFO_2`"] #[inline(always)] pub fn is_num_fifo_2(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_2 } #[doc = "Checks if the value of the field is `NUM_FIFO_3`"] #[inline(always)] pub fn is_num_fifo_3(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_3 } #[doc = "Checks if the value of the field is `NUM_FIFO_4`"] #[inline(always)] pub fn is_num_fifo_4(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_4 } } #[doc = "Reader of field `MINOR`"] pub type MINOR_R = crate::R<u8, u8>; #[doc = "Reader of field `MAJOR`"] pub type MAJOR_R = crate::R<u8, u8>; impl R { #[doc = "Bit 0 - Resolution"] #[inline(always)] pub fn res(&self) -> RES_R { RES_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Differential Supported"] #[inline(always)] pub fn diffen(&self) -> DIFFEN_R { DIFFEN_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 3 - Multi Vref Implemented"] #[inline(always)] pub fn mvi(&self) -> MVI_R { MVI_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bits 4:6 - Channel Scale Width"] #[inline(always)] pub fn csw(&self) -> CSW_R { CSW_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bit 8 - Voltage Reference 1 Range Control Bit Implemented"] #[inline(always)] pub fn vr1rngi(&self) -> VR1RNGI_R { VR1RNGI_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - Internal ADC Clock implemented"] #[inline(always)] pub fn iadcki(&self) -> IADCKI_R { IADCKI_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - Calibration Function Implemented"] #[inline(always)] pub fn calofsi(&self) -> CALOFSI_R { CALOFSI_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - Number of Single Ended Outputs Supported"] #[inline(always)] pub fn num_sec(&self) -> NUM_SEC_R { NUM_SEC_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bits 12:14 - Number of FIFOs"] #[inline(always)] pub fn num_fifo(&self) -> NUM_FIFO_R { NUM_FIFO_R::new(((self.bits >> 12) & 0x07) as u8) } #[doc = "Bits 16:23 - Minor Version Number"] #[inline(always)] pub fn minor(&self) -> MINOR_R { MINOR_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bits 24:31 - Major Version Number"] #[inline(always)] pub fn major(&self) -> MAJOR_R { MAJOR_R::new(((self.bits >> 24) & 0xff) as u8) } }
#[doc = "Reader of register VERID"] pub type R = crate::R<u32, super::VERID>; #[doc = "Resolution\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RES_A { #[doc = "0: Up to 13-bit differential/12-bit single ended resolution supported."] RES_0 = 0, #[doc = "1: Up to 16-bit differential/16-bit single ended resolution supported."] RES_1 = 1, } impl From<RES_A> for bool { #[inline(always)] fn from(variant: RES_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `RES`"] pub type RES_R = crate::R<bool, RES_A>; impl RES_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RES_A { match self.bits { false => RES_A::RES_0, true => RES_A::RES_1, } } #[doc = "Checks if the value of the field is `RES_0`"] #[inline(always)] pub fn is_res_0(&self) -> bool { *self == RES_A::RES_0 } #[doc = "Checks if the value of the field is `RES_1`"] #[inline(always)] pub fn is_res_1(&self) -> bool { *self == RES_A::RES_1 } } #[doc = "Differential Supported\n\nValue on reset: 1"] #[
VR1RNGI_0 = 0, #[doc = "1: Range control required. CFG\\[VREF1RNG\\] is implemented."] VR1RNGI_1 = 1, } impl From<VR1RNGI_A> for bool { #[inline(always)] fn from(variant: VR1RNGI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `VR1RNGI`"] pub type VR1RNGI_R = crate::R<bool, VR1RNGI_A>; impl VR1RNGI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> VR1RNGI_A { match self.bits { false => VR1RNGI_A::VR1RNGI_0, true => VR1RNGI_A::VR1RNGI_1, } } #[doc = "Checks if the value of the field is `VR1RNGI_0`"] #[inline(always)] pub fn is_vr1rngi_0(&self) -> bool { *self == VR1RNGI_A::VR1RNGI_0 } #[doc = "Checks if the value of the field is `VR1RNGI_1`"] #[inline(always)] pub fn is_vr1rngi_1(&self) -> bool { *self == VR1RNGI_A::VR1RNGI_1 } } #[doc = "Internal ADC Clock implemented\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IADCKI_A { #[doc = "0: Internal clock source not implemented."] IADCKI_0 = 0, #[doc = "1: Internal clock source (and CFG\\[ADCKEN\\]) implemented."] IADCKI_1 = 1, } impl From<IADCKI_A> for bool { #[inline(always)] fn from(variant: IADCKI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `IADCKI`"] pub type IADCKI_R = crate::R<bool, IADCKI_A>; impl IADCKI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> IADCKI_A { match self.bits { false => IADCKI_A::IADCKI_0, true => IADCKI_A::IADCKI_1, } } #[doc = "Checks if the value of the field is `IADCKI_0`"] #[inline(always)] pub fn is_iadcki_0(&self) -> bool { *self == IADCKI_A::IADCKI_0 } #[doc = "Checks if the value of the field is `IADCKI_1`"] #[inline(always)] pub fn is_iadcki_1(&self) -> bool { *self == IADCKI_A::IADCKI_1 } } #[doc = "Calibration Function Implemented\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CALOFSI_A { #[doc = "0: Calibration Not Implemented."] CALOFSI_0 = 0, #[doc = "1: Calibration Implemented."] CALOFSI_1 = 1, } impl From<CALOFSI_A> for bool { #[inline(always)] fn from(variant: CALOFSI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `CALOFSI`"] pub type CALOFSI_R = crate::R<bool, CALOFSI_A>; impl CALOFSI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CALOFSI_A { match self.bits { false => CALOFSI_A::CALOFSI_0, true => CALOFSI_A::CALOFSI_1, } } #[doc = "Checks if the value of the field is `CALOFSI_0`"] #[inline(always)] pub fn is_calofsi_0(&self) -> bool { *self == CALOFSI_A::CALOFSI_0 } #[doc = "Checks if the value of the field is `CALOFSI_1`"] #[inline(always)] pub fn is_calofsi_1(&self) -> bool { *self == CALOFSI_A::CALOFSI_1 } } #[doc = "Number of Single Ended Outputs Supported\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum NUM_SEC_A { #[doc = "0: This design supports one single ended conversion at a time."] NUM_SEC_0 = 0, #[doc = "1: This design supports two simultanious single ended conversions."] NUM_SEC_1 = 1, } impl From<NUM_SEC_A> for bool { #[inline(always)] fn from(variant: NUM_SEC_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `NUM_SEC`"] pub type NUM_SEC_R = crate::R<bool, NUM_SEC_A>; impl NUM_SEC_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> NUM_SEC_A { match self.bits { false => NUM_SEC_A::NUM_SEC_0, true => NUM_SEC_A::NUM_SEC_1, } } #[doc = "Checks if the value of the field is `NUM_SEC_0`"] #[inline(always)] pub fn is_num_sec_0(&self) -> bool { *self == NUM_SEC_A::NUM_SEC_0 } #[doc = "Checks if the value of the field is `NUM_SEC_1`"] #[inline(always)] pub fn is_num_sec_1(&self) -> bool { *self == NUM_SEC_A::NUM_SEC_1 } } #[doc = "Number of FIFOs\n\nValue on reset: 2"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum NUM_FIFO_A { #[doc = "0: N/A"] NUM_FIFO_0 = 0, #[doc = "1: This design supports one result FIFO."] NUM_FIFO_1 = 1, #[doc = "2: This design supports two result FIFOs."] NUM_FIFO_2 = 2, #[doc = "3: This design supports three result FIFOs."] NUM_FIFO_3 = 3, #[doc = "4: This design supports four result FIFOs."] NUM_FIFO_4 = 4, } impl From<NUM_FIFO_A> for u8 { #[inline(always)] fn from(variant: NUM_FIFO_A) -> Self { variant as _ } } #[doc = "Reader of field `NUM_FIFO`"] pub type NUM_FIFO_R = crate::R<u8, NUM_FIFO_A>; impl NUM_FIFO_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, NUM_FIFO_A> { use crate::Variant::*; match self.bits { 0 => Val(NUM_FIFO_A::NUM_FIFO_0), 1 => Val(NUM_FIFO_A::NUM_FIFO_1), 2 => Val(NUM_FIFO_A::NUM_FIFO_2), 3 => Val(NUM_FIFO_A::NUM_FIFO_3), 4 => Val(NUM_FIFO_A::NUM_FIFO_4), i => Res(i), } } #[doc = "Checks if the value of the field is `NUM_FIFO_0`"] #[inline(always)] pub fn is_num_fifo_0(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_0 } #[doc = "Checks if the value of the field is `NUM_FIFO_1`"] #[inline(always)] pub fn is_num_fifo_1(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_1 } #[doc = "Checks if the value of the field is `NUM_FIFO_2`"] #[inline(always)] pub fn is_num_fifo_2(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_2 } #[doc = "Checks if the value of the field is `NUM_FIFO_3`"] #[inline(always)] pub fn is_num_fifo_3(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_3 } #[doc = "Checks if the value of the field is `NUM_FIFO_4`"] #[inline(always)] pub fn is_num_fifo_4(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_4 } } #[doc = "Reader of field `MINOR`"] pub type MINOR_R = crate::R<u8, u8>; #[doc = "Reader of field `MAJOR`"] pub type MAJOR_R = crate::R<u8, u8>; impl R { #[doc = "Bit 0 - Resolution"] #[inline(always)] pub fn res(&self) -> RES_R { RES_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Differential Supported"] #[inline(always)] pub fn diffen(&self) -> DIFFEN_R { DIFFEN_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 3 - Multi Vref Implemented"] #[inline(always)] pub fn mvi(&self) -> MVI_R { MVI_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bits 4:6 - Channel Scale Width"] #[inline(always)] pub fn csw(&self) -> CSW_R { CSW_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bit 8 - Voltage Reference 1 Range Control Bit Implemented"] #[inline(always)] pub fn vr1rngi(&self) -> VR1RNGI_R { VR1RNGI_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - Internal ADC Clock implemented"] #[inline(always)] pub fn iadcki(&self) -> IADCKI_R { IADCKI_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - Calibration Function Implemented"] #[inline(always)] pub fn calofsi(&self) -> CALOFSI_R { CALOFSI_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - Number of Single Ended Outputs Supported"] #[inline(always)] pub fn num_sec(&self) -> NUM_SEC_R { NUM_SEC_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bits 12:14 - Number of FIFOs"] #[inline(always)] pub fn num_fifo(&self) -> NUM_FIFO_R { NUM_FIFO_R::new(((self.bits >> 12) & 0x07) as u8) } #[doc = "Bits 16:23 - Minor Version Number"] #[inline(always)] pub fn minor(&self) -> MINOR_R { MINOR_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bits 24:31 - Major Version Number"] #[inline(always)] pub fn major(&self) -> MAJOR_R { MAJOR_R::new(((self.bits >> 24) & 0xff) as u8) } }
derive(Clone, Copy, Debug, PartialEq)] pub enum DIFFEN_A { #[doc = "0: Differential operation not supported."] DIFFEN_0 = 0, #[doc = "1: Differential operation supported. CMDLa\\[CTYPE\\] controls fields implemented."] DIFFEN_1 = 1, } impl From<DIFFEN_A> for bool { #[inline(always)] fn from(variant: DIFFEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `DIFFEN`"] pub type DIFFEN_R = crate::R<bool, DIFFEN_A>; impl DIFFEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DIFFEN_A { match self.bits { false => DIFFEN_A::DIFFEN_0, true => DIFFEN_A::DIFFEN_1, } } #[doc = "Checks if the value of the field is `DIFFEN_0`"] #[inline(always)] pub fn is_diffen_0(&self) -> bool { *self == DIFFEN_A::DIFFEN_0 } #[doc = "Checks if the value of the field is `DIFFEN_1`"] #[inline(always)] pub fn is_diffen_1(&self) -> bool { *self == DIFFEN_A::DIFFEN_1 } } #[doc = "Multi Vref Implemented\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MVI_A { #[doc = "0: Single voltage reference high (VREFH) input supported."] MVI_0 = 0, #[doc = "1: Multiple voltage reference high (VREFH) inputs supported."] MVI_1 = 1, } impl From<MVI_A> for bool { #[inline(always)] fn from(variant: MVI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `MVI`"] pub type MVI_R = crate::R<bool, MVI_A>; impl MVI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MVI_A { match self.bits { false => MVI_A::MVI_0, true => MVI_A::MVI_1, } } #[doc = "Checks if the value of the field is `MVI_0`"] #[inline(always)] pub fn is_mvi_0(&self) -> bool { *self == MVI_A::MVI_0 } #[doc = "Checks if the value of the field is `MVI_1`"] #[inline(always)] pub fn is_mvi_1(&self) -> bool { *self == MVI_A::MVI_1 } } #[doc = "Channel Scale Width\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum CSW_A { #[doc = "0: Channel scaling not supported."] CSW_0 = 0, #[doc = "1: Channel scaling supported. 1-bit CSCALE control field."] CSW_1 = 1, #[doc = "6: Channel scaling supported. 6-bit CSCALE control field."] CSW_6 = 6, } impl From<CSW_A> for u8 { #[inline(always)] fn from(variant: CSW_A) -> Self { variant as _ } } #[doc = "Reader of field `CSW`"] pub type CSW_R = crate::R<u8, CSW_A>; impl CSW_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, CSW_A> { use crate::Variant::*; match self.bits { 0 => Val(CSW_A::CSW_0), 1 => Val(CSW_A::CSW_1), 6 => Val(CSW_A::CSW_6), i => Res(i), } } #[doc = "Checks if the value of the field is `CSW_0`"] #[inline(always)] pub fn is_csw_0(&self) -> bool { *self == CSW_A::CSW_0 } #[doc = "Checks if the value of the field is `CSW_1`"] #[inline(always)] pub fn is_csw_1(&self) -> bool { *self == CSW_A::CSW_1 } #[doc = "Checks if the value of the field is `CSW_6`"] #[inline(always)] pub fn is_csw_6(&self) -> bool { *self == CSW_A::CSW_6 } } #[doc = "Voltage Reference 1 Range Control Bit Implemented\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum VR1RNGI_A { #[doc = "0: Range control not required. CFG\\[VREF1RNG\\] is not implemented."]
random
[ { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n...
Rust
ark-bcs/src/ldt/mod.rs
arkworks-rs/bcs
baf903d87c0ee98d82e931610439b3d0e03a982f
use ark_std::marker::PhantomData; use ark_crypto_primitives::merkle_tree::Config as MTConfig; use ark_ff::PrimeField; use ark_ldt::domain::Radix2CosetDomain; use ark_sponge::{Absorb, CryptographicSponge}; use crate::{ bcs::{simulation_transcript::SimulationTranscript, transcript::Transcript}, iop::{ bookkeeper::NameSpace, message::{MessagesCollection, MsgRoundRef}, oracles::RoundOracle, }, Error, }; #[cfg(feature = "r1cs")] pub mod constraints; pub mod rl_ldt; pub trait LDT<F: PrimeField + Absorb> { type LDTParameters: Clone; fn codeword_domain(param: &Self::LDTParameters) -> Option<Radix2CosetDomain<F>>; fn localization_param(param: &Self::LDTParameters) -> Option<usize>; fn prove<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( namespace: NameSpace, param: &Self::LDTParameters, transcript: &mut Transcript<MT, S, F>, codewords: &[MsgRoundRef], ) -> Result<(), Error> where MT::InnerDigest: Absorb; fn register_iop_structure<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( namespace: NameSpace, param: &Self::LDTParameters, num_rs_oracles: usize, transcript: &mut SimulationTranscript<MT, S, F>, ) where MT::InnerDigest: Absorb; fn query_and_decide<S: CryptographicSponge, O: RoundOracle<F>>( namespace: NameSpace, param: &Self::LDTParameters, sponge: &mut S, codewords: &[MsgRoundRef], transcript_messages: &mut MessagesCollection<F, O>, ) -> Result<(), Error>; } pub struct NoLDT<F: PrimeField + Absorb> { _do_nothing: PhantomData<F>, } impl<F: PrimeField + Absorb> NoLDT<F> { pub fn parameter( evaluation_domain: Radix2CosetDomain<F>, localization_parameter: usize, ) -> (Radix2CosetDomain<F>, usize) { (evaluation_domain, localization_parameter) } } impl<F: PrimeField + Absorb> LDT<F> for NoLDT<F> { type LDTParameters = Option<(Radix2CosetDomain<F>, usize)>; fn prove<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( _namespace: NameSpace, _param: &Self::LDTParameters, _transcript: &mut Transcript<MT, S, F>, _codewords: &[MsgRoundRef], ) -> Result<(), Error> where MT::InnerDigest: Absorb, { Ok(()) } fn register_iop_structure<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( _namespace: NameSpace, _param: &Self::LDTParameters, _num_codewords_oracles: usize, _transcript: &mut SimulationTranscript<MT, S, F>, ) where MT::InnerDigest: Absorb, { } fn query_and_decide<S: CryptographicSponge, O: RoundOracle<F>>( _namespace: NameSpace, _param: &Self::LDTParameters, _sponge: &mut S, codewords: &[MsgRoundRef], transcript_messages: &mut MessagesCollection<F, O>, ) -> Result<(), Error> { let no_rs_code = codewords.iter().all(|round| { transcript_messages .get_prover_round_info(*round) .num_reed_solomon_codes_oracles() == 0 }); assert!( no_rs_code, "NoLDT enforces that main protocol does not send any RS code." ); Ok(()) } fn codeword_domain(_param: &Self::LDTParameters) -> Option<Radix2CosetDomain<F>> { None } fn localization_param(_param: &Self::LDTParameters) -> Option<usize> { None } }
use ark_std::marker::PhantomData; use ark_crypto_primitives::merkle_tree::Config as MTConfig; use ark_ff::PrimeField; use ark_ldt::domain::Radix2CosetDomain; use ark_sponge::{Absorb, CryptographicSponge}; use crate::{ bcs::{simulation_transcript::SimulationTranscript, transcript::Transcript}, iop::{ bookkeeper::NameSpace, message::{MessagesCollection, MsgRoundRef}, oracles::RoundOracle, }, Error, }; #[cfg(feature = "r1cs")] pub mod constraints; pub mod rl_ldt; pub trait LDT<F: PrimeField + Absorb> { type LDTParameters: Clone; fn codeword_domain(param: &Self::LDTParameters) -> Option<Radix2CosetDomain<F>>; fn localization_param(param: &Self::LDTParameters) -> Option<usize>; fn prove<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( namespace: NameSpace, param: &Self::LDTParameters, transcript: &mut Transcript<MT, S, F>, codewords: &[MsgRoundRef], ) -> Result<(), Error> where MT::InnerDigest: Absorb; fn register_iop_structure<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( namespace: NameSpace, param: &Self::LDTParameters, num_rs_oracles: usize, transcript: &mut SimulationTranscript<MT, S, F>, ) where MT::InnerDigest: Absorb; fn query_and_decide<S: CryptographicSponge, O: RoundOracle<F>>( namespace: NameSpace, param: &Self::LDTParameters, sponge: &mut S, codewords: &[MsgRoundRef], transcript_messages: &mut MessagesCollection<F, O>, ) -> Result<(), Error>; } pub struct NoLDT<F: PrimeField + Absorb> { _do_nothing: PhantomData<F>, } impl<F: PrimeField + Absorb> NoLDT<F> {
} impl<F: PrimeField + Absorb> LDT<F> for NoLDT<F> { type LDTParameters = Option<(Radix2CosetDomain<F>, usize)>; fn prove<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( _namespace: NameSpace, _param: &Self::LDTParameters, _transcript: &mut Transcript<MT, S, F>, _codewords: &[MsgRoundRef], ) -> Result<(), Error> where MT::InnerDigest: Absorb, { Ok(()) } fn register_iop_structure<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( _namespace: NameSpace, _param: &Self::LDTParameters, _num_codewords_oracles: usize, _transcript: &mut SimulationTranscript<MT, S, F>, ) where MT::InnerDigest: Absorb, { } fn query_and_decide<S: CryptographicSponge, O: RoundOracle<F>>( _namespace: NameSpace, _param: &Self::LDTParameters, _sponge: &mut S, codewords: &[MsgRoundRef], transcript_messages: &mut MessagesCollection<F, O>, ) -> Result<(), Error> { let no_rs_code = codewords.iter().all(|round| { transcript_messages .get_prover_round_info(*round) .num_reed_solomon_codes_oracles() == 0 }); assert!( no_rs_code, "NoLDT enforces that main protocol does not send any RS code." ); Ok(()) } fn codeword_domain(_param: &Self::LDTParameters) -> Option<Radix2CosetDomain<F>> { None } fn localization_param(_param: &Self::LDTParameters) -> Option<usize> { None } }
pub fn parameter( evaluation_domain: Radix2CosetDomain<F>, localization_parameter: usize, ) -> (Radix2CosetDomain<F>, usize) { (evaluation_domain, localization_parameter) }
function_block-full_function
[ { "content": "/// The verifier for public coin IOP has two phases. This is intended to be\n\n/// used as an endpoint protocol. Any subprotocol does not need to implement\n\n/// this trait. Any implementation of this trait can be transformed to SNARG by\n\n/// BCS.\n\n/// * **Commit Phase**: Verifier send messa...
Rust
src/flash/fcfg_b0_ssize0.rs
jeandudey/cc13x2-rs
215918099301ec75e9dfad531f5cf46e13077a39
#[doc = "Reader of register FCFG_B0_SSIZE0"] pub type R = crate::R<u32, super::FCFG_B0_SSIZE0>; #[doc = "Writer for register FCFG_B0_SSIZE0"] pub type W = crate::W<u32, super::FCFG_B0_SSIZE0>; #[doc = "Register FCFG_B0_SSIZE0 `reset()`'s with value 0x002c_0008"] impl crate::ResetValue for super::FCFG_B0_SSIZE0 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x002c_0008 } } #[doc = "Reader of field `RESERVED28`"] pub type RESERVED28_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RESERVED28`"] pub struct RESERVED28_W<'a> { w: &'a mut W, } impl<'a> RESERVED28_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 28)) | (((value as u32) & 0x0f) << 28); self.w } } #[doc = "Reader of field `B0_NUM_SECTORS`"] pub type B0_NUM_SECTORS_R = crate::R<u16, u16>; #[doc = "Write proxy for field `B0_NUM_SECTORS`"] pub struct B0_NUM_SECTORS_W<'a> { w: &'a mut W, } impl<'a> B0_NUM_SECTORS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0fff << 16)) | (((value as u32) & 0x0fff) << 16); self.w } } #[doc = "Reader of field `RESERVED4`"] pub type RESERVED4_R = crate::R<u16, u16>; #[doc = "Write proxy for field `RESERVED4`"] pub struct RESERVED4_W<'a> { w: &'a mut W, } impl<'a> RESERVED4_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0fff << 4)) | (((value as u32) & 0x0fff) << 4); self.w } } #[doc = "Reader of field `B0_SECT_SIZE`"] pub type B0_SECT_SIZE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `B0_SECT_SIZE`"] pub struct B0_SECT_SIZE_W<'a> { w: &'a mut W, } impl<'a> B0_SECT_SIZE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } impl R { #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved28(&self) -> RESERVED28_R { RESERVED28_R::new(((self.bits >> 28) & 0x0f) as u8) } #[doc = "Bits 16:27 - 27:16\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_num_sectors(&self) -> B0_NUM_SECTORS_R { B0_NUM_SECTORS_R::new(((self.bits >> 16) & 0x0fff) as u16) } #[doc = "Bits 4:15 - 15:4\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved4(&self) -> RESERVED4_R { RESERVED4_R::new(((self.bits >> 4) & 0x0fff) as u16) } #[doc = "Bits 0:3 - 3:0\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_sect_size(&self) -> B0_SECT_SIZE_R { B0_SECT_SIZE_R::new((self.bits & 0x0f) as u8) } } impl W { #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved28(&mut self) -> RESERVED28_W { RESERVED28_W { w: self } } #[doc = "Bits 16:27 - 27:16\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_num_sectors(&mut self) -> B0_NUM_SECTORS_W { B0_NUM_SECTORS_W { w: self } } #[doc = "Bits 4:15 - 15:4\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved4(&mut self) -> RESERVED4_W { RESERVED4_W { w: self } } #[doc = "Bits 0:3 - 3:0\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_sect_size(&mut self) -> B0_SECT_SIZE_W { B0_SECT_SIZE_W { w: self } } }
#[doc = "Reader of register FCFG_B0_SSIZE0"] pub type R = crate::R<u32, super::FCFG_B0_SSIZE0>; #[doc = "Writer for register FCFG_B0_SSIZE0"] pub type W = crate::W<u32, super::FCFG_B0_SSIZE0>; #[doc = "Register FCFG_B0_SSIZE0 `reset()`'s with value 0x002c_0008"] impl crate::ResetValue for super::FCFG_B0_SSIZE0 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x002c_0008 } } #[doc = "Reader of field `RESERVED28`"] pub type RESERVED28_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RESERVED28`"] pub struct RESERVED28_W<'a> { w: &'a mut W, } impl<'a> RESERVED28_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 28)) | (((value as u32) & 0x0f) << 28); self.w } } #[doc = "Reader of field `B0_NUM_SECTORS`"] pub type B0_NUM_SECTORS_R = crate::R<u16, u16>; #[doc = "Write proxy for field `B0_NUM_SECTORS`"] pub struct B0_NUM_SECTORS_W<'a> { w: &'a mut W, } impl<'a> B0_NUM_SECTORS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0fff << 16)) | (((value as u32) & 0x0fff) << 16); self.w } } #[doc = "Reader of field `RESERVED4`"] pub type RESERVED4_R = crate::R<u16, u16>; #[doc = "Write proxy for field `RESERVED4`"] pub struct RESERVED4_W<'a> { w: &'a mut W, } impl<'a> RESERVED4_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0fff << 4)) | (((value as u32) & 0x0fff) << 4); self.w } } #[doc = "Reader of field `B0_SECT_SIZE`"] pub type B0_SECT_SIZE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `B0_SECT_SIZE`"] pub stru
gh TI provided API."] #[inline(always)] pub fn b0_num_sectors(&mut self) -> B0_NUM_SECTORS_W { B0_NUM_SECTORS_W { w: self } } #[doc = "Bits 4:15 - 15:4\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved4(&mut self) -> RESERVED4_W { RESERVED4_W { w: self } } #[doc = "Bits 0:3 - 3:0\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_sect_size(&mut self) -> B0_SECT_SIZE_W { B0_SECT_SIZE_W { w: self } } }
ct B0_SECT_SIZE_W<'a> { w: &'a mut W, } impl<'a> B0_SECT_SIZE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } impl R { #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved28(&self) -> RESERVED28_R { RESERVED28_R::new(((self.bits >> 28) & 0x0f) as u8) } #[doc = "Bits 16:27 - 27:16\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_num_sectors(&self) -> B0_NUM_SECTORS_R { B0_NUM_SECTORS_R::new(((self.bits >> 16) & 0x0fff) as u16) } #[doc = "Bits 4:15 - 15:4\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved4(&self) -> RESERVED4_R { RESERVED4_R::new(((self.bits >> 4) & 0x0fff) as u16) } #[doc = "Bits 0:3 - 3:0\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_sect_size(&self) -> B0_SECT_SIZE_R { B0_SECT_SIZE_R::new((self.bits & 0x0f) as u8) } } impl W { #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved28(&mut self) -> RESERVED28_W { RESERVED28_W { w: self } } #[doc = "Bits 16:27 - 27:16\\] Internal. Only to be used throu
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Res...
Rust
chain-impl-mockchain/src/stake/role.rs
Emurgo/rust-cardano
31b508cbabcee2ef4ff9596abe2b04aede34e1a0
use crate::key::{deserialize_public_key, serialize_public_key, Hash}; use crate::leadership::genesis::GenesisPraosLeader; use chain_core::mempack::{ReadBuf, ReadError, Readable}; use chain_core::property; use chain_crypto::{Ed25519, PublicKey}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct StakeKeyInfo { pub(crate) pool: Option<StakePoolId>, } #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct StakePoolId(Hash); #[derive(Debug, Clone, PartialEq, Eq)] pub struct StakePoolInfo { pub serial: u128, pub owners: Vec<StakeKeyId>, pub initial_key: GenesisPraosLeader, } impl StakePoolInfo { pub fn to_id(&self) -> StakePoolId { let mut v = Vec::new(); v.extend_from_slice(&self.serial.to_be_bytes()); for o in &self.owners { v.extend_from_slice(o.0.as_ref()) } v.extend_from_slice(self.initial_key.kes_public_key.as_ref()); v.extend_from_slice(self.initial_key.vrf_public_key.as_ref()); StakePoolId(Hash::hash_bytes(&v)) } } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct StakeKeyId(pub(crate) PublicKey<Ed25519>); impl From<PublicKey<Ed25519>> for StakeKeyId { fn from(key: PublicKey<Ed25519>) -> Self { StakeKeyId(key) } } impl property::Serialize for StakeKeyId { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, writer: W) -> Result<(), Self::Error> { serialize_public_key(&self.0, writer) } } impl Readable for StakeKeyId { fn read<'a>(reader: &mut ReadBuf<'a>) -> Result<Self, ReadError> { deserialize_public_key(reader).map(StakeKeyId) } } impl property::Serialize for StakePoolId { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, mut writer: W) -> Result<(), Self::Error> { writer.write_all(self.0.as_ref()) } } impl Readable for StakePoolId { fn read<'a>(buf: &mut ReadBuf<'a>) -> Result<Self, ReadError> { Hash::read(buf).map(StakePoolId) } } impl property::Serialize for GenesisPraosLeader { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, mut writer: W) -> Result<(), Self::Error> { serialize_public_key(&self.kes_public_key, &mut writer)?; serialize_public_key(&self.vrf_public_key, &mut writer)?; Ok(()) } } impl Readable for GenesisPraosLeader { fn read<'a>(reader: &mut ReadBuf<'a>) -> Result<Self, ReadError> { let kes_public_key = deserialize_public_key(reader)?; let vrf_public_key = deserialize_public_key(reader)?; Ok(GenesisPraosLeader { vrf_public_key, kes_public_key, }) } } impl property::Serialize for StakePoolInfo { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, writer: W) -> Result<(), Self::Error> { assert!(self.owners.len() < 256); use chain_core::packer::Codec; let mut codec = Codec::new(writer); codec.put_u128(self.serial)?; codec.put_u8(self.owners.len() as u8)?; for o in &self.owners { serialize_public_key(&o.0, &mut codec)?; } self.initial_key.serialize(&mut codec)?; Ok(()) } } impl Readable for StakePoolInfo { fn read<'a>(buf: &mut ReadBuf<'a>) -> Result<Self, ReadError> { let serial = buf.get_u128()?; let owner_nb = buf.get_u8()? as usize; let mut owners = Vec::with_capacity(owner_nb); for _ in 0..owner_nb { let pub_key = deserialize_public_key(buf)?; owners.push(StakeKeyId(pub_key)) } let initial_key = GenesisPraosLeader::read(buf)?; Ok(StakePoolInfo { serial, owners, initial_key, }) } } impl From<Hash> for StakePoolId { fn from(hash: Hash) -> Self { StakePoolId(hash) } } impl From<chain_crypto::Blake2b256> for StakePoolId { fn from(hash: chain_crypto::Blake2b256) -> Self { StakePoolId(hash.into()) } } impl std::fmt::Display for StakePoolId { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { std::fmt::Display::fmt(&self.0, f) } } #[cfg(test)] mod test { use super::*; use chain_crypto::KeyPair; use quickcheck::{Arbitrary, Gen}; impl Arbitrary for StakeKeyId { fn arbitrary<G: Gen>(g: &mut G) -> Self { let kp: KeyPair<Ed25519> = Arbitrary::arbitrary(g); StakeKeyId::from(kp.into_keys().1) } } impl Arbitrary for StakePoolId { fn arbitrary<G: Gen>(g: &mut G) -> Self { StakePoolId(Arbitrary::arbitrary(g)) } } }
use crate::key::{deserialize_public_key, serialize_public_key, Hash}; use crate::leadership::genesis::GenesisPraosLeader; use chain_core::mempack::{ReadBuf, ReadError, Readable}; use chain_core::property; use chain_crypto::{Ed25519, PublicKey}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct StakeKeyInfo { pub(crate) pool: Option<StakePoolId>, } #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct StakePoolId(Hash); #[derive(Debug, Clone, PartialEq, Eq)] pub struct StakePoolInfo { pub serial: u128, pub owners: Vec<StakeKeyId>, pub initial_key: GenesisPraosLeader, } impl StakePoolInfo { pub fn to_id(&self) -> StakePoolId { let mut v = Vec::new(); v.extend_from_slice(&self.serial.to_be_bytes()); for o in &self.owners { v.extend_from_slice(o.0.as_ref()) } v.extend_from_slice(self.initial_key.kes_public_key.as_ref()); v.extend_from_slice(self.initial_key.vrf_public_key.as_ref()); StakePoolId(Hash::hash_bytes(&v)) } } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct StakeKeyId(pub(crate) PublicKey<Ed25519>); impl From<PublicKey<Ed25519>> for StakeKeyId { fn from(key: PublicKey<Ed25519>) -> Self { StakeKeyId(key) } } impl property::Serialize for StakeKeyId { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, writer: W) -> Result<(), Self::Error> { serialize_public_key(&self.0, writer) } } impl Readable for StakeKeyId { fn read<'a>(reader: &mut ReadBuf<'a>) -> Result<Self, ReadError> { deserialize_public_key(reader).map(StakeKeyId) } } impl property::Serialize for StakePoolId { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, mut writer: W) -> Result<(), Self::Error> { writer.write_all(self.0.as_ref()) } } impl Readable for StakePoolId { fn read<'a>(buf: &mut ReadBuf<'a>) -> Result<Self, ReadError> { Hash::read(buf).map(StakePoolId) } } impl property::Serialize for GenesisPraosLeader { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, mut writer: W) -> Result<(), Self::Error> { serialize_public_key(&self.kes_public_key, &mut writer)?; serialize_public_key(&self.vrf_public_key, &mut writer)?; Ok(()) } } impl Readable for GenesisPraosLeader { fn read<'a>(reader: &mut ReadBuf<'a>) -> Result<Self, ReadError> { let kes_public_key = deserialize_public_key(reader)?;
} impl property::Serialize for StakePoolInfo { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, writer: W) -> Result<(), Self::Error> { assert!(self.owners.len() < 256); use chain_core::packer::Codec; let mut codec = Codec::new(writer); codec.put_u128(self.serial)?; codec.put_u8(self.owners.len() as u8)?; for o in &self.owners { serialize_public_key(&o.0, &mut codec)?; } self.initial_key.serialize(&mut codec)?; Ok(()) } } impl Readable for StakePoolInfo { fn read<'a>(buf: &mut ReadBuf<'a>) -> Result<Self, ReadError> { let serial = buf.get_u128()?; let owner_nb = buf.get_u8()? as usize; let mut owners = Vec::with_capacity(owner_nb); for _ in 0..owner_nb { let pub_key = deserialize_public_key(buf)?; owners.push(StakeKeyId(pub_key)) } let initial_key = GenesisPraosLeader::read(buf)?; Ok(StakePoolInfo { serial, owners, initial_key, }) } } impl From<Hash> for StakePoolId { fn from(hash: Hash) -> Self { StakePoolId(hash) } } impl From<chain_crypto::Blake2b256> for StakePoolId { fn from(hash: chain_crypto::Blake2b256) -> Self { StakePoolId(hash.into()) } } impl std::fmt::Display for StakePoolId { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { std::fmt::Display::fmt(&self.0, f) } } #[cfg(test)] mod test { use super::*; use chain_crypto::KeyPair; use quickcheck::{Arbitrary, Gen}; impl Arbitrary for StakeKeyId { fn arbitrary<G: Gen>(g: &mut G) -> Self { let kp: KeyPair<Ed25519> = Arbitrary::arbitrary(g); StakeKeyId::from(kp.into_keys().1) } } impl Arbitrary for StakePoolId { fn arbitrary<G: Gen>(g: &mut G) -> Self { StakePoolId(Arbitrary::arbitrary(g)) } } }
let vrf_public_key = deserialize_public_key(reader)?; Ok(GenesisPraosLeader { vrf_public_key, kes_public_key, }) }
function_block-function_prefix_line
[ { "content": "/// Trait identifying the block identifier type.\n\npub trait BlockId: Eq + Ord + Clone + Debug + Hash + Serialize + Deserialize {\n\n /// A special ID used to denote a non-existent block (e.g. the\n\n /// parent of the first block).\n\n fn zero() -> Self;\n\n}\n\n\n", "file_path": "c...
Rust
sdk/cosmos/src/clients/database_client.rs
duysqubix/azure-sdk-for-rust
fe07e29f11e95acbf830289499eb373b4efa0006
use super::*; use crate::authorization_policy::CosmosContext; use crate::operations::*; use crate::resources::ResourceType; use crate::{requests, ReadonlyString}; use azure_core::pipeline::Pipeline; use azure_core::prelude::Continuation; use azure_core::{AddAsHeader, Context, HttpClient, PipelineContext}; use futures::stream::unfold; use futures::Stream; #[derive(Debug, Clone)] pub struct DatabaseClient { cosmos_client: CosmosClient, database_name: ReadonlyString, } impl DatabaseClient { pub(crate) fn new<S: Into<ReadonlyString>>( cosmos_client: CosmosClient, database_name: S, ) -> Self { Self { cosmos_client, database_name: database_name.into(), } } pub fn cosmos_client(&self) -> &CosmosClient { &self.cosmos_client } pub fn database_name(&self) -> &str { &self.database_name } pub async fn get_database( &self, ctx: Context, options: GetDatabaseOptions, ) -> crate::Result<GetDatabaseResponse> { let mut request = self .cosmos_client() .prepare_request_pipeline(&format!("dbs/{}", self.database_name()), http::Method::GET); let mut pipeline_context = PipelineContext::new(ctx, ResourceType::Databases.into()); options.decorate_request(&mut request)?; let response = self .pipeline() .send(&mut pipeline_context, &mut request) .await? .validate(http::StatusCode::OK) .await?; Ok(GetDatabaseResponse::try_from(response).await?) } pub fn list_collections(&self) -> requests::ListCollectionsBuilder<'_> { requests::ListCollectionsBuilder::new(self) } pub fn delete_database(&self) -> requests::DeleteDatabaseBuilder<'_> { requests::DeleteDatabaseBuilder::new(self) } pub async fn create_collection<S: AsRef<str>>( &self, ctx: Context, collection_name: S, options: CreateCollectionOptions, ) -> crate::Result<CreateCollectionResponse> { let mut request = self.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/colls", self.database_name()), http::Method::POST, ); let mut pipeline_context = PipelineContext::new(ctx, ResourceType::Collections.into()); options.decorate_request(&mut request, collection_name.as_ref())?; let response = self .pipeline() .send(&mut pipeline_context, &mut request) .await? .validate(http::StatusCode::CREATED) .await?; Ok(CreateCollectionResponse::try_from(response).await?) } pub fn list_users( &self, ctx: Context, options: ListUsersOptions, ) -> impl Stream<Item = crate::Result<ListUsersResponse>> + '_ { macro_rules! r#try { ($expr:expr $(,)?) => { match $expr { Result::Ok(val) => val, Result::Err(err) => { return Some((Err(err.into()), State::Done)); } } }; } #[derive(Debug, Clone, PartialEq)] enum State { Init, Continuation(String), Done, } unfold(State::Init, move |state: State| { let this = self.clone(); let ctx = ctx.clone(); let options = options.clone(); async move { let response = match state { State::Init => { let mut request = this.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/users", this.database_name()), http::Method::GET, ); let mut pipeline_context = PipelineContext::new(ctx.clone(), ResourceType::Users.into()); r#try!(options.decorate_request(&mut request)); let response = r#try!( this.pipeline() .send(&mut pipeline_context, &mut request) .await ); let response = r#try!(response.validate(http::StatusCode::OK).await); ListUsersResponse::try_from(response).await } State::Continuation(continuation_token) => { let continuation = Continuation::new(continuation_token.as_str()); let mut request = this.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/users", self.database_name()), http::Method::GET, ); let mut pipeline_context = PipelineContext::new(ctx.clone(), ResourceType::Users.into()); r#try!(options.decorate_request(&mut request)); r#try!(continuation.add_as_header2(&mut request)); let response = r#try!( this.pipeline() .send(&mut pipeline_context, &mut request) .await ); let response = r#try!(response.validate(http::StatusCode::OK).await); ListUsersResponse::try_from(response).await } State::Done => return None, }; let response = r#try!(response); let next_state = response .continuation_token .clone() .map(State::Continuation) .unwrap_or_else(|| State::Done); Some((Ok(response), next_state)) } }) } pub fn into_collection_client<S: Into<ReadonlyString>>( self, collection_name: S, ) -> CollectionClient { CollectionClient::new(self, collection_name) } pub fn into_user_client<S: Into<ReadonlyString>>(self, user_name: S) -> UserClient { UserClient::new(self, user_name) } pub(crate) fn prepare_request_with_database_name( &self, method: http::Method, ) -> http::request::Builder { self.cosmos_client().prepare_request( &format!("dbs/{}", self.database_name()), method, ResourceType::Databases, ) } pub(crate) fn http_client(&self) -> &dyn HttpClient { self.cosmos_client().http_client() } fn pipeline(&self) -> &Pipeline<CosmosContext> { self.cosmos_client.pipeline() } }
use super::*; use crate::authorization_policy::CosmosContext; use crate::operations::*; use crate::resources::ResourceType; use crate::{requests, ReadonlyString}; use azure_core::pipeline::Pipeline; use azure_core::prelude::Continuation; use azure_core::{AddAsHeader, Context, HttpClient, PipelineContext}; use futures::stream::unfold; use futures::Stream; #[derive(Debug, Clone)] pub struct DatabaseClient { cosmos_client: CosmosClient, database_name: ReadonlyString, } impl DatabaseClient { pub(crate) fn new<S: Into<ReadonlyString>>( cosmos_client: CosmosClient, database_name: S, ) -> Self { Self { cosmos_client, database_name: database_name.into(), } } pub fn cosmos_client(&self) -> &CosmosClient { &self.cosmos_client } pub fn database_name(&self) -> &str { &self.database_name } pub async fn get_database( &self, ctx: Context, options: GetDatabaseOptions, ) -> crate::Result<GetDatabaseResponse> { let mut request = self .cosmos_client() .prepare_request_pipeline(&format!("dbs/{}", self.database_name()), http::Method::GET); let mut pipeline_context = PipelineContext::new(ctx, ResourceType::Databases.into()); options.decorate_request(&mut request)?; let response = self .pipeline() .send(&mut pipeline_context, &mut request) .await? .validate(http::StatusCode::OK) .await?; Ok(GetDatabaseResponse::try_from(response).await?) } pub fn list_collections(&self) -> requests::ListCollectionsBuilder<'_> { requests::ListCollectionsBuilder::new(self) } pub fn delete_database(&self) -> requests::DeleteDatabaseBuilder<'_> { requests::DeleteDatabaseBuilder::new(self) } pub async fn create_collection<S: AsRef<str>>( &self, ctx: Context, collection_name: S, options: CreateCollectionOptions, ) -> crate::Result<CreateCollectionResponse> { let mut request = self.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/colls", self.database_name()), http::Method::POST, ); let mut pipeline_context = PipelineContext::new(ctx, ResourceType::Collections.into()); options.decorate_request(&mut request, collection_name.as_ref())?; let response = self .pipeline() .send(&mut pipeline_context, &mut request) .await? .validate(http::StatusCode::CREATED) .await?; Ok(CreateCollectionResponse::try_from(response).await?) } pub fn list_users( &self, ctx: Context, options: ListUsersOptions, ) -> impl Stream<Item = crate::Result<ListUsersResponse>> + '_ { macro_rules! r#try { ($expr:expr $(,)?) => { match $expr { Result::Ok(val) => val, Result::Err(err) => { return Some((Err(err.into()), State::Done)); } } }; } #[derive(Debug, Clone, PartialEq)] enum State { Init, Continuation(String), Done, }
} pub fn into_collection_client<S: Into<ReadonlyString>>( self, collection_name: S, ) -> CollectionClient { CollectionClient::new(self, collection_name) } pub fn into_user_client<S: Into<ReadonlyString>>(self, user_name: S) -> UserClient { UserClient::new(self, user_name) } pub(crate) fn prepare_request_with_database_name( &self, method: http::Method, ) -> http::request::Builder { self.cosmos_client().prepare_request( &format!("dbs/{}", self.database_name()), method, ResourceType::Databases, ) } pub(crate) fn http_client(&self) -> &dyn HttpClient { self.cosmos_client().http_client() } fn pipeline(&self) -> &Pipeline<CosmosContext> { self.cosmos_client.pipeline() } }
unfold(State::Init, move |state: State| { let this = self.clone(); let ctx = ctx.clone(); let options = options.clone(); async move { let response = match state { State::Init => { let mut request = this.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/users", this.database_name()), http::Method::GET, ); let mut pipeline_context = PipelineContext::new(ctx.clone(), ResourceType::Users.into()); r#try!(options.decorate_request(&mut request)); let response = r#try!( this.pipeline() .send(&mut pipeline_context, &mut request) .await ); let response = r#try!(response.validate(http::StatusCode::OK).await); ListUsersResponse::try_from(response).await } State::Continuation(continuation_token) => { let continuation = Continuation::new(continuation_token.as_str()); let mut request = this.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/users", self.database_name()), http::Method::GET, ); let mut pipeline_context = PipelineContext::new(ctx.clone(), ResourceType::Users.into()); r#try!(options.decorate_request(&mut request)); r#try!(continuation.add_as_header2(&mut request)); let response = r#try!( this.pipeline() .send(&mut pipeline_context, &mut request) .await ); let response = r#try!(response.validate(http::StatusCode::OK).await); ListUsersResponse::try_from(response).await } State::Done => return None, }; let response = r#try!(response); let next_state = response .continuation_token .clone() .map(State::Continuation) .unwrap_or_else(|| State::Done); Some((Ok(response), next_state)) } })
call_expression
[]
Rust
src/process/intermediate.rs
Ian-Yy/youki
d209d75512631d676ca0409a19b893d4314f3830
use crate::{namespaces::Namespaces, process::channel, process::fork}; use anyhow::{Context, Error, Result}; use cgroups::common::CgroupManager; use nix::unistd::{Gid, Pid, Uid}; use oci_spec::runtime::{LinuxNamespaceType, LinuxResources}; use procfs::process::Process; use std::convert::From; use super::args::ContainerArgs; use super::init::container_init; pub fn container_intermediate( args: ContainerArgs, receiver_from_main: &mut channel::ReceiverFromMain, sender_to_main: &mut channel::SenderIntermediateToMain, ) -> Result<()> { let command = &args.syscall; let spec = &args.spec; let linux = spec.linux().as_ref().context("no linux in spec")?; let namespaces = Namespaces::from(linux.namespaces().as_ref()); if let Some(user_namespace) = namespaces.get(LinuxNamespaceType::User) { namespaces .unshare_or_setns(user_namespace) .with_context(|| format!("Failed to enter user namespace: {:?}", user_namespace))?; if user_namespace.path().is_none() { log::debug!("creating new user namespace"); prctl::set_dumpable(true).unwrap(); sender_to_main.identifier_mapping_request()?; receiver_from_main.wait_for_mapping_ack()?; prctl::set_dumpable(false).unwrap(); } command.set_id(Uid::from_raw(0), Gid::from_raw(0)).context( "Failed to configure uid and gid root in the beginning of a new user namespace", )?; } let proc = spec.process().as_ref().context("no process in spec")?; if let Some(rlimits) = proc.rlimits() { for rlimit in rlimits { command.set_rlimit(rlimit).context("failed to set rlimit")?; } } if let Some(pid_namespace) = namespaces.get(LinuxNamespaceType::Pid) { namespaces .unshare_or_setns(pid_namespace) .with_context(|| format!("Failed to enter pid namespace: {:?}", pid_namespace))?; } if args.rootless.is_none() { apply_cgroups( args.cgroup_manager.as_ref(), linux.resources().as_ref(), args.init, ) .context("failed to apply cgroups")? } let (sender_to_intermediate, receiver_from_init) = &mut channel::init_to_intermediate()?; let pid = fork::container_fork(|| { receiver_from_init .close() .context("Failed to close receiver in init process")?; container_init(args, sender_to_intermediate) })?; sender_to_intermediate .close() .context("Failed to close sender in the intermediate process")?; receiver_from_init .wait_for_init_ready() .context("Failed to wait for the child")?; sender_to_main .intermediate_ready(pid) .context("Failed to send child ready from intermediate process")?; Ok(()) } fn apply_cgroups<C: CgroupManager + ?Sized>( cmanager: &C, resources: Option<&LinuxResources>, init: bool, ) -> Result<(), Error> { let pid = Pid::from_raw(Process::myself()?.pid()); cmanager .add_task(pid) .with_context(|| format!("failed to add task {} to cgroup manager", pid))?; if let Some(resources) = resources { if init { let controller_opt = cgroups::common::ControllerOpt { resources, freezer_state: None, oom_score_adj: None, disable_oom_killer: false, }; cmanager .apply(&controller_opt) .context("failed to apply resource limits to cgroup")?; } } Ok(()) } #[cfg(test)] mod tests { use super::apply_cgroups; use anyhow::Result; use cgroups::test_manager::TestManager; use nix::unistd::Pid; use oci_spec::runtime::LinuxResources; use procfs::process::Process; #[test] fn apply_cgroup_init() -> Result<()> { let cmanager = TestManager::default(); let resources = LinuxResources::default(); apply_cgroups(&cmanager, Some(&resources), true)?; assert!(cmanager.get_add_task_args().len() == 1); assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(cmanager.apply_called()); Ok(()) } #[test] fn apply_cgroup_tenant() -> Result<()> { let cmanager = TestManager::default(); let resources = LinuxResources::default(); apply_cgroups(&cmanager, Some(&resources), false)?; assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(!cmanager.apply_called()); Ok(()) } #[test] fn apply_cgroup_no_resources() -> Result<()> { let cmanager = TestManager::default(); apply_cgroups(&cmanager, None, true)?; assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(!cmanager.apply_called()); Ok(()) } }
use crate::{namespaces::Namespaces, process::channel, process::fork}; use anyhow::{Context, Error, Result}; use cgroups::common::CgroupManager; use nix::unistd::{Gid, Pid, Uid}; use oci_spec::runtime::{LinuxNamespaceType, LinuxResources}; use procfs::process::Process; use std::convert::From; use super::args::ContainerArgs; use super::init::container_init; pub fn container_intermediate( args: ContainerArgs, receiver_from_main: &mut channel::ReceiverFromMain, sender_to_main: &mut channel::SenderIntermediateToMain, ) -> Result<()> { let command = &args.syscall; let spec = &args.spec; let linux = spec.linux().as_ref().context("no linux in spec")?; let namespaces = Namespaces::from(linux.namespaces().as_ref()); if let Some(user_namespace) = namespaces.get(LinuxNamespaceType::User) { namespaces .unshare_or_setns(user_namespace) .with_context(|| format!("Failed to enter user namespace: {:?}", user_namespace))?; if user_namespace.path().is_none() { log::debug!("creating new user namespace"); prctl::set_dumpable(true).unwrap(); sender_to_main.identifier_mapping_request()?; receiver_from_main.wait_for_mapping_ack()?; prctl::set_dumpable(false).unwrap(); } command.set_id(Uid::from_raw(0), Gid::from_raw(0)).context( "Failed to configure uid and gid root in the beginning of a new user namespace", )?; } let proc = spec.process().as_ref().context("no process in spec")?; if let Some(rlimits) = proc.rlimits() { for rlimit in rlimits { command.set_rlimit(rlimit).context("failed to set rlimit")?; } } if let Some(pid_namespace) = namespaces.get(LinuxNamespaceType::Pid) { namespaces .unshare_or_setns(pid_namespace) .with_context(|| format!("Failed to enter pid namespace: {:?}", pid_namespace))?; } if args.rootless.is_none() { apply_cgroups( args.cgroup_manager.as_ref(), linux.resources().as_ref(), args.init, ) .context("failed to apply cgroups")? } let (sender_to_intermediate, receiver_from_init) = &mut channel::init_to_intermediate()?; let pid = fork::container_fork(|| { receiver_from_init .close() .context("Failed to close receiver in init process")?; container_init(args, sender_to_intermediate) })?; sender_to_intermediate .close() .context("Failed to close sender in the intermediate process")?; receiver_from_init .wait_for_init_ready() .context("Failed to wait for the child")?; sender_to_main .intermediate_ready(pid) .context("Failed to send child ready from intermediate process")?; Ok(()) } fn apply_cgroups<C: CgroupManager + ?Sized>( cmanager: &C, resources: Option<&LinuxResources>, init: bool, ) -> Result<(), Error> { let pid = Pid::from_raw(Process::myself()?.pid()); cmanager .add_task(pid) .with_context(|| format!("failed to add task {} to cgroup manager", pid))?; if let Some(resources) = resources { if init { let controller_opt = cgroups::common::ControllerOpt { resources, freezer_state: None, oom_score_adj: None, disable_oom_killer: false, }; cmanager .apply(&controller_opt) .context("failed to apply resource limits to cgroup")?; } } Ok(()) } #[cfg(test)] mod tests { use super::apply_cgroups; use anyhow::Result; use cgroups::test_manager::TestManager; use nix::unistd::Pid; use oci_spec::runtime::LinuxResources; use procfs::process::Process; #[test] fn apply_cgroup_init() -> Result<()> { let cmanager = TestManager::default(); let resources = LinuxResources::default(); apply_cgroups(&cmanager, Some(&resources), true)?; assert!(cmanager.get_add_task_args().len() == 1); assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(cmanager.apply_called()); Ok(()) } #[test] fn apply_cgroup_tenant() -> Result<()> { let cmanager = TestManager::default();
#[test] fn apply_cgroup_no_resources() -> Result<()> { let cmanager = TestManager::default(); apply_cgroups(&cmanager, None, true)?; assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(!cmanager.apply_called()); Ok(()) } }
let resources = LinuxResources::default(); apply_cgroups(&cmanager, Some(&resources), false)?; assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(!cmanager.apply_called()); Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn init_to_intermediate() -> Result<(SenderInitToIntermediate, ReceiverFromInit)> {\n\n let (sender, receiver) = new_pipe()?;\n\n Ok((\n\n SenderInitToIntermediate { sender },\n\n ReceiverFromInit { receiver },\n\n ))\n\n}\n\n\n\npub struct SenderInitToIntermediate {\n\n...
Rust
examples/test.rs
AntonHermann/synth
74c53d72ee1a690cd055417e48e2c114b0e1061d
extern crate pitch_calc as pitch; extern crate portaudio; extern crate sample; extern crate synth; use portaudio as pa; use pitch::{Letter, LetterOctave}; use synth::Synth; pub type AudioSample = f32; pub type Input = AudioSample; pub type Output = AudioSample; const CHANNELS: i32 = 2; const FRAMES: u32 = 64; const SAMPLE_HZ: f64 = 44_100.0; fn main() { run().unwrap() } fn run() -> Result<(), pa::Error> { let mut synth = { use synth::{Point, Oscillator, oscillator, Envelope}; let amp_env = Envelope::from(vec!( Point::new(0.0 , 0.0 , 0.0), Point::new(0.01 , 1.0 , 0.0), Point::new(0.45 , 1.0 , 0.0), Point::new(0.81 , 0.8 , 0.0), Point::new(1.0 , 0.0 , 0.0), )); let freq_env = Envelope::from(vec!( Point::new(0.0 , 0.0 , 0.0), Point::new(0.00136 , 1.0 , 0.0), Point::new(0.015 , 0.02 , 0.0), Point::new(0.045 , 0.005 , 0.0), Point::new(0.1 , 0.0022 , 0.0), Point::new(0.35 , 0.0011 , 0.0), Point::new(1.0 , 0.0 , 0.0), )); let oscillator = Oscillator::new(oscillator::waveform::Square, amp_env, freq_env, ()); Synth::retrigger(()) .oscillator(oscillator) .duration(6000.0) .base_pitch(LetterOctave(Letter::C, 1).hz()) .loop_points(0.49, 0.51) .fade(500.0, 500.0) .num_voices(16) .volume(0.2) .detune(0.5) .spread(1.0) }; let note = LetterOctave(Letter::C, 1); let note_velocity = 1.0; synth.note_on(note, note_velocity); let note_duration = 4.0; let mut is_note_off = false; let mut timer: f64 = 0.0; let mut prev_time = None; let callback = move |pa::OutputStreamCallbackArgs { buffer, time, .. }| { let buffer: &mut [[f32; CHANNELS as usize]] = sample::slice::to_frame_slice_mut(buffer).unwrap(); sample::slice::equilibrium(buffer); synth.fill_slice(buffer, SAMPLE_HZ as f64); if timer < 6.0 { let last_time = prev_time.unwrap_or(time.current); let dt = time.current - last_time; timer += dt; prev_time = Some(time.current); if timer > note_duration { if !is_note_off { synth.note_off(note); is_note_off = true; } } pa::Continue } else { pa::Complete } }; let pa = try!(pa::PortAudio::new()); let settings = try!(pa.default_output_stream_settings::<f32>(CHANNELS, SAMPLE_HZ, FRAMES)); let mut stream = try!(pa.open_non_blocking_stream(settings, callback)); try!(stream.start()); while let Ok(true) = stream.is_active() { std::thread::sleep(std::time::Duration::from_millis(16)); } Ok(()) }
extern crate pitch_calc as pitch; extern crate portaudio; extern crate sample; extern crate synth; use portaudio as pa; use pitch::{Letter, LetterOctave}; use synth::Synth; pub type AudioSample = f32; pub type Input = AudioSample; pub type Output = AudioSample; const CHANNELS: i32 = 2; const FRAMES: u32 = 64; const SAMPLE_HZ: f64 = 44_100.0; fn main() { run().unwrap() } fn run() -> Result<(), pa::Error> { let mut synth = { use synth::{Point, Oscillator, oscillator, Envelope}; let amp_env = Envelope::from(vec!( Point::new(0.0 , 0.0 , 0.0), Point::new(0.01 , 1.0 , 0.0), Point::new(0.45 , 1.0 , 0.0), Point::new(0.81 , 0.8 , 0.0), Point::new(1.0 , 0.0 , 0.0), )); let freq_env = Envelope::from(vec!( Point::new(0.0 , 0.0 , 0.0), Point::new(0.00136 , 1.0 , 0.0), Point::new(0.015 , 0.02 , 0.0), Point::new(0.045 , 0.005 , 0.0), Point::new(0.1 , 0.0022 , 0.0), Point::new(0.35 , 0.0011 , 0.0), Point::new(1.0 , 0.0 , 0.0), )); let oscillator = Oscillator::new(oscillator::waveform::Square, amp_env, freq_env, ()); Synth::retrigger(()) .oscillator(oscillator) .duration(6000.0) .base_pitch(LetterOctave(Letter::C, 1).hz()) .loop_points(0.49, 0.51) .fade(500.0, 500.0) .num_voices(16) .volume(0.2) .detune(0.5) .spread(1.0) }; let note = LetterOctave(Letter::C, 1); let note_velocity = 1.0; synth.note_on(note, note_velocity); let note_duration = 4.0; let mut is_note_off = false; let mut timer: f64 = 0.0; let mut prev_time = None; let callback = move |pa::OutputStreamCallbackArgs { buffer, time, .. }| { let buffer: &mut [[f32; CHANNELS as usize]] = sample::slice::to_frame_slice_mut(buffer).unwrap(); sample::slice::equilibrium(buffer); synth.fill_slice(buffer, SAMPLE_HZ as f64); if timer < 6.0 { let last_time = prev_time.unwrap_or(time.current); let dt = time.current - last_time; timer += dt; prev_time = Some(time.current);
pa::Continue } else { pa::Complete } }; let pa = try!(pa::PortAudio::new()); let settings = try!(pa.default_output_stream_settings::<f32>(CHANNELS, SAMPLE_HZ, FRAMES)); let mut stream = try!(pa.open_non_blocking_stream(settings, callback)); try!(stream.start()); while let Ok(true) = stream.is_active() { std::thread::sleep(std::time::Duration::from_millis(16)); } Ok(()) }
if timer > note_duration { if !is_note_off { synth.note_off(note); is_note_off = true; } }
if_condition
[ { "content": "#[test]\n\nfn test_dynamic_synth() {\n\n use dynamic::Synth;\n\n\n\n extern crate serde_json;\n\n\n\n let synth = Synth::dynamic_retrigger();\n\n let serialized = serde_json::to_string(&synth).unwrap();\n\n\n\n println!(\"{}\", serialized);\n\n \n\n let deserialized: Synth = s...
Rust
src/schema/text_options.rs
elbow-jason/tantivy
45e62d43293ed8c66ef027104a35cf25afe2e995
use schema::IndexRecordOption; use std::borrow::Cow; use std::ops::BitOr; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct TextOptions { indexing: Option<TextFieldIndexing>, stored: bool, } impl TextOptions { pub fn get_indexing_options(&self) -> Option<&TextFieldIndexing> { self.indexing.as_ref() } pub fn is_stored(&self) -> bool { self.stored } pub fn set_stored(mut self) -> TextOptions { self.stored = true; self } pub fn set_indexing_options(mut self, indexing: TextFieldIndexing) -> TextOptions { self.indexing = Some(indexing); self } } impl Default for TextOptions { fn default() -> TextOptions { TextOptions { indexing: None, stored: false, } } } #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] pub struct TextFieldIndexing { record: IndexRecordOption, tokenizer: Cow<'static, str>, } impl Default for TextFieldIndexing { fn default() -> TextFieldIndexing { TextFieldIndexing { tokenizer: Cow::Borrowed("default"), record: IndexRecordOption::Basic, } } } impl TextFieldIndexing { pub fn set_tokenizer(mut self, tokenizer_name: &str) -> TextFieldIndexing { self.tokenizer = Cow::Owned(tokenizer_name.to_string()); self } pub fn tokenizer(&self) -> &str { &self.tokenizer } pub fn set_index_option(mut self, index_option: IndexRecordOption) -> TextFieldIndexing { self.record = index_option; self } pub fn index_option(&self) -> IndexRecordOption { self.record } } pub const STRING: TextOptions = TextOptions { indexing: Some(TextFieldIndexing { tokenizer: Cow::Borrowed("raw"), record: IndexRecordOption::Basic, }), stored: false, }; pub const TEXT: TextOptions = TextOptions { indexing: Some(TextFieldIndexing { tokenizer: Cow::Borrowed("default"), record: IndexRecordOption::WithFreqsAndPositions, }), stored: false, }; pub const STORED: TextOptions = TextOptions { indexing: None, stored: true, }; impl BitOr for TextOptions { type Output = TextOptions; fn bitor(self, other: TextOptions) -> TextOptions { let mut res = TextOptions::default(); res.indexing = self.indexing.or(other.indexing); res.stored = self.stored | other.stored; res } } #[cfg(test)] mod tests { use schema::*; #[test] fn test_field_options() { { let field_options = STORED | TEXT; assert!(field_options.is_stored()); assert!(field_options.get_indexing_options().is_some()); } { let mut schema_builder = Schema::builder(); schema_builder.add_text_field("body", TEXT); let schema = schema_builder.build(); let field = schema.get_field("body").unwrap(); let field_entry = schema.get_field_entry(field); match field_entry.field_type() { &FieldType::Str(ref text_options) => { assert!(text_options.get_indexing_options().is_some()); assert_eq!( text_options.get_indexing_options().unwrap().tokenizer(), "default" ); } _ => { panic!(""); } } } } #[test] fn test_cmp_index_record_option() { assert!(IndexRecordOption::WithFreqsAndPositions > IndexRecordOption::WithFreqs); assert!(IndexRecordOption::WithFreqs > IndexRecordOption::Basic); } }
use schema::IndexRecordOption; use std::borrow::Cow; use std::ops::BitOr; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct TextOptions { indexing: Option<TextFieldIndexing>, stored: bool, } impl TextOptions { pub fn get_indexing_options(&self) -> Option<&TextFieldIndexing> { self.indexing.as_ref() } pub fn is_stored(&self) -> bool { self.stored } pub fn set_stored(mut self) -> TextOptions { self.stored = true; self } pub fn set_indexing_options(mut self, indexing: TextFieldIndexing) -> TextOptions { self.indexing = Some(indexing); self } } impl Default for TextOptions { fn default() -> TextOptions { TextOptions { indexing: None, stored: false, } } } #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] pub struct TextFieldIndexing { record: IndexRecordOption, tokenizer: Cow<'static, str>, } impl Default for TextFieldIndexing { fn default() -> TextFieldIndexing { TextFieldIndexing { tokenizer: Cow::Borrowed("default"), record: IndexRecordOption::Basic, } } } impl TextFieldIndexing { pub fn set_tokenizer(mut self, tokenizer_name: &str) -> TextFieldIndexing { self.tokenizer = Cow::Owned(tokenizer_name.to_string()); self } pub fn tokenizer(&self) -> &str { &self.tokenizer } pub fn set_index_option(mut self, index_option: IndexRecordOption) -> TextFieldIndexing { self.record = index_option; self } pub fn index_option(&self) -> IndexRecordOption { self.record } } pub const STRING: TextOptions = TextOptions { indexing: Some(TextFieldIndexing { tokenizer: Cow::Borrowed("raw"), record: IndexRecordOption::Basic, }), stored: false, }; pub const TEXT: TextOptions = TextOptions { indexing: Some(TextFieldIndexing { tokenizer: Cow::Borrowed("default"), record: IndexRecordOption::WithFreqsAndPositions, }), stored: false, }; pub const STORED: TextOptions = TextOptions { indexing: None, stored: true, }; impl BitOr for TextOptions { type Output = TextOptions; fn bitor(self, other: TextOptions) -> TextOptions { let mut res = TextOptions::default(); res.indexing = self.indexing.or(other.indexing); res.stored = self.stored | other.stored; res } } #[cfg(test)] mod tests { use schema::*; #[test] fn test_field_options() { { let field_options = STORED | TEXT; assert!(field_options.is_stored()); assert!(field_options.get_indexing_options().is_some()); } { let mut schema_builder = Schema::builder(); schema_builder.add_text_field("body", TEXT); let schema = schema_builder.build(); let field = schema.get_field("body").unwrap(); let field_entry = schema.get_field_entry(field);
} } #[test] fn test_cmp_index_record_option() { assert!(IndexRecordOption::WithFreqsAndPositions > IndexRecordOption::WithFreqs); assert!(IndexRecordOption::WithFreqs > IndexRecordOption::Basic); } }
match field_entry.field_type() { &FieldType::Str(ref text_options) => { assert!(text_options.get_indexing_options().is_some()); assert_eq!( text_options.get_indexing_options().unwrap().tokenizer(), "default" ); } _ => { panic!(""); } }
if_condition
[ { "content": "// writes a lowercased version of text into output.\n\nfn to_lowercase_unicode(text: &mut String, output: &mut String) {\n\n output.clear();\n\n for c in text.chars() {\n\n // Contrary to the std, we do not take care of sigma special case.\n\n // This will have an normalization...
Rust
tests/route_middleware.rs
chrisdickinson/tide
f884167f29e36993afd31f477ce77138bd915b7c
use http_types::headers::HeaderName; use std::convert::TryInto; use tide::http::{self, url::Url, Method}; use tide::Middleware; use test_utils::BoxFuture; mod test_utils; #[derive(Debug)] struct TestMiddleware(HeaderName, &'static str); impl TestMiddleware { fn with_header_name(name: &'static str, value: &'static str) -> Self { Self(name.try_into().unwrap(), value) } } impl<State: Send + Sync + 'static> Middleware<State> for TestMiddleware { fn handle<'a>( &'a self, req: tide::Request<State>, next: tide::Next<'a, State>, ) -> BoxFuture<'a, tide::Result<tide::Response>> { Box::pin(async move { let res = next.run(req).await?; Ok(res.set_header(self.0.clone(), self.1)) }) } } async fn echo_path<State>(req: tide::Request<State>) -> tide::Result<String> { Ok(req.url().path().to_string()) } #[async_std::test] async fn route_middleware() { let mut app = tide::new(); let mut foo_route = app.at("/foo"); foo_route .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); foo_route .at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .get(echo_path); foo_route .post(echo_path) .reset_middleware() .put(echo_path); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); let req = http::Request::new(Method::Post, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); let req = http::Request::new(Method::Put, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert!(res.header("X-Foo").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo/bar").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); assert_eq!(res["x-bar"], "bar"); } #[async_std::test] async fn app_and_route_middleware() { let mut app = tide::new(); app.middleware(TestMiddleware::with_header_name("X-Root", "root")); app.at("/foo") .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); app.at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .get(echo_path); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert_eq!(res["x-foo"], "foo"); assert!(res.header("x-bar").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/bar").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert!(res.header("x-foo").is_none()); assert_eq!(res["X-Bar"], "bar"); } #[async_std::test] async fn nested_app_with_route_middleware() { let mut inner = tide::new(); inner.middleware(TestMiddleware::with_header_name("X-Inner", "inner")); inner .at("/baz") .middleware(TestMiddleware::with_header_name("X-Baz", "baz")) .get(echo_path); let mut app = tide::new(); app.middleware(TestMiddleware::with_header_name("X-Root", "root")); app.at("/foo") .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); app.at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .nest(inner); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert!(res.header("X-Inner").is_none()); assert_eq!(res["X-Foo"], "foo"); assert!(res.header("X-Bar").is_none()); assert!(res.header("X-Baz").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/bar/baz").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert_eq!(res["X-Inner"], "inner"); assert!(res.header("X-Foo").is_none()); assert_eq!(res["X-Bar"], "bar"); assert_eq!(res["X-Baz"], "baz"); } #[async_std::test] async fn subroute_not_nested() { let mut app = tide::new(); app.at("/parent") .middleware(TestMiddleware::with_header_name("X-Parent", "Parent")) .get(echo_path); app.at("/parent/child") .middleware(TestMiddleware::with_header_name("X-Child", "child")) .get(echo_path); let req = http::Request::new( Method::Get, Url::parse("http://localhost/parent/child").unwrap(), ); let res: http::Response = app.respond(req).await.unwrap(); assert!(res.header("X-Parent").is_none()); assert_eq!(res["x-child"], "child"); }
use http_types::headers::HeaderName; use std::convert::TryInto; use tide::http::{self, url::Url, Method}; use tide::Middleware; use test_utils::BoxFuture; mod test_utils; #[derive(Debug)] struct TestMiddleware(HeaderName, &'static str); impl TestMiddleware { fn with_header_name(name: &'static str, value: &'static str) -> Self { Self(name.try_into().unwrap(), value) } } impl<State: Send + Sync + 'static> Middleware<State> for TestMiddleware { fn handle<'a>( &'a self, req: tide::Request<State>, next: tide::Next<'a, State>, ) -> BoxFuture<'a, tide::Result<tide::Response>> { Box::pin(async move { let res = next.run(req).await?; Ok(res.set_header(self.0.clone(), self.1)) }) } } async fn echo_path<State>(req: tide::Request<State>) -> tide::Result<String> { Ok(req.url().path().to_string()) } #[async_std::test] async fn route_middleware() { let mut app = tide::new(); let mut foo_route = app.at("/foo"); foo_route .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); foo_route .at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .get(echo_path); foo_route .post(echo_path) .reset_middleware() .put(echo_path); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); let req = http::Request::new(Method::Post, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); let req = http::Request::new(Method::Put, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert!(res.header("X-Foo").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo/bar").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); assert_eq!(res["x-bar"], "bar"); } #[async_std::test]
#[async_std::test] async fn nested_app_with_route_middleware() { let mut inner = tide::new(); inner.middleware(TestMiddleware::with_header_name("X-Inner", "inner")); inner .at("/baz") .middleware(TestMiddleware::with_header_name("X-Baz", "baz")) .get(echo_path); let mut app = tide::new(); app.middleware(TestMiddleware::with_header_name("X-Root", "root")); app.at("/foo") .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); app.at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .nest(inner); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert!(res.header("X-Inner").is_none()); assert_eq!(res["X-Foo"], "foo"); assert!(res.header("X-Bar").is_none()); assert!(res.header("X-Baz").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/bar/baz").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert_eq!(res["X-Inner"], "inner"); assert!(res.header("X-Foo").is_none()); assert_eq!(res["X-Bar"], "bar"); assert_eq!(res["X-Baz"], "baz"); } #[async_std::test] async fn subroute_not_nested() { let mut app = tide::new(); app.at("/parent") .middleware(TestMiddleware::with_header_name("X-Parent", "Parent")) .get(echo_path); app.at("/parent/child") .middleware(TestMiddleware::with_header_name("X-Child", "child")) .get(echo_path); let req = http::Request::new( Method::Get, Url::parse("http://localhost/parent/child").unwrap(), ); let res: http::Response = app.respond(req).await.unwrap(); assert!(res.header("X-Parent").is_none()); assert_eq!(res["x-child"], "child"); }
async fn app_and_route_middleware() { let mut app = tide::new(); app.middleware(TestMiddleware::with_header_name("X-Root", "root")); app.at("/foo") .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); app.at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .get(echo_path); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert_eq!(res["x-foo"], "foo"); assert!(res.header("x-bar").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/bar").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert!(res.header("x-foo").is_none()); assert_eq!(res["X-Bar"], "bar"); }
function_block-full_function
[ { "content": "/// An HTTP request handler.\n\n///\n\n/// This trait is automatically implemented for `Fn` types, and so is rarely implemented\n\n/// directly by Tide users.\n\n///\n\n/// In practice, endpoints are functions that take a `Request<State>` as an argument and\n\n/// return a type `T` that implements...
Rust
account/src/account.rs
WormholeStudio/starcoin
9f7025537cccb613834f1659649cd753e0becdd7
use crate::account_manager::gen_private_key; use crate::account_storage::AccountStorage; use anyhow::{format_err, Result}; use starcoin_account_api::error::AccountError; use starcoin_account_api::{ AccountInfo, AccountPrivateKey, AccountPublicKey, AccountResult, Setting, }; use starcoin_crypto::PrivateKey; use starcoin_logger::prelude::*; use starcoin_storage::storage::StorageInstance; use starcoin_types::account_address; use starcoin_types::account_address::AccountAddress; use starcoin_types::genesis_config::ChainId; use starcoin_types::sign_message::{SignedMessage, SigningMessage}; use starcoin_types::transaction::authenticator::AuthenticationKey; use starcoin_types::transaction::{RawUserTransaction, SignedUserTransaction}; pub struct Account { addr: AccountAddress, public_key: AccountPublicKey, private_key: Option<AccountPrivateKey>, setting: Setting, store: AccountStorage, } impl Account { pub fn create( address: AccountAddress, private_key: AccountPrivateKey, password: String, storage: AccountStorage, ) -> AccountResult<Self> { storage.update_key(address, &private_key, password.as_str())?; let setting = Setting::default(); storage.update_setting(address, setting.clone())?; Ok(Self { addr: address, public_key: private_key.public_key(), private_key: Some(private_key), setting, store: storage, }) } pub fn create_readonly( address: AccountAddress, public_key: AccountPublicKey, storage: AccountStorage, ) -> AccountResult<Self> { storage.update_public_key(address, public_key.clone())?; let setting = Setting::readonly(); storage.update_setting(address, setting.clone())?; Ok(Self { addr: address, public_key, private_key: None, setting, store: storage, }) } pub fn load( addr: AccountAddress, password: Option<String>, storage: AccountStorage, ) -> AccountResult<Option<Self>> { let setting = storage.load_setting(addr)?; let private_key = if setting.is_readonly { None } else { let decrypted_key = storage .decrypt_private_key(addr, password.unwrap_or_else(|| "".to_string())) .map_err(|e| { warn!( "Try to unlock {} with a invalid password, err: {:?}", addr, e ); AccountError::InvalidPassword(addr) })?; let private_key = match decrypted_key { None => return Ok(None), Some(p) => p, }; Some(private_key) }; let saved_public_key = storage.public_key(addr)?; let saved_public_key = saved_public_key.ok_or_else(|| { AccountError::StoreError(format_err!("public key not found for address {}", addr)) })?; Ok(Some(Self { addr, public_key: saved_public_key, private_key, setting, store: storage, })) } pub fn set_default(&mut self) -> Result<()> { self.setting.is_default = true; self.store.set_default_address(Some(self.addr))?; self.store.update_setting(self.addr, self.setting.clone())?; Ok(()) } pub fn info(&self) -> AccountInfo { AccountInfo::new( self.addr, self.public_key.clone(), self.setting.is_default, self.setting.is_readonly, ) } pub fn sign_message( &self, message: SigningMessage, chain_id: ChainId, ) -> Result<SignedMessage> { let authenticator = self .private_key .as_ref() .map(|private_key| private_key.sign_message(&message)) .ok_or_else(|| format_err!("Readonly account can not sign message."))?; Ok(SignedMessage::new( self.addr, message, authenticator, chain_id, )) } pub fn sign_txn(&self, raw_txn: RawUserTransaction) -> Result<SignedUserTransaction> { let signature = self .private_key .as_ref() .map(|private_key| private_key.sign(&raw_txn)) .ok_or_else(|| format_err!("Readonly account can not sign txn"))?; Ok(SignedUserTransaction::new(raw_txn, signature)) } pub fn destroy(self) -> Result<()> { self.store.destroy_account(self.addr) } pub fn address(&self) -> &AccountAddress { &self.addr } pub fn private_key(&self) -> Option<&AccountPrivateKey> { self.private_key.as_ref() } pub fn public_key(&self) -> AccountPublicKey { self.public_key.clone() } pub fn auth_key(&self) -> AuthenticationKey { self.public_key.authentication_key() } pub fn random() -> Result<Self> { let private_key = gen_private_key(); let public_key = private_key.public_key(); let address = account_address::from_public_key(&public_key); let storage = AccountStorage::new(StorageInstance::new_cache_instance()); Self::create(address, private_key.into(), "".to_string(), storage).map_err(|e| e.into()) } }
use crate::account_manager::gen_private_key; use crate::account_storage::AccountStorage; use anyhow::{format_err, Result}; use starcoin_account_api::error::AccountError; use starcoin_account_api::{ AccountInfo, AccountPrivateKey, AccountPublicKey, AccountResult, Setting, }; use starcoin_crypto::PrivateKey; use starcoin_logger::prelude::*; use starcoin_storage::storage::StorageInstance; use starcoin_types::account_address; use starcoin_types::account_address::AccountAddress; use starcoin_types::genesis_config::ChainId; use starcoin_types::sign_message::{SignedMessage, SigningMessage}; use starcoin_types::transaction::authenticator::AuthenticationKey; use starcoin_types::transaction::{RawUserTransaction, SignedUserTransaction}; pub struct Account { addr: AccountAddress, public_key: AccountPublicKey, private_key: Option<AccountPrivateKey>, setting: Setting, store: AccountStorage, } impl Account { pub f
pub fn create_readonly( address: AccountAddress, public_key: AccountPublicKey, storage: AccountStorage, ) -> AccountResult<Self> { storage.update_public_key(address, public_key.clone())?; let setting = Setting::readonly(); storage.update_setting(address, setting.clone())?; Ok(Self { addr: address, public_key, private_key: None, setting, store: storage, }) } pub fn load( addr: AccountAddress, password: Option<String>, storage: AccountStorage, ) -> AccountResult<Option<Self>> { let setting = storage.load_setting(addr)?; let private_key = if setting.is_readonly { None } else { let decrypted_key = storage .decrypt_private_key(addr, password.unwrap_or_else(|| "".to_string())) .map_err(|e| { warn!( "Try to unlock {} with a invalid password, err: {:?}", addr, e ); AccountError::InvalidPassword(addr) })?; let private_key = match decrypted_key { None => return Ok(None), Some(p) => p, }; Some(private_key) }; let saved_public_key = storage.public_key(addr)?; let saved_public_key = saved_public_key.ok_or_else(|| { AccountError::StoreError(format_err!("public key not found for address {}", addr)) })?; Ok(Some(Self { addr, public_key: saved_public_key, private_key, setting, store: storage, })) } pub fn set_default(&mut self) -> Result<()> { self.setting.is_default = true; self.store.set_default_address(Some(self.addr))?; self.store.update_setting(self.addr, self.setting.clone())?; Ok(()) } pub fn info(&self) -> AccountInfo { AccountInfo::new( self.addr, self.public_key.clone(), self.setting.is_default, self.setting.is_readonly, ) } pub fn sign_message( &self, message: SigningMessage, chain_id: ChainId, ) -> Result<SignedMessage> { let authenticator = self .private_key .as_ref() .map(|private_key| private_key.sign_message(&message)) .ok_or_else(|| format_err!("Readonly account can not sign message."))?; Ok(SignedMessage::new( self.addr, message, authenticator, chain_id, )) } pub fn sign_txn(&self, raw_txn: RawUserTransaction) -> Result<SignedUserTransaction> { let signature = self .private_key .as_ref() .map(|private_key| private_key.sign(&raw_txn)) .ok_or_else(|| format_err!("Readonly account can not sign txn"))?; Ok(SignedUserTransaction::new(raw_txn, signature)) } pub fn destroy(self) -> Result<()> { self.store.destroy_account(self.addr) } pub fn address(&self) -> &AccountAddress { &self.addr } pub fn private_key(&self) -> Option<&AccountPrivateKey> { self.private_key.as_ref() } pub fn public_key(&self) -> AccountPublicKey { self.public_key.clone() } pub fn auth_key(&self) -> AuthenticationKey { self.public_key.authentication_key() } pub fn random() -> Result<Self> { let private_key = gen_private_key(); let public_key = private_key.public_key(); let address = account_address::from_public_key(&public_key); let storage = AccountStorage::new(StorageInstance::new_cache_instance()); Self::create(address, private_key.into(), "".to_string(), storage).map_err(|e| e.into()) } }
n create( address: AccountAddress, private_key: AccountPrivateKey, password: String, storage: AccountStorage, ) -> AccountResult<Self> { storage.update_key(address, &private_key, password.as_str())?; let setting = Setting::default(); storage.update_setting(address, setting.clone())?; Ok(Self { addr: address, public_key: private_key.public_key(), private_key: Some(private_key), setting, store: storage, }) }
function_block-function_prefixed
[ { "content": "#[test]\n\npub fn test_readonly_account() -> Result<()> {\n\n let tempdir = tempfile::tempdir()?;\n\n let storage = AccountStorage::create_from_path(tempdir.path(), RocksdbConfig::default())?;\n\n let manager = AccountManager::new(storage.clone(), ChainId::test())?;\n\n let mut key_gen...
Rust
src/style/styles/styledobject.rs
tkaden4/crossterm
dd01de870beed9717de1afc6d12e145be4e931bc
use std::io::Write; use std::{self, fmt}; #[cfg(unix)] use super::super::Attribute; use style::{Color, ObjectStyle}; pub struct StyledObject<D> { pub object_style: ObjectStyle, pub content: D, } impl<D> StyledObject<D> { pub fn with(mut self, foreground_color: Color) -> StyledObject<D> { self.object_style = self.object_style.fg(foreground_color); self } pub fn on(mut self, background_color: Color) -> StyledObject<D> { self.object_style = self.object_style.bg(background_color); self } #[cfg(unix)] pub fn attr(mut self, attr: Attribute) -> StyledObject<D> { &self.object_style.add_attr(attr); self } #[cfg(unix)] #[inline(always)] pub fn bold(self) -> StyledObject<D> { self.attr(Attribute::Bold) } #[cfg(unix)] #[inline(always)] pub fn dim(self) -> StyledObject<D> { self.attr(Attribute::Dim) } #[cfg(unix)] #[inline(always)] pub fn italic(self) -> StyledObject<D> { self.attr(Attribute::Italic) } #[cfg(unix)] #[inline(always)] pub fn underlined(self) -> StyledObject<D> { self.attr(Attribute::Underlined) } #[cfg(unix)] #[inline(always)] pub fn slow_blink(self) -> StyledObject<D> { self.attr(Attribute::SlowBlink) } #[cfg(unix)] #[inline(always)] pub fn rapid_blink(self) -> StyledObject<D> { self.attr(Attribute::RapidBlink) } #[cfg(unix)] #[inline(always)] pub fn reverse(self) -> StyledObject<D> { self.attr(Attribute::Reverse) } #[cfg(unix)] #[inline(always)] pub fn hidden(self) -> StyledObject<D> { self.attr(Attribute::Hidden) } #[cfg(unix)] #[inline(always)] pub fn crossed_out(self) -> StyledObject<D> { self.attr(Attribute::CrossedOut) } } macro_rules! impl_fmt { ($name:ident) => { impl<D: fmt::$name> fmt::$name for StyledObject<D> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut colored_terminal = super::super::color(); let mut reset = true; if let Some(bg) = self.object_style.bg_color { colored_terminal.set_bg(bg); reset = true; } if let Some(fg) = self.object_style.fg_color { colored_terminal.set_fg(fg); reset = true; } #[cfg(unix)] for attr in self.object_style.attrs.iter() { write!(f, csi!("{}m"), *attr as i16); reset = true; } fmt::$name::fmt(&self.content, f)?; std::io::stdout().flush().expect("Flush stdout failed"); if reset { colored_terminal.reset(); } Ok(()) } } }; } impl_fmt!(Debug); impl_fmt!(Display);
use std::io::Write; use std::{self, fmt}; #[cfg(unix)] use super::super::Attribute; use style::{Color, ObjectStyle}; pub struct StyledObject<D> { pub object_style: ObjectStyle, pub content: D, } impl<D> StyledObject<D> { pub fn with(mut self, foreground_color: Color) -> StyledObject<D> { self.object_style = self.object_style.fg(foreground_color); self } pub fn on(mut self, background_color: Color) -> StyledObject<D> { self.object_style = self.object_style.bg(background_color); self } #[cfg(unix)] pub fn attr(mut self, attr: Attribute) -> StyledObject<D> { &self.object_style.add_attr(attr); self } #[cfg(unix)] #[inline(always)] pub fn bold(self) -> StyledObject<D> { self.attr(Attribute::Bold) } #[cfg(unix)] #[inline(always)] pub fn dim(self) -> StyledObject<D> { self.attr(Attribute::Dim) } #[cfg(unix)] #[inline(always)] pub fn italic(self) -> StyledObject<D> { self.attr(Attribute::Italic) } #[cfg(unix)] #[inline(always)] pub fn underlined(self) -> StyledObject<D> { self.attr(Attribute::Underlined) } #[cfg(unix)] #[inline(always)] pub fn slow_blink(self) -> StyledObject<D> { self.attr(Attribute::SlowBlink) } #[cfg(unix)] #[inline(always)] pub fn rapid_blink(self) -> StyledObject<D> { self.attr(Attribute::RapidBlink) } #[cfg(unix)] #[inline(always)] pub fn reverse(self) -> StyledObject<D> { self.attr(Attribute::Reverse) } #[cfg(unix)] #[inline(always)] pub fn hidden(self) -> StyledObject<D> { self.attr(Attribute::Hidden) } #[cfg(unix)] #[inline(always)] pub fn crossed_out(self) -> StyledObject<D> { self.attr(Attribute::CrossedOut) } } macro_rules! impl_fmt { ($name:ident) => { impl<D: fmt::$name> fmt::$name for StyledObject<D> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut colored_terminal = super::super::color(); let mut reset = true; if let Some(bg) = self.object_style.bg_color { colored_terminal.set_bg(bg); reset = true; } if let Some(fg) = self.object_style.fg_c
for attr in self.object_style.attrs.iter() { write!(f, csi!("{}m"), *attr as i16); reset = true; } fmt::$name::fmt(&self.content, f)?; std::io::stdout().flush().expect("Flush stdout failed"); if reset { colored_terminal.reset(); } Ok(()) } } }; } impl_fmt!(Debug); impl_fmt!(Display);
olor { colored_terminal.set_fg(fg); reset = true; } #[cfg(unix)]
random
[ { "content": "/// Get an TerminalColor implementation whereon color related actions can be performed.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// extern crate crossterm;\n\n///\n\n/// use self::crossterm::style::{color, Color};\n\n///\n\n/// // Get colored terminal instance\n\n/// let mut colored_termi...
Rust
tools/publisher/src/sort.rs
Jacco/smithy-rs
8a5b48062bb76138844c6c912f355076c3ad75ba
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ use crate::package::{Package, PackageHandle}; use anyhow::Result; use std::collections::{BTreeMap, BTreeSet}; pub fn dependency_order(packages: Vec<Package>) -> Result<Vec<Package>> { let mut order = Vec::new(); let mut packages: BTreeMap<PackageHandle, Package> = packages .into_iter() .map(|p| (p.handle.clone(), p)) .collect(); let mut visited = BTreeSet::new(); let mut to_visit: Vec<&Package> = packages.iter().map(|e| e.1).collect(); to_visit.sort_by(|a, b| { (*a).local_dependencies .len() .cmp(&(*b).local_dependencies.len()) }); while let Some(package) = to_visit.iter().find(|e| !visited.contains(&e.handle)) { dependency_order_visit( &package.handle, &packages, &mut BTreeSet::new(), &mut visited, &mut order, )?; } Ok(order .into_iter() .map(&mut |handle| packages.remove(&handle).unwrap()) .collect()) } #[derive(Debug, thiserror::Error)] enum Error { #[error("dependency cycle detected")] DependencyCycle, } fn dependency_order_visit( package_handle: &PackageHandle, packages: &BTreeMap<PackageHandle, Package>, stack: &mut BTreeSet<PackageHandle>, visited: &mut BTreeSet<PackageHandle>, result: &mut Vec<PackageHandle>, ) -> Result<(), Error> { visited.insert(package_handle.clone()); stack.insert(package_handle.clone()); let local_dependencies = &packages[package_handle].local_dependencies; for dependency in local_dependencies { if visited.contains(dependency) && stack.contains(dependency) { return Err(Error::DependencyCycle); } if package_handle != dependency && packages.contains_key(dependency) && !visited.contains(dependency) { dependency_order_visit(dependency, packages, stack, visited, result)?; } } result.push(package_handle.clone()); Ok(()) } #[cfg(test)] mod tests { use super::*; use semver::Version; fn package(name: &str, dependencies: &[&str]) -> Package { Package::new( PackageHandle::new(name, Version::parse("1.0.0").unwrap()), format!("{}/Cargo.toml", name), dependencies .iter() .map(|d| PackageHandle::new(*d, Version::parse("1.0.0").unwrap())) .collect(), ) } #[test] pub fn test_dependency_order() { let packages = vec![ package("E", &["B", "C", "A"]), package("B", &[]), package("F", &["E", "D"]), package("C", &["A"]), package("A", &[]), package("D", &["C"]), ]; let result = dependency_order(packages).unwrap(); assert_eq!( "ABCDEF", result.iter().fold(String::new(), |mut acc, p| { acc.push_str(&p.handle.name); acc }) ); } #[test] pub fn test_dependency_cycles() { let packages = vec![ package("A", &["C"]), package("B", &["A"]), package("C", &["B"]), ]; let error = dependency_order(packages).err().expect("cycle"); assert_eq!("dependency cycle detected", format!("{}", error)); } }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ use crate::package::{Package, PackageHandle}; use anyhow::Result; use std::collections::{BTreeMap, BTreeSet}; pub fn dependency_order(packages: Vec<Package>) -> Result<Vec<Package>> { let mut order = Vec::new(); let mut packages: BTreeMap<PackageHandle, Package> = packages .into_iter() .map(|p| (p.handle.clone(), p)) .collect(); let mut visited = BTreeSet::new(); let mut to_visit: Vec<&Package> = packages.iter().map(|e| e.1).collect(); to_visit.sort_by(|a, b| { (*a).local_dependencies .len() .cmp(&(*b).local_dependencies.len()) }); while let Some(package) = to_visit.iter().find(|e| !visited.contains(&e.handle)) { dependency_order_visit( &package.handle, &packages, &mut BTreeSet::new(), &mut visited, &mut order, )?; } Ok(order .into_iter() .map(&mut |handle| packages.remove(&handle).unwrap()) .collect()) } #[derive(Debug, thiserror::Error)] enum Error { #[error("dependency cycle detected")] DependencyCycle, } fn dependency_order_visit( package_handle: &PackageHandle, packages: &BTreeMap<PackageHandle, Package>, stack: &mut BTreeSet<PackageHandle>, visited: &mut BTreeSet<PackageHandle>, result: &mut Vec<PackageHandle>, ) -> Result<(), Error> { visited.insert(package_handle.clone()); stack.insert(package_handle.clone()); let local_dependencies = &packages[package_handle].local_dependencies; for dependency in local_dependencies { if visited.contains(dependency) && stack.contains(dependency) { return Err(Error::DependencyCycle); } if package_handle != dependency && packages.contains_key(dependency) && !visited.contains(dependency) { dependency_order_visit(dependency, packages, stack, visited, result)?; } } result.push(package_handle.clone()); Ok(()) } #[cfg(test)] mod tests { use super::*; use semver::Version;
#[test] pub fn test_dependency_order() { let packages = vec![ package("E", &["B", "C", "A"]), package("B", &[]), package("F", &["E", "D"]), package("C", &["A"]), package("A", &[]), package("D", &["C"]), ]; let result = dependency_order(packages).unwrap(); assert_eq!( "ABCDEF", result.iter().fold(String::new(), |mut acc, p| { acc.push_str(&p.handle.name); acc }) ); } #[test] pub fn test_dependency_cycles() { let packages = vec![ package("A", &["C"]), package("B", &["A"]), package("C", &["B"]), ]; let error = dependency_order(packages).err().expect("cycle"); assert_eq!("dependency cycle detected", format!("{}", error)); } }
fn package(name: &str, dependencies: &[&str]) -> Package { Package::new( PackageHandle::new(name, Version::parse("1.0.0").unwrap()), format!("{}/Cargo.toml", name), dependencies .iter() .map(|d| PackageHandle::new(*d, Version::parse("1.0.0").unwrap())) .collect(), ) }
function_block-full_function
[ { "content": "/// Writes the given `headers` to a `buffer`.\n\npub fn write_headers_to<B: BufMut>(headers: &[Header], mut buffer: B) -> Result<(), Error> {\n\n for header in headers {\n\n header.write_to(&mut buffer)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n/// Event Stream message.\n\n#[non_exhaustive]\n...
Rust
src/sdif/ctype.rs
geky/lpc55s6x-pac
766a1eec50a670a5872aa1a8c7637a9d5b9d6478
#[doc = "Reader of register CTYPE"] pub type R = crate::R<u32, super::CTYPE>; #[doc = "Writer for register CTYPE"] pub type W = crate::W<u32, super::CTYPE>; #[doc = "Register CTYPE `reset()`'s with value 0"] impl crate::ResetValue for super::CTYPE { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `CARD0_WIDTH0`"] pub type CARD0_WIDTH0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD0_WIDTH0`"] pub struct CARD0_WIDTH0_W<'a> { w: &'a mut W, } impl<'a> CARD0_WIDTH0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `CARD1_WIDTH0`"] pub type CARD1_WIDTH0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD1_WIDTH0`"] pub struct CARD1_WIDTH0_W<'a> { w: &'a mut W, } impl<'a> CARD1_WIDTH0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `CARD0_WIDTH1`"] pub type CARD0_WIDTH1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD0_WIDTH1`"] pub struct CARD0_WIDTH1_W<'a> { w: &'a mut W, } impl<'a> CARD0_WIDTH1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `CARD1_WIDTH1`"] pub type CARD1_WIDTH1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD1_WIDTH1`"] pub struct CARD1_WIDTH1_W<'a> { w: &'a mut W, } impl<'a> CARD1_WIDTH1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } impl R { #[doc = "Bit 0 - Indicates if card 0 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD0_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card0_width0(&self) -> CARD0_WIDTH0_R { CARD0_WIDTH0_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Indicates if card 1 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD1_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card1_width0(&self) -> CARD1_WIDTH0_R { CARD1_WIDTH0_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 16 - Indicates if card 0 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card0_width1(&self) -> CARD0_WIDTH1_R { CARD0_WIDTH1_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - Indicates if card 1 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card1_width1(&self) -> CARD1_WIDTH1_R { CARD1_WIDTH1_R::new(((self.bits >> 17) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Indicates if card 0 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD0_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card0_width0(&mut self) -> CARD0_WIDTH0_W { CARD0_WIDTH0_W { w: self } } #[doc = "Bit 1 - Indicates if card 1 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD1_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card1_width0(&mut self) -> CARD1_WIDTH0_W { CARD1_WIDTH0_W { w: self } } #[doc = "Bit 16 - Indicates if card 0 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card0_width1(&mut self) -> CARD0_WIDTH1_W { CARD0_WIDTH1_W { w: self } } #[doc = "Bit 17 - Indicates if card 1 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card1_width1(&mut self) -> CARD1_WIDTH1_W { CARD1_WIDTH1_W { w: self } } }
#[doc = "Reader of register CTYPE"] pub type R = crate::R<u32, super::CTYPE>; #[doc = "Writer for register CTYPE"] pub type W = crate::W<u32, super::CTYPE>; #[doc = "Register CTYPE `reset()`'s with value 0"] impl crate::ResetValue for super::CTYPE { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `CARD0_WIDTH0`"] pub type CARD0_WIDTH0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD0_WIDTH0`"] pub struct CARD0_WIDTH0_W<'a> { w: &'a mut W, } impl<'a> CARD0_WIDTH0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `CARD1_WIDTH0`"] pub type CARD1_WIDTH0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD1_WIDTH0`"] pub struct CARD1_WIDTH0_W<'a> { w: &'a mut W, } impl<'a> CARD1_WIDTH0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `CARD0_WIDTH1`"] pub type CARD0_WIDTH1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD0_WIDTH1`"] pub struct CARD0_WIDTH1_W<'a> { w: &'a mut W, } impl<'a> CARD0_WIDTH1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `CARD1_WIDTH1`"] pub type CARD1_WIDTH1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD1_WIDTH1`"] pub struct CARD1_WIDTH1_W<'a> { w: &'a mut W, } impl<'a> CARD1_WIDTH1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } impl R { #[doc = "Bit 0 - Indicates if card 0 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD0_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card0_width0(&self) -> CARD0_WIDTH0_R { CARD0_WIDTH0_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Indicates if card 1 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD1_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card1_width0(&self) -> CARD1_WIDTH0_R { CARD1_WIDTH0_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 16 - Indicates if card 0 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card0_width1(&self) -> CARD0_WIDTH1_R { CARD0_WIDTH1_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - Indicates if card 1 is 8-b
pub fn card1_width0(&mut self) -> CARD1_WIDTH0_W { CARD1_WIDTH0_W { w: self } } #[doc = "Bit 16 - Indicates if card 0 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card0_width1(&mut self) -> CARD0_WIDTH1_W { CARD0_WIDTH1_W { w: self } } #[doc = "Bit 17 - Indicates if card 1 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card1_width1(&mut self) -> CARD1_WIDTH1_W { CARD1_WIDTH1_W { w: self } } }
it: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card1_width1(&self) -> CARD1_WIDTH1_R { CARD1_WIDTH1_R::new(((self.bits >> 17) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Indicates if card 0 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD0_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card0_width0(&mut self) -> CARD0_WIDTH0_W { CARD0_WIDTH0_W { w: self } } #[doc = "Bit 1 - Indicates if card 1 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD1_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)]
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Res...
Rust
sway-core/src/semantic_analysis/ast_node/expression/match_expression/analysis/patstack.rs
FuelLabs/sway
0190b5dac4735fd2a34528e48cc2e0c9606b5ce8
use std::{cmp::Ordering, fmt, slice::Iter, vec::IntoIter}; use itertools::Itertools; use sway_types::Span; use crate::{ error::{err, ok}, CompileError, CompileResult, }; use super::pattern::Pattern; #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct PatStack { pats: Vec<Pattern>, } impl PatStack { pub(crate) fn empty() -> Self { PatStack { pats: vec![] } } pub(crate) fn from_pattern(p: Pattern) -> Self { PatStack { pats: vec![p] } } pub(crate) fn fill_wildcards(n: usize) -> Self { let mut pats = vec![]; for _ in 0..n { pats.push(Pattern::Wildcard); } PatStack { pats } } pub(crate) fn first(&self, span: &Span) -> CompileResult<Pattern> { let warnings = vec![]; let mut errors = vec![]; match self.pats.first() { Some(first) => ok(first.to_owned(), warnings, errors), None => { errors.push(CompileError::Internal("empty PatStack", span.clone())); err(warnings, errors) } } } pub(crate) fn split_first(&self, span: &Span) -> CompileResult<(Pattern, PatStack)> { let warnings = vec![]; let mut errors = vec![]; match self.pats.split_first() { Some((first, pat_stack_contents)) => { let pat_stack = PatStack { pats: pat_stack_contents.to_vec(), }; ok((first.to_owned(), pat_stack), warnings, errors) } None => { errors.push(CompileError::Internal("empty PatStack", span.clone())); err(warnings, errors) } } } pub(crate) fn split_at(&self, n: usize, span: &Span) -> CompileResult<(PatStack, PatStack)> { let warnings = vec![]; let mut errors = vec![]; if n > self.len() { errors.push(CompileError::Internal( "attempting to split OOB", span.clone(), )); return err(warnings, errors); } let (a, b) = self.pats.split_at(n); let x = PatStack { pats: a.to_vec() }; let y = PatStack { pats: b.to_vec() }; ok((x, y), warnings, errors) } pub(crate) fn push(&mut self, other: Pattern) { self.pats.push(other) } fn get_mut(&mut self, n: usize, span: &Span) -> CompileResult<&mut Pattern> { let warnings = vec![]; let mut errors = vec![]; match self.pats.get_mut(n) { Some(elem) => ok(elem, warnings, errors), None => { errors.push(CompileError::Internal( "cant retrieve mutable reference to element", span.clone(), )); err(warnings, errors) } } } pub(crate) fn append(&mut self, others: &mut PatStack) { self.pats.append(&mut others.pats); } pub(crate) fn prepend(&mut self, other: Pattern) { self.pats.insert(0, other); } pub(crate) fn len(&self) -> usize { self.pats.len() } pub(crate) fn is_empty(&self) -> bool { self.flatten().filter_out_wildcards().pats.is_empty() } pub(crate) fn contains(&self, pat: &Pattern) -> bool { self.pats.contains(pat) } fn contains_or_pattern(&self) -> bool { for pat in self.pats.iter() { if let Pattern::Or(_) = pat { return true; } } false } pub(crate) fn iter(&self) -> Iter<'_, Pattern> { self.pats.iter() } pub(crate) fn into_iter(self) -> IntoIter<Pattern> { self.pats.into_iter() } pub(crate) fn flatten(&self) -> PatStack { let mut flattened = PatStack::empty(); for pat in self.pats.iter() { flattened.append(&mut pat.flatten()); } flattened } pub(crate) fn sort(self) -> PatStack { let mut sorted = self.pats; sorted.sort(); PatStack::from(sorted) } pub(crate) fn filter_out_wildcards(&self) -> PatStack { let mut pats = PatStack::empty(); for pat in self.pats.iter() { match pat { Pattern::Wildcard => {} pat => pats.push(pat.to_owned()), } } pats } pub(crate) fn serialize_multi_patterns(self, span: &Span) -> CompileResult<Vec<PatStack>> { let mut warnings = vec![]; let mut errors = vec![]; let mut output: Vec<PatStack> = vec![]; let mut stack: Vec<PatStack> = vec![self]; while !stack.is_empty() { let top = match stack.pop() { Some(top) => top, None => { errors.push(CompileError::Internal("can't pop Vec", span.clone())); return err(warnings, errors); } }; if !top.contains_or_pattern() { output.push(top); } else { for (i, pat) in top.clone().into_iter().enumerate() { if let Pattern::Or(elems) = pat { for elem in elems.into_iter() { let mut top = top.clone(); let r = check!( top.get_mut(i, span), return err(warnings, errors), warnings, errors ); let _ = std::mem::replace(r, elem); stack.push(top); } } } } } output.reverse(); ok(output, warnings, errors) } pub(crate) fn remove_duplicates(self) -> PatStack { let mut new_pats = vec![]; for pat in self.pats.into_iter() { if !new_pats.contains(&pat) { new_pats.push(pat); } } PatStack::from(new_pats) } } impl From<Vec<Pattern>> for PatStack { fn from(pats: Vec<Pattern>) -> Self { PatStack { pats } } } impl fmt::Display for PatStack { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let s = self .flatten() .sort() .remove_duplicates() .into_iter() .map(|x| format!("{}", x)) .join(", "); write!(f, "{}", s) } } impl std::cmp::Ord for PatStack { fn cmp(&self, other: &Self) -> Ordering { let sorted_self = self.clone().sort(); let sorted_other = other.clone().sort(); sorted_self.pats.cmp(&sorted_other.pats) } } impl std::cmp::PartialOrd for PatStack { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } }
use std::{cmp::Ordering, fmt, slice::Iter, vec::IntoIter}; use itertools::Itertools; use sway_types::Span; use crate::{ error::{err, ok}, CompileError, CompileResult, }; use super::pattern::Pattern; #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct PatStack { pats: Vec<Pattern>, } impl PatStack { pub(crate) fn empty() -> Self { PatStack { pats: vec![] } } pub(crate) fn from_pattern(p: Pattern) -> Self { PatStack { pats: vec![p] } } pub(crate) fn fill_wildcards(n: usize) -> Self { let mut pats = vec![]; for _ in 0..n { pats.push(Pattern::Wildcard); } PatStack { pats } } pub(crate) fn first(&self, span: &Span) -> CompileResult<Pattern> { let warnings = vec![]; let mut errors = vec![]; match self.pats.first() { Some(first) => ok(first.to_owned(), warnings, errors), None => { errors.push(CompileError::Internal("empty PatStack", span.clone())); err(warnings, errors) } } } pub(crate) fn split_first(&self, span: &Span) -> CompileResult<(Pattern, PatStack)> { let warnings = vec![]; let mut errors = vec![]; match self.pats.split_first() { Some((first, pat_stack_contents)) => { let pat_stack = PatStack { pats: pat_stack_contents.to_vec(), }; ok((first.to_owned(), pat_stack), warnings, errors) } None => { errors.push(CompileError::Internal("empty PatStack", span.clone())); err(warnings, errors) } } } pub(crate) fn split_at(&self, n: usize, span: &Span) -> CompileResult<(PatStack, PatStack)> { let warnings = vec![]; let mut errors = vec![]; if n > self.len() { errors.push(CompileError::Internal( "attempting to split OOB", span.clone(), )); return err(warnings, errors); } let (a, b) = self.pats.split_at(n); let x = PatStack { pats: a.to_vec() }; let y = PatStack { pats: b.to_vec() }; ok((x, y), warnings, errors) } pub(crate) fn push(&mut self, other: Pattern) { self.pats.push(other) } fn get_mut(&mut self, n: usize, span: &Span) -> CompileResult<&mut Pattern> { let warnings = vec![]; let mut errors = vec![]; match self.pats.get_mut(n) { Some(elem) => ok(elem, warnings, errors), None => { errors.push(CompileError::Internal( "cant retrieve mutable reference to element", span.clone(), )); err(warnings, errors) } } } pub(crate) fn append(&mut self, others: &mut PatStack) { self.pats.append(&mut others.pats); } pub(crate) fn prepend(&mut self, other: Pattern) { self.pats.insert(0, other); } pub(crate) fn len(&self) -> usize { self.pats.len() } pub(crate) fn is_empty(&self) -> bool { self.flatten().filter_out_wildcards().pats.is_empty() } pub(crate) fn contains(&self, pat: &Pattern) -> bool { self.pats.contains(pat) } fn contains_or_pattern(&self) -> bool { for pat in self.pats.iter() { if let Pattern::Or(_) = pat { return true; } } false } pub(crate) fn iter(&self) -> Iter<'_, Pattern> { self.pats.iter() } pub(crate) fn into_iter(self) -> IntoIter<Pattern> { self.pats.into_iter() } pub(crate) fn flatten(&self) -> PatStack { let mut flattened = PatStack::empty(); for pat in self.pats.iter() { flattened.append(&mut pat.flatten()); } flattened } pub(crate) fn sort(self) -> PatStack { let mut sorted = self.pats; sorted.sort(); PatStack::from(sorted) } pub(crate) fn filter_out_wildcards(&self) -> PatStack { let mut pats = PatStack::empty(); for pat in self.pats.iter() { match pat { Pattern::Wildcard => {} pat => pats.push(pat.to_owned()), } } pats }
for (i, pat) in top.clone().into_iter().enumerate() { if let Pattern::Or(elems) = pat { for elem in elems.into_iter() { let mut top = top.clone(); let r = check!( top.get_mut(i, span), return err(warnings, errors), warnings, errors ); let _ = std::mem::replace(r, elem); stack.push(top); } } } } } output.reverse(); ok(output, warnings, errors) } pub(crate) fn remove_duplicates(self) -> PatStack { let mut new_pats = vec![]; for pat in self.pats.into_iter() { if !new_pats.contains(&pat) { new_pats.push(pat); } } PatStack::from(new_pats) } } impl From<Vec<Pattern>> for PatStack { fn from(pats: Vec<Pattern>) -> Self { PatStack { pats } } } impl fmt::Display for PatStack { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let s = self .flatten() .sort() .remove_duplicates() .into_iter() .map(|x| format!("{}", x)) .join(", "); write!(f, "{}", s) } } impl std::cmp::Ord for PatStack { fn cmp(&self, other: &Self) -> Ordering { let sorted_self = self.clone().sort(); let sorted_other = other.clone().sort(); sorted_self.pats.cmp(&sorted_other.pats) } } impl std::cmp::PartialOrd for PatStack { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } }
pub(crate) fn serialize_multi_patterns(self, span: &Span) -> CompileResult<Vec<PatStack>> { let mut warnings = vec![]; let mut errors = vec![]; let mut output: Vec<PatStack> = vec![]; let mut stack: Vec<PatStack> = vec![self]; while !stack.is_empty() { let top = match stack.pop() { Some(top) => top, None => { errors.push(CompileError::Internal("can't pop Vec", span.clone())); return err(warnings, errors); } }; if !top.contains_or_pattern() { output.push(top); } else {
random
[ { "content": "/// Given a `PatStack` *pⁱ* from `Matrix` *P*, compute the resulting row of the\n\n/// default `Matrix` *D(P)*.\n\n///\n\n/// A row in the default `Matrix` \"shrinks itself\" or \"eliminates itself\"\n\n/// depending on if its possible to make general claims the first element of the\n\n/// row *pⁱ...
Rust
tests/std.rs
passcod/async-process
bc9719e64a5d82d7b122b7e1a59dfb5db79148a5
use std::env; use std::io; use std::str; use async_process::{Command, Output, Stdio}; use futures_lite::{future, prelude::*}; #[test] fn smoke() { future::block_on(async { let p = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 0"]).spawn() } else { Command::new("true").spawn() }; assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.status().await.unwrap().success()); }) } #[test] fn smoke_failure() { match Command::new("if-this-is-a-binary-then-the-world-has-ended").spawn() { Ok(..) => panic!(), Err(..) => {} } } #[test] fn exit_reported_right() { future::block_on(async { let p = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn() } else { Command::new("false").spawn() }; assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.status().await.unwrap().code() == Some(1)); drop(p.status().await); }) } #[test] #[cfg(unix)] fn signal_reported_right() { use std::os::unix::process::ExitStatusExt; future::block_on(async { let mut p = Command::new("/bin/sh") .arg("-c") .arg("read a") .stdin(Stdio::piped()) .spawn() .unwrap(); p.kill().unwrap(); match p.status().await.unwrap().signal() { Some(9) => {} result => panic!("not terminated by signal 9 (instead, {:?})", result), } }) } pub async fn run_output(mut cmd: Command) -> String { let p = cmd.spawn(); assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.stdout.is_some()); let mut ret = String::new(); p.stdout .as_mut() .unwrap() .read_to_string(&mut ret) .await .unwrap(); assert!(p.status().await.unwrap().success()); return ret; } #[test] fn stdout_works() { future::block_on(async { if cfg!(target_os = "windows") { let mut cmd = Command::new("cmd"); cmd.args(&["/C", "echo foobar"]).stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "foobar\r\n"); } else { let mut cmd = Command::new("echo"); cmd.arg("foobar").stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "foobar\n"); } }) } #[test] #[cfg_attr(windows, ignore)] fn set_current_dir_works() { future::block_on(async { let mut cmd = Command::new("/bin/sh"); cmd.arg("-c") .arg("pwd") .current_dir("/") .stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "/\n"); }) } #[test] #[cfg_attr(windows, ignore)] fn stdin_works() { future::block_on(async { let mut p = Command::new("/bin/sh") .arg("-c") .arg("read line; echo $line") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn() .unwrap(); p.stdin .as_mut() .unwrap() .write("foobar".as_bytes()) .await .unwrap(); drop(p.stdin.take()); let mut out = String::new(); p.stdout .as_mut() .unwrap() .read_to_string(&mut out) .await .unwrap(); assert!(p.status().await.unwrap().success()); assert_eq!(out, "foobar\n"); }) } #[test] fn test_process_status() { future::block_on(async { let mut status = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "exit 1"]) .status() .await .unwrap() } else { Command::new("false").status().await.unwrap() }; assert!(status.code() == Some(1)); status = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "exit 0"]) .status() .await .unwrap() } else { Command::new("true").status().await.unwrap() }; assert!(status.success()); }) } #[test] fn test_process_output_fail_to_start() { future::block_on(async { match Command::new("/no-binary-by-this-name-should-exist") .output() .await { Err(e) => assert_eq!(e.kind(), io::ErrorKind::NotFound), Ok(..) => panic!(), } }) } #[test] fn test_process_output_output() { future::block_on(async { let Output { status, stdout, stderr, } = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "echo hello"]) .output() .await .unwrap() } else { Command::new("echo").arg("hello").output().await.unwrap() }; let output_str = str::from_utf8(&stdout).unwrap(); assert!(status.success()); assert_eq!(output_str.trim().to_string(), "hello"); assert_eq!(stderr, Vec::new()); }) } #[test] fn test_process_output_error() { future::block_on(async { let Output { status, stdout, stderr, } = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "mkdir ."]) .output() .await .unwrap() } else { Command::new("mkdir").arg("./").output().await.unwrap() }; assert!(status.code() == Some(1)); assert_eq!(stdout, Vec::new()); assert!(!stderr.is_empty()); }) } #[test] fn test_finish_once() { future::block_on(async { let mut prog = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn().unwrap() } else { Command::new("false").spawn().unwrap() }; assert!(prog.status().await.unwrap().code() == Some(1)); }) } #[test] fn test_finish_twice() { future::block_on(async { let mut prog = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn().unwrap() } else { Command::new("false").spawn().unwrap() }; assert!(prog.status().await.unwrap().code() == Some(1)); assert!(prog.status().await.unwrap().code() == Some(1)); }) } #[test] fn test_wait_with_output_once() { future::block_on(async { let prog = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "echo hello"]) .stdout(Stdio::piped()) .spawn() .unwrap() } else { Command::new("echo") .arg("hello") .stdout(Stdio::piped()) .spawn() .unwrap() }; let Output { status, stdout, stderr, } = prog.output().await.unwrap(); let output_str = str::from_utf8(&stdout).unwrap(); assert!(status.success()); assert_eq!(output_str.trim().to_string(), "hello"); assert_eq!(stderr, Vec::new()); }) } #[cfg(all(unix, not(target_os = "android")))] pub fn env_cmd() -> Command { Command::new("env") } #[cfg(target_os = "android")] pub fn env_cmd() -> Command { let mut cmd = Command::new("/system/bin/sh"); cmd.arg("-c").arg("set"); cmd } #[cfg(windows)] pub fn env_cmd() -> Command { let mut cmd = Command::new("cmd"); cmd.arg("/c").arg("set"); cmd } #[test] fn test_override_env() { future::block_on(async { let mut cmd = env_cmd(); cmd.env_clear().env("RUN_TEST_NEW_ENV", "123"); if let Some(p) = env::var_os("PATH") { cmd.env("PATH", &p); } let result = cmd.output().await.unwrap(); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV=123"), "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output ); }) } #[test] fn test_add_to_env() { future::block_on(async { let result = env_cmd() .env("RUN_TEST_NEW_ENV", "123") .output() .await .unwrap(); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV=123"), "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output ); }) } #[test] fn test_capture_env_at_spawn() { future::block_on(async { let mut cmd = env_cmd(); cmd.env("RUN_TEST_NEW_ENV1", "123"); env::set_var("RUN_TEST_NEW_ENV2", "456"); let result = cmd.output().await.unwrap(); env::remove_var("RUN_TEST_NEW_ENV2"); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV1=123"), "didn't find RUN_TEST_NEW_ENV1 inside of:\n\n{}", output ); assert!( output.contains("RUN_TEST_NEW_ENV2=456"), "didn't find RUN_TEST_NEW_ENV2 inside of:\n\n{}", output ); }) } #[test] #[cfg(unix)] fn child_status_preserved_with_kill_on_drop() { future::block_on(async { let p = Command::new("true").kill_on_drop(true).spawn().unwrap(); let res = p.output().await; assert!(res.unwrap().status.success()); }) }
use std::env; use std::io; use std::str; use async_process::{Command, Output, Stdio}; use futures_lite::{future, prelude::*}; #[test] fn smoke() { future::block_on(async { let p = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 0"]).spawn() } else { Command::new("true").spawn() }; assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.status().await.unwrap().success()); }) } #[test] fn smoke_failure() {
#[test] fn exit_reported_right() { future::block_on(async { let p = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn() } else { Command::new("false").spawn() }; assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.status().await.unwrap().code() == Some(1)); drop(p.status().await); }) } #[test] #[cfg(unix)] fn signal_reported_right() { use std::os::unix::process::ExitStatusExt; future::block_on(async { let mut p = Command::new("/bin/sh") .arg("-c") .arg("read a") .stdin(Stdio::piped()) .spawn() .unwrap(); p.kill().unwrap(); match p.status().await.unwrap().signal() { Some(9) => {} result => panic!("not terminated by signal 9 (instead, {:?})", result), } }) } pub async fn run_output(mut cmd: Command) -> String { let p = cmd.spawn(); assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.stdout.is_some()); let mut ret = String::new(); p.stdout .as_mut() .unwrap() .read_to_string(&mut ret) .await .unwrap(); assert!(p.status().await.unwrap().success()); return ret; } #[test] fn stdout_works() { future::block_on(async { if cfg!(target_os = "windows") { let mut cmd = Command::new("cmd"); cmd.args(&["/C", "echo foobar"]).stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "foobar\r\n"); } else { let mut cmd = Command::new("echo"); cmd.arg("foobar").stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "foobar\n"); } }) } #[test] #[cfg_attr(windows, ignore)] fn set_current_dir_works() { future::block_on(async { let mut cmd = Command::new("/bin/sh"); cmd.arg("-c") .arg("pwd") .current_dir("/") .stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "/\n"); }) } #[test] #[cfg_attr(windows, ignore)] fn stdin_works() { future::block_on(async { let mut p = Command::new("/bin/sh") .arg("-c") .arg("read line; echo $line") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn() .unwrap(); p.stdin .as_mut() .unwrap() .write("foobar".as_bytes()) .await .unwrap(); drop(p.stdin.take()); let mut out = String::new(); p.stdout .as_mut() .unwrap() .read_to_string(&mut out) .await .unwrap(); assert!(p.status().await.unwrap().success()); assert_eq!(out, "foobar\n"); }) } #[test] fn test_process_status() { future::block_on(async { let mut status = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "exit 1"]) .status() .await .unwrap() } else { Command::new("false").status().await.unwrap() }; assert!(status.code() == Some(1)); status = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "exit 0"]) .status() .await .unwrap() } else { Command::new("true").status().await.unwrap() }; assert!(status.success()); }) } #[test] fn test_process_output_fail_to_start() { future::block_on(async { match Command::new("/no-binary-by-this-name-should-exist") .output() .await { Err(e) => assert_eq!(e.kind(), io::ErrorKind::NotFound), Ok(..) => panic!(), } }) } #[test] fn test_process_output_output() { future::block_on(async { let Output { status, stdout, stderr, } = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "echo hello"]) .output() .await .unwrap() } else { Command::new("echo").arg("hello").output().await.unwrap() }; let output_str = str::from_utf8(&stdout).unwrap(); assert!(status.success()); assert_eq!(output_str.trim().to_string(), "hello"); assert_eq!(stderr, Vec::new()); }) } #[test] fn test_process_output_error() { future::block_on(async { let Output { status, stdout, stderr, } = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "mkdir ."]) .output() .await .unwrap() } else { Command::new("mkdir").arg("./").output().await.unwrap() }; assert!(status.code() == Some(1)); assert_eq!(stdout, Vec::new()); assert!(!stderr.is_empty()); }) } #[test] fn test_finish_once() { future::block_on(async { let mut prog = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn().unwrap() } else { Command::new("false").spawn().unwrap() }; assert!(prog.status().await.unwrap().code() == Some(1)); }) } #[test] fn test_finish_twice() { future::block_on(async { let mut prog = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn().unwrap() } else { Command::new("false").spawn().unwrap() }; assert!(prog.status().await.unwrap().code() == Some(1)); assert!(prog.status().await.unwrap().code() == Some(1)); }) } #[test] fn test_wait_with_output_once() { future::block_on(async { let prog = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "echo hello"]) .stdout(Stdio::piped()) .spawn() .unwrap() } else { Command::new("echo") .arg("hello") .stdout(Stdio::piped()) .spawn() .unwrap() }; let Output { status, stdout, stderr, } = prog.output().await.unwrap(); let output_str = str::from_utf8(&stdout).unwrap(); assert!(status.success()); assert_eq!(output_str.trim().to_string(), "hello"); assert_eq!(stderr, Vec::new()); }) } #[cfg(all(unix, not(target_os = "android")))] pub fn env_cmd() -> Command { Command::new("env") } #[cfg(target_os = "android")] pub fn env_cmd() -> Command { let mut cmd = Command::new("/system/bin/sh"); cmd.arg("-c").arg("set"); cmd } #[cfg(windows)] pub fn env_cmd() -> Command { let mut cmd = Command::new("cmd"); cmd.arg("/c").arg("set"); cmd } #[test] fn test_override_env() { future::block_on(async { let mut cmd = env_cmd(); cmd.env_clear().env("RUN_TEST_NEW_ENV", "123"); if let Some(p) = env::var_os("PATH") { cmd.env("PATH", &p); } let result = cmd.output().await.unwrap(); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV=123"), "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output ); }) } #[test] fn test_add_to_env() { future::block_on(async { let result = env_cmd() .env("RUN_TEST_NEW_ENV", "123") .output() .await .unwrap(); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV=123"), "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output ); }) } #[test] fn test_capture_env_at_spawn() { future::block_on(async { let mut cmd = env_cmd(); cmd.env("RUN_TEST_NEW_ENV1", "123"); env::set_var("RUN_TEST_NEW_ENV2", "456"); let result = cmd.output().await.unwrap(); env::remove_var("RUN_TEST_NEW_ENV2"); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV1=123"), "didn't find RUN_TEST_NEW_ENV1 inside of:\n\n{}", output ); assert!( output.contains("RUN_TEST_NEW_ENV2=456"), "didn't find RUN_TEST_NEW_ENV2 inside of:\n\n{}", output ); }) } #[test] #[cfg(unix)] fn child_status_preserved_with_kill_on_drop() { future::block_on(async { let p = Command::new("true").kill_on_drop(true).spawn().unwrap(); let res = p.output().await; assert!(res.unwrap().status.success()); }) }
match Command::new("if-this-is-a-binary-then-the-world-has-ended").spawn() { Ok(..) => panic!(), Err(..) => {} } }
function_block-function_prefix_line
[ { "content": "//! Windows-specific extensions.\n\n\n\nuse std::os::windows::process::CommandExt as _;\n\n\n\nuse crate::Command;\n\n\n\n/// Windows-specific extensions to the [`Command`] builder.\n", "file_path": "src/windows.rs", "rank": 19, "score": 23251.021665620086 }, { "content": "/// ...
Rust
src/main.rs
AldanTanneo/lotr-mod-discord-bot
624ae132fefa5bb4b1f5762629f24c9f398e9891
pub mod announcement; pub mod api; pub mod check; pub mod commands; pub mod constants; pub mod database; pub mod event_handler; pub mod qa_answers; pub mod role_cache; pub mod utils; use mysql_async::OptsBuilder; use serenity::client::bridge::gateway::GatewayIntents; use serenity::client::ClientBuilder; use serenity::framework::standard::{macros::group, StandardFramework}; use serenity::http::client::Http; use serenity::prelude::*; use std::env; use std::sync::Arc; use api::ReqwestClient; use check::{after_hook, dispatch_error_hook}; use commands::{ admin::*, announcements::*, bug_reports::*, custom_commands::*, general::*, help::*, meme::*, qa_setup::*, roles::*, servers::*, wiki::*, }; use constants::{BOT_ID, OWNER_ID}; use database::{ config::{get_prefix, PrefixCache}, qa_data::QaChannelsCache, DatabasePool, }; use event_handler::Handler; use role_cache::RoleCache; #[group] #[commands( help, renewed, curseforge, prefix, forge, coremod, invite, server_ip, online, donate, facebook, discord, user_info, role, listroles, instagram )] struct General; #[group] #[commands( qa_moderator, qa_answer_channel, qa_question_channel, qa_disable, qa_summary, qa_cache )] #[prefix("q&a")] #[default_command(qa_summary)] struct QA; #[group] #[commands(floppa, aeugh, dagohon, colour)] struct Meme; #[group] #[commands(wiki, tolkien, minecraft)] struct Wiki; #[group] #[commands(track, buglist, bug, resolve)] struct BugReports; #[group] #[commands( admin, floppadd, blacklist, announce, floppadmin, listguilds, define, shutdown )] struct Moderation; #[group] #[commands(custom_command)] #[default_command(custom_command)] struct CustomCommand; #[derive(Clone)] pub struct FrameworkKey(Arc<StandardFramework>); impl TypeMapKey for FrameworkKey { type Value = Self; } impl std::ops::Deref for FrameworkKey { type Target = StandardFramework; fn deref(&self) -> &Self::Target { self.0.as_ref() } } impl FrameworkKey { pub fn new(framework: StandardFramework) -> Self { Self(Arc::new(framework)) } pub fn as_arc(&self) -> Arc<StandardFramework> { self.0.clone() } } #[tokio::main] async fn main() { let token = env::var("DISCORD_TOKEN").expect("Expected a token in the environment"); let application_id: u64 = env::var("APPLICATION_ID") .expect("Expected an application id in the environment") .parse() .expect("APPLICATION_ID must be a valid u64"); let db_name: String = env::var("DB_NAME").expect("Expected an environment variable DB_NAME"); let db_user: String = env::var("DB_USER").expect("Expected an environment variable DB_USER"); let db_password: String = env::var("DB_PASSWORD").expect("Expected an environment variable DB_PASSWORD"); let db_server: String = env::var("DB_SERVER").expect("Expected an environment variable DB_SERVER"); let db_port: u16 = env::var("DB_PORT") .expect("Expected an environment variable DB_PORT") .parse() .expect("DB_PORT must be a valid u16"); let pool = DatabasePool::new( OptsBuilder::default() .user(Some(db_user)) .db_name(Some(db_name)) .ip_or_hostname(db_server) .pass(Some(db_password)) .tcp_port(db_port), ); let reqwest_client = ReqwestClient::new(); let role_cache = RoleCache::new(); let prefix_cache = PrefixCache::new(); let qa_channels_cache = QaChannelsCache::new(); let framework = StandardFramework::new() .configure(|c| { c.prefix("") .dynamic_prefix(|ctx, msg| { Box::pin(async move { get_prefix(ctx, msg.guild_id.unwrap_or_default()).await }) }) .on_mention(Some(BOT_ID)) .owners(vec![OWNER_ID].into_iter().collect()) .case_insensitivity(true) .delimiters(vec![' ', '\n']) }) .on_dispatch_error(dispatch_error_hook) .after(after_hook) .group(&MEME_GROUP) .group(&WIKI_GROUP) .group(&MODERATION_GROUP) .group(&BUGREPORTS_GROUP) .group(&GENERAL_GROUP) .group(&QA_GROUP) .group(&CUSTOMCOMMAND_GROUP) .bucket("basic", |b| b.delay(2).time_span(10).limit(3)) .await; let mut http = Http::new(reqwest_client.as_arc(), &format!("Bot {}", &token)); http.application_id = application_id; let framework = FrameworkKey::new(framework); let mut client = ClientBuilder::new_with_http(http) .event_handler(Handler) .framework_arc(framework.as_arc()) .intents(GatewayIntents::non_privileged() | GatewayIntents::GUILD_MEMBERS) .type_map_insert::<DatabasePool>(pool) .type_map_insert::<ReqwestClient>(reqwest_client) .type_map_insert::<RoleCache>(role_cache) .type_map_insert::<PrefixCache>(prefix_cache) .type_map_insert::<QaChannelsCache>(qa_channels_cache) .type_map_insert::<FrameworkKey>(framework) .await .expect("Error creating client"); { let shard_manager = client.shard_manager.clone(); tokio::spawn(async move { tokio::signal::ctrl_c().await.unwrap(); println!("Shutting down..."); shard_manager.clone().lock().await.shutdown_all().await; }); } #[cfg(unix)] { let shard_manager = client.shard_manager.clone(); tokio::spawn(async move { tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) .unwrap() .recv() .await .unwrap(); println!("Shutting down..."); shard_manager.lock().await.shutdown_all().await; }); } if let Err(why) = client.start().await { println!("An error occurred while running the client: {:?}", why); } }
pub mod announcement; pub mod api; pub mod check; pub mod commands; pub mod constants; pub mod database; pub mod event_handler; pub mod qa_answers; pub mod role_cache; pub mod utils; use mysql_async::OptsBuilder; use serenity::client::bridge::gateway::GatewayIntents; use serenity::client::ClientBuilder; use serenity::framework::standard::{macros::group, StandardFramework}; use serenity::http::client::Http; use serenity::prelude::*; use std::env; use std::sync::Arc; use api::ReqwestClient; use check::{after_hook, dispatch_error_hook}; use commands::{ admin::*, announcements::*, bug_reports::*, custom_commands::*, general::*, help::*, meme::*, qa_setup::*, roles::*, servers::*, wiki::*, }; use constants::{BOT_ID, OWNER_ID}; use database::{ config::{get_prefix, PrefixCache}, qa_data::QaChannelsCache, DatabasePool, }; use event_handler::Handler; use role_cache::RoleCache; #[group] #[commands( help, renewed, curseforge, prefix, forge, coremod, invite, server_ip, online, donate, facebook, discord, user_info, role, listroles, instagram )] struct General; #[group] #[commands( qa_moderator, qa_answer_channel, qa_question_channel, qa_disable, qa_summary, qa_cache )] #[prefix("q&a")] #[default_command(qa_summary)] struct QA; #[group] #[commands(floppa, aeugh, dagohon, colour)] struct Meme; #[group] #[commands(wiki, tolkien, minecraft)] struct Wiki; #[group] #[commands(track, buglist, bug, resolve)] struct BugReports; #[group] #[commands( admin, floppadd, blacklist, announce, floppadmin, listguilds, define, shutdown )] struct Moderation; #[group] #[commands(custom_command)] #[default_command(custom_command)] struct CustomCommand; #[derive(Clone)] pub struct FrameworkKey(Arc<StandardFramework>); impl TypeMapKey for FrameworkKey { type Value = Self; } impl std::ops::Deref for FrameworkKey { type Target = StandardFramework; fn deref(&self) -> &Self::Target { self.0.as_ref() } } impl FrameworkKey { pub fn new(framework: StandardFramework) -> Self { Self(Arc::new(framework)) } pub fn as_arc(&self) -> Arc<StandardFramework> { self.0.clone() } } #[tokio::main] async fn main() { let token = env::var("DISCORD_TOKEN").expect("Expected a token in the environment"); let application_id: u64 = env::var("APPLICATION_ID") .expect("Expected an application id in the environment") .parse() .expect("APPLICATION_ID must be a valid u64"); let db_name: String = env::var("DB_NAME").expect("Expected an environment variable DB_NAME"); let db_user: String = env::var("DB_USER").expect("Expected an environment variable DB_USER"); let db_password: String = env::var("DB_PASSWORD").expect("Expected an environment variable DB_PASSWORD"); let db_server: String = env::var("DB_SERVER").expect("Expected an environment variable DB_SERVER"); let db_port: u16 = env::var("DB_PORT") .expect("Expected an environment variable DB_PORT") .parse() .expect("DB_PORT must be a valid u16"); let pool = DatabasePool::new( OptsBuilder::default() .user(Some(db_user)) .db_name(Some(db_name)) .ip_or_hostname(db_server) .pass(Some(db_password)) .tcp_port(db_port), ); let reqwest_client = ReqwestClient::new(); let role_cache = RoleCache::new(); let prefix_cache = PrefixCache::new(); let qa_channels_cache = QaChannelsCache::new(); let framework = StandardFramework::new() .configure(|c| { c.prefix("") .dynamic_prefix(|ctx, msg| { Box::pin(async move { get_prefix(ctx, msg.guild_id.unwrap_or_default()).await }) }) .on_mention(Some(BOT_ID)) .owners(vec![OWNER_ID].into_iter().collect()) .case_insensitivity(true) .delimiters(vec![' ', '\n']) }) .on_dispatch_error(dispatch_error_hook) .after(after_hook) .group(&MEME_GROUP) .group(&WIKI_GROUP) .group(&MODERATION_GROUP) .group(&BUGREPORTS_GROUP) .group(&GENERAL_GROUP) .group(&QA_GROUP) .group(&CUSTOMCOMMAND_GROUP) .bucket("basic", |b| b.delay(2).time_span(10).limit(3)) .await; let mut http = Http::new(reqwest_client.as_arc(), &format!("Bot {}", &token)); http.application_id = application_id; let framework = FrameworkKey::new(framework); let mut client = ClientBuilder::new_with_http(http) .event_handler(Handler) .framework_arc(framework.as_arc()) .intents(GatewayIntents::non_privileged() | GatewayIntents::GUILD_MEMBERS) .type_map_insert::<DatabasePool>(pool) .type_map_insert::<ReqwestClient>(reqwest_client) .type_map_insert::<RoleCache>(role_cache) .type_map_insert::<PrefixCache>(prefix_cache) .type_map_insert::<QaChannelsCache>(qa_channels_cache) .type_map_insert::<FrameworkKey>(framework) .await .expect("Error creating client"); { let shard_manager = client.shard_manager.clone(); tokio::spawn(async move { tokio::signal::ctrl_c().await.unwrap(); println!("Shutting down..."); shard_manager.clone().lock().await.shutdown_all().await; }); } #[cfg(unix)] { let shard_manager = client.shard_manager.clone(); tokio::spawn(async move { tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) .unwrap() .recv() .await .unwrap(); println!("Shutting down..."); shard_manager.lock().await.shutdown_all().await; }); }
}
if let Err(why) = client.start().await { println!("An error occurred while running the client: {:?}", why); }
if_condition
[ { "content": "pub fn to_json_safe_string(s: impl ToString) -> String {\n\n // serialize as string to get string escapes\n\n let s = serde_json::ser::to_string(&serde_json::Value::String(s.to_string())).unwrap();\n\n // remove the surrounding quotes\n\n s[1..s.len() - 1].to_string()\n\n}\n\n\n\n#[cfg...
Rust
nj-core/src/buffer.rs
sehz/node-bindgen
6450525dbd3202310b1e153adb1314062842477c
use std::ptr; use std::ops::Deref; use log::trace; use crate::TryIntoJs; use crate::JSValue; use crate::sys::{napi_value, napi_ref, napi_env}; use crate::val::JsEnv; use crate::NjError; pub struct ArrayBuffer { data: Vec<u8>, } use std::fmt; use std::fmt::Debug; impl Debug for ArrayBuffer { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_fmt(format_args!("ArrayBuffer len: {}", self.data.len())) } } impl ArrayBuffer { pub fn new(data: Vec<u8>) -> Self { Self { data } } extern "C" fn finalize_buffer( _env: napi_env, _finalize_data: *mut ::std::os::raw::c_void, finalize_hint: *mut ::std::os::raw::c_void, ) { trace!("finalize array buffer"); unsafe { let ptr: *mut Vec<u8> = finalize_hint as *mut Vec<u8>; let _rust = Box::from_raw(ptr); } } } impl TryIntoJs for ArrayBuffer { fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> { let len = self.data.len(); let box_data = Box::new(self.data); let mut napi_buffer = ptr::null_mut(); let data_buffer = box_data.as_ptr(); let data_box_ptr = Box::into_raw(box_data) as *mut core::ffi::c_void; crate::napi_call_result!(crate::sys::napi_create_external_arraybuffer( js_env.inner(), data_buffer as *mut core::ffi::c_void, len, Some(Self::finalize_buffer), data_box_ptr, &mut napi_buffer ))?; Ok(napi_buffer) } } impl<'a> JSValue<'a> for &'a [u8] { fn convert_to_rust(env: &'a JsEnv, js_value: napi_value) -> Result<Self, NjError> { if !env.is_buffer(js_value)? { return Err(NjError::InvalidType( "Buffer".to_owned(), env.value_type_string(js_value)?.to_owned(), )); } let buffer = env.get_buffer_info(js_value)?; Ok(buffer) } } pub struct JSArrayBuffer { env: JsEnv, napi_ref: napi_ref, buffer: &'static [u8], } unsafe impl Send for JSArrayBuffer {} impl JSArrayBuffer { pub fn as_bytes(&self) -> &[u8] { &self.buffer } } impl JSValue<'_> for JSArrayBuffer { fn convert_to_rust(env: &JsEnv, napi_value: napi_value) -> Result<Self, NjError> { use std::mem::transmute; let napi_ref = env.create_reference(napi_value, 1)?; let buffer: &'static [u8] = unsafe { transmute::<&[u8], &'static [u8]>(env.convert_to_rust(napi_value)?) }; Ok(Self { env: *env, napi_ref, buffer, }) } } impl Drop for JSArrayBuffer { fn drop(&mut self) { self.env .delete_reference(self.napi_ref) .expect("reference can't be deleted to array buf"); } } impl Deref for JSArrayBuffer { type Target = [u8]; fn deref(&self) -> &Self::Target { &self.buffer } }
use std::ptr; use std::ops::Deref; use log::trace; use crate::TryIntoJs; use crate::JSValue; use crate::sys::{napi_value, napi_ref, napi_env}; use crate::val::JsEnv; use crate::NjError; pub struct ArrayBuffer { data: Vec<u8>, } use std::fmt; use std::fmt::Debug; impl Debug for ArrayBuffer { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_fmt(format_args!("ArrayBuffer len: {}", self.data.len())) } } impl ArrayBuffer { pub fn new(data: Vec<u8>) -> Self { Self { data } } extern "C" fn finalize_buffer( _env: napi_env, _finalize_data: *mut ::std::os::raw::c_void, finalize_hint: *mut ::std::os::raw::c_void, ) { trace!("finalize array buffer"); unsafe { let ptr: *mut Vec<u8> = finalize_hint as *mut Vec<u8>; let _rust = Box::from_raw(ptr); } } } impl TryIntoJs for ArrayBuffer { fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> { let len = self.data.len(); let box_data = Box::new(self.data); let mut napi_buffer = ptr::null_mut(); let data_buffer = box_data.as_ptr(); let data_box_ptr = Box::into_raw(box_data) as *mut core::ffi::c_void; crate::napi_call_result!(crate::sys::napi_create_external_arraybuffer( js_env.inner(), data_buffer as *mut core::ffi::c_void, len, Some(Self::finalize_buffer), data_box_ptr, &mut napi_buffer ))?; Ok(napi_buffer) } } impl<'a> JSValue<'a> for &'a [u8] { fn convert_to_rust(env: &'a JsEnv, js_value: napi_value) -> Result<Self, NjError> { if !env.is_buffer(js_value)? { return
; } let buffer = env.get_buffer_info(js_value)?; Ok(buffer) } } pub struct JSArrayBuffer { env: JsEnv, napi_ref: napi_ref, buffer: &'static [u8], } unsafe impl Send for JSArrayBuffer {} impl JSArrayBuffer { pub fn as_bytes(&self) -> &[u8] { &self.buffer } } impl JSValue<'_> for JSArrayBuffer { fn convert_to_rust(env: &JsEnv, napi_value: napi_value) -> Result<Self, NjError> { use std::mem::transmute; let napi_ref = env.create_reference(napi_value, 1)?; let buffer: &'static [u8] = unsafe { transmute::<&[u8], &'static [u8]>(env.convert_to_rust(napi_value)?) }; Ok(Self { env: *env, napi_ref, buffer, }) } } impl Drop for JSArrayBuffer { fn drop(&mut self) { self.env .delete_reference(self.napi_ref) .expect("reference can't be deleted to array buf"); } } impl Deref for JSArrayBuffer { type Target = [u8]; fn deref(&self) -> &Self::Target { &self.buffer } }
Err(NjError::InvalidType( "Buffer".to_owned(), env.value_type_string(js_value)?.to_owned(), ))
call_expression
[ { "content": "#[node_bindgen]\n\nfn test3(data: JSArrayBuffer) -> Result<String, NjError> {\n\n let message = String::from_utf8(data.to_vec())?;\n\n Ok(format!(\"reply {}\", message))\n\n}\n\n\n", "file_path": "examples/buffer/src/lib.rs", "rank": 0, "score": 210644.66397764176 }, { "c...
Rust
network/src/peer_store/peer_store_impl.rs
brson/ckb
b9bf40024b8a5acd9b8871dba669c89f38be297d
use crate::{ errors::{PeerStoreError, Result}, network_group::{Group, NetworkGroup}, peer_store::{ addr_manager::AddrManager, ban_list::BanList, types::{ip_to_network, AddrInfo, BannedAddr, MultiaddrExt, PeerInfo}, Behaviour, Multiaddr, PeerScoreConfig, ReportResult, Status, ADDR_COUNT_LIMIT, ADDR_TIMEOUT_MS, }, PeerId, SessionType, }; use ipnetwork::IpNetwork; use std::cell::{Ref, RefCell}; use std::collections::{hash_map::Entry, HashMap}; #[derive(Default)] pub struct PeerStore { addr_manager: AddrManager, ban_list: RefCell<BanList>, peers: RefCell<HashMap<PeerId, PeerInfo>>, score_config: PeerScoreConfig, } impl PeerStore { pub fn new(addr_manager: AddrManager, ban_list: BanList) -> Self { PeerStore { addr_manager, ban_list: RefCell::new(ban_list), peers: Default::default(), score_config: Default::default(), } } pub fn add_connected_peer( &mut self, peer_id: PeerId, addr: Multiaddr, session_type: SessionType, ) -> Result<()> { let now_ms = faketime::unix_time_as_millis(); match self.peers.get_mut().entry(peer_id.to_owned()) { Entry::Occupied(mut entry) => { let mut peer = entry.get_mut(); peer.connected_addr = addr.clone(); peer.last_connected_at_ms = now_ms; peer.session_type = session_type; } Entry::Vacant(entry) => { let peer = PeerInfo::new(peer_id.to_owned(), addr.clone(), session_type, now_ms); entry.insert(peer); } } let score = self.score_config.default_score; if session_type.is_outbound() { self.addr_manager.add(AddrInfo::new( peer_id, addr.extract_ip_addr()?, addr.exclude_p2p(), now_ms, score, )); } Ok(()) } pub fn add_addr(&mut self, peer_id: PeerId, addr: Multiaddr) -> Result<()> { self.check_purge()?; let score = self.score_config.default_score; self.addr_manager.add(AddrInfo::new( peer_id, addr.extract_ip_addr()?, addr.exclude_p2p(), 0, score, )); Ok(()) } pub fn addr_manager(&self) -> &AddrManager { &self.addr_manager } pub fn mut_addr_manager(&mut self) -> &mut AddrManager { &mut self.addr_manager } pub fn report(&mut self, peer_id: &PeerId, behaviour: Behaviour) -> Result<ReportResult> { if let Some(peer) = { let peers = self.peers.borrow(); peers.get(peer_id).map(ToOwned::to_owned) } { let key = peer.connected_addr.extract_ip_addr()?; let mut peer_addr = self.addr_manager.get_mut(&key).expect("peer addr exists"); let score = peer_addr.score.saturating_add(behaviour.score()); peer_addr.score = score; if score < self.score_config.ban_score { self.ban_addr( &peer.connected_addr, self.score_config.ban_timeout_ms, format!("report behaviour {:?}", behaviour), )?; return Ok(ReportResult::Banned); } } Ok(ReportResult::Ok) } pub fn remove_disconnected_peer(&mut self, peer_id: &PeerId) -> Option<PeerInfo> { self.peers.borrow_mut().remove(peer_id) } pub fn peer_status(&self, peer_id: &PeerId) -> Status { if self.peers.borrow().contains_key(peer_id) { Status::Connected } else { Status::Disconnected } } pub fn fetch_addrs_to_attempt(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && !peers.contains_key(&peer_addr.peer_id) && !peer_addr.tried_in_last_minute(now_ms) }) } pub fn fetch_addrs_to_feeler(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let addr_expired_ms = now_ms - ADDR_TIMEOUT_MS; let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && !peers.contains_key(&peer_addr.peer_id) && !peer_addr.tried_in_last_minute(now_ms) && !peer_addr.had_connected(addr_expired_ms) }) } pub fn fetch_random_addrs(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let addr_expired_ms = now_ms - ADDR_TIMEOUT_MS; let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && (peers.contains_key(&peer_addr.peer_id) || peer_addr.had_connected(addr_expired_ms)) }) } pub(crate) fn ban_addr( &mut self, addr: &Multiaddr, timeout_ms: u64, ban_reason: String, ) -> Result<()> { let network = ip_to_network(addr.extract_ip_addr()?.ip); self.ban_network(network, timeout_ms, ban_reason) } pub(crate) fn ban_network( &mut self, network: IpNetwork, timeout_ms: u64, ban_reason: String, ) -> Result<()> { let now_ms = faketime::unix_time_as_millis(); let ban_addr = BannedAddr { address: network, ban_until: now_ms + timeout_ms, created_at: now_ms, ban_reason, }; self.mut_ban_list().ban(ban_addr); Ok(()) } pub fn is_addr_banned(&self, addr: &Multiaddr) -> bool { self.ban_list().is_addr_banned(addr) } pub fn ban_list(&self) -> Ref<BanList> { self.ban_list.borrow() } pub fn mut_ban_list(&mut self) -> &mut BanList { self.ban_list.get_mut() } pub fn clear_ban_list(&self) { self.ban_list.replace(Default::default()); } fn check_purge(&mut self) -> Result<()> { if self.addr_manager.count() < ADDR_COUNT_LIMIT { return Ok(()); } let now_ms = faketime::unix_time_as_millis(); let candidate_peers: Vec<_> = { let mut peers_by_network_group: HashMap<Group, Vec<_>> = HashMap::default(); for addr in self.addr_manager.addrs_iter() { let network_group = addr.addr.network_group(); peers_by_network_group .entry(network_group) .or_default() .push(addr); } let ban_score = self.score_config.ban_score; peers_by_network_group .values() .max_by_key(|peers| peers.len()) .expect("largest network group") .iter() .filter(move |addr| addr.is_terrible(now_ms) || addr.score <= ban_score) .map(|addr| addr.ip_port()) .collect() }; if candidate_peers.is_empty() { return Err(PeerStoreError::EvictionFailed.into()); } for key in candidate_peers { self.addr_manager.remove(&key); } Ok(()) } }
use crate::{ errors::{PeerStoreError, Result}, network_group::{Group, NetworkGroup}, peer_store::{ addr_manager::AddrManager, ban_list::BanList, types::{ip_to_network, AddrInfo, BannedAddr, MultiaddrExt, PeerInfo}, Behaviour, Multiaddr, PeerScoreConfig, ReportResult, Status, ADDR_COUNT_LIMIT, ADDR_TIMEOUT_MS, }, PeerId, SessionType, }; use ipnetwork::IpNetwork; use std::cell::{Ref, RefCell}; use std::collections::{hash_map::Entry, HashMap}; #[derive(Default)] pub struct PeerStore { addr_manager: AddrManager, ban_list: RefCell<BanList>, peers: RefCell<HashMap<PeerId, PeerInfo>>, score_config: PeerScoreConfig, } impl PeerStore { pub fn new(addr_manager: AddrManager, ban_list: BanList) -> Self { PeerStore { addr_manager, ban_list: RefCell::new(ban_list), peers: Default::default(), score_config: Default::default(), } } pub fn add_connected_peer( &mut self, peer_id: PeerId, addr: Multiaddr, session_type: SessionType, ) -> Result<()> { let now_ms = faketime::unix_time_as_millis();
let score = self.score_config.default_score; if session_type.is_outbound() { self.addr_manager.add(AddrInfo::new( peer_id, addr.extract_ip_addr()?, addr.exclude_p2p(), now_ms, score, )); } Ok(()) } pub fn add_addr(&mut self, peer_id: PeerId, addr: Multiaddr) -> Result<()> { self.check_purge()?; let score = self.score_config.default_score; self.addr_manager.add(AddrInfo::new( peer_id, addr.extract_ip_addr()?, addr.exclude_p2p(), 0, score, )); Ok(()) } pub fn addr_manager(&self) -> &AddrManager { &self.addr_manager } pub fn mut_addr_manager(&mut self) -> &mut AddrManager { &mut self.addr_manager } pub fn report(&mut self, peer_id: &PeerId, behaviour: Behaviour) -> Result<ReportResult> { if let Some(peer) = { let peers = self.peers.borrow(); peers.get(peer_id).map(ToOwned::to_owned) } { let key = peer.connected_addr.extract_ip_addr()?; let mut peer_addr = self.addr_manager.get_mut(&key).expect("peer addr exists"); let score = peer_addr.score.saturating_add(behaviour.score()); peer_addr.score = score; if score < self.score_config.ban_score { self.ban_addr( &peer.connected_addr, self.score_config.ban_timeout_ms, format!("report behaviour {:?}", behaviour), )?; return Ok(ReportResult::Banned); } } Ok(ReportResult::Ok) } pub fn remove_disconnected_peer(&mut self, peer_id: &PeerId) -> Option<PeerInfo> { self.peers.borrow_mut().remove(peer_id) } pub fn peer_status(&self, peer_id: &PeerId) -> Status { if self.peers.borrow().contains_key(peer_id) { Status::Connected } else { Status::Disconnected } } pub fn fetch_addrs_to_attempt(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && !peers.contains_key(&peer_addr.peer_id) && !peer_addr.tried_in_last_minute(now_ms) }) } pub fn fetch_addrs_to_feeler(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let addr_expired_ms = now_ms - ADDR_TIMEOUT_MS; let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && !peers.contains_key(&peer_addr.peer_id) && !peer_addr.tried_in_last_minute(now_ms) && !peer_addr.had_connected(addr_expired_ms) }) } pub fn fetch_random_addrs(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let addr_expired_ms = now_ms - ADDR_TIMEOUT_MS; let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && (peers.contains_key(&peer_addr.peer_id) || peer_addr.had_connected(addr_expired_ms)) }) } pub(crate) fn ban_addr( &mut self, addr: &Multiaddr, timeout_ms: u64, ban_reason: String, ) -> Result<()> { let network = ip_to_network(addr.extract_ip_addr()?.ip); self.ban_network(network, timeout_ms, ban_reason) } pub(crate) fn ban_network( &mut self, network: IpNetwork, timeout_ms: u64, ban_reason: String, ) -> Result<()> { let now_ms = faketime::unix_time_as_millis(); let ban_addr = BannedAddr { address: network, ban_until: now_ms + timeout_ms, created_at: now_ms, ban_reason, }; self.mut_ban_list().ban(ban_addr); Ok(()) } pub fn is_addr_banned(&self, addr: &Multiaddr) -> bool { self.ban_list().is_addr_banned(addr) } pub fn ban_list(&self) -> Ref<BanList> { self.ban_list.borrow() } pub fn mut_ban_list(&mut self) -> &mut BanList { self.ban_list.get_mut() } pub fn clear_ban_list(&self) { self.ban_list.replace(Default::default()); } fn check_purge(&mut self) -> Result<()> { if self.addr_manager.count() < ADDR_COUNT_LIMIT { return Ok(()); } let now_ms = faketime::unix_time_as_millis(); let candidate_peers: Vec<_> = { let mut peers_by_network_group: HashMap<Group, Vec<_>> = HashMap::default(); for addr in self.addr_manager.addrs_iter() { let network_group = addr.addr.network_group(); peers_by_network_group .entry(network_group) .or_default() .push(addr); } let ban_score = self.score_config.ban_score; peers_by_network_group .values() .max_by_key(|peers| peers.len()) .expect("largest network group") .iter() .filter(move |addr| addr.is_terrible(now_ms) || addr.score <= ban_score) .map(|addr| addr.ip_port()) .collect() }; if candidate_peers.is_empty() { return Err(PeerStoreError::EvictionFailed.into()); } for key in candidate_peers { self.addr_manager.remove(&key); } Ok(()) } }
match self.peers.get_mut().entry(peer_id.to_owned()) { Entry::Occupied(mut entry) => { let mut peer = entry.get_mut(); peer.connected_addr = addr.clone(); peer.last_connected_at_ms = now_ms; peer.session_type = session_type; } Entry::Vacant(entry) => { let peer = PeerInfo::new(peer_id.to_owned(), addr.clone(), session_type, now_ms); entry.insert(peer); } }
if_condition
[]
Rust
derive/src/lib.rs
znly/async-graphql
16d38c521f5855914d9a830076731ef515b8d4d3
#![allow(clippy::cognitive_complexity)] #![forbid(unsafe_code)] extern crate proc_macro; mod args; mod r#enum; mod input_object; mod interface; mod merged_object; mod merged_subscription; mod object; mod output_type; mod scalar; mod simple_object; mod subscription; mod union; mod utils; use crate::utils::{add_container_attrs, parse_derive}; use proc_macro::TokenStream; use quote::quote; use syn::parse_macro_input; use syn::{AttributeArgs, ItemImpl}; #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Object(args: TokenStream, input: TokenStream) -> TokenStream { let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match object::generate(&object_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn SimpleObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLSimpleObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLSimpleObject, attributes(field, graphql))] pub fn derive_simple_object(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match simple_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Enum(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLEnum, Copy, Clone, Eq, PartialEq), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLEnum, attributes(item, graphql))] pub fn derive_enum(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let enum_args = match args::Enum::parse(parse_macro_input!(args as AttributeArgs)) { Ok(enum_args) => enum_args, Err(err) => return err.to_compile_error().into(), }; match r#enum::generate(&enum_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn InputObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLInputObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLInputObject, attributes(field, graphql))] pub fn derive_input_object(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::InputObject::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match input_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Interface(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLInterface), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLInterface, attributes(graphql))] pub fn derive_interface(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let interface_args = match args::Interface::parse(parse_macro_input!(args as AttributeArgs)) { Ok(interface_args) => interface_args, Err(err) => return err.to_compile_error().into(), }; match interface::generate(&interface_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Union(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLUnion), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLUnion, attributes(graphql))] pub fn derive_union(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let union_args = match args::Interface::parse(parse_macro_input!(args as AttributeArgs)) { Ok(union_args) => union_args, Err(err) => return err.to_compile_error().into(), }; match union::generate(&union_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Subscription(args: TokenStream, input: TokenStream) -> TokenStream { let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match subscription::generate(&object_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Scalar(args: TokenStream, input: TokenStream) -> TokenStream { let scalar_args = match args::Scalar::parse(parse_macro_input!(args as AttributeArgs)) { Ok(scalar_args) => scalar_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match scalar::generate(&scalar_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn MergedObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLMergedObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLMergedObject, attributes(item, graphql))] pub fn derive_merged_object(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match merged_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn MergedSubscription(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLMergedObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLMergedSubscription, attributes(item, graphql))] pub fn derive_merged_subscription(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match merged_subscription::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } }
#![allow(clippy::cognitive_complexity)] #![forbid(unsafe_code)] extern crate proc_macro; mod args; mod r#enum; mod input_object; mod interface; mod merged_object; mod merged_subscription; mod object; mod output_type; mod scalar; mod simple_object; mod subscription; mod union; mod utils; use crate::utils::{add_container_attrs, parse_derive}; use proc_macro::TokenStream; use quote::quote; use syn::parse_macro_input; use syn::{AttributeArgs, ItemImpl}; #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Object(args: TokenStream, input: TokenStream) -> TokenStream { let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match object::generate(&object_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn SimpleObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLSimpleObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLSimpleObject, attributes(field, graphql))] pub fn derive_simple_object(input: TokenStream) -> TokenStream { let (args, inpu
#[proc_macro_attribute] #[allow(non_snake_case)] pub fn Enum(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLEnum, Copy, Clone, Eq, PartialEq), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLEnum, attributes(item, graphql))] pub fn derive_enum(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let enum_args = match args::Enum::parse(parse_macro_input!(args as AttributeArgs)) { Ok(enum_args) => enum_args, Err(err) => return err.to_compile_error().into(), }; match r#enum::generate(&enum_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn InputObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLInputObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLInputObject, attributes(field, graphql))] pub fn derive_input_object(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::InputObject::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match input_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Interface(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLInterface), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLInterface, attributes(graphql))] pub fn derive_interface(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let interface_args = match args::Interface::parse(parse_macro_input!(args as AttributeArgs)) { Ok(interface_args) => interface_args, Err(err) => return err.to_compile_error().into(), }; match interface::generate(&interface_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Union(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLUnion), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLUnion, attributes(graphql))] pub fn derive_union(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let union_args = match args::Interface::parse(parse_macro_input!(args as AttributeArgs)) { Ok(union_args) => union_args, Err(err) => return err.to_compile_error().into(), }; match union::generate(&union_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Subscription(args: TokenStream, input: TokenStream) -> TokenStream { let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match subscription::generate(&object_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Scalar(args: TokenStream, input: TokenStream) -> TokenStream { let scalar_args = match args::Scalar::parse(parse_macro_input!(args as AttributeArgs)) { Ok(scalar_args) => scalar_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match scalar::generate(&scalar_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn MergedObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLMergedObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLMergedObject, attributes(item, graphql))] pub fn derive_merged_object(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match merged_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn MergedSubscription(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLMergedObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLMergedSubscription, attributes(item, graphql))] pub fn derive_merged_subscription(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match merged_subscription::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } }
t) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match simple_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } }
function_block-function_prefixed
[]
Rust
examples/noise_handshake.rs
niklaslong/pea2pea
4ff273bd6f0e9703d8347f5fccddd21a37a33f0d
mod common; use bytes::Bytes; use parking_lot::{Mutex, RwLock}; use tokio::{ io::{AsyncReadExt, AsyncWriteExt}, time::sleep, }; use tracing::*; use tracing_subscriber::filter::LevelFilter; use pea2pea::{ protocols::{Handshaking, Reading, Writing}, Connection, ConnectionSide, Node, NodeConfig, Pea2Pea, }; use std::{ collections::HashMap, convert::TryInto, io, net::SocketAddr, str, sync::Arc, time::Duration, }; const NOISE_BUF_LEN: usize = 65535; struct NoiseState { state: snow::TransportState, buffer: Box<[u8]>, } #[derive(Clone)] struct SecureNode { node: Node, noise_states: Arc<RwLock<HashMap<SocketAddr, Arc<Mutex<NoiseState>>>>>, } impl Pea2Pea for SecureNode { fn node(&self) -> &Node { &self.node } } fn read_message(buffer: &[u8]) -> io::Result<Option<&[u8]>> { if buffer.len() >= 2 { let payload_len = u16::from_be_bytes(buffer[..2].try_into().unwrap()) as usize; if payload_len == 0 { return Err(io::ErrorKind::InvalidData.into()); } if buffer[2..].len() >= payload_len { Ok(Some(&buffer[2..][..payload_len])) } else { Ok(None) } } else { Ok(None) } } fn packet_message(message: &[u8]) -> Bytes { let mut bytes = Vec::with_capacity(2 + message.len()); let u16_len_header = (message.len() as u16).to_be_bytes(); bytes.extend_from_slice(&u16_len_header); bytes.extend_from_slice(message); bytes.into() } impl SecureNode { async fn new(name: &str) -> io::Result<Self> { let config = NodeConfig { name: Some(name.into()), listener_ip: "127.0.0.1".parse().unwrap(), conn_read_buffer_size: NOISE_BUF_LEN + 2, ..Default::default() }; let node = Node::new(Some(config)).await?; Ok(Self { node, noise_states: Default::default(), }) } } #[async_trait::async_trait] impl Handshaking for SecureNode { async fn perform_handshake(&self, mut conn: Connection) -> io::Result<Connection> { const HANDSHAKE_PATTERN: &str = "Noise_XXpsk3_25519_ChaChaPoly_BLAKE2s"; const PRE_SHARED_KEY: &[u8] = b"I dont care for codes of conduct"; let builder = snow::Builder::new(HANDSHAKE_PATTERN.parse().unwrap()); let static_key = builder.generate_keypair().unwrap().private; let noise_builder = builder .local_private_key(&static_key) .psk(3, PRE_SHARED_KEY); let mut buffer: Box<[u8]> = vec![0u8; NOISE_BUF_LEN].into(); let mut buf = [0u8; NOISE_BUF_LEN]; let state = match !conn.side { ConnectionSide::Initiator => { let mut noise = noise_builder.build_initiator().unwrap(); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent e (XX handshake part 1/3)"); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received e, ee, s, es (XX handshake part 2/3)"); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent s, se, psk (XX handshake part 3/3)"); noise.into_transport_mode().unwrap() } ConnectionSide::Responder => { let mut noise = noise_builder.build_responder().unwrap(); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received e (XX handshake part 1/3)"); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent e, ee, s, es (XX handshake part 2/3)"); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received s, se, psk (XX handshake part 3/3)"); noise.into_transport_mode().unwrap() } }; debug!(parent: conn.node.span(), "XX handshake complete"); let noise_state = NoiseState { state, buffer }; self.noise_states .write() .insert(conn.addr, Arc::new(Mutex::new(noise_state))); Ok(conn) } } #[async_trait::async_trait] impl Reading for SecureNode { type Message = String; fn read_message( &self, source: SocketAddr, buffer: &[u8], ) -> io::Result<Option<(Self::Message, usize)>> { let bytes = read_message(buffer)?; if let Some(bytes) = bytes { let noise = Arc::clone(self.noise_states.read().get(&source).unwrap()); let NoiseState { state, buffer } = &mut *noise.lock(); let len = state.read_message(bytes, buffer).ok().unwrap(); let decrypted_message = String::from_utf8(buffer[..len].to_vec()).unwrap(); Ok(Some((decrypted_message, bytes.len() + 2))) } else { Ok(None) } } async fn process_message(&self, source: SocketAddr, message: Self::Message) -> io::Result<()> { info!(parent: self.node().span(), "decrypted a message from {}: \"{}\"", source, message); Ok(()) } } impl Writing for SecureNode { fn write_message( &self, target: SocketAddr, payload: &[u8], conn_buffer: &mut [u8], ) -> io::Result<usize> { let to_encrypt = str::from_utf8(payload).unwrap(); info!(parent: self.node.span(), "sending an encrypted message to {}: \"{}\"", target, to_encrypt); let noise = Arc::clone(&self.noise_states.read().get(&target).unwrap()); let NoiseState { state, buffer } = &mut *noise.lock(); let len = state.write_message(payload, buffer).unwrap(); let encrypted_message = &buffer[..len]; conn_buffer[..2].copy_from_slice(&(len as u16).to_be_bytes()); conn_buffer[2..][..len].copy_from_slice(&encrypted_message); Ok(2 + len) } } #[tokio::main] async fn main() { common::start_logger(LevelFilter::TRACE); let initiator = SecureNode::new("initiator").await.unwrap(); let responder = SecureNode::new("responder").await.unwrap(); for node in &[&initiator, &responder] { node.enable_handshaking(); node.enable_reading(); node.enable_writing(); } initiator .node() .connect(responder.node().listening_addr()) .await .unwrap(); sleep(Duration::from_millis(10)).await; let msg = b"why hello there, fellow noise protocol user; I'm the initiator"; initiator .node() .send_direct_message(responder.node().listening_addr(), msg[..].into()) .await .unwrap(); let initiator_addr = responder.node().connected_addrs()[0]; let msg = b"why hello there, fellow noise protocol user; I'm the responder"; responder .node() .send_direct_message(initiator_addr, msg[..].into()) .await .unwrap(); sleep(Duration::from_millis(10)).await; }
mod common; use bytes::Bytes; use parking_lot::{Mutex, RwLock}; use tokio::{ io::{AsyncReadExt, AsyncWriteExt}, time::sleep, }; use tracing::*; use tracing_subscriber::filter::LevelFilter; use pea2pea::{ protocols::{Handshaking, Reading, Writing}, Connection, ConnectionSide, Node, NodeConfig, Pea2Pea, }; use std::{ collections::HashMap, convert::TryInto, io, net::SocketAddr, str, sync::Arc, time::Duration, }; const NOISE_BUF_LEN: usize = 65535; struct NoiseState { state: snow::TransportState, buffer: Box<[u8]>, } #[derive(Clone)] struct SecureNode { node: Node, noise_states: Arc<RwLock<HashMap<SocketAddr, Arc<Mutex<NoiseState>>>>>, } impl Pea2Pea for SecureNode { fn node(&self) -> &Node { &self.node } } fn read_message(buffer: &[u8]) -> io::Result<Option<&[u8]>> { if buffer.len() >= 2 { let payload_len = u16::from_be_bytes(buffer[..2].try_into().unwrap()) as usize; if payload_len == 0 { return Err(io::ErrorKind::InvalidData.into()); } if buffer[2..].len() >= payload_len { Ok(Some(&buffer[2..][..payload_len])) } else { Ok(None) } } else { Ok(None) } } fn packet_message(message: &[u8]) -> Bytes { let mut bytes = Vec::with_capacity(2 + message.len()); let u16_len_header = (message.len() as u16).to_be_bytes(); bytes.extend_from_slice(&u16_len_header); bytes.extend_from_slice(message); bytes.into() } impl SecureNode { async fn new(name: &str) -> io::Result<Self> { let config = NodeConfig { name: Some(name.into()), listener_ip: "127.0.0.1".parse().unwrap(), conn_read_buffer_size: NOISE_BUF_LEN + 2, ..Default::default() }; let node = Node::new(Some(config)).await?; Ok(Self { node, noise_states: Default::default(), }) } } #[async_trait::async_trait] impl Handshaking for SecureNode { async fn perform_handshake(&self, mut conn: Connection) -> io::Result<Connection> { const HANDSHAKE_PATTERN: &str = "Noise_XXpsk3_25519_ChaChaPoly_BLAKE2s"; const PRE_SHARED_KEY: &[u8] = b"I dont care for codes of conduct"; let builder = snow::Builder::new(HANDSHAKE_PATTERN.parse().unwrap()); let static_key = builder.generate_keypair().unwrap().private; let noise_builder = builder .local_private_key(&static_key) .psk(3, PRE_SHARED_KEY); let mut buffer: Box<[u8]> = vec![0u8; NOISE_BUF_LEN].into(); let mut buf = [0u8; NOISE_BUF_LEN]; let state = match !conn.side { ConnectionSide::Initiator => { let mut noise = noise_builder.build_initiator().unwrap(); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent e (XX handshake part 1/3)"); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received e, ee, s, es (XX handshake part 2/3)"); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent s, se, psk (XX handshake part 3/3)"); noise.into_transport_mode().unwrap() } ConnectionSide::Responder => { let mut noise = noise_builder.build_responder().unwrap(); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received e (XX handshake part 1/3)"); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent e, ee, s, es (XX handshake part 2/3)"); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received s, se, psk (XX handshake part 3/3)"); noise.into_transport_mode().unwrap() } }; debug!(parent: conn.node.span(), "XX handshake complete"); let noise_state = NoiseState { state, buffer }; self.noise_states .write() .insert(conn.addr, Arc::new(Mutex::new(noise_state))); Ok(conn) } } #[async_trait::async_trait] impl Reading for SecureNode { type Message = String;
async fn process_message(&self, source: SocketAddr, message: Self::Message) -> io::Result<()> { info!(parent: self.node().span(), "decrypted a message from {}: \"{}\"", source, message); Ok(()) } } impl Writing for SecureNode { fn write_message( &self, target: SocketAddr, payload: &[u8], conn_buffer: &mut [u8], ) -> io::Result<usize> { let to_encrypt = str::from_utf8(payload).unwrap(); info!(parent: self.node.span(), "sending an encrypted message to {}: \"{}\"", target, to_encrypt); let noise = Arc::clone(&self.noise_states.read().get(&target).unwrap()); let NoiseState { state, buffer } = &mut *noise.lock(); let len = state.write_message(payload, buffer).unwrap(); let encrypted_message = &buffer[..len]; conn_buffer[..2].copy_from_slice(&(len as u16).to_be_bytes()); conn_buffer[2..][..len].copy_from_slice(&encrypted_message); Ok(2 + len) } } #[tokio::main] async fn main() { common::start_logger(LevelFilter::TRACE); let initiator = SecureNode::new("initiator").await.unwrap(); let responder = SecureNode::new("responder").await.unwrap(); for node in &[&initiator, &responder] { node.enable_handshaking(); node.enable_reading(); node.enable_writing(); } initiator .node() .connect(responder.node().listening_addr()) .await .unwrap(); sleep(Duration::from_millis(10)).await; let msg = b"why hello there, fellow noise protocol user; I'm the initiator"; initiator .node() .send_direct_message(responder.node().listening_addr(), msg[..].into()) .await .unwrap(); let initiator_addr = responder.node().connected_addrs()[0]; let msg = b"why hello there, fellow noise protocol user; I'm the responder"; responder .node() .send_direct_message(initiator_addr, msg[..].into()) .await .unwrap(); sleep(Duration::from_millis(10)).await; }
fn read_message( &self, source: SocketAddr, buffer: &[u8], ) -> io::Result<Option<(Self::Message, usize)>> { let bytes = read_message(buffer)?; if let Some(bytes) = bytes { let noise = Arc::clone(self.noise_states.read().get(&source).unwrap()); let NoiseState { state, buffer } = &mut *noise.lock(); let len = state.read_message(bytes, buffer).ok().unwrap(); let decrypted_message = String::from_utf8(buffer[..len].to_vec()).unwrap(); Ok(Some((decrypted_message, bytes.len() + 2))) } else { Ok(None) } }
function_block-full_function
[ { "content": "pub fn read_len_prefixed_message(len_size: usize, buffer: &[u8]) -> io::Result<Option<&[u8]>> {\n\n if buffer.len() >= len_size {\n\n let payload_len = match len_size {\n\n 2 => u16::from_le_bytes(buffer[..len_size].try_into().unwrap()) as usize,\n\n 4 => u32::from_...
Rust
src/shade.rs
ucarion/gfx
0701f16155ec5af4a2abff4e2b8740523861d1a4
use std::error::Error; use std::fmt; pub use gfx_device_gl::Version as GlslVersion; #[cfg(target_os = "windows")] pub use gfx_device_dx11::ShaderModel as DxShaderModel; #[cfg(feature = "metal")] pub use gfx_device_metal::ShaderModel as MetalShaderModel; #[derive(Copy, Clone, Debug, PartialEq)] pub enum Backend { Glsl(GlslVersion), GlslEs(GlslVersion), #[cfg(target_os = "windows")] Hlsl(DxShaderModel), #[cfg(feature = "metal")] Msl(MetalShaderModel), #[cfg(feature = "vulkan")] Vulkan, } pub const EMPTY: &'static [u8] = &[]; #[derive(Copy, Clone, PartialEq, Debug)] pub struct Source<'a> { pub glsl_120: &'a [u8], pub glsl_130: &'a [u8], pub glsl_140: &'a [u8], pub glsl_150: &'a [u8], pub glsl_400: &'a [u8], pub glsl_430: &'a [u8], pub glsl_es_100: &'a [u8], pub glsl_es_200: &'a [u8], pub glsl_es_300: &'a [u8], pub hlsl_30: &'a [u8], pub hlsl_40: &'a [u8], pub hlsl_41: &'a [u8], pub hlsl_50: &'a [u8], pub msl_10: &'a [u8], pub msl_11: &'a [u8], pub vulkan: &'a [u8], } #[derive(Clone, Copy, Debug, PartialEq)] pub struct SelectError(Backend); impl fmt::Display for SelectError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "An error occurred when selecting the {:?} backend", self.0) } } impl Error for SelectError { fn description(&self) -> &str { "An error occurred when selecting a backend" } } impl<'a> Source<'a> { pub fn empty() -> Source<'a> { Source { glsl_120: EMPTY, glsl_130: EMPTY, glsl_140: EMPTY, glsl_150: EMPTY, glsl_400: EMPTY, glsl_430: EMPTY, glsl_es_100: EMPTY, glsl_es_200: EMPTY, glsl_es_300: EMPTY, hlsl_30: EMPTY, hlsl_40: EMPTY, hlsl_41: EMPTY, hlsl_50: EMPTY, msl_10: EMPTY, msl_11: EMPTY, vulkan: EMPTY, } } pub fn select(&self, backend: Backend) -> Result<&'a [u8], SelectError> { Ok(match backend { Backend::Glsl(version) => { let v = version.major * 100 + version.minor; match *self { Source { glsl_430: s, .. } if s != EMPTY && v >= 430 => s, Source { glsl_400: s, .. } if s != EMPTY && v >= 400 => s, Source { glsl_150: s, .. } if s != EMPTY && v >= 150 => s, Source { glsl_140: s, .. } if s != EMPTY && v >= 140 => s, Source { glsl_130: s, .. } if s != EMPTY && v >= 130 => s, Source { glsl_120: s, .. } if s != EMPTY && v >= 120 => s, _ => return Err(SelectError(backend)), } } Backend::GlslEs(version) => { let v = version.major * 100 + version.minor; match *self { Source { glsl_es_100: s, .. } if s != EMPTY && v >= 100 => s, Source { glsl_es_200: s, .. } if s != EMPTY && v >= 200 => s, Source { glsl_es_300: s, .. } if s != EMPTY && v >= 300 => s, _ => return Err(SelectError(backend)), } } #[cfg(target_os = "windows")] Backend::Hlsl(model) => { match *self { Source { hlsl_50: s, .. } if s != EMPTY && model >= 50 => s, Source { hlsl_41: s, .. } if s != EMPTY && model >= 41 => s, Source { hlsl_40: s, .. } if s != EMPTY && model >= 40 => s, Source { hlsl_30: s, .. } if s != EMPTY && model >= 30 => s, _ => return Err(SelectError(backend)), } } #[cfg(feature = "metal")] Backend::Msl(revision) => { match *self { Source { msl_11: s, .. } if s != EMPTY && revision >= 11 => s, Source { msl_10: s, .. } if s != EMPTY && revision >= 10 => s, _ => return Err(SelectError(backend)), } } #[cfg(feature = "vulkan")] Backend::Vulkan => { match *self { Source { vulkan: s, .. } if s != EMPTY => s, _ => return Err(SelectError(backend)), } } }) } }
use std::error::Error; use std::fmt; pub use gfx_device_gl::Version as GlslVersion; #[cfg(target_os = "windows")] pub use gfx_device_dx11::ShaderModel as DxShaderModel; #[cfg(feature = "metal")] pub use gfx_device_metal::ShaderModel as MetalShaderModel; #[derive(Copy, Clone, Debug, PartialEq)] pub enum Backend { Glsl(GlslVersion), GlslEs(GlslVersion), #[cfg(target_os = "windows")] Hlsl(DxShaderModel), #[cfg(feature = "metal")] Msl(MetalShaderModel), #[cfg(feature = "vulkan")] Vulkan, } pub const EMPTY: &'static [u8] = &[]; #[derive(Copy, Clone, PartialEq, Debug)] pub struct Source<'a> { pub glsl_120: &'a [u8], pub glsl_130: &'a [u8], pub glsl_140: &'a [u8], pub glsl_150: &'a [u8], pub glsl_400: &'a [u8], pub glsl_430: &'a [u8], pub glsl_es_100: &'a [u8], pub glsl_es_200: &'a [u8], pub glsl_es_300: &'a [u8], pub hlsl_30: &'a [u8], pub hlsl_40: &'a [u8], pub hlsl_41: &'a [u8], pub hlsl_50: &'a [u8], pub msl_10: &'a [u8], pub msl_11: &'a [u8], pub vulkan: &'a [u8], } #[derive(Clone, Copy, Debug, PartialEq)] pub struct SelectError(Backend); impl fmt::Display for SelectError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "An error occurred when selecting the {:?} backend", self.0) } } impl Error for SelectError { fn description(&self) -> &str { "An error occurred when selecting a backend" } } impl<'a> Source<'a> { pub fn empty() -> Source<'a> { Source { glsl_120: EMPTY, glsl_130: EMPTY, glsl_140: EMPTY, glsl_150: EMPTY, glsl_400: EMPTY, glsl_430: EMPTY, glsl_es_100: EMPTY, glsl_es_200: EMPTY, glsl_es_300: EMPTY, hlsl_30: EMPTY, hlsl_40: EMPTY, hlsl_41: EMPTY, hlsl_50: EMPTY, msl_10: EMPTY, msl_11: EMPTY, vulkan: EMPTY, } }
}
pub fn select(&self, backend: Backend) -> Result<&'a [u8], SelectError> { Ok(match backend { Backend::Glsl(version) => { let v = version.major * 100 + version.minor; match *self { Source { glsl_430: s, .. } if s != EMPTY && v >= 430 => s, Source { glsl_400: s, .. } if s != EMPTY && v >= 400 => s, Source { glsl_150: s, .. } if s != EMPTY && v >= 150 => s, Source { glsl_140: s, .. } if s != EMPTY && v >= 140 => s, Source { glsl_130: s, .. } if s != EMPTY && v >= 130 => s, Source { glsl_120: s, .. } if s != EMPTY && v >= 120 => s, _ => return Err(SelectError(backend)), } } Backend::GlslEs(version) => { let v = version.major * 100 + version.minor; match *self { Source { glsl_es_100: s, .. } if s != EMPTY && v >= 100 => s, Source { glsl_es_200: s, .. } if s != EMPTY && v >= 200 => s, Source { glsl_es_300: s, .. } if s != EMPTY && v >= 300 => s, _ => return Err(SelectError(backend)), } } #[cfg(target_os = "windows")] Backend::Hlsl(model) => { match *self { Source { hlsl_50: s, .. } if s != EMPTY && model >= 50 => s, Source { hlsl_41: s, .. } if s != EMPTY && model >= 41 => s, Source { hlsl_40: s, .. } if s != EMPTY && model >= 40 => s, Source { hlsl_30: s, .. } if s != EMPTY && model >= 30 => s, _ => return Err(SelectError(backend)), } } #[cfg(feature = "metal")] Backend::Msl(revision) => { match *self { Source { msl_11: s, .. } if s != EMPTY && revision >= 11 => s, Source { msl_10: s, .. } if s != EMPTY && revision >= 10 => s, _ => return Err(SelectError(backend)), } } #[cfg(feature = "vulkan")] Backend::Vulkan => { match *self { Source { vulkan: s, .. } if s != EMPTY => s, _ => return Err(SelectError(backend)), } } }) }
function_block-full_function
[ { "content": "// texture loading boilerplate\n\npub fn load_texture<R, F>(factory: &mut F, data: &[u8])\n\n -> Result<gfx::handle::ShaderResourceView<R, [f32; 4]>, String>\n\n where R: gfx::Resources, F: gfx::Factory<R>\n\n{\n\n use gfx::format::Rgba8;\n\n use gfx::texture as t;\...
Rust
pac/atsam3x8h/src/uotghs/devicr.rs
compenguy/atsam3x8e
79168e405c0c8ce9005866c85df5dcc77e235f5e
#[doc = "Writer for register DEVICR"] pub type W = crate::W<u32, super::DEVICR>; #[doc = "Write proxy for field `SUSPC`"] pub struct SUSPC_W<'a> { w: &'a mut W, } impl<'a> SUSPC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Write proxy for field `MSOFC`"] pub struct MSOFC_W<'a> { w: &'a mut W, } impl<'a> MSOFC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `SOFC`"] pub struct SOFC_W<'a> { w: &'a mut W, } impl<'a> SOFC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `EORSTC`"] pub struct EORSTC_W<'a> { w: &'a mut W, } impl<'a> EORSTC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `WAKEUPC`"] pub struct WAKEUPC_W<'a> { w: &'a mut W, } impl<'a> WAKEUPC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `EORSMC`"] pub struct EORSMC_W<'a> { w: &'a mut W, } impl<'a> EORSMC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Write proxy for field `UPRSMC`"] pub struct UPRSMC_W<'a> { w: &'a mut W, } impl<'a> UPRSMC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } impl W { #[doc = "Bit 0 - Suspend Interrupt Clear"] #[inline(always)] pub fn suspc(&mut self) -> SUSPC_W { SUSPC_W { w: self } } #[doc = "Bit 1 - Micro Start of Frame Interrupt Clear"] #[inline(always)] pub fn msofc(&mut self) -> MSOFC_W { MSOFC_W { w: self } } #[doc = "Bit 2 - Start of Frame Interrupt Clear"] #[inline(always)] pub fn sofc(&mut self) -> SOFC_W { SOFC_W { w: self } } #[doc = "Bit 3 - End of Reset Interrupt Clear"] #[inline(always)] pub fn eorstc(&mut self) -> EORSTC_W { EORSTC_W { w: self } } #[doc = "Bit 4 - Wake-Up Interrupt Clear"] #[inline(always)] pub fn wakeupc(&mut self) -> WAKEUPC_W { WAKEUPC_W { w: self } } #[doc = "Bit 5 - End of Resume Interrupt Clear"] #[inline(always)] pub fn eorsmc(&mut self) -> EORSMC_W { EORSMC_W { w: self } } #[doc = "Bit 6 - Upstream Resume Interrupt Clear"] #[inline(always)] pub fn uprsmc(&mut self) -> UPRSMC_W { UPRSMC_W { w: self } } }
#[doc = "Writer for register DEVICR"] pub type W = crate::W<u32, super::DEVICR>; #[doc = "Write proxy for field `SUSPC`"] pub struct SUSPC_W<'a> { w: &'a mut W, } impl<'a> SUSPC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Write proxy for field `MSOFC`"] pub struct MSOFC_W<'a> { w: &'a mut W, } impl<'a> MSOFC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `SOFC`"] pub struct SOFC_W<'a> { w: &'a mut W, } impl<'a> SOFC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `EORSTC`"] pub struct EORSTC
} #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `EORSMC`"] pub struct EORSMC_W<'a> { w: &'a mut W, } impl<'a> EORSMC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Write proxy for field `UPRSMC`"] pub struct UPRSMC_W<'a> { w: &'a mut W, } impl<'a> UPRSMC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } impl W { #[doc = "Bit 0 - Suspend Interrupt Clear"] #[inline(always)] pub fn suspc(&mut self) -> SUSPC_W { SUSPC_W { w: self } } #[doc = "Bit 1 - Micro Start of Frame Interrupt Clear"] #[inline(always)] pub fn msofc(&mut self) -> MSOFC_W { MSOFC_W { w: self } } #[doc = "Bit 2 - Start of Frame Interrupt Clear"] #[inline(always)] pub fn sofc(&mut self) -> SOFC_W { SOFC_W { w: self } } #[doc = "Bit 3 - End of Reset Interrupt Clear"] #[inline(always)] pub fn eorstc(&mut self) -> EORSTC_W { EORSTC_W { w: self } } #[doc = "Bit 4 - Wake-Up Interrupt Clear"] #[inline(always)] pub fn wakeupc(&mut self) -> WAKEUPC_W { WAKEUPC_W { w: self } } #[doc = "Bit 5 - End of Resume Interrupt Clear"] #[inline(always)] pub fn eorsmc(&mut self) -> EORSMC_W { EORSMC_W { w: self } } #[doc = "Bit 6 - Upstream Resume Interrupt Clear"] #[inline(always)] pub fn uprsmc(&mut self) -> UPRSMC_W { UPRSMC_W { w: self } } }
_W<'a> { w: &'a mut W, } impl<'a> EORSTC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `WAKEUPC`"] pub struct WAKEUPC_W<'a> { w: &'a mut W, } impl<'a> WAKEUPC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false)
random
[]
Rust
src/custom_fd.rs
nsforth/actorio
b15dacfadfb93b201d54f2b3758fd1a84a044e0a
use crate::{AsSocketId, MaybeSocketOwner, ActorioContext, SocketHolder, SocketId}; use mio::event::Source; use mio::unix::SourceFd; use mio::{Interest, Registry, Token}; use std::io::Error; use std::os::unix::io::RawFd; type CustomFdEventHandler<'a, A> = Box<dyn FnMut(&mut A, &mut ActorioContext<'a, A>, &CustomFdId) + 'a>; #[derive(Hash, PartialEq, Eq, Debug)] pub struct CustomFdId(SocketId); impl AsSocketId for CustomFdId { fn as_socket_id(&self) -> &SocketId { &self.0 } } impl From<SocketId> for CustomFdId { fn from(socket_id: SocketId) -> Self { CustomFdId(socket_id) } } pub struct CustomFd<'a, A> { custom_fd: RawFd, on_readable: Option<CustomFdEventHandler<'a, A>>, on_writeable: Option<CustomFdEventHandler<'a, A>>, } impl<'a, A> Drop for CustomFd<'a, A> { fn drop(&mut self) { unsafe { libc::close(self.custom_fd) }; } } impl<'a, A> CustomFd<'a, A> { pub fn get_raw_fd(&self) -> RawFd { self.custom_fd } pub fn new(raw_fd: RawFd) -> CustomFdInit { CustomFdInit::new::<A>(raw_fd) } pub(crate) fn process_read( act_ctx: &mut ActorioContext<'a, A>, application: &mut A, custom_fd_id: CustomFdId, ) { if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { let mut on_readable = custom_fd.on_readable.take().unwrap(); on_readable(application, act_ctx, &custom_fd_id); if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { if custom_fd.on_readable.is_none() { custom_fd.on_readable = Some(on_readable); } }; }; } pub(crate) fn process_write( act_ctx: &mut ActorioContext<'a, A>, application: &mut A, custom_fd_id: CustomFdId, ) { if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { let mut on_writeable = custom_fd.on_writeable.take().unwrap(); on_writeable(application, act_ctx, &custom_fd_id); if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { if custom_fd.on_writeable.is_none() { custom_fd.on_writeable = Some(on_writeable); } }; }; } pub(crate) fn has_read_handler(&self) -> bool { self.on_readable.is_some() } pub(crate) fn has_write_handler(&self) -> bool { self.on_writeable.is_some() } } impl<'a, A> Source for CustomFd<'a, A> { fn register( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> Result<(), Error> { registry.register(&mut SourceFd(&self.custom_fd), token, interests) } fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> Result<(), Error> { registry.reregister(&mut SourceFd(&self.custom_fd), token, interests) } fn deregister(&mut self, registry: &Registry) -> Result<(), Error> { registry.deregister(&mut SourceFd(&self.custom_fd)) } } pub struct CustomFdInit(RawFd); impl CustomFdInit { fn new<A>(raw_fd: RawFd) -> Self { CustomFdInit(raw_fd) } pub fn on_readable<'a, A>( self, on_readable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitWithReadHandler<'a, A> { CustomFdInitWithReadHandler { custom_fd: self.0, on_readable: Box::new(on_readable), } } pub fn on_writeable<'a, A>( self, on_writeable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitWithWriteHandler<'a, A> { CustomFdInitWithWriteHandler { custom_fd: self.0, on_writeable: Box::new(on_writeable), } } } pub struct CustomFdInitWithReadHandler<'a, A> { custom_fd: RawFd, on_readable: CustomFdEventHandler<'a, A>, } impl<'a, A> CustomFdInitWithReadHandler<'a, A> { pub fn on_writeable( self, on_writeable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitFinal<'a, A> { CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(self.on_readable), on_writeable: Some(Box::new(on_writeable)), } } pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { let custom_fd_init_final = CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(self.on_readable), on_writeable: None, }; custom_fd_init_final.register(act_ctx) } } pub struct CustomFdInitWithWriteHandler<'a, A> { custom_fd: RawFd, on_writeable: CustomFdEventHandler<'a, A>, } impl<'a, A> CustomFdInitWithWriteHandler<'a, A> { pub fn on_readable( self, on_readable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitFinal<'a, A> { CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(Box::new(on_readable)), on_writeable: Some(self.on_writeable), } } pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { let custom_fd_init_final = CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: None, on_writeable: Some(self.on_writeable), }; custom_fd_init_final.register(act_ctx) } } pub struct CustomFdInitFinal<'a, A> { custom_fd: RawFd, on_readable: Option<CustomFdEventHandler<'a, A>>, on_writeable: Option<CustomFdEventHandler<'a, A>>, } impl<'a, A> CustomFdInitFinal<'a, A> { pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { act_ctx .register_socket_holder(SocketHolder::new( self.on_readable.is_some(), self.on_writeable.is_some(), From::from(CustomFd { custom_fd: self.custom_fd, on_readable: self.on_readable, on_writeable: self.on_writeable, }), )) .map(From::from) } }
use crate::{AsSocketId, MaybeSocketOwner, ActorioContext, SocketHolder, SocketId}; use mio::event::Source; use mio::unix::SourceFd; use mio::{Interest, Registry, Token}; use std::io::Error; use std::os::unix::io::RawFd; type CustomFdEventHandler<'a, A> = Box<dyn FnMut(&mut A, &mut ActorioContext<'a, A>, &CustomFdId) + 'a>; #[derive(Hash, PartialEq, Eq, Debug)] pub struct CustomFdId(SocketId); impl AsSocketId for CustomFdId { fn as_socket_id(&self) -> &SocketId { &self.0 } } impl From<SocketId> for CustomFdId { fn from(socket_id: SocketId) -> Self { CustomFdId(socket_id) } } pub struct CustomFd<'a, A> { custom_fd: RawFd, on_readable: Option<CustomFdEventHandler<'a, A>>, on_writeable: Option<CustomFdEventHandler<'a, A>>, } impl<'a, A> Drop for CustomFd<'a, A> { fn drop(&mut self) { unsafe { libc::close(self.custom_fd) }; } } impl<'a, A> CustomFd<'a, A> { pub fn get_raw_fd(&self) -> RawFd { self.custom_fd } pub fn new(raw_fd: RawFd) -> CustomFdInit { CustomFdInit::new::<A>(raw_fd) } pub(crate) fn process_read( act_ctx: &mut ActorioContext<'a, A>, application: &mut A, custom_fd_id: CustomFdId, ) { if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { let mut on_readable = custom_fd.on_readable.take().unwrap(); on_readable(application, act_ctx, &custom_fd_id); if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { if custom_fd.on_readable.is_none() { custom_fd.on_readable = Some(on_readable); } }; }; } pub(crate) fn pr
pub(crate) fn has_read_handler(&self) -> bool { self.on_readable.is_some() } pub(crate) fn has_write_handler(&self) -> bool { self.on_writeable.is_some() } } impl<'a, A> Source for CustomFd<'a, A> { fn register( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> Result<(), Error> { registry.register(&mut SourceFd(&self.custom_fd), token, interests) } fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> Result<(), Error> { registry.reregister(&mut SourceFd(&self.custom_fd), token, interests) } fn deregister(&mut self, registry: &Registry) -> Result<(), Error> { registry.deregister(&mut SourceFd(&self.custom_fd)) } } pub struct CustomFdInit(RawFd); impl CustomFdInit { fn new<A>(raw_fd: RawFd) -> Self { CustomFdInit(raw_fd) } pub fn on_readable<'a, A>( self, on_readable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitWithReadHandler<'a, A> { CustomFdInitWithReadHandler { custom_fd: self.0, on_readable: Box::new(on_readable), } } pub fn on_writeable<'a, A>( self, on_writeable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitWithWriteHandler<'a, A> { CustomFdInitWithWriteHandler { custom_fd: self.0, on_writeable: Box::new(on_writeable), } } } pub struct CustomFdInitWithReadHandler<'a, A> { custom_fd: RawFd, on_readable: CustomFdEventHandler<'a, A>, } impl<'a, A> CustomFdInitWithReadHandler<'a, A> { pub fn on_writeable( self, on_writeable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitFinal<'a, A> { CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(self.on_readable), on_writeable: Some(Box::new(on_writeable)), } } pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { let custom_fd_init_final = CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(self.on_readable), on_writeable: None, }; custom_fd_init_final.register(act_ctx) } } pub struct CustomFdInitWithWriteHandler<'a, A> { custom_fd: RawFd, on_writeable: CustomFdEventHandler<'a, A>, } impl<'a, A> CustomFdInitWithWriteHandler<'a, A> { pub fn on_readable( self, on_readable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitFinal<'a, A> { CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(Box::new(on_readable)), on_writeable: Some(self.on_writeable), } } pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { let custom_fd_init_final = CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: None, on_writeable: Some(self.on_writeable), }; custom_fd_init_final.register(act_ctx) } } pub struct CustomFdInitFinal<'a, A> { custom_fd: RawFd, on_readable: Option<CustomFdEventHandler<'a, A>>, on_writeable: Option<CustomFdEventHandler<'a, A>>, } impl<'a, A> CustomFdInitFinal<'a, A> { pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { act_ctx .register_socket_holder(SocketHolder::new( self.on_readable.is_some(), self.on_writeable.is_some(), From::from(CustomFd { custom_fd: self.custom_fd, on_readable: self.on_readable, on_writeable: self.on_writeable, }), )) .map(From::from) } }
ocess_write( act_ctx: &mut ActorioContext<'a, A>, application: &mut A, custom_fd_id: CustomFdId, ) { if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { let mut on_writeable = custom_fd.on_writeable.take().unwrap(); on_writeable(application, act_ctx, &custom_fd_id); if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { if custom_fd.on_writeable.is_none() { custom_fd.on_writeable = Some(on_writeable); } }; }; }
function_block-function_prefixed
[ { "content": "type TCPEventHandler<'a, A> = Box<dyn FnMut(&mut A, &mut ActorioContext<'a, A>, &TCPConnId) + 'a>;\n\n\n\n#[derive(Hash, PartialEq, Eq, Debug)]\n\npub struct TCPConnId(SocketId);\n\n\n\nimpl AsSocketId for TCPConnId {\n\n fn as_socket_id(&self) -> &SocketId {\n\n &self.0\n\n }\n\n}\n\...
Rust
src/lib.rs
dvdplm/rsmq-rust
20fe18287247a81c803b8b539ea1027c9de54736
use failure::{Error, format_err}; use bb8::Pool; use bb8_redis::RedisConnectionManager; use std::{default::Default, ops::DerefMut}; use redis::{from_redis_value, RedisError, RedisResult, Value, ErrorKind as RedisErrorKind}; #[derive(Clone, Debug)] pub struct Queue { pub qname: String, pub vt: u64, pub delay: u64, pub maxsize: i64, pub totalrecv: u64, pub totalsent: u64, pub created: u64, pub modified: u64, pub msgs: u64, pub hiddenmsgs: u64, } impl Queue { pub fn new(qname: &str, vt: Option<u64>, delay: Option<u64>, maxsize: Option<i64>) -> Queue { let mut q = Queue { ..Default::default() }; q.qname = qname.into(); q.vt = vt.unwrap_or(30); q.delay = delay.unwrap_or(0); q.maxsize = maxsize.unwrap_or(65536); q } } impl Default for Queue { fn default() -> Queue { Queue { qname: "".into(), vt: 30, delay: 0, maxsize: 65536, totalrecv: 0, totalsent: 0, created: 0, modified: 0, msgs: 0, hiddenmsgs: 0, } } } #[derive(Clone, Debug)] pub struct Message { pub id: String, pub message: String, pub rc: u64, pub fr: u64, pub sent: u64, } impl Message { pub fn new() -> Message { Message { id: "".into(), message: "".into(), sent: 0, fr: 0, rc: 0, } } } impl redis::FromRedisValue for Message { fn from_redis_value(v: &Value) -> RedisResult<Message> { match *v { Value::Bulk(ref items) => { if items.len() == 0 { return Err(RedisError::from((RedisErrorKind::TryAgain, "No messages to receive"))); } let mut m = Message::new(); m.id = from_redis_value(&items[0])?; m.message = from_redis_value(&items[1])?; m.rc = from_redis_value(&items[2])?; m.fr = from_redis_value(&items[3])?; m.sent = match u64::from_str_radix(&m.id[0..10], 36) { Ok(ts) => ts, Err(e) => return Err(RedisError::from(( RedisErrorKind::TypeError, "timestamp parsing error", format!("Could not convert '{:?}' to a timestamp. Error: {}", &m.id[0..10], e) ))) }; Ok(m) } _ => Err(RedisError::from((RedisErrorKind::IoError, "Redis did not return a Value::Bulk"))), } } } pub struct Rsmq { pool: Pool<RedisConnectionManager>, name_space: String, } impl std::fmt::Debug for Rsmq { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "redis namespace: {}, {:?}", self.name_space, self.pool) } } impl Rsmq { pub async fn new<T: redis::IntoConnectionInfo>(params: T, name_space: &str) -> Result<Rsmq, Error> { let manager = RedisConnectionManager::new(params)?; let pool = bb8::Pool::builder().build(manager).await?; let name_space = if name_space != "" { name_space.into() } else { "rsmq".into() }; Ok(Rsmq { pool, name_space }) } pub async fn create_queue(&self, opts: Queue) -> Result<u8, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qky = self.queue_hash_key(&opts.qname); let (ts, _): (u32, u32) = redis::cmd("TIME").query_async(con).await?; let (res, ): (u8, ) = redis::pipe() .atomic() .cmd("HSETNX").arg(&qky).arg("vt").arg(opts.vt).ignore() .cmd("HSETNX").arg(&qky).arg("delay").arg(opts.delay).ignore() .cmd("HSETNX").arg(&qky).arg("maxsize").arg(opts.maxsize).ignore() .cmd("HSETNX").arg(&qky).arg("totalrecv").arg(0).ignore() .cmd("HSETNX").arg(&qky).arg("totalsent").arg(0).ignore() .cmd("HSETNX").arg(&qky).arg("created").arg(ts).ignore() .cmd("HSETNX").arg(&qky).arg("modified").arg(ts).ignore() .cmd("SADD").arg(format!("{}:QUEUES", self.name_space)).arg(opts.qname) .query_async(con) .await?; Ok(res) } pub async fn delete_queue(&self, qname: &str) -> Result<Value, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = self.message_zset_key(qname); redis::pipe() .atomic() .cmd("DEL").arg(format!("{}:Q", &key)).ignore() .cmd("DEL").arg(&key).ignore() .cmd("SREM").arg(format!("{}:QUEUES", self.name_space)).arg(qname).ignore() .query_async(con) .await .map_err(|e| e.into()) } pub async fn list_queues(&self) -> Result<Vec<String>, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = format!("{}:QUEUES", self.name_space); redis::cmd("SMEMBERS") .arg(key) .query_async(con) .await .map_err(|e| e.into()) } async fn get_queue(&self, qname: &str, set_uid: bool) -> Result<(Queue, u64, Option<String>), Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qkey = self.queue_hash_key(qname); let ((vt, delay, maxsize), (secs, micros)): ((u64, u64, i64), (u64, u64)) = redis::pipe() .atomic() .cmd("HMGET").arg(qkey).arg("vt").arg("delay").arg("maxsize") .cmd("TIME") .query_async(con) .await?; let ts_micros = secs * 1_000_000 + micros; let ts = ts_micros / 1_000; let q = Queue { qname: qname.into(), vt, delay, maxsize, ..Default::default() }; let uid = if set_uid { let ts_rad36 = radix::RadixNum::from(ts_micros).with_radix(36).unwrap().as_str().to_lowercase().to_string(); Some(ts_rad36 + &make_id_22()) } else { None }; Ok((q, ts, uid)) } pub async fn change_message_visibility(&self, qname: &str, msgid: &str, hidefor: u64) -> Result<u64, Error> { const LUA: &'static str = r#" local msg = redis.call("ZSCORE", KEYS[1], KEYS[2]) if not msg then return 0 end redis.call("ZADD", KEYS[1], KEYS[3], KEYS[2]) return 1"#; let (_, ts, _) = self.get_queue(&qname, false).await?; let key = self.message_zset_key(qname); let expires_at = ts + hidefor * 1000u64; let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; redis::Script::new(LUA) .key(key) .key(msgid) .key(expires_at) .invoke_async::<_, ()>(con) .await?; Ok(expires_at) } pub async fn send_message(&self, qname: &str, message: &str, delay: Option<u64>) -> Result<String, Error> { let (q, ts, uid) = self.get_queue(&qname, true).await?; let uid = uid.ok_or(format_err!("Did not get a proper uid back from Redis"))?; let delay = delay.unwrap_or(q.delay); if q.maxsize != -1 && message.as_bytes().len() > q.maxsize as usize { let custom_error = std::io::Error::new(std::io::ErrorKind::Other, "Message is too long"); let redis_err = RedisError::from(custom_error); return Err(redis_err.into()); } let key = self.message_zset_key(qname); let qky = self.queue_hash_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; redis::pipe().atomic() .cmd("ZADD").arg(&key).arg(ts + delay * 1000).arg(&uid).ignore() .cmd("HSET").arg(&qky).arg(&uid).arg(message).ignore() .cmd("HINCRBY").arg(&qky).arg("totalsent").arg(1).ignore() .query_async::<_, ()>(con) .await?; Ok(uid) } pub async fn delete_message(&self, qname: &str, msgid: &str) -> Result<bool, Error> { let key = self.message_zset_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let (delete_count, deleted_fields_count): (u32, u32) = redis::pipe() .atomic() .cmd("ZREM") .arg(&key) .arg(msgid) .cmd("HDEL") .arg(format!("{}:Q", &key)) .arg(msgid) .arg(format!("{}:rc", &key)) .arg(format!("{}:fr", &key)) .query_async(con) .await?; if delete_count == 1 && deleted_fields_count > 0 { Ok(true) } else { Ok(false) } } pub async fn pop_message(&self, qname: &str) -> Result<Message, Error> { const LUA: &'static str = r##" local msg = redis.call("ZRANGEBYSCORE", KEYS[1], "-inf", KEYS[2], "LIMIT", "0", "1") if #msg == 0 then return {} end redis.call("HINCRBY", KEYS[1] .. ":Q", "totalrecv", 1) local mbody = redis.call("HGET", KEYS[1] .. ":Q", msg[1]) local rc = redis.call("HINCRBY", KEYS[1] .. ":Q", msg[1] .. ":rc", 1) local o = {msg[1], mbody, rc} if rc==1 then table.insert(o, KEYS[2]) else local fr = redis.call("HGET", KEYS[1] .. ":Q", msg[1] .. ":fr") table.insert(o, fr) end redis.call("ZREM", KEYS[1], msg[1]) redis.call("HDEL", KEYS[1] .. ":Q", msg[1], msg[1] .. ":rc", msg[1] .. ":fr") return o "##; let (_, ts, _) = self.get_queue(qname, false).await?; let key = self.message_zset_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let m: Message = redis::Script::new(LUA) .key(key) .key(ts) .invoke_async(con) .await?; Ok(m) } pub async fn receive_message(&self, qname: &str, hidefor: Option<u64>) -> Result<Message, Error> { const LUA: &'static str = r##" local msg = redis.call("ZRANGEBYSCORE", KEYS[1], "-inf", KEYS[2], "LIMIT", "0", "1") if #msg == 0 then return {} end redis.call("ZADD", KEYS[1], KEYS[3], msg[1]) redis.call("HINCRBY", KEYS[1] .. ":Q", "totalrecv", 1) local mbody = redis.call("HGET", KEYS[1] .. ":Q", msg[1]) local rc = redis.call("HINCRBY", KEYS[1] .. ":Q", msg[1] .. ":rc", 1) local o = {msg[1], mbody, rc} if rc==1 then redis.call("HSET", KEYS[1] .. ":Q", msg[1] .. ":fr", KEYS[2]) table.insert(o, KEYS[2]) else local fr = redis.call("HGET", KEYS[1] .. ":Q", msg[1] .. ":fr") table.insert(o, fr) end return o "##; let (q, ts, _) = self.get_queue(&qname, false).await?; let hidefor = hidefor.unwrap_or(q.vt); let key = self.message_zset_key(qname); let expires_at = ts + hidefor * 1000u64; let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let m: Message = redis::Script::new(LUA) .key(key) .key(ts) .key(expires_at) .invoke_async(con) .await?; Ok(m) } pub async fn get_queue_attributes(&self, qname: &str) -> Result<Queue, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = self.message_zset_key(qname); let qkey = self.queue_hash_key(qname); let (time, _): (String, u32) = redis::cmd("TIME") .query_async(con) .await?; let ts_str = format!("{}000", time); let out: ((u64, u64, i64, u64, u64, u64, u64), u64, u64) = redis::pipe().atomic() .cmd("HMGET") .arg(qkey) .arg("vt") .arg("delay") .arg("maxsize") .arg("totalrecv") .arg("totalsent") .arg("created") .arg("modified") .cmd("ZCARD") .arg(&key) .cmd("ZCOUNT") .arg(&key) .arg(ts_str) .arg("+inf") .query_async(con) .await?; let (vt, delay, maxsize, totalrecv, totalsent, created, modified) = out.0; let msgs = out.1; let hiddenmsgs = out.2; let q = Queue { qname: qname.into(), vt, delay, maxsize, totalrecv, totalsent, created, modified, msgs, hiddenmsgs, }; Ok(q) } pub async fn set_queue_attributes( &self, qname: &str, vt: Option<u64>, delay: Option<u64>, maxsize: Option<i64>, ) -> Result<Queue, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qkey = self.queue_hash_key(qname); let mut pipe = redis::pipe(); if vt.is_some() { pipe.cmd("HSET").arg(&qkey).arg("vt").arg(vt).ignore(); } if delay.is_some() { pipe.cmd("HSET").arg(&qkey).arg("delay").arg(delay).ignore(); } if maxsize.is_some() { pipe.cmd("HSET").arg(&qkey).arg("maxsize").arg(maxsize).ignore(); } pipe.atomic().query_async::<_, ()>(con).await?; let q = self.get_queue_attributes(qname).await?; Ok(q) } fn queue_hash_key(&self, qname: &str) -> String { format!("{}:{}:Q", self.name_space, qname) } fn message_zset_key(&self, qname: &str) -> String { format!("{}:{}", self.name_space, qname) } } fn make_id_22() -> String { use rand::{Rng, distributions::Alphanumeric}; rand::thread_rng() .sample_iter(&Alphanumeric) .take(22) .collect::<String>() }
use failure::{Error, format_err}; use bb8::Pool; use bb8_redis::RedisConnectionManager; use std::{default::Default, ops::DerefMut}; use redis::{from_redis_value, RedisError, RedisResult, Value, ErrorKind as RedisErrorKind}; #[derive(Clone, Debug)] pub struct Queue { pub qname: String, pub vt: u64, pub delay: u64, pub maxsize: i64, pub totalrecv: u64, pub totalsent: u64, pub created: u64, pub modified: u64, pub msgs: u64, pub hiddenmsgs: u64, } impl Queue { pub fn new(qname: &str, vt: Option<u64>, delay: Option<u64>, maxsize: Option<i64>) -> Queue { let mut q = Queu
} impl Default for Queue { fn default() -> Queue { Queue { qname: "".into(), vt: 30, delay: 0, maxsize: 65536, totalrecv: 0, totalsent: 0, created: 0, modified: 0, msgs: 0, hiddenmsgs: 0, } } } #[derive(Clone, Debug)] pub struct Message { pub id: String, pub message: String, pub rc: u64, pub fr: u64, pub sent: u64, } impl Message { pub fn new() -> Message { Message { id: "".into(), message: "".into(), sent: 0, fr: 0, rc: 0, } } } impl redis::FromRedisValue for Message { fn from_redis_value(v: &Value) -> RedisResult<Message> { match *v { Value::Bulk(ref items) => { if items.len() == 0 { return Err(RedisError::from((RedisErrorKind::TryAgain, "No messages to receive"))); } let mut m = Message::new(); m.id = from_redis_value(&items[0])?; m.message = from_redis_value(&items[1])?; m.rc = from_redis_value(&items[2])?; m.fr = from_redis_value(&items[3])?; m.sent = match u64::from_str_radix(&m.id[0..10], 36) { Ok(ts) => ts, Err(e) => return Err(RedisError::from(( RedisErrorKind::TypeError, "timestamp parsing error", format!("Could not convert '{:?}' to a timestamp. Error: {}", &m.id[0..10], e) ))) }; Ok(m) } _ => Err(RedisError::from((RedisErrorKind::IoError, "Redis did not return a Value::Bulk"))), } } } pub struct Rsmq { pool: Pool<RedisConnectionManager>, name_space: String, } impl std::fmt::Debug for Rsmq { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "redis namespace: {}, {:?}", self.name_space, self.pool) } } impl Rsmq { pub async fn new<T: redis::IntoConnectionInfo>(params: T, name_space: &str) -> Result<Rsmq, Error> { let manager = RedisConnectionManager::new(params)?; let pool = bb8::Pool::builder().build(manager).await?; let name_space = if name_space != "" { name_space.into() } else { "rsmq".into() }; Ok(Rsmq { pool, name_space }) } pub async fn create_queue(&self, opts: Queue) -> Result<u8, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qky = self.queue_hash_key(&opts.qname); let (ts, _): (u32, u32) = redis::cmd("TIME").query_async(con).await?; let (res, ): (u8, ) = redis::pipe() .atomic() .cmd("HSETNX").arg(&qky).arg("vt").arg(opts.vt).ignore() .cmd("HSETNX").arg(&qky).arg("delay").arg(opts.delay).ignore() .cmd("HSETNX").arg(&qky).arg("maxsize").arg(opts.maxsize).ignore() .cmd("HSETNX").arg(&qky).arg("totalrecv").arg(0).ignore() .cmd("HSETNX").arg(&qky).arg("totalsent").arg(0).ignore() .cmd("HSETNX").arg(&qky).arg("created").arg(ts).ignore() .cmd("HSETNX").arg(&qky).arg("modified").arg(ts).ignore() .cmd("SADD").arg(format!("{}:QUEUES", self.name_space)).arg(opts.qname) .query_async(con) .await?; Ok(res) } pub async fn delete_queue(&self, qname: &str) -> Result<Value, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = self.message_zset_key(qname); redis::pipe() .atomic() .cmd("DEL").arg(format!("{}:Q", &key)).ignore() .cmd("DEL").arg(&key).ignore() .cmd("SREM").arg(format!("{}:QUEUES", self.name_space)).arg(qname).ignore() .query_async(con) .await .map_err(|e| e.into()) } pub async fn list_queues(&self) -> Result<Vec<String>, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = format!("{}:QUEUES", self.name_space); redis::cmd("SMEMBERS") .arg(key) .query_async(con) .await .map_err(|e| e.into()) } async fn get_queue(&self, qname: &str, set_uid: bool) -> Result<(Queue, u64, Option<String>), Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qkey = self.queue_hash_key(qname); let ((vt, delay, maxsize), (secs, micros)): ((u64, u64, i64), (u64, u64)) = redis::pipe() .atomic() .cmd("HMGET").arg(qkey).arg("vt").arg("delay").arg("maxsize") .cmd("TIME") .query_async(con) .await?; let ts_micros = secs * 1_000_000 + micros; let ts = ts_micros / 1_000; let q = Queue { qname: qname.into(), vt, delay, maxsize, ..Default::default() }; let uid = if set_uid { let ts_rad36 = radix::RadixNum::from(ts_micros).with_radix(36).unwrap().as_str().to_lowercase().to_string(); Some(ts_rad36 + &make_id_22()) } else { None }; Ok((q, ts, uid)) } pub async fn change_message_visibility(&self, qname: &str, msgid: &str, hidefor: u64) -> Result<u64, Error> { const LUA: &'static str = r#" local msg = redis.call("ZSCORE", KEYS[1], KEYS[2]) if not msg then return 0 end redis.call("ZADD", KEYS[1], KEYS[3], KEYS[2]) return 1"#; let (_, ts, _) = self.get_queue(&qname, false).await?; let key = self.message_zset_key(qname); let expires_at = ts + hidefor * 1000u64; let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; redis::Script::new(LUA) .key(key) .key(msgid) .key(expires_at) .invoke_async::<_, ()>(con) .await?; Ok(expires_at) } pub async fn send_message(&self, qname: &str, message: &str, delay: Option<u64>) -> Result<String, Error> { let (q, ts, uid) = self.get_queue(&qname, true).await?; let uid = uid.ok_or(format_err!("Did not get a proper uid back from Redis"))?; let delay = delay.unwrap_or(q.delay); if q.maxsize != -1 && message.as_bytes().len() > q.maxsize as usize { let custom_error = std::io::Error::new(std::io::ErrorKind::Other, "Message is too long"); let redis_err = RedisError::from(custom_error); return Err(redis_err.into()); } let key = self.message_zset_key(qname); let qky = self.queue_hash_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; redis::pipe().atomic() .cmd("ZADD").arg(&key).arg(ts + delay * 1000).arg(&uid).ignore() .cmd("HSET").arg(&qky).arg(&uid).arg(message).ignore() .cmd("HINCRBY").arg(&qky).arg("totalsent").arg(1).ignore() .query_async::<_, ()>(con) .await?; Ok(uid) } pub async fn delete_message(&self, qname: &str, msgid: &str) -> Result<bool, Error> { let key = self.message_zset_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let (delete_count, deleted_fields_count): (u32, u32) = redis::pipe() .atomic() .cmd("ZREM") .arg(&key) .arg(msgid) .cmd("HDEL") .arg(format!("{}:Q", &key)) .arg(msgid) .arg(format!("{}:rc", &key)) .arg(format!("{}:fr", &key)) .query_async(con) .await?; if delete_count == 1 && deleted_fields_count > 0 { Ok(true) } else { Ok(false) } } pub async fn pop_message(&self, qname: &str) -> Result<Message, Error> { const LUA: &'static str = r##" local msg = redis.call("ZRANGEBYSCORE", KEYS[1], "-inf", KEYS[2], "LIMIT", "0", "1") if #msg == 0 then return {} end redis.call("HINCRBY", KEYS[1] .. ":Q", "totalrecv", 1) local mbody = redis.call("HGET", KEYS[1] .. ":Q", msg[1]) local rc = redis.call("HINCRBY", KEYS[1] .. ":Q", msg[1] .. ":rc", 1) local o = {msg[1], mbody, rc} if rc==1 then table.insert(o, KEYS[2]) else local fr = redis.call("HGET", KEYS[1] .. ":Q", msg[1] .. ":fr") table.insert(o, fr) end redis.call("ZREM", KEYS[1], msg[1]) redis.call("HDEL", KEYS[1] .. ":Q", msg[1], msg[1] .. ":rc", msg[1] .. ":fr") return o "##; let (_, ts, _) = self.get_queue(qname, false).await?; let key = self.message_zset_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let m: Message = redis::Script::new(LUA) .key(key) .key(ts) .invoke_async(con) .await?; Ok(m) } pub async fn receive_message(&self, qname: &str, hidefor: Option<u64>) -> Result<Message, Error> { const LUA: &'static str = r##" local msg = redis.call("ZRANGEBYSCORE", KEYS[1], "-inf", KEYS[2], "LIMIT", "0", "1") if #msg == 0 then return {} end redis.call("ZADD", KEYS[1], KEYS[3], msg[1]) redis.call("HINCRBY", KEYS[1] .. ":Q", "totalrecv", 1) local mbody = redis.call("HGET", KEYS[1] .. ":Q", msg[1]) local rc = redis.call("HINCRBY", KEYS[1] .. ":Q", msg[1] .. ":rc", 1) local o = {msg[1], mbody, rc} if rc==1 then redis.call("HSET", KEYS[1] .. ":Q", msg[1] .. ":fr", KEYS[2]) table.insert(o, KEYS[2]) else local fr = redis.call("HGET", KEYS[1] .. ":Q", msg[1] .. ":fr") table.insert(o, fr) end return o "##; let (q, ts, _) = self.get_queue(&qname, false).await?; let hidefor = hidefor.unwrap_or(q.vt); let key = self.message_zset_key(qname); let expires_at = ts + hidefor * 1000u64; let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let m: Message = redis::Script::new(LUA) .key(key) .key(ts) .key(expires_at) .invoke_async(con) .await?; Ok(m) } pub async fn get_queue_attributes(&self, qname: &str) -> Result<Queue, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = self.message_zset_key(qname); let qkey = self.queue_hash_key(qname); let (time, _): (String, u32) = redis::cmd("TIME") .query_async(con) .await?; let ts_str = format!("{}000", time); let out: ((u64, u64, i64, u64, u64, u64, u64), u64, u64) = redis::pipe().atomic() .cmd("HMGET") .arg(qkey) .arg("vt") .arg("delay") .arg("maxsize") .arg("totalrecv") .arg("totalsent") .arg("created") .arg("modified") .cmd("ZCARD") .arg(&key) .cmd("ZCOUNT") .arg(&key) .arg(ts_str) .arg("+inf") .query_async(con) .await?; let (vt, delay, maxsize, totalrecv, totalsent, created, modified) = out.0; let msgs = out.1; let hiddenmsgs = out.2; let q = Queue { qname: qname.into(), vt, delay, maxsize, totalrecv, totalsent, created, modified, msgs, hiddenmsgs, }; Ok(q) } pub async fn set_queue_attributes( &self, qname: &str, vt: Option<u64>, delay: Option<u64>, maxsize: Option<i64>, ) -> Result<Queue, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qkey = self.queue_hash_key(qname); let mut pipe = redis::pipe(); if vt.is_some() { pipe.cmd("HSET").arg(&qkey).arg("vt").arg(vt).ignore(); } if delay.is_some() { pipe.cmd("HSET").arg(&qkey).arg("delay").arg(delay).ignore(); } if maxsize.is_some() { pipe.cmd("HSET").arg(&qkey).arg("maxsize").arg(maxsize).ignore(); } pipe.atomic().query_async::<_, ()>(con).await?; let q = self.get_queue_attributes(qname).await?; Ok(q) } fn queue_hash_key(&self, qname: &str) -> String { format!("{}:{}:Q", self.name_space, qname) } fn message_zset_key(&self, qname: &str) -> String { format!("{}:{}", self.name_space, qname) } } fn make_id_22() -> String { use rand::{Rng, distributions::Alphanumeric}; rand::thread_rng() .sample_iter(&Alphanumeric) .take(22) .collect::<String>() }
e { ..Default::default() }; q.qname = qname.into(); q.vt = vt.unwrap_or(30); q.delay = delay.unwrap_or(0); q.maxsize = maxsize.unwrap_or(65536); q }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn criterion_benchmark() {\n\n\tlet rsmq = block_on(Rsmq::new(\"redis://127.0.0.1/\", \"rsmq\"))\n\n\t\t.expect(\"Can't instantiate RSMQ\");\n\n\tlet q = Queue::new(\"bench-queue\", Some(60), Some(0), Some(1200));\n\n\tblock_on(rsmq.create_queue(q))\n\n\t\t.expect(\"queue creation faile...
Rust
day3/src/main.rs
thomas9911/aoc-2021
a226244802b69cef33ebed33a44a7537805d0d64
use std::collections::VecDeque; use std::convert::TryFrom; use std::fs::read_to_string; use std::path::Path; fn fetch_file_path() -> &'static str { if Path::new("src/input.txt").exists() { "src/input.txt" } else { "day3/src/input.txt" } } fn parse_input(path: &str) -> Option<Vec<VecDeque<u8>>> { let text = read_to_string(path).ok()?; let data: Vec<VecDeque<u8>> = text .lines() .map(|line| { line.chars() .map(|digit| { u8::try_from(digit.to_digit(2).expect("invalid binary")) .expect("one or zero always fits in u8") }) .collect() }) .collect(); Some(data) } fn transpose_input(mut input: Vec<VecDeque<u8>>) -> Vec<Vec<u8>> { let mut transposed_vec = Vec::new(); let size = input.len(); for _ in 0..input[0].len() { let mut tmp = Vec::with_capacity(size); for line in &mut input { tmp.push((*line).pop_front().expect("invalid input columns")); } transposed_vec.push(tmp) } transposed_vec } fn most_common_bits(data: &[Vec<u8>]) -> Vec<u8> { data.iter().map(|x| most_common_bit(x)).collect() } fn most_common_bit(list: &[u8]) -> u8 { let sum = list.iter().map(|x| *x as u64).sum::<u64>() as f32; let avg = sum / list.len() as f32; avg.round() as u8 } fn invert_bits(input: Vec<u8>) -> Vec<u8> { input.into_iter().map(|x| 1 - x).collect() } fn binary_to_number(input: &[u8]) -> i64 { let mut number = 0; for (position, bit) in input.iter().rev().enumerate() { number += (*bit as i64) * 2_i64.pow(position as u32); } number } fn find_rating<F, G>(mut input: Vec<VecDeque<u8>>, mut filter0: F, mut filter1: G) -> i64 where F: FnMut(&VecDeque<u8>, usize) -> bool, G: FnMut(&VecDeque<u8>, usize) -> bool, { let length = input[0].len(); for i in 0..length { let mut ones = 0; let mut zeroes = 0; for line in input.clone() { if line[i] == 1 { ones += 1 } else { zeroes += 1 } } input = if ones >= zeroes { input.into_iter().filter(|x| filter0(x, i)).collect() } else { input.into_iter().filter(|x| filter1(x, i)).collect() }; if input.len() == 1 { break; } } binary_to_number(&input[0].make_contiguous()) } fn main() -> Result<(), Box<dyn std::error::Error>> { let input_file = fetch_file_path(); println!("part one: {:?}", part_one(input_file)?); println!("part two: {:?}", part_two(input_file)?); Ok(()) } fn part_one(input_file: &str) -> Result<i64, Box<dyn std::error::Error>> { let input = parse_input(input_file).ok_or(String::from("invalid input"))?; let input = transpose_input(input); let most_common_bits = most_common_bits(&input); let gamma = binary_to_number(&most_common_bits); let least_common_bits = invert_bits(most_common_bits); let epsilon = binary_to_number(&least_common_bits); Ok(gamma * epsilon) } fn part_two(input_file: &str) -> Result<i64, Box<dyn std::error::Error>> { let input = parse_input(input_file).ok_or(String::from("invalid input"))?; let oxygen_generator_rating = find_rating(input.clone(), |x, i| x[i] == 1, |x, i| x[i] == 0); let co2_scrubber_rating = find_rating(input.clone(), |x, i| x[i] != 1, |x, i| x[i] != 0); Ok(oxygen_generator_rating * co2_scrubber_rating) } #[test] fn day3_one() { assert_eq!(1997414, part_one(fetch_file_path()).unwrap()) } #[test] fn day3_two() { assert_eq!(1032597, part_two(fetch_file_path()).unwrap()) }
use std::collections::VecDeque; use std::convert::TryFrom; use std::fs::read_to_string; use std::path::Path; fn fetch_file_path() -> &'static str { if Path::new("src/input.txt").exists() { "src/input.txt" } else { "day3/src/input.txt" } } fn parse_input(path: &str) -> Option<Vec<VecDeque<u8>>> { let text = read_to_string(path).ok()?; let data: Vec<VecDeque<u8>> = text .lines() .map(|line| { line.chars() .map(|digit| { u8::try_from(digit.to_digit(2).expect("invalid binary")) .expect("one or zero always fits in u8") }) .collect() }) .collect(); Some(data) } fn transpose_input(mut input: Vec<VecDeque<u8>>) -> Vec<Vec<u8>> { let mut transposed_vec = Vec::new(); let size = input.len(); for _ in 0..input[0].len() { let mut tmp = Vec::with_capacity(size); for line in &mut input { tmp.push((*line).pop_front().expect("invalid input columns")); } transposed_vec.push(tmp) } transposed_vec } fn most_common_bits(data: &[Vec<u8>]) -> Vec<u8> { data.iter().map(|x| most_common_bit(x)).collect() } fn most_common_bit(list: &[u8]) -> u8 { let sum = list.iter().map(|x| *x as u64).sum::<u64>() as f32; let avg = sum / list.len() as f32; avg.round() as u8 } fn invert_bits(input: Vec<u8>) -> Vec<u8> { input.into_iter().map(|x| 1 - x).collect() } fn binary_to_number(input: &[u8]) -> i64 { let mut number = 0; for (position, bit) in input.iter().rev().enumerate() { number += (*bit as i64) * 2_i64.pow(position as u32); } number } fn find_rating<F, G>(mut input: Vec<VecDeque<u8>>, mut filter0: F, mut filter1: G) -> i64 where F: FnMut(&VecDeque<u8>, usize) -> bool, G: FnMut(&VecDeque<u8>, usize) -> bool, { let length = input[0].len(); for i in 0..length { let mut ones = 0; let mut zeroes = 0; for line in input.clone() { if line[i] == 1 { ones += 1 } else { zeroes += 1 } } input = if ones >= zeroes { input.into_iter().filter(|x| filter0(x, i)).collect() } else { input.into_iter().filter(|x| filter1(x, i)).collect() }; if input.len() == 1 { break; } } binary_to_number(&input[0].make_contiguous()) }
fn part_one(input_file: &str) -> Result<i64, Box<dyn std::error::Error>> { let input = parse_input(input_file).ok_or(String::from("invalid input"))?; let input = transpose_input(input); let most_common_bits = most_common_bits(&input); let gamma = binary_to_number(&most_common_bits); let least_common_bits = invert_bits(most_common_bits); let epsilon = binary_to_number(&least_common_bits); Ok(gamma * epsilon) } fn part_two(input_file: &str) -> Result<i64, Box<dyn std::error::Error>> { let input = parse_input(input_file).ok_or(String::from("invalid input"))?; let oxygen_generator_rating = find_rating(input.clone(), |x, i| x[i] == 1, |x, i| x[i] == 0); let co2_scrubber_rating = find_rating(input.clone(), |x, i| x[i] != 1, |x, i| x[i] != 0); Ok(oxygen_generator_rating * co2_scrubber_rating) } #[test] fn day3_one() { assert_eq!(1997414, part_one(fetch_file_path()).unwrap()) } #[test] fn day3_two() { assert_eq!(1032597, part_two(fetch_file_path()).unwrap()) }
fn main() -> Result<(), Box<dyn std::error::Error>> { let input_file = fetch_file_path(); println!("part one: {:?}", part_one(input_file)?); println!("part two: {:?}", part_two(input_file)?); Ok(()) }
function_block-full_function
[ { "content": "fn parse_i64(input: &str) -> Result<i64, String> {\n\n input.parse::<i64>().map_err(|x| x.to_string())\n\n}\n\n\n\nimpl FromStr for Direction {\n\n type Err = String;\n\n\n\n fn from_str(input: &str) -> Result<Direction, Self::Err> {\n\n use Direction::*;\n\n\n\n let directi...
Rust
enroll/src/main.rs
galenguyer/gatekeeper-utils
46462104c62327eec80439f1e0e3c308b2cd8a59
extern crate serde; extern crate serde_json; extern crate libgatekeeper_sys; extern crate reqwest; use std::env; use clap::{App, Arg}; use libgatekeeper_sys::{Nfc, Realm}; use serde_json::json; use std::time::Duration; use std::thread; use std::io; use std::fmt; use serde::{Serialize, Deserialize}; use reqwest::StatusCode; use reqwest::header::AUTHORIZATION; use libgatekeeper_sys::NfcDevice; #[derive(Debug)] pub enum GatekeeperError { Unknown, } impl std::error::Error for GatekeeperError {} impl fmt::Display for GatekeeperError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { return f.write_str(match self { GatekeeperError::Unknown => "Haha ligma", }); } } #[derive(Debug, Serialize, Deserialize)] #[allow(non_snake_case)] struct KeyCreated { keyId: String, uid: String, doorsId: String, drinkId: String, memberProjectsId: String, } #[derive(Debug, Serialize, Deserialize)] #[allow(non_snake_case)] struct UserLookup { id: String, groups: Vec<String>, disabled: bool, } #[derive(Clone)] struct RealmKeys { auth_key: String, read_key: String, update_key: String, public_key: String, private_key: String, slot_name: String, slot: u8, } struct Provisions { doors: RealmKeys, drink: RealmKeys, member_projects: RealmKeys, prefix: String, system_secret: String, token: String, } fn create_realm(keys: RealmKeys, association: String) -> Realm { return Realm::new( keys.slot, &keys.slot_name.clone(), &association, &keys.auth_key, &keys.read_key, &keys.update_key, &keys.public_key, &keys.private_key ).unwrap(); } fn resolve_id(client: &reqwest::blocking::Client, prefix: String, token: String, username: String) -> Result<UserLookup, Box<dyn std::error::Error>> { let res = client.get( prefix + "/users/uuid-by-uid/" + &username.to_string() ).header(AUTHORIZATION, token).send()?; return match res.status() { StatusCode::OK => match res.json::<UserLookup>() { Ok(user) => Ok(user), Err(_) => Err(Box::new(GatekeeperError::Unknown)), }, StatusCode::NOT_FOUND => { println!("User {} doesn't exist!", username); Err(Box::new(GatekeeperError::Unknown)) }, status => { println!("Couldn't lookup user {}! {:?}", username, status); Err(Box::new(GatekeeperError::Unknown)) }, }; } fn check_uid(client: &reqwest::blocking::Client, prefix: String, token: String, association: String) -> Result<String, Box<dyn std::error::Error>> { let res = client.get( prefix + "/keys/by-association/" + &association.to_string() ).header(AUTHORIZATION, token).send()?; return match res.status() { StatusCode::OK => match res.json::<KeyCreated>() { Ok(key) => Ok(key.uid), Err(_) => Err(Box::new(GatekeeperError::Unknown)), }, StatusCode::NOT_FOUND => { println!("Key {} doesn't exist!", association); Err(Box::new(GatekeeperError::Unknown)) }, status => { println!("Couldn't lookup key {}! {:?}", association, status); Err(Box::new(GatekeeperError::Unknown)) }, } } fn main() { dotenv::dotenv().ok(); let matches = App::new("Gatekeeper Door") .version("0.1.0") .author("Steven Mirabito <steven@stevenmirabito.com>") .about("Door lock client software for the Gatekeeper access control system") .arg(Arg::with_name("DEVICE") .help("Device connection string (e.g. 'pn532_uart:/dev/ttyUSB0')") .required(true) .index(1)) .get_matches(); let conn_str = matches.value_of("DEVICE").unwrap().to_string(); let mut nfc = Nfc::new().ok_or("failed to create NFC context").unwrap(); let mut device = nfc.gatekeeper_device(conn_str).ok_or("failed to get gatekeeper device").unwrap(); let client = reqwest::blocking::Client::new(); let provisions = Provisions { doors: RealmKeys { slot: 0, slot_name: "Doors".to_string(), auth_key: env::var("GK_REALM_DOORS_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_DOORS_READ_KEY").unwrap(), update_key: env::var("GK_REALM_DOORS_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_DOORS_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_DOORS_PRIVATE_KEY").unwrap() }, drink: RealmKeys { slot: 1, slot_name: "Drink".to_string(), auth_key: env::var("GK_REALM_DRINK_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_DRINK_READ_KEY").unwrap(), update_key: env::var("GK_REALM_DRINK_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_DRINK_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_DRINK_PRIVATE_KEY").unwrap() }, member_projects: RealmKeys { slot: 2, slot_name: "Member Projects".to_string(), auth_key: env::var("GK_REALM_MEMBER_PROJECTS_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_MEMBER_PROJECTS_READ_KEY").unwrap(), update_key: env::var("GK_REALM_MEMBER_PROJECTS_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_MEMBER_PROJECTS_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_MEMBER_PROJECTS_PRIVATE_KEY").unwrap() }, system_secret: env::var("GK_SYSTEM_SECRET").unwrap_or("b00".to_string()), prefix: env::var("GK_HTTP_ENDPOINT").unwrap_or("http://localhost:3000".to_string()) + "/admin", token: env::var("GK_ADMIN_SECRETS").unwrap() }; loop { let mut username: String = "".to_string(); println!("Enter username:"); if let Ok(_) = io::stdin().read_line(&mut username) { if let Err(err) = create_tag(&client, &mut username, &provisions, &mut device) { eprintln!("Couldn't create tag for user! {:?}", err); } } } } fn create_tag(client: &reqwest::blocking::Client, username: &mut String, provisions: &Provisions, device: &mut NfcDevice) -> Result<(), Box<dyn std::error::Error>> { username.pop(); let resolution = resolve_id( &client, provisions.prefix.clone(), provisions.token.clone(), username.clone() )?; let uuid = resolution.id; println!("Ok, enrolling {}", username); println!("Ready to register for {}! Please scan a tag to enroll it", username); loop { let tag = device.first_tag(); if let Some(mut tag) = tag { let uid = match tag.authenticate( &mut create_realm( provisions.doors.clone(), "".to_string() ) ) { Ok(association) => check_uid( &client, provisions.prefix.clone(), provisions.token.clone(), association ).ok(), Err(_) => None, }; let uid_str = match &uid { Some(uid) => Some(uid.as_str()), None => None, }; if let Some(uid_str) = uid_str { println!("Formatting tag with uid {}", uid_str); } else { println!("Formatting tag with no care for UID"); } match tag.format( uid_str, Some(&provisions.system_secret.clone()) ) { Ok(_) => { println!("Formatted tag"); }, Err(err) => { println!("Failed formatting tag: {:?}", err); continue; } } let new_uid = match uid_str { Some(uid) => uid.to_string(), None => tag.get_uid().unwrap(), }; println!("Formatted tag, now telling server about new key with uid {}!", new_uid.clone()); let res = client.put(provisions.prefix.clone() + "/keys") .json(&json!({ "userId": uuid, "uid": new_uid.clone(), })) .header(AUTHORIZATION, provisions.token.clone()).send()?; let data = res.json::<KeyCreated>()?; let mut realms: Vec<&mut Realm> = Vec::new(); let mut doors = create_realm( provisions.doors.clone(), data.doorsId.clone() ); realms.push(&mut doors); let mut drink = create_realm( provisions.drink.clone(), data.drinkId.clone() ); realms.push(&mut drink); let mut member_projects = create_realm( provisions.member_projects.clone(), data.memberProjectsId.clone() ); realms.push(&mut member_projects); match tag.issue(&provisions.system_secret.clone(), uid_str, realms) { Ok(_) => { let res_result = client.patch( provisions.prefix.clone() + "/keys/" + &data.keyId ).header(AUTHORIZATION, provisions.token.clone()).json(&json!({ "enabled": true })).send(); match res_result { Ok(res) => match res.status() { StatusCode::NO_CONTENT => println!("Issued for {}!", username), status => { println!("Failed to associate key with user! {:?}", status); continue; } }, Err(error) => { println!("Failed to associate key with user! {:?}", error); continue; } } break; }, Err(err) => { println!("Failed issuing... {:?}", err); } } } thread::sleep(Duration::from_millis(200)); } return Ok(()); }
extern crate serde; extern crate serde_json; extern crate libgatekeeper_sys; extern crate reqwest; use std::env; use clap::{App, Arg}; use libgatekeeper_sys::{Nfc, Realm}; use serde_json::json; use std::time::Duration; use std::thread; use std::io; use std::fmt; use serde::{Serialize, Deserialize}; use reqwest::StatusCode; use reqwest::header::AUTHORIZATION; use libgatekeeper_sys::NfcDevice; #[derive(Debug)] pub enum GatekeeperError { Unknown, } impl std::error::Error for GatekeeperError {} impl fmt::Display for GatekeeperError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { return f.write_str(match self { GatekeeperError::Unknown => "Haha ligma", }); } } #[derive(Debug, Serialize, Deserialize)] #[allow(non_snake_case)] struct KeyCreated { keyId: String, uid: String, doorsId: String, drinkId: String, memberProjectsId: String, } #[derive(Debug, Serialize, Deserialize)] #[allow(non_snake_case)] struct UserLookup { id: String, groups: Vec<String>, disabled: bool, } #[derive(Clone)] struct RealmKeys { auth_key: String, read_key: String, update_key: String, public_key: String, private_key: String, slot_name: String, slot: u8, } struct Provisions { doors: RealmKeys, drink: RealmKeys, member_projects: RealmKeys, prefix: String, system_secret: String, token: String, } fn create_realm(keys: RealmKeys, association: String) -> Realm { return Realm::new( keys.slot, &keys.slot_name.clone(), &association, &keys.auth_key, &keys.read_key, &keys.update_key, &keys.public_key, &keys.private_key ).unwrap(); } fn resolve_id(client: &reqwest::blocking::Client, prefix: String, token: String, username: String) -> Result<UserLookup, Box<dyn std::error::Error>> {
return match res.status() { StatusCode::OK => match res.json::<UserLookup>() { Ok(user) => Ok(user), Err(_) => Err(Box::new(GatekeeperError::Unknown)), }, StatusCode::NOT_FOUND => { println!("User {} doesn't exist!", username); Err(Box::new(GatekeeperError::Unknown)) }, status => { println!("Couldn't lookup user {}! {:?}", username, status); Err(Box::new(GatekeeperError::Unknown)) }, }; } fn check_uid(client: &reqwest::blocking::Client, prefix: String, token: String, association: String) -> Result<String, Box<dyn std::error::Error>> { let res = client.get( prefix + "/keys/by-association/" + &association.to_string() ).header(AUTHORIZATION, token).send()?; return match res.status() { StatusCode::OK => match res.json::<KeyCreated>() { Ok(key) => Ok(key.uid), Err(_) => Err(Box::new(GatekeeperError::Unknown)), }, StatusCode::NOT_FOUND => { println!("Key {} doesn't exist!", association); Err(Box::new(GatekeeperError::Unknown)) }, status => { println!("Couldn't lookup key {}! {:?}", association, status); Err(Box::new(GatekeeperError::Unknown)) }, } } fn main() { dotenv::dotenv().ok(); let matches = App::new("Gatekeeper Door") .version("0.1.0") .author("Steven Mirabito <steven@stevenmirabito.com>") .about("Door lock client software for the Gatekeeper access control system") .arg(Arg::with_name("DEVICE") .help("Device connection string (e.g. 'pn532_uart:/dev/ttyUSB0')") .required(true) .index(1)) .get_matches(); let conn_str = matches.value_of("DEVICE").unwrap().to_string(); let mut nfc = Nfc::new().ok_or("failed to create NFC context").unwrap(); let mut device = nfc.gatekeeper_device(conn_str).ok_or("failed to get gatekeeper device").unwrap(); let client = reqwest::blocking::Client::new(); let provisions = Provisions { doors: RealmKeys { slot: 0, slot_name: "Doors".to_string(), auth_key: env::var("GK_REALM_DOORS_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_DOORS_READ_KEY").unwrap(), update_key: env::var("GK_REALM_DOORS_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_DOORS_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_DOORS_PRIVATE_KEY").unwrap() }, drink: RealmKeys { slot: 1, slot_name: "Drink".to_string(), auth_key: env::var("GK_REALM_DRINK_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_DRINK_READ_KEY").unwrap(), update_key: env::var("GK_REALM_DRINK_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_DRINK_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_DRINK_PRIVATE_KEY").unwrap() }, member_projects: RealmKeys { slot: 2, slot_name: "Member Projects".to_string(), auth_key: env::var("GK_REALM_MEMBER_PROJECTS_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_MEMBER_PROJECTS_READ_KEY").unwrap(), update_key: env::var("GK_REALM_MEMBER_PROJECTS_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_MEMBER_PROJECTS_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_MEMBER_PROJECTS_PRIVATE_KEY").unwrap() }, system_secret: env::var("GK_SYSTEM_SECRET").unwrap_or("b00".to_string()), prefix: env::var("GK_HTTP_ENDPOINT").unwrap_or("http://localhost:3000".to_string()) + "/admin", token: env::var("GK_ADMIN_SECRETS").unwrap() }; loop { let mut username: String = "".to_string(); println!("Enter username:"); if let Ok(_) = io::stdin().read_line(&mut username) { if let Err(err) = create_tag(&client, &mut username, &provisions, &mut device) { eprintln!("Couldn't create tag for user! {:?}", err); } } } } fn create_tag(client: &reqwest::blocking::Client, username: &mut String, provisions: &Provisions, device: &mut NfcDevice) -> Result<(), Box<dyn std::error::Error>> { username.pop(); let resolution = resolve_id( &client, provisions.prefix.clone(), provisions.token.clone(), username.clone() )?; let uuid = resolution.id; println!("Ok, enrolling {}", username); println!("Ready to register for {}! Please scan a tag to enroll it", username); loop { let tag = device.first_tag(); if let Some(mut tag) = tag { let uid = match tag.authenticate( &mut create_realm( provisions.doors.clone(), "".to_string() ) ) { Ok(association) => check_uid( &client, provisions.prefix.clone(), provisions.token.clone(), association ).ok(), Err(_) => None, }; let uid_str = match &uid { Some(uid) => Some(uid.as_str()), None => None, }; if let Some(uid_str) = uid_str { println!("Formatting tag with uid {}", uid_str); } else { println!("Formatting tag with no care for UID"); } match tag.format( uid_str, Some(&provisions.system_secret.clone()) ) { Ok(_) => { println!("Formatted tag"); }, Err(err) => { println!("Failed formatting tag: {:?}", err); continue; } } let new_uid = match uid_str { Some(uid) => uid.to_string(), None => tag.get_uid().unwrap(), }; println!("Formatted tag, now telling server about new key with uid {}!", new_uid.clone()); let res = client.put(provisions.prefix.clone() + "/keys") .json(&json!({ "userId": uuid, "uid": new_uid.clone(), })) .header(AUTHORIZATION, provisions.token.clone()).send()?; let data = res.json::<KeyCreated>()?; let mut realms: Vec<&mut Realm> = Vec::new(); let mut doors = create_realm( provisions.doors.clone(), data.doorsId.clone() ); realms.push(&mut doors); let mut drink = create_realm( provisions.drink.clone(), data.drinkId.clone() ); realms.push(&mut drink); let mut member_projects = create_realm( provisions.member_projects.clone(), data.memberProjectsId.clone() ); realms.push(&mut member_projects); match tag.issue(&provisions.system_secret.clone(), uid_str, realms) { Ok(_) => { let res_result = client.patch( provisions.prefix.clone() + "/keys/" + &data.keyId ).header(AUTHORIZATION, provisions.token.clone()).json(&json!({ "enabled": true })).send(); match res_result { Ok(res) => match res.status() { StatusCode::NO_CONTENT => println!("Issued for {}!", username), status => { println!("Failed to associate key with user! {:?}", status); continue; } }, Err(error) => { println!("Failed to associate key with user! {:?}", error); continue; } } break; }, Err(err) => { println!("Failed issuing... {:?}", err); } } } thread::sleep(Duration::from_millis(200)); } return Ok(()); }
let res = client.get( prefix + "/users/uuid-by-uid/" + &username.to_string() ).header(AUTHORIZATION, token).send()?;
assignment_statement
[ { "content": "# Gatekeeper Utilities\n\n\n\nGatekeeper powers access to the doors to [Computer Science House](https://csh.rit.edu/)'s special rooms.\n\n\n\nThis repository contains tools for enrolling and manipulating tags.\n\n\n\n# Building\n\n\n\nTo download and build this project on a development machine, ru...
Rust
src/client.rs
jo/wcb-rs
263fc1af02874fab770c5d192294d3d37efd96fa
use crate::cli; use webcryptobox::*; use std::io::{Error, Read, Write}; use std::path::PathBuf; use std::{fs, io}; fn read_file(filename: &PathBuf) -> Vec<u8> { fs::read(&filename).unwrap() } fn read_hex(key: &String) -> Vec<u8> { hex::decode(key).unwrap() } fn read_file_or_stdin(filename: &Option<PathBuf>) -> Vec<u8> { match &filename { Some(path) => read_file(&path), None => { let mut data = Vec::new(); io::stdin().read_to_end(&mut data).unwrap(); data } } } fn read_base64_file_or_stdin(filename: &Option<PathBuf>) -> Vec<u8> { let mut data = read_file_or_stdin(&filename); data.retain(|&x| { x == 43 || (x >= 47 && x <= 57) || x == 61 || (x >= 65 && x <= 90) || (x >= 97 && x <= 122) }); base64::decode(&data).unwrap() } fn write_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { match &filename { Some(path) => fs::write(path, data).expect("Unable to write file"), None => io::stdout().write_all(data).expect("Unable to write to stdout") } } fn write_hex_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { let data_hex = hex::encode(data); match &filename { Some(path) => fs::write(path, data_hex).expect("Unable to write file"), None => println!("{}", data_hex), } } fn write_base64_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { let data_base64 = base64::encode(data); match &filename { Some(path) => fs::write(path, data_base64).expect("Unable to write file"), None => println!("{}", data_base64), } } pub struct Wcb { args: cli::Args, } impl Wcb { pub fn new(args: cli::Args) -> Self { Wcb { args } } pub fn run(&self) -> Result<(), Error> { match &self.args.command { cli::Commands::PrivateKey { output_filename } => { let key = generate_private_key().unwrap(); let pem = export_private_key_pem(key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Key { output_filename } => { let key = generate_key().unwrap(); write_hex_file_or_stdout(&output_filename, &key) } cli::Commands::PublicKey { filename, output_filename, } => { let pem = read_file_or_stdin(&filename); let key = import_private_key_pem(&pem).unwrap(); let public_key = get_public_key(&key).unwrap(); let pem = export_public_key_pem(&public_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Fingerprint { filename, sha_type, output_filename, } => { let pem = read_file_or_stdin(&filename); let data = match pem.starts_with(b"-----BEGIN PRIVATE KEY-----") { true => { let key = import_private_key_pem(&pem).unwrap(); match sha_type { cli::ShaType::Sha1 => sha1_fingerprint_from_private_key(&key).unwrap(), cli::ShaType::Sha256 => { sha256_fingerprint_from_private_key(&key).unwrap() } } } _ => { let key = import_public_key_pem(&pem).unwrap(); match sha_type { cli::ShaType::Sha1 => sha1_fingerprint_from_public_key(&key).unwrap(), cli::ShaType::Sha256 => { sha256_fingerprint_from_public_key(&key).unwrap() } } } }; write_hex_file_or_stdout(&output_filename, &data) } cli::Commands::DeriveKey { private_key_filename, public_key_filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file_or_stdin(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let key = derive_key(private_key, public_key).unwrap(); write_hex_file_or_stdout(&output_filename, &key) } cli::Commands::DerivePassword { private_key_filename, public_key_filename, length, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file_or_stdin(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let password = derive_password(private_key, public_key, length).unwrap(); write_hex_file_or_stdout(&output_filename, &password) } cli::Commands::EncryptPrivateKey { filename, passphrase, output_filename, } => { let private_key_pem = read_file_or_stdin(&filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let pem = export_encrypted_private_key_pem(private_key, passphrase.as_bytes()).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::DecryptPrivateKey { filename, passphrase, output_filename, } => { let encrypted_private_key_pem = read_file_or_stdin(&filename); let private_key = import_encrypted_private_key_pem( &encrypted_private_key_pem, passphrase.as_bytes(), ) .unwrap(); let pem = export_private_key_pem(private_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::EncryptPrivateKeyTo { filename, private_key_filename, public_key_filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let key_pem = read_file_or_stdin(&filename); let key = import_private_key_pem(&key_pem).unwrap(); let pem = export_encrypted_private_key_pem_to(key, private_key, public_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::DecryptPrivateKeyFrom { private_key_filename, public_key_filename, filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let encrypted_key_pem = read_file_or_stdin(&filename); let key = import_encrypted_private_key_pem_from( &encrypted_key_pem, private_key, public_key, ) .unwrap(); let pem = export_private_key_pem(key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Encrypt { key, filename, output_filename, base64, } => { let key = read_hex(&key); let data = read_file_or_stdin(&filename); let encrypted_data = encrypt(&key, &data).unwrap(); if *base64 { write_base64_file_or_stdout(&output_filename, &encrypted_data) } else { write_file_or_stdout(&output_filename, &encrypted_data) } } cli::Commands::Decrypt { key, filename, output_filename, base64, } => { let key = read_hex(&key); let data = match base64 { true => read_base64_file_or_stdin(&filename), false => read_file_or_stdin(&filename) }; let decrypted_data = decrypt(&key, &data).unwrap(); write_file_or_stdout(&output_filename, &decrypted_data) } cli::Commands::EncryptTo { private_key_filename, public_key_filename, filename, output_filename, base64, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let data = read_file_or_stdin(&filename); let encrypted_data = derive_and_encrypt(private_key, public_key, &data).unwrap(); if *base64 { write_base64_file_or_stdout(&output_filename, &encrypted_data) } else { write_file_or_stdout(&output_filename, &encrypted_data) } } cli::Commands::DecryptFrom { private_key_filename, public_key_filename, filename, output_filename, base64, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let data = match base64 { true => read_base64_file_or_stdin(&filename), false => read_file_or_stdin(&filename) }; let decrypted_data = derive_and_decrypt(private_key, public_key, &data).unwrap(); write_file_or_stdout(&output_filename, &decrypted_data) } } Ok(()) } }
use crate::cli; use webcryptobox::*; use std::io::{Error, Read, Write}; use std::path::PathBuf; use std::{fs, io}; fn read_file(filename: &PathBuf) -> Vec<u8> { fs::read(&filename).unwrap() } fn read_hex(key: &String) -> Vec<u8> { hex::decode(key).unwrap() } fn read_file_or_stdin(filename: &Option<PathBuf>) -> Vec<u8> { match &filename { Some(path) => read_file(&path), None => { let mut data = Vec::new(); io::stdin().read_to_end(&mut data).unwrap(); data } } } fn read_base64_file_or_stdin(filename: &Option<PathBuf>) -> Vec<u8> { let mut data = read_file_or_stdin(&filename); data.retain(|&x| { x == 43 || (x >= 47 && x <= 57) || x == 61 || (x >= 65 && x <= 90) || (x >= 97 && x <= 122) }); base64::decode(&data).unwrap() } fn write_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { match &filename { Some(path) => fs::write(path, data).expect("Unable to write file"), None => io::stdout().write_all(data).expect("Unable to write to stdout") } } fn write_hex_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { let data_hex = hex::encode(data); match &filename { Some(path) => fs::write(path, data_hex).expect("Unable to write file"), None => println!("{}", data_hex), } } fn write_base64_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { let data_base64 = base64::encode(data); match &filename { Some(path) => fs::write(path, data_base64).expect("Unable to write file"), None => println!("{}", data_base64), } } pub struct Wcb { args: cli::Args, } impl Wcb { pub fn new(args: cli::Args) -> Self { Wcb { args } } pub fn run(&self) -> Result<(), Error> { match &self.args.command { cli::Commands::PrivateKey { output_filename } => { let key = generate_private_key().unwrap(); let pem = export_private_key_pem(key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Key { output_filename } => { let key = generate_key().unwrap(); write_hex_file_or_stdout(&output_filename, &key) } cli::Commands::PublicKey { filename, output_filename, } => { let pem = read_file_or_stdin(&filename); let key = import_private_key_pem(&pem).unwrap(); let public_key = get_public_key(&key).unwrap(); let pem = export_public_key_pem(&public_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Fingerprint { filename, sha_type, output_filename, } => { let pem = read_file_or_stdin(&filename); let data = match pem.starts_with(b"-----BEGIN PRIVATE KEY-----") { true => { let key = import_private_key_pem(&pem).unwrap(); match sha_type { cli::ShaType::Sha1 => sha1_fingerprint_from_private_key(&key).unwrap(), cli::ShaType::Sha256 => { sha256_fingerprint_from_private_key(&key).unwrap() } } } _ => { let key = import_public_key_pem(&pem).unwrap(); match sha_type { cli::ShaType::Sha1 => sha1_fingerprint_from_public_key(&key).unwrap(), cli::ShaType::Sha256 => { sha256_fingerprint_from_public_key(&key).unwrap() } } } }; write_hex_file_or_stdout(&output_filename, &data) } cli::Commands::DeriveKey { private_key_filename, public_key_filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file_or_stdin(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let key = derive_key(private_key, public_key).unwrap(); write_hex_file_or_stdout(&output_filename, &key) } cli::Commands::DerivePassword { private_key_filename, public_key_filename, length, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file_or_stdin(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let password = derive_password(private_key, public_key, length).unwrap(); write_hex_file_or_stdout(&output_filename, &password) } cli::Commands::EncryptPrivateKey { filename, passphrase, output_filename, } => { let private_key_pem = read_file_or_stdin(&filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let pem = export_encrypted_private_key_pem(private_key, passphrase.as_bytes()).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::DecryptPrivateKey { filename, passphrase, output_filename, } => { let encrypted_private_key_pem = read_file_or_stdin(&filename); let private_key =
.unwrap(); let pem = export_private_key_pem(private_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::EncryptPrivateKeyTo { filename, private_key_filename, public_key_filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let key_pem = read_file_or_stdin(&filename); let key = import_private_key_pem(&key_pem).unwrap(); let pem = export_encrypted_private_key_pem_to(key, private_key, public_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::DecryptPrivateKeyFrom { private_key_filename, public_key_filename, filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let encrypted_key_pem = read_file_or_stdin(&filename); let key = import_encrypted_private_key_pem_from( &encrypted_key_pem, private_key, public_key, ) .unwrap(); let pem = export_private_key_pem(key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Encrypt { key, filename, output_filename, base64, } => { let key = read_hex(&key); let data = read_file_or_stdin(&filename); let encrypted_data = encrypt(&key, &data).unwrap(); if *base64 { write_base64_file_or_stdout(&output_filename, &encrypted_data) } else { write_file_or_stdout(&output_filename, &encrypted_data) } } cli::Commands::Decrypt { key, filename, output_filename, base64, } => { let key = read_hex(&key); let data = match base64 { true => read_base64_file_or_stdin(&filename), false => read_file_or_stdin(&filename) }; let decrypted_data = decrypt(&key, &data).unwrap(); write_file_or_stdout(&output_filename, &decrypted_data) } cli::Commands::EncryptTo { private_key_filename, public_key_filename, filename, output_filename, base64, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let data = read_file_or_stdin(&filename); let encrypted_data = derive_and_encrypt(private_key, public_key, &data).unwrap(); if *base64 { write_base64_file_or_stdout(&output_filename, &encrypted_data) } else { write_file_or_stdout(&output_filename, &encrypted_data) } } cli::Commands::DecryptFrom { private_key_filename, public_key_filename, filename, output_filename, base64, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let data = match base64 { true => read_base64_file_or_stdin(&filename), false => read_file_or_stdin(&filename) }; let decrypted_data = derive_and_decrypt(private_key, public_key, &data).unwrap(); write_file_or_stdout(&output_filename, &decrypted_data) } } Ok(()) } }
import_encrypted_private_key_pem( &encrypted_private_key_pem, passphrase.as_bytes(), )
call_expression
[ { "content": "fn main() -> Result<(), Error> {\n\n let outdir = match env::var_os(\"OUT_DIR\") {\n\n None => return Ok(()),\n\n Some(outdir) => outdir,\n\n };\n\n\n\n let mut app = Args::command();\n\n let path = generate_to(Bash, &mut app, \"wcb\", outdir)?;\n\n\n\n println!(\"carg...
Rust
src/fib/stream_ring.rs
rmja/drone-core
01f463ba1fd42655ed8edae79d5ba9dcb308b51b
use crate::{ fib::{self, Fiber}, sync::spsc::ring::{channel, Receiver, SendError, SendErrorKind}, thr::prelude::*, }; use core::{ convert::identity, pin::Pin, task::{Context, Poll}, }; use futures::Stream; #[must_use = "streams do nothing unless you `.await` or poll them"] pub struct FiberStreamRing<T> { rx: Receiver<T, !>, } #[must_use = "streams do nothing unless you `.await` or poll them"] pub struct TryFiberStreamRing<T, E> { rx: Receiver<T, E>, } impl<T> FiberStreamRing<T> { #[inline] pub fn close(&mut self) { self.rx.close() } } impl<T, E> TryFiberStreamRing<T, E> { #[inline] pub fn close(&mut self) { self.rx.close() } } impl<T> Stream for FiberStreamRing<T> { type Item = T; #[inline] fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let rx = unsafe { self.map_unchecked_mut(|x| &mut x.rx) }; rx.poll_next(cx).map(|value| { value.map(|value| match value { Ok(value) => value, }) }) } } impl<T, E> Stream for TryFiberStreamRing<T, E> { type Item = Result<T, E>; #[inline] fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let rx = unsafe { self.map_unchecked_mut(|x| &mut x.rx) }; rx.poll_next(cx) } } pub trait ThrFiberStreamRing: ThrToken { #[inline] fn add_saturating_stream<F, T>(self, capacity: usize, fib: F) -> FiberStreamRing<T> where F: Fiber<Input = (), Yield = Option<T>, Return = Option<T>>, F: Send + 'static, T: Send + 'static, { FiberStreamRing { rx: add_rx(self, capacity, |_| Ok(()), fib, Ok) } } #[inline] fn add_overwriting_stream<F, T>(self, capacity: usize, fib: F) -> FiberStreamRing<T> where F: Fiber<Input = (), Yield = Option<T>, Return = Option<T>>, F: Send + 'static, T: Send + 'static, { FiberStreamRing { rx: add_rx_overwrite(self, capacity, fib, Ok) } } #[inline] fn add_try_stream<O, F, T, E>( self, capacity: usize, overflow: O, fib: F, ) -> TryFiberStreamRing<T, E> where O: Fn(T) -> Result<(), E>, F: Fiber<Input = (), Yield = Option<T>, Return = Result<Option<T>, E>>, O: Send + 'static, F: Send + 'static, T: Send + 'static, E: Send + 'static, { TryFiberStreamRing { rx: add_rx(self, capacity, overflow, fib, identity) } } #[inline] fn add_overwriting_try_stream<F, T, E>( self, capacity: usize, fib: F, ) -> TryFiberStreamRing<T, E> where F: Fiber<Input = (), Yield = Option<T>, Return = Result<Option<T>, E>>, F: Send + 'static, T: Send + 'static, E: Send + 'static, { TryFiberStreamRing { rx: add_rx_overwrite(self, capacity, fib, identity) } } } #[inline] fn add_rx<H, O, F, T, E, C>( thr: H, capacity: usize, overflow: O, mut fib: F, convert: C, ) -> Receiver<T, E> where H: ThrToken, O: Fn(T) -> Result<(), E>, F: Fiber<Input = (), Yield = Option<T>>, C: FnOnce(F::Return) -> Result<Option<T>, E>, O: Send + 'static, F: Send + 'static, T: Send + 'static, E: Send + 'static, C: Send + 'static, { let (mut tx, rx) = channel(capacity); thr.add(move || { loop { if tx.is_canceled() { break; } match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) { fib::Yielded(None) => {} fib::Yielded(Some(value)) => match tx.send(value) { Ok(()) => {} Err(SendError { value, kind }) => match kind { SendErrorKind::Canceled => { break; } SendErrorKind::Overflow => match overflow(value) { Ok(()) => {} Err(err) => { drop(tx.send_err(err)); break; } }, }, }, fib::Complete(value) => { match convert(value) { Ok(None) => {} Ok(Some(value)) => match tx.send(value) { Ok(()) => {} Err(SendError { value, kind }) => match kind { SendErrorKind::Canceled => {} SendErrorKind::Overflow => match overflow(value) { Ok(()) => {} Err(err) => { drop(tx.send_err(err)); } }, }, }, Err(err) => { drop(tx.send_err(err)); } } break; } } yield; } }); rx } #[inline] fn add_rx_overwrite<H, F, T, E, C>( thr: H, capacity: usize, mut fib: F, convert: C, ) -> Receiver<T, E> where H: ThrToken, F: Fiber<Input = (), Yield = Option<T>>, C: FnOnce(F::Return) -> Result<Option<T>, E>, F: Send + 'static, T: Send + 'static, E: Send + 'static, C: Send + 'static, { let (mut tx, rx) = channel(capacity); thr.add(move || { loop { if tx.is_canceled() { break; } match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) { fib::Yielded(None) => {} fib::Yielded(Some(value)) => match tx.send_overwrite(value) { Ok(()) => (), Err(_) => break, }, fib::Complete(value) => { match convert(value) { Ok(None) => {} Ok(Some(value)) => { drop(tx.send_overwrite(value)); } Err(err) => { drop(tx.send_err(err)); } } break; } } yield; } }); rx } impl<T: ThrToken> ThrFiberStreamRing for T {}
use crate::{ fib::{self, Fiber}, sync::spsc::ring::{channel, Receiver, SendError, SendErrorKind}, thr::prelude::*, }; use core::{ convert::identity, pin::Pin, task::{Context, Poll}, }; use futures::Stream; #[must_use = "streams do nothing unless you `.await` or poll them"] pub struct FiberStreamRing<T> { rx: Receiver<T, !>, } #[must_use = "streams do nothing unless you `.await` or poll them"] pub struct TryFiberStreamRing<T, E> { rx: Receiver<T, E>, } impl<T> FiberStreamRing<T> { #[inline] pub fn close(&mut self) { self.rx.close() } } impl<T, E> TryFiberStreamRing<T, E> { #[inline] pub fn close(&mut self) { self.rx.close() } } impl<T> Stream for FiberStreamRing<T> { type Item = T; #[inline] fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let rx = unsafe { self.map_unchecked_mut(|x| &mut x.rx) }; rx.poll_next(cx).map(|value| { value.map(|value| match value { Ok(value) => value, }) }) } } impl<T, E> Stream for TryFiberStreamRing<T, E> { type Item = Result<T, E>; #[inline] fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let rx = unsafe { self.map_unchecked_mut(|x| &mut x.rx) }; rx.poll_next(cx) } } pub trait ThrFiberStreamRing: ThrToken { #[inline] fn add_saturating_stream<F, T>(self, capacity: usize, fib: F) -> FiberStreamRing<T> where F: Fiber<Input = (), Yield = Option<T>, Return = Option<T>>, F: Send + 'static, T: Send + 'static, { FiberStreamRing { rx: add_rx(self
drop(tx.send_err(err)); break; } }, }, }, fib::Complete(value) => { match convert(value) { Ok(None) => {} Ok(Some(value)) => match tx.send(value) { Ok(()) => {} Err(SendError { value, kind }) => match kind { SendErrorKind::Canceled => {} SendErrorKind::Overflow => match overflow(value) { Ok(()) => {} Err(err) => { drop(tx.send_err(err)); } }, }, }, Err(err) => { drop(tx.send_err(err)); } } break; } } yield; } }); rx } #[inline] fn add_rx_overwrite<H, F, T, E, C>( thr: H, capacity: usize, mut fib: F, convert: C, ) -> Receiver<T, E> where H: ThrToken, F: Fiber<Input = (), Yield = Option<T>>, C: FnOnce(F::Return) -> Result<Option<T>, E>, F: Send + 'static, T: Send + 'static, E: Send + 'static, C: Send + 'static, { let (mut tx, rx) = channel(capacity); thr.add(move || { loop { if tx.is_canceled() { break; } match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) { fib::Yielded(None) => {} fib::Yielded(Some(value)) => match tx.send_overwrite(value) { Ok(()) => (), Err(_) => break, }, fib::Complete(value) => { match convert(value) { Ok(None) => {} Ok(Some(value)) => { drop(tx.send_overwrite(value)); } Err(err) => { drop(tx.send_err(err)); } } break; } } yield; } }); rx } impl<T: ThrToken> ThrFiberStreamRing for T {}
, capacity, |_| Ok(()), fib, Ok) } } #[inline] fn add_overwriting_stream<F, T>(self, capacity: usize, fib: F) -> FiberStreamRing<T> where F: Fiber<Input = (), Yield = Option<T>, Return = Option<T>>, F: Send + 'static, T: Send + 'static, { FiberStreamRing { rx: add_rx_overwrite(self, capacity, fib, Ok) } } #[inline] fn add_try_stream<O, F, T, E>( self, capacity: usize, overflow: O, fib: F, ) -> TryFiberStreamRing<T, E> where O: Fn(T) -> Result<(), E>, F: Fiber<Input = (), Yield = Option<T>, Return = Result<Option<T>, E>>, O: Send + 'static, F: Send + 'static, T: Send + 'static, E: Send + 'static, { TryFiberStreamRing { rx: add_rx(self, capacity, overflow, fib, identity) } } #[inline] fn add_overwriting_try_stream<F, T, E>( self, capacity: usize, fib: F, ) -> TryFiberStreamRing<T, E> where F: Fiber<Input = (), Yield = Option<T>, Return = Result<Option<T>, E>>, F: Send + 'static, T: Send + 'static, E: Send + 'static, { TryFiberStreamRing { rx: add_rx_overwrite(self, capacity, fib, identity) } } } #[inline] fn add_rx<H, O, F, T, E, C>( thr: H, capacity: usize, overflow: O, mut fib: F, convert: C, ) -> Receiver<T, E> where H: ThrToken, O: Fn(T) -> Result<(), E>, F: Fiber<Input = (), Yield = Option<T>>, C: FnOnce(F::Return) -> Result<Option<T>, E>, O: Send + 'static, F: Send + 'static, T: Send + 'static, E: Send + 'static, C: Send + 'static, { let (mut tx, rx) = channel(capacity); thr.add(move || { loop { if tx.is_canceled() { break; } match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) { fib::Yielded(None) => {} fib::Yielded(Some(value)) => match tx.send(value) { Ok(()) => {} Err(SendError { value, kind }) => match kind { SendErrorKind::Canceled => { break; } SendErrorKind::Overflow => match overflow(value) { Ok(()) => {} Err(err) => {
random
[ { "content": "#[marker]\n\npub trait YieldNone: Send + 'static {}\n\n\n\nimpl YieldNone for () {}\n\nimpl YieldNone for ! {}\n\n\n\nimpl<T> FiberFuture<T> {\n\n /// Gracefully close this future.\n\n ///\n\n /// The fiber will be removed on a next thread invocation without resuming.\n\n #[inline]\n\n...
Rust
src/lib.rs
neithernut/transiter
e73ad3cdd5aaa154933a061e20e7c96b348f2149
use std::iter::FromIterator; #[derive(Clone, Debug)] pub struct TransIter<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> { get_next: F, queue: std::collections::VecDeque<T>, mode: Mode, } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> TransIter<F, I, T> { pub fn new(initial: T, recursion: F) -> Self { Self {get_next: recursion, queue: std::iter::once(initial).collect(), mode: Default::default()} } pub fn new_multi(initial: impl IntoIterator<Item = T>, recursion: F) -> Self { Self {get_next: recursion, queue: FromIterator::from_iter(initial), mode: Default::default()} } pub fn breadth_first(self) -> Self { Self {mode: Mode::BreadthFirst, ..self} } pub fn depth_first(self) -> Self { Self {mode: Mode::DepthFirst, ..self} } pub fn depth_first_unordered(self) -> Self { Self {mode: Mode::DepthFirstUnordered, ..self} } pub fn into_trans_prio_queue(self) -> TransPrioQueue<F, I, T> where T: Ord { TransPrioQueue::new_multi(self.queue, self.get_next) } } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> Iterator for TransIter<F, I, T> { type Item = T; fn next(&mut self) -> Option<T> { let res = self.queue.pop_front(); res.as_ref().map(&mut self.get_next).map(|items| match self.mode { Mode::BreadthFirst => self.queue.extend(items), Mode::DepthFirst => { let mut items = Vec::from_iter(items); self.queue.reserve(items.len()); while let Some(i) = items.pop() { self.queue.push_front(i); } }, Mode::DepthFirstUnordered => { let items = items.into_iter(); self.queue.reserve(items.size_hint().0); items.for_each(|i| self.queue.push_front(i)) }, }); res } } #[derive(Copy, Clone, Debug)] enum Mode { BreadthFirst, DepthFirst, DepthFirstUnordered, } impl Default for Mode { fn default() -> Self { Self::BreadthFirst } } #[derive(Clone, Debug)] pub struct TransPrioQueue<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> { get_next: F, data: std::collections::BinaryHeap<T>, } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> TransPrioQueue<F, I, T> { pub fn new(initial: T, recursion: F) -> Self { Self {get_next: recursion, data: std::iter::once(initial).collect()} } pub fn new_multi(initial: impl IntoIterator<Item = T>, recursion: F) -> Self { Self {get_next: recursion, data: FromIterator::from_iter(initial)} } } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> Iterator for TransPrioQueue<F, I, T> { type Item = T; fn next(&mut self) -> Option<T> { let res = self.data.pop(); res.as_ref().map(&mut self.get_next).map(|items| self.data.extend(items)); res } } pub trait IntoTransIter<T> { fn trans_iter_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransIter<F, I, T>; fn trans_prio_queue_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransPrioQueue<F, I, T> where Self: Sized, T: Ord, { self.trans_iter_with(recursion).into_trans_prio_queue() } } impl<T> IntoTransIter<T> for T { fn trans_iter_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransIter<F, I, T> { TransIter::new(self, recursion) } } pub trait AutoTransIter<T>: IntoTransIter<T> + Sized { type RecIter: IntoIterator<Item = T>; fn recurse(item: &T) -> Self::RecIter; fn trans_iter(self) -> TransIter<fn(&T) -> Self::RecIter, Self::RecIter, T> { self.trans_iter_with(Self::recurse) } fn trans_prio_queue(self) -> TransPrioQueue<fn(&T) -> Self::RecIter, Self::RecIter, T> where T: Ord { self.trans_prio_queue_with(Self::recurse) } } #[cfg(test)] #[macro_use(quickcheck)] extern crate quickcheck_macros; #[cfg(test)] mod tests;
use std::iter::FromIterator; #[derive(Clone, Debug)] pub struct TransIter<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> { get_next: F, queue: std::collections::VecDeque<T>, mode: Mode, } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> TransIter<F, I, T> { pub fn new(initial: T, recursion: F) -> Self { Self {get_next: recursion, queue: std::iter::once(initial).collect(), mode: Default::default()} } pub fn new_multi(initial: impl IntoIterator<Item = T>, recursion: F) -> Self { Self {get_next: recursion, queue: FromIterator::from_iter(initial), mode: Default::default()} } pub fn breadth_first(self) -> Self { Self {mode: Mode::BreadthFirst, ..self} } pub fn depth_first(self) -> Self { Self {mode: Mode::DepthFirst, ..self} } pub fn depth_first_unordered(self) -> Self { Self {mode: Mode::DepthFirstUnordered, ..self} } pub fn into_trans_prio_queue(self) -> TransPrioQueue<F, I, T> where T: Ord { TransPrioQueue::new_multi(self.queue, self.get_next) } } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> Iterator for TransIter<F, I, T> { type Item = T; fn next(&mut self) -> Option<T> { let res = self.queue.pop_front(); res.as_ref().map(&mut self.get_next).map(|items| ma
} #[derive(Copy, Clone, Debug)] enum Mode { BreadthFirst, DepthFirst, DepthFirstUnordered, } impl Default for Mode { fn default() -> Self { Self::BreadthFirst } } #[derive(Clone, Debug)] pub struct TransPrioQueue<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> { get_next: F, data: std::collections::BinaryHeap<T>, } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> TransPrioQueue<F, I, T> { pub fn new(initial: T, recursion: F) -> Self { Self {get_next: recursion, data: std::iter::once(initial).collect()} } pub fn new_multi(initial: impl IntoIterator<Item = T>, recursion: F) -> Self { Self {get_next: recursion, data: FromIterator::from_iter(initial)} } } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> Iterator for TransPrioQueue<F, I, T> { type Item = T; fn next(&mut self) -> Option<T> { let res = self.data.pop(); res.as_ref().map(&mut self.get_next).map(|items| self.data.extend(items)); res } } pub trait IntoTransIter<T> { fn trans_iter_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransIter<F, I, T>; fn trans_prio_queue_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransPrioQueue<F, I, T> where Self: Sized, T: Ord, { self.trans_iter_with(recursion).into_trans_prio_queue() } } impl<T> IntoTransIter<T> for T { fn trans_iter_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransIter<F, I, T> { TransIter::new(self, recursion) } } pub trait AutoTransIter<T>: IntoTransIter<T> + Sized { type RecIter: IntoIterator<Item = T>; fn recurse(item: &T) -> Self::RecIter; fn trans_iter(self) -> TransIter<fn(&T) -> Self::RecIter, Self::RecIter, T> { self.trans_iter_with(Self::recurse) } fn trans_prio_queue(self) -> TransPrioQueue<fn(&T) -> Self::RecIter, Self::RecIter, T> where T: Ord { self.trans_prio_queue_with(Self::recurse) } } #[cfg(test)] #[macro_use(quickcheck)] extern crate quickcheck_macros; #[cfg(test)] mod tests;
tch self.mode { Mode::BreadthFirst => self.queue.extend(items), Mode::DepthFirst => { let mut items = Vec::from_iter(items); self.queue.reserve(items.len()); while let Some(i) = items.pop() { self.queue.push_front(i); } }, Mode::DepthFirstUnordered => { let items = items.into_iter(); self.queue.reserve(items.size_hint().0); items.for_each(|i| self.queue.push_front(i)) }, }); res }
function_block-function_prefixed
[ { "content": "#[quickcheck]\n\nfn node_count_prio_queue(node: Node) -> bool {\n\n let count = node.count();\n\n node.trans_prio_queue().count() == count\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 2, "score": 37565.85385757161 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq...
Rust
src/correct/mod.rs
natir/br
8d83017bb1ad1ec7153fa177c1d0c5127aeed6ed
/* Copyright (c) 2020 Pierre Marijon <pmarijon@mmci.uni-saarland.de> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* local use */ use crate::set; const MASK_LOOKUP: [u64; 32] = { let mut lookup = [0; 32]; let mut k = 1; while k < 32 { lookup[k] = (1 << (2 * k)) - 1; k += 1; } lookup }; #[inline(always)] pub(crate) fn mask(k: u8) -> u64 { MASK_LOOKUP[k as usize] } pub trait Corrector { fn valid_kmer(&self) -> &set::BoxKmerSet; fn correct_error(&self, kmer: u64, seq: &[u8]) -> Option<(Vec<u8>, usize)>; fn k(&self) -> u8 { self.valid_kmer().k() } fn correct(&self, seq: &[u8]) -> Vec<u8> { let mut correct: Vec<u8> = Vec::with_capacity(seq.len()); if seq.len() < self.k() as usize { return seq.to_vec(); } let mut i = self.k() as usize; let mut kmer = cocktail::kmer::seq2bit(&seq[0..i]); for n in &seq[0..i] { correct.push(*n); } let mut previous = self.valid_kmer().get(kmer); while i < seq.len() { let nuc = seq[i]; kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(nuc), self.k()); if !self.valid_kmer().get(kmer) && previous { if let Some((local_correct, offset)) = self.correct_error(kmer, &seq[i..]) { kmer >>= 2; for nuc in local_correct { kmer = add_nuc_to_end( kmer, cocktail::kmer::nuc2bit(nuc), self.valid_kmer().k(), ); correct.push(nuc); } log::debug!("error at position {} cor", i); previous = true; i += offset; } else { correct.push(nuc); log::debug!("error at position {} not", i); i += 1; previous = false; } } else { previous = self.valid_kmer().get(kmer); correct.push(nuc); i += 1; } } correct } } pub(crate) fn add_nuc_to_end(kmer: u64, nuc: u64, k: u8) -> u64 { ((kmer << 2) & mask(k)) ^ nuc } pub(crate) fn alt_nucs(valid_kmer: &set::BoxKmerSet, ori: u64) -> Vec<u64> { next_nucs(valid_kmer, ori >> 2) } pub(crate) fn next_nucs(valid_kmer: &set::BoxKmerSet, kmer: u64) -> Vec<u64> { let mut correct_nuc: Vec<u64> = Vec::with_capacity(4); for alt_nuc in 0..4 { if valid_kmer.get(add_nuc_to_end(kmer, alt_nuc, valid_kmer.k())) { correct_nuc.push(alt_nuc); } } correct_nuc } pub(crate) fn error_len( subseq: &[u8], mut kmer: u64, valid_kmer: &set::BoxKmerSet, ) -> (usize, u64) { let mut j = 0; loop { j += 1; if j >= subseq.len() { break; } kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(subseq[j]), valid_kmer.k()); if valid_kmer.get(kmer) { break; } } (j, kmer) } pub mod exist; pub mod gap_size; pub mod graph; pub mod greedy; pub use exist::one::One; pub use exist::two::Two; pub use gap_size::GapSize; pub use graph::Graph; pub use greedy::Greedy; #[cfg(test)] mod tests { use super::*; #[test] fn found_alt_kmer() { let mut data = pcon::solid::Solid::new(5); data.set(cocktail::kmer::seq2bit(b"ACTGA"), true); data.set(cocktail::kmer::seq2bit(b"ACTGT"), true); let set: set::BoxKmerSet = Box::new(set::Pcon::new(data)); let kmer = cocktail::kmer::seq2bit(b"ACTGC"); assert_eq!(alt_nucs(&set, kmer), vec![0, 2]); } }
/* Copyright (c) 2020 Pierre Marijon <pmarijon@mmci.uni-saarland.de> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* local use */ use crate::set; const MASK_LOOKUP: [u64; 32] = { let mut lookup = [0; 32]; let mut k = 1; while k < 32 { lookup[k] = (1 << (2 * k)) - 1; k += 1; } lookup }; #[inline(always)] pub(crate) fn mask(k: u8) -> u64 { MASK_LOOKUP[k as usize] } pub trait Corrector { fn valid_kmer(&self) -> &set::BoxKmerSet; fn correct_error(&self, kmer: u64, seq: &[u8]) -> Option<(Vec<u8>, usize)>; fn k(&self) -> u8 { self.valid_kmer().k() }
} pub(crate) fn add_nuc_to_end(kmer: u64, nuc: u64, k: u8) -> u64 { ((kmer << 2) & mask(k)) ^ nuc } pub(crate) fn alt_nucs(valid_kmer: &set::BoxKmerSet, ori: u64) -> Vec<u64> { next_nucs(valid_kmer, ori >> 2) } pub(crate) fn next_nucs(valid_kmer: &set::BoxKmerSet, kmer: u64) -> Vec<u64> { let mut correct_nuc: Vec<u64> = Vec::with_capacity(4); for alt_nuc in 0..4 { if valid_kmer.get(add_nuc_to_end(kmer, alt_nuc, valid_kmer.k())) { correct_nuc.push(alt_nuc); } } correct_nuc } pub(crate) fn error_len( subseq: &[u8], mut kmer: u64, valid_kmer: &set::BoxKmerSet, ) -> (usize, u64) { let mut j = 0; loop { j += 1; if j >= subseq.len() { break; } kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(subseq[j]), valid_kmer.k()); if valid_kmer.get(kmer) { break; } } (j, kmer) } pub mod exist; pub mod gap_size; pub mod graph; pub mod greedy; pub use exist::one::One; pub use exist::two::Two; pub use gap_size::GapSize; pub use graph::Graph; pub use greedy::Greedy; #[cfg(test)] mod tests { use super::*; #[test] fn found_alt_kmer() { let mut data = pcon::solid::Solid::new(5); data.set(cocktail::kmer::seq2bit(b"ACTGA"), true); data.set(cocktail::kmer::seq2bit(b"ACTGT"), true); let set: set::BoxKmerSet = Box::new(set::Pcon::new(data)); let kmer = cocktail::kmer::seq2bit(b"ACTGC"); assert_eq!(alt_nucs(&set, kmer), vec![0, 2]); } }
fn correct(&self, seq: &[u8]) -> Vec<u8> { let mut correct: Vec<u8> = Vec::with_capacity(seq.len()); if seq.len() < self.k() as usize { return seq.to_vec(); } let mut i = self.k() as usize; let mut kmer = cocktail::kmer::seq2bit(&seq[0..i]); for n in &seq[0..i] { correct.push(*n); } let mut previous = self.valid_kmer().get(kmer); while i < seq.len() { let nuc = seq[i]; kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(nuc), self.k()); if !self.valid_kmer().get(kmer) && previous { if let Some((local_correct, offset)) = self.correct_error(kmer, &seq[i..]) { kmer >>= 2; for nuc in local_correct { kmer = add_nuc_to_end( kmer, cocktail::kmer::nuc2bit(nuc), self.valid_kmer().k(), ); correct.push(nuc); } log::debug!("error at position {} cor", i); previous = true; i += offset; } else { correct.push(nuc); log::debug!("error at position {} not", i); i += 1; previous = false; } } else { previous = self.valid_kmer().get(kmer); correct.push(nuc); i += 1; } } correct }
function_block-full_function
[ { "content": "pub trait KmerSet: Sync {\n\n fn get(&self, kmer: u64) -> bool;\n\n\n\n fn k(&self) -> u8;\n\n}\n\n\n\npub type BoxKmerSet<'a> = Box<dyn KmerSet + 'a>;\n", "file_path": "src/set/mod.rs", "rank": 1, "score": 77363.71832479363 }, { "content": "/// Set the number of threads ...
Rust
zcash_proofs/src/circuit/pedersen_hash.rs
murisi/masp
502f61121b6acac85a61b4ca594a4110e2d0d643
use super::ecc::{EdwardsPoint, MontgomeryPoint}; use bellman::gadgets::boolean::Boolean; use bellman::gadgets::lookup::*; use bellman::{ConstraintSystem, SynthesisError}; pub use zcash_primitives::pedersen_hash::Personalization; use crate::constants::PEDERSEN_CIRCUIT_GENERATORS; fn get_constant_bools(person: &Personalization) -> Vec<Boolean> { person .get_bits() .into_iter() .map(Boolean::constant) .collect() } pub fn pedersen_hash<CS>( mut cs: CS, personalization: Personalization, bits: &[Boolean], ) -> Result<EdwardsPoint, SynthesisError> where CS: ConstraintSystem<bls12_381::Scalar>, { let personalization = get_constant_bools(&personalization); assert_eq!(personalization.len(), 6); let mut edwards_result = None; let mut bits = personalization.iter().chain(bits.iter()).peekable(); let mut segment_generators = PEDERSEN_CIRCUIT_GENERATORS.iter(); let boolean_false = Boolean::constant(false); let mut segment_i = 0; while bits.peek().is_some() { let mut segment_result = None; let mut segment_windows = &segment_generators.next().expect("enough segments")[..]; let mut window_i = 0; while let Some(a) = bits.next() { let b = bits.next().unwrap_or(&boolean_false); let c = bits.next().unwrap_or(&boolean_false); let tmp = lookup3_xy_with_conditional_negation( cs.namespace(|| format!("segment {}, window {}", segment_i, window_i)), &[a.clone(), b.clone(), c.clone()], &segment_windows[0], )?; let tmp = MontgomeryPoint::interpret_unchecked(tmp.0, tmp.1); match segment_result { None => { segment_result = Some(tmp); } Some(ref mut segment_result) => { *segment_result = tmp.add( cs.namespace(|| { format!("addition of segment {}, window {}", segment_i, window_i) }), segment_result, )?; } } segment_windows = &segment_windows[1..]; if segment_windows.is_empty() { break; } window_i += 1; } let segment_result = segment_result.expect( "bits is not exhausted due to while condition; thus there must be a segment window; thus there must be a segment result", ); let segment_result = segment_result.into_edwards( cs.namespace(|| format!("conversion of segment {} into edwards", segment_i)), )?; match edwards_result { Some(ref mut edwards_result) => { *edwards_result = segment_result.add( cs.namespace(|| format!("addition of segment {} to accumulator", segment_i)), edwards_result, )?; } None => { edwards_result = Some(segment_result); } } segment_i += 1; } Ok(edwards_result.unwrap()) } #[cfg(test)] mod test { use super::*; use bellman::gadgets::boolean::{AllocatedBit, Boolean}; use bellman::gadgets::test::*; use ff::PrimeField; use group::Curve; use rand_core::{RngCore, SeedableRng}; use rand_xorshift::XorShiftRng; use zcash_primitives::pedersen_hash; fn ph_num_constraints(input_bits: usize) -> usize { let personalized_bits = 6 + input_bits; let precomputed_booleans = 2 + (personalized_bits % 3 == 1) as usize; let chunks = (personalized_bits + 3 - 1) / 3; let segments = (chunks + 63 - 1) / 63; let all_but_last_segments = segments - 1; let last_chunks = chunks - all_but_last_segments * 63; let lookup_chunk = 2; let add_chunks = 3; let convert_segment = 2; let add_segments = 6; return (chunks) * lookup_chunk - precomputed_booleans + segments * convert_segment + all_but_last_segments * ((63 - 1) * add_chunks + add_segments) + (last_chunks - 1) * add_chunks; } #[test] fn test_pedersen_hash_constraints() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); let leaves_len = 2 * 255; let note_len = 64 + 256 + 256; for &n_bits in [ 0, 3 * 63 - 6, 3 * 63 - 6 + 1, 3 * 63 - 6 + 2, leaves_len, note_len, ] .iter() { let mut cs = TestConstraintSystem::new(); let input: Vec<bool> = (0..n_bits).map(|_| rng.next_u32() % 2 != 0).collect(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::NoteCommitment, &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); let bitness_constraints = n_bits; let ph_constraints = ph_num_constraints(n_bits); assert_eq!(cs.num_constraints(), bitness_constraints + ph_constraints); if n_bits == leaves_len { assert_eq!(cs.num_constraints(), leaves_len + 867) }; if n_bits == note_len { assert_eq!(cs.num_constraints(), note_len + 982) }; } } #[test] fn test_pedersen_hash() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for length in 0..751 { for _ in 0..5 { let input: Vec<bool> = (0..length).map(|_| rng.next_u32() % 2 != 0).collect(); let mut cs = TestConstraintSystem::new(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); let res = pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::MerkleTree(1), &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); let expected = jubjub::ExtendedPoint::from(pedersen_hash::pedersen_hash( Personalization::MerkleTree(1), input.clone().into_iter(), )) .to_affine(); assert_eq!(res.get_u().get_value().unwrap(), expected.get_u()); assert_eq!(res.get_v().get_value().unwrap(), expected.get_v()); let unexpected = jubjub::ExtendedPoint::from(pedersen_hash::pedersen_hash( Personalization::MerkleTree(0), input.into_iter(), )) .to_affine(); assert!(res.get_u().get_value().unwrap() != unexpected.get_u()); assert!(res.get_v().get_value().unwrap() != unexpected.get_v()); } } } #[test] fn test_pedersen_hash_external_test_vectors() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); let expected_us = [ "28161926966428986673895580777285905189725480206811328272001879986576840909576", "39669831794597628158501766225645040955899576179071014703006420393381978263045", ]; let expected_vs = [ "26869991781071974894722407757894142583682396277979904369818887810555917099932", "2112827187110048608327330788910224944044097981650120385961435904443901436107", ]; for length in 300..302 { let input: Vec<bool> = (0..length).map(|_| rng.next_u32() % 2 != 0).collect(); let mut cs = TestConstraintSystem::new(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); let res = pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::MerkleTree(1), &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); assert_eq!( res.get_u().get_value().unwrap(), bls12_381::Scalar::from_str_vartime(expected_us[length - 300]).unwrap() ); assert_eq!( res.get_v().get_value().unwrap(), bls12_381::Scalar::from_str_vartime(expected_vs[length - 300]).unwrap() ); } } }
use super::ecc::{EdwardsPoint, MontgomeryPoint}; use bellman::gadgets::boolean::Boolean; use bellman::gadgets::lookup::*; use bellman::{ConstraintSystem, SynthesisError}; pub use zcash_primitives::pedersen_hash::Personalization; use crate::constants::PEDERSEN_CIRCUIT_GENERATORS; fn get_constant_bools(person: &Personalization) -> Vec<Boolean> { person .get_bits() .into_iter() .map(Boolean::constant) .collect() } pub fn pedersen_hash<CS>( mut cs: CS, personalization: Personalization, bits: &[Boolean], ) -> Result<EdwardsPoint, SynthesisError> where CS: ConstraintSystem<bls12_381::Scalar>, { let personalization = get_constant_bools(&personalization); assert_eq!(personalization.len(), 6); let mut edwards_result = None; let mut bits = personalization.iter().chain(bits.iter()).peekable(); let mut segment_generators = PEDERSEN_CIRCUIT_GENERATORS.iter(); let boolean_false = Boolean::constant(false); let mut segment_i = 0; while bits.peek().is_some() { let mut segment_result = None; let mut segment_windows = &segment_generators.next().expect("enough segments")[..]; let mut window_i = 0; while let Some(a) = bits.next() { let b = bits.next().unwrap_or(&boolean_false); let c = bits.next().unwrap_or(&boolean_false); let tmp = lookup3_xy_with_conditional_negation( cs.namespace(|| format!("segment {}, window {}", segment_i, window_i)), &[a.clone(), b.clone(), c.clone()], &segment_windows[0], )?; let tmp = MontgomeryPoint::interpret_unchecked(tmp.0, tmp.1);
segment_windows = &segment_windows[1..]; if segment_windows.is_empty() { break; } window_i += 1; } let segment_result = segment_result.expect( "bits is not exhausted due to while condition; thus there must be a segment window; thus there must be a segment result", ); let segment_result = segment_result.into_edwards( cs.namespace(|| format!("conversion of segment {} into edwards", segment_i)), )?; match edwards_result { Some(ref mut edwards_result) => { *edwards_result = segment_result.add( cs.namespace(|| format!("addition of segment {} to accumulator", segment_i)), edwards_result, )?; } None => { edwards_result = Some(segment_result); } } segment_i += 1; } Ok(edwards_result.unwrap()) } #[cfg(test)] mod test { use super::*; use bellman::gadgets::boolean::{AllocatedBit, Boolean}; use bellman::gadgets::test::*; use ff::PrimeField; use group::Curve; use rand_core::{RngCore, SeedableRng}; use rand_xorshift::XorShiftRng; use zcash_primitives::pedersen_hash; fn ph_num_constraints(input_bits: usize) -> usize { let personalized_bits = 6 + input_bits; let precomputed_booleans = 2 + (personalized_bits % 3 == 1) as usize; let chunks = (personalized_bits + 3 - 1) / 3; let segments = (chunks + 63 - 1) / 63; let all_but_last_segments = segments - 1; let last_chunks = chunks - all_but_last_segments * 63; let lookup_chunk = 2; let add_chunks = 3; let convert_segment = 2; let add_segments = 6; return (chunks) * lookup_chunk - precomputed_booleans + segments * convert_segment + all_but_last_segments * ((63 - 1) * add_chunks + add_segments) + (last_chunks - 1) * add_chunks; } #[test] fn test_pedersen_hash_constraints() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); let leaves_len = 2 * 255; let note_len = 64 + 256 + 256; for &n_bits in [ 0, 3 * 63 - 6, 3 * 63 - 6 + 1, 3 * 63 - 6 + 2, leaves_len, note_len, ] .iter() { let mut cs = TestConstraintSystem::new(); let input: Vec<bool> = (0..n_bits).map(|_| rng.next_u32() % 2 != 0).collect(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::NoteCommitment, &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); let bitness_constraints = n_bits; let ph_constraints = ph_num_constraints(n_bits); assert_eq!(cs.num_constraints(), bitness_constraints + ph_constraints); if n_bits == leaves_len { assert_eq!(cs.num_constraints(), leaves_len + 867) }; if n_bits == note_len { assert_eq!(cs.num_constraints(), note_len + 982) }; } } #[test] fn test_pedersen_hash() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for length in 0..751 { for _ in 0..5 { let input: Vec<bool> = (0..length).map(|_| rng.next_u32() % 2 != 0).collect(); let mut cs = TestConstraintSystem::new(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); let res = pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::MerkleTree(1), &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); let expected = jubjub::ExtendedPoint::from(pedersen_hash::pedersen_hash( Personalization::MerkleTree(1), input.clone().into_iter(), )) .to_affine(); assert_eq!(res.get_u().get_value().unwrap(), expected.get_u()); assert_eq!(res.get_v().get_value().unwrap(), expected.get_v()); let unexpected = jubjub::ExtendedPoint::from(pedersen_hash::pedersen_hash( Personalization::MerkleTree(0), input.into_iter(), )) .to_affine(); assert!(res.get_u().get_value().unwrap() != unexpected.get_u()); assert!(res.get_v().get_value().unwrap() != unexpected.get_v()); } } } #[test] fn test_pedersen_hash_external_test_vectors() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); let expected_us = [ "28161926966428986673895580777285905189725480206811328272001879986576840909576", "39669831794597628158501766225645040955899576179071014703006420393381978263045", ]; let expected_vs = [ "26869991781071974894722407757894142583682396277979904369818887810555917099932", "2112827187110048608327330788910224944044097981650120385961435904443901436107", ]; for length in 300..302 { let input: Vec<bool> = (0..length).map(|_| rng.next_u32() % 2 != 0).collect(); let mut cs = TestConstraintSystem::new(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); let res = pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::MerkleTree(1), &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); assert_eq!( res.get_u().get_value().unwrap(), bls12_381::Scalar::from_str_vartime(expected_us[length - 300]).unwrap() ); assert_eq!( res.get_v().get_value().unwrap(), bls12_381::Scalar::from_str_vartime(expected_vs[length - 300]).unwrap() ); } } }
match segment_result { None => { segment_result = Some(tmp); } Some(ref mut segment_result) => { *segment_result = tmp.add( cs.namespace(|| { format!("addition of segment {}, window {}", segment_i, window_i) }), segment_result, )?; } }
if_condition
[ { "content": "pub fn prf_a_pk<Scalar, CS>(cs: CS, a_sk: &[Boolean]) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n prf(\n\n cs,\n\n true,\n\n true,\n\n false,\n\n false,\n\n a_sk,\n\n &...
Rust
src/stat/fsext.rs
LuoZijun/coreutils
9b6f0b02e82b05cbe94d9b4c036e18bdb87b8042
pub use super::uucore::libc; extern crate time; use self::time::Timespec; pub use libc::{c_int, mode_t, strerror, S_IFBLK, S_IFCHR, S_IFDIR, S_IFIFO, S_IFLNK, S_IFMT, S_IFREG, S_IFSOCK, S_IRGRP, S_IROTH, S_IRUSR, S_ISGID, S_ISUID, S_ISVTX, S_IWGRP, S_IWOTH, S_IWUSR, S_IXGRP, S_IXOTH, S_IXUSR}; pub trait BirthTime { fn pretty_birth(&self) -> String; fn birth(&self) -> String; } use std::fs::Metadata; impl BirthTime for Metadata { #[cfg(feature = "nightly")] fn pretty_birth(&self) -> String { self.created() .map(|t| t.elapsed().unwrap()) .map(|e| pretty_time(e.as_secs() as i64, e.subsec_nanos() as i64)) .unwrap_or("-".to_owned()) } #[cfg(not(feature = "nightly"))] fn pretty_birth(&self) -> String { "-".to_owned() } #[cfg(feature = "nightly")] fn birth(&self) -> String { self.created() .map(|t| t.elapsed().unwrap()) .map(|e| format!("{}", e.as_secs())) .unwrap_or("0".to_owned()) } #[cfg(not(feature = "nightly"))] fn birth(&self) -> String { "0".to_owned() } } #[macro_export] macro_rules! has { ($mode:expr, $perm:expr) => ( $mode & $perm != 0 ) } pub fn pretty_time(sec: i64, nsec: i64) -> String { let tm = time::at(Timespec::new(sec, nsec as i32)); let res = time::strftime("%Y-%m-%d %H:%M:%S.%f %z", &tm).unwrap(); if res.ends_with(" -0000") { res.replace(" -0000", " +0000") } else { res } } pub fn pretty_filetype<'a>(mode: mode_t, size: u64) -> &'a str { match mode & S_IFMT { S_IFREG => { if size != 0 { "regular file" } else { "regular empty file" } } S_IFDIR => "directory", S_IFLNK => "symbolic link", S_IFCHR => "character special file", S_IFBLK => "block special file", S_IFIFO => "fifo", S_IFSOCK => "socket", _ => "weird file", } } pub fn pretty_access(mode: mode_t) -> String { let mut result = String::with_capacity(10); result.push(match mode & S_IFMT { S_IFDIR => 'd', S_IFCHR => 'c', S_IFBLK => 'b', S_IFREG => '-', S_IFIFO => 'p', S_IFLNK => 'l', S_IFSOCK => 's', _ => '?', }); result.push(if has!(mode, S_IRUSR) { 'r' } else { '-' }); result.push(if has!(mode, S_IWUSR) { 'w' } else { '-' }); result.push(if has!(mode, S_ISUID as mode_t) { if has!(mode, S_IXUSR) { 's' } else { 'S' } } else if has!(mode, S_IXUSR) { 'x' } else { '-' }); result.push(if has!(mode, S_IRGRP) { 'r' } else { '-' }); result.push(if has!(mode, S_IWGRP) { 'w' } else { '-' }); result.push(if has!(mode, S_ISGID as mode_t) { if has!(mode, S_IXGRP) { 's' } else { 'S' } } else if has!(mode, S_IXGRP) { 'x' } else { '-' }); result.push(if has!(mode, S_IROTH) { 'r' } else { '-' }); result.push(if has!(mode, S_IWOTH) { 'w' } else { '-' }); result.push(if has!(mode, S_ISVTX as mode_t) { if has!(mode, S_IXOTH) { 't' } else { 'T' } } else if has!(mode, S_IXOTH) { 'x' } else { '-' }); result } use std::mem::{self, transmute}; use std::path::Path; use std::borrow::Cow; use std::ffi::CString; use std::convert::{AsRef, From}; use std::error::Error; use std::io::Error as IOError; #[cfg(any(target_os = "linux", target_os = "macos", target_os = "android"))] use libc::statfs as Sstatfs; #[cfg(any(target_os = "linux", target_os = "macos", target_os = "android"))] use libc::statfs as statfs_fn; pub trait FsMeta { fn fs_type(&self) -> i64; fn iosize(&self) -> i64; fn blksize(&self) -> i64; fn total_blocks(&self) -> u64; fn free_blocks(&self) -> u64; fn avail_blocks(&self) -> u64; fn total_fnodes(&self) -> u64; fn free_fnodes(&self) -> u64; fn fsid(&self) -> u64; fn namelen(&self) -> i64; } impl FsMeta for Sstatfs { fn blksize(&self) -> i64 { self.f_bsize as i64 } fn total_blocks(&self) -> u64 { self.f_blocks as u64 } fn free_blocks(&self) -> u64 { self.f_bfree as u64 } fn avail_blocks(&self) -> u64 { self.f_bavail as u64 } fn total_fnodes(&self) -> u64 { self.f_files as u64 } fn free_fnodes(&self) -> u64 { self.f_ffree as u64 } fn fs_type(&self) -> i64 { self.f_type as i64 } #[cfg(target_os = "linux")] fn iosize(&self) -> i64 { self.f_frsize as i64 } #[cfg(target_os = "macos")] fn iosize(&self) -> i64 { self.f_iosize as i64 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn iosize(&self) -> i64 { 0 } #[cfg(any(target_os = "macos", target_os = "linux"))] fn fsid(&self) -> u64 { let f_fsid: &[u32; 2] = unsafe { transmute(&self.f_fsid) }; (f_fsid[0] as u64) << 32 | f_fsid[1] as u64 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn fsid(&self) -> u64 { 0 } #[cfg(target_os = "linux")] fn namelen(&self) -> i64 { self.f_namelen as i64 } #[cfg(target_os = "macos")] fn namelen(&self) -> i64 { 1024 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn namelen(&self) -> u64 { 0 } } pub fn statfs<P: AsRef<Path>>(path: P) -> Result<Sstatfs, String> where Vec<u8>: From<P>, { match CString::new(path) { Ok(p) => { let mut buffer: Sstatfs = unsafe { mem::zeroed() }; unsafe { match statfs_fn(p.as_ptr(), &mut buffer) { 0 => Ok(buffer), _ => { let errno = IOError::last_os_error().raw_os_error().unwrap_or(0); Err(CString::from_raw(strerror(errno)) .into_string() .unwrap_or("Unknown Error".to_owned())) } } } } Err(e) => Err(e.description().to_owned()), } } pub fn pretty_fstype<'a>(fstype: i64) -> Cow<'a, str> { match fstype { 0x61636673 => "acfs".into(), 0xADF5 => "adfs".into(), 0xADFF => "affs".into(), 0x5346414F => "afs".into(), 0x09041934 => "anon-inode FS".into(), 0x61756673 => "aufs".into(), 0x0187 => "autofs".into(), 0x42465331 => "befs".into(), 0x62646576 => "bdevfs".into(), 0x1BADFACE => "bfs".into(), 0xCAFE4A11 => "bpf_fs".into(), 0x42494E4D => "binfmt_misc".into(), 0x9123683E => "btrfs".into(), 0x73727279 => "btrfs_test".into(), 0x00C36400 => "ceph".into(), 0x0027E0EB => "cgroupfs".into(), 0xFF534D42 => "cifs".into(), 0x73757245 => "coda".into(), 0x012FF7B7 => "coh".into(), 0x62656570 => "configfs".into(), 0x28CD3D45 => "cramfs".into(), 0x453DCD28 => "cramfs-wend".into(), 0x64626720 => "debugfs".into(), 0x1373 => "devfs".into(), 0x1CD1 => "devpts".into(), 0xF15F => "ecryptfs".into(), 0xDE5E81E4 => "efivarfs".into(), 0x00414A53 => "efs".into(), 0x5DF5 => "exofs".into(), 0x137D => "ext".into(), 0xEF53 => "ext2/ext3".into(), 0xEF51 => "ext2".into(), 0xF2F52010 => "f2fs".into(), 0x4006 => "fat".into(), 0x19830326 => "fhgfs".into(), 0x65735546 => "fuseblk".into(), 0x65735543 => "fusectl".into(), 0x0BAD1DEA => "futexfs".into(), 0x01161970 => "gfs/gfs2".into(), 0x47504653 => "gpfs".into(), 0x4244 => "hfs".into(), 0x482B => "hfs+".into(), 0x4858 => "hfsx".into(), 0x00C0FFEE => "hostfs".into(), 0xF995E849 => "hpfs".into(), 0x958458F6 => "hugetlbfs".into(), 0x11307854 => "inodefs".into(), 0x013111A8 => "ibrix".into(), 0x2BAD1DEA => "inotifyfs".into(), 0x9660 => "isofs".into(), 0x4004 => "isofs".into(), 0x4000 => "isofs".into(), 0x07C0 => "jffs".into(), 0x72B6 => "jffs2".into(), 0x3153464A => "jfs".into(), 0x6B414653 => "k-afs".into(), 0xC97E8168 => "logfs".into(), 0x0BD00BD0 => "lustre".into(), 0x5346314D => "m1fs".into(), 0x137F => "minix".into(), 0x138F => "minix (30 char.)".into(), 0x2468 => "minix v2".into(), 0x2478 => "minix v2 (30 char.)".into(), 0x4D5A => "minix3".into(), 0x19800202 => "mqueue".into(), 0x4D44 => "msdos".into(), 0x564C => "novell".into(), 0x6969 => "nfs".into(), 0x6E667364 => "nfsd".into(), 0x3434 => "nilfs".into(), 0x6E736673 => "nsfs".into(), 0x5346544E => "ntfs".into(), 0x9FA1 => "openprom".into(), 0x7461636F => "ocfs2".into(), 0x794C7630 => "overlayfs".into(), 0xAAD7AAEA => "panfs".into(), 0x50495045 => "pipefs".into(), 0x7C7C6673 => "prl_fs".into(), 0x9FA0 => "proc".into(), 0x6165676C => "pstorefs".into(), 0x002F => "qnx4".into(), 0x68191122 => "qnx6".into(), 0x858458F6 => "ramfs".into(), 0x52654973 => "reiserfs".into(), 0x7275 => "romfs".into(), 0x67596969 => "rpc_pipefs".into(), 0x73636673 => "securityfs".into(), 0xF97CFF8C => "selinux".into(), 0x43415D53 => "smackfs".into(), 0x517B => "smb".into(), 0xFE534D42 => "smb2".into(), 0xBEEFDEAD => "snfs".into(), 0x534F434B => "sockfs".into(), 0x73717368 => "squashfs".into(), 0x62656572 => "sysfs".into(), 0x012FF7B6 => "sysv2".into(), 0x012FF7B5 => "sysv4".into(), 0x01021994 => "tmpfs".into(), 0x74726163 => "tracefs".into(), 0x24051905 => "ubifs".into(), 0x15013346 => "udf".into(), 0x00011954 => "ufs".into(), 0x54190100 => "ufs".into(), 0x9FA2 => "usbdevfs".into(), 0x01021997 => "v9fs".into(), 0xBACBACBC => "vmhgfs".into(), 0xA501FCF5 => "vxfs".into(), 0x565A4653 => "vzfs".into(), 0x53464846 => "wslfs".into(), 0xABBA1974 => "xenfs".into(), 0x012FF7B4 => "xenix".into(), 0x58465342 => "xfs".into(), 0x012FD16D => "xia".into(), 0x2FC12FC1 => "zfs".into(), other => format!("UNKNOWN ({:#x})", other).into(), } }
pub use super::uucore::libc; extern crate time; use self::time::Timespec; pub use libc::{c_int, mode_t, strerror, S_IFBLK, S_IFCHR, S_IFDIR, S_IFIFO, S_IFLNK, S_IFMT, S_IFREG, S_IFSOCK, S_IRGRP, S_IROTH, S_IRUSR, S_ISGID, S_ISUID, S_ISVTX, S_IWGRP, S_IWOTH, S_IWUSR, S_IXGRP, S_IXOTH, S_IXUSR}; pub trait BirthTime { fn pretty_birth(&self) -> String; fn birth(&self) -> String; } use std::fs::Metadata; impl BirthTime for Metadata { #[cfg(feature = "nightly")] fn pretty_birth(&self) -> String { self.created() .map(|t| t.elapsed().unwrap()) .map(|e| pretty_time(e.as_secs() as i64, e.subsec_nanos() as i64)) .unwrap_or("-".to_owned()) } #[cfg(not(feature = "nightly"))] fn pretty_birth(&self) -> String { "-".to_owned() } #[cfg(feature = "nightly")] fn birth(&self) -> String { self.created() .map(|t| t.elapsed().unwrap()) .map(|e| format!("{}", e.as_secs())) .unwrap_or("0".to_owned()) } #[cfg(not(feature = "nightly"))] fn birth(&self) -> String { "0".to_owned() } } #[macro_export] macro_rules! has { ($mode:expr, $perm:expr) => ( $mode & $perm != 0 ) } pub fn pretty_time(sec: i64, nsec: i64) -> String { let tm = time::at(Timespec::new(sec, nsec as i32)); let res = time::strftime("%Y-%m-%d %H:%M:%S.%f %z", &tm).unwrap(); if res.ends_with(" -0000") { res.replace(" -0000", " +0000") } else { res } } pub fn pretty_filetype<'a>(mode: mode_t, size: u64) -> &'a str { match mode & S_IFMT { S_IFREG => { if size != 0 { "regular file" } else { "regular empty file" } } S_IFDIR => "directory", S_IFLNK => "symbolic link", S_IFCHR => "character special file", S_IFBLK => "block special file", S_IFIFO => "fifo", S_IFSOCK => "socket", _ => "weird file", } } pub fn pretty_access(mode: mode_t) -> String { let mut result = String::with_capacity(10); result.push(match mode & S_IFMT { S_IFDIR => 'd', S_IFCHR => 'c', S_IFBLK => 'b', S_IFREG => '-', S_IFIFO => 'p', S_IFLNK => 'l', S_IFSOCK => 's', _ => '?', }); result.push(if has!(mode, S_IRUSR) { 'r' } else { '-' }); result.push(if has!(mode, S_IWUSR) { 'w' } else { '-' }); result.push(if has!(mode, S_ISUID as mode_t) { if has!(mode, S_IXUSR) { 's' } else { 'S' } } else if has!(mode, S_IXUSR) { 'x' } else { '-' }); result.push(if has!(mode, S_IRGRP) { 'r' } else { '-' }); result.push(if has!(mode, S_IWGRP) { 'w' } else { '-' }); result.push(if has!(mode, S_ISGID as mode_t) { if has!(mode, S_IXGRP) { 's' } else { 'S' } } else if has!(mode, S_IXGRP) { 'x' } else { '-' }); result.push(if has!(mode, S_IROTH) { 'r' } else { '-' }); result.push(if has!(mode, S_IWOTH) { 'w' } else { '-' }); result.push(if has!(mode, S_ISVTX as mode_t) { if has!(mode, S_IXOTH) { 't' } els
use std::mem::{self, transmute}; use std::path::Path; use std::borrow::Cow; use std::ffi::CString; use std::convert::{AsRef, From}; use std::error::Error; use std::io::Error as IOError; #[cfg(any(target_os = "linux", target_os = "macos", target_os = "android"))] use libc::statfs as Sstatfs; #[cfg(any(target_os = "linux", target_os = "macos", target_os = "android"))] use libc::statfs as statfs_fn; pub trait FsMeta { fn fs_type(&self) -> i64; fn iosize(&self) -> i64; fn blksize(&self) -> i64; fn total_blocks(&self) -> u64; fn free_blocks(&self) -> u64; fn avail_blocks(&self) -> u64; fn total_fnodes(&self) -> u64; fn free_fnodes(&self) -> u64; fn fsid(&self) -> u64; fn namelen(&self) -> i64; } impl FsMeta for Sstatfs { fn blksize(&self) -> i64 { self.f_bsize as i64 } fn total_blocks(&self) -> u64 { self.f_blocks as u64 } fn free_blocks(&self) -> u64 { self.f_bfree as u64 } fn avail_blocks(&self) -> u64 { self.f_bavail as u64 } fn total_fnodes(&self) -> u64 { self.f_files as u64 } fn free_fnodes(&self) -> u64 { self.f_ffree as u64 } fn fs_type(&self) -> i64 { self.f_type as i64 } #[cfg(target_os = "linux")] fn iosize(&self) -> i64 { self.f_frsize as i64 } #[cfg(target_os = "macos")] fn iosize(&self) -> i64 { self.f_iosize as i64 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn iosize(&self) -> i64 { 0 } #[cfg(any(target_os = "macos", target_os = "linux"))] fn fsid(&self) -> u64 { let f_fsid: &[u32; 2] = unsafe { transmute(&self.f_fsid) }; (f_fsid[0] as u64) << 32 | f_fsid[1] as u64 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn fsid(&self) -> u64 { 0 } #[cfg(target_os = "linux")] fn namelen(&self) -> i64 { self.f_namelen as i64 } #[cfg(target_os = "macos")] fn namelen(&self) -> i64 { 1024 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn namelen(&self) -> u64 { 0 } } pub fn statfs<P: AsRef<Path>>(path: P) -> Result<Sstatfs, String> where Vec<u8>: From<P>, { match CString::new(path) { Ok(p) => { let mut buffer: Sstatfs = unsafe { mem::zeroed() }; unsafe { match statfs_fn(p.as_ptr(), &mut buffer) { 0 => Ok(buffer), _ => { let errno = IOError::last_os_error().raw_os_error().unwrap_or(0); Err(CString::from_raw(strerror(errno)) .into_string() .unwrap_or("Unknown Error".to_owned())) } } } } Err(e) => Err(e.description().to_owned()), } } pub fn pretty_fstype<'a>(fstype: i64) -> Cow<'a, str> { match fstype { 0x61636673 => "acfs".into(), 0xADF5 => "adfs".into(), 0xADFF => "affs".into(), 0x5346414F => "afs".into(), 0x09041934 => "anon-inode FS".into(), 0x61756673 => "aufs".into(), 0x0187 => "autofs".into(), 0x42465331 => "befs".into(), 0x62646576 => "bdevfs".into(), 0x1BADFACE => "bfs".into(), 0xCAFE4A11 => "bpf_fs".into(), 0x42494E4D => "binfmt_misc".into(), 0x9123683E => "btrfs".into(), 0x73727279 => "btrfs_test".into(), 0x00C36400 => "ceph".into(), 0x0027E0EB => "cgroupfs".into(), 0xFF534D42 => "cifs".into(), 0x73757245 => "coda".into(), 0x012FF7B7 => "coh".into(), 0x62656570 => "configfs".into(), 0x28CD3D45 => "cramfs".into(), 0x453DCD28 => "cramfs-wend".into(), 0x64626720 => "debugfs".into(), 0x1373 => "devfs".into(), 0x1CD1 => "devpts".into(), 0xF15F => "ecryptfs".into(), 0xDE5E81E4 => "efivarfs".into(), 0x00414A53 => "efs".into(), 0x5DF5 => "exofs".into(), 0x137D => "ext".into(), 0xEF53 => "ext2/ext3".into(), 0xEF51 => "ext2".into(), 0xF2F52010 => "f2fs".into(), 0x4006 => "fat".into(), 0x19830326 => "fhgfs".into(), 0x65735546 => "fuseblk".into(), 0x65735543 => "fusectl".into(), 0x0BAD1DEA => "futexfs".into(), 0x01161970 => "gfs/gfs2".into(), 0x47504653 => "gpfs".into(), 0x4244 => "hfs".into(), 0x482B => "hfs+".into(), 0x4858 => "hfsx".into(), 0x00C0FFEE => "hostfs".into(), 0xF995E849 => "hpfs".into(), 0x958458F6 => "hugetlbfs".into(), 0x11307854 => "inodefs".into(), 0x013111A8 => "ibrix".into(), 0x2BAD1DEA => "inotifyfs".into(), 0x9660 => "isofs".into(), 0x4004 => "isofs".into(), 0x4000 => "isofs".into(), 0x07C0 => "jffs".into(), 0x72B6 => "jffs2".into(), 0x3153464A => "jfs".into(), 0x6B414653 => "k-afs".into(), 0xC97E8168 => "logfs".into(), 0x0BD00BD0 => "lustre".into(), 0x5346314D => "m1fs".into(), 0x137F => "minix".into(), 0x138F => "minix (30 char.)".into(), 0x2468 => "minix v2".into(), 0x2478 => "minix v2 (30 char.)".into(), 0x4D5A => "minix3".into(), 0x19800202 => "mqueue".into(), 0x4D44 => "msdos".into(), 0x564C => "novell".into(), 0x6969 => "nfs".into(), 0x6E667364 => "nfsd".into(), 0x3434 => "nilfs".into(), 0x6E736673 => "nsfs".into(), 0x5346544E => "ntfs".into(), 0x9FA1 => "openprom".into(), 0x7461636F => "ocfs2".into(), 0x794C7630 => "overlayfs".into(), 0xAAD7AAEA => "panfs".into(), 0x50495045 => "pipefs".into(), 0x7C7C6673 => "prl_fs".into(), 0x9FA0 => "proc".into(), 0x6165676C => "pstorefs".into(), 0x002F => "qnx4".into(), 0x68191122 => "qnx6".into(), 0x858458F6 => "ramfs".into(), 0x52654973 => "reiserfs".into(), 0x7275 => "romfs".into(), 0x67596969 => "rpc_pipefs".into(), 0x73636673 => "securityfs".into(), 0xF97CFF8C => "selinux".into(), 0x43415D53 => "smackfs".into(), 0x517B => "smb".into(), 0xFE534D42 => "smb2".into(), 0xBEEFDEAD => "snfs".into(), 0x534F434B => "sockfs".into(), 0x73717368 => "squashfs".into(), 0x62656572 => "sysfs".into(), 0x012FF7B6 => "sysv2".into(), 0x012FF7B5 => "sysv4".into(), 0x01021994 => "tmpfs".into(), 0x74726163 => "tracefs".into(), 0x24051905 => "ubifs".into(), 0x15013346 => "udf".into(), 0x00011954 => "ufs".into(), 0x54190100 => "ufs".into(), 0x9FA2 => "usbdevfs".into(), 0x01021997 => "v9fs".into(), 0xBACBACBC => "vmhgfs".into(), 0xA501FCF5 => "vxfs".into(), 0x565A4653 => "vzfs".into(), 0x53464846 => "wslfs".into(), 0xABBA1974 => "xenfs".into(), 0x012FF7B4 => "xenix".into(), 0x58465342 => "xfs".into(), 0x012FD16D => "xia".into(), 0x2FC12FC1 => "zfs".into(), other => format!("UNKNOWN ({:#x})", other).into(), } }
e { 'T' } } else if has!(mode, S_IXOTH) { 'x' } else { '-' }); result }
function_block-function_prefixed
[ { "content": "pub fn parse_numeric(fperm: u32, mut mode: &str) -> Result<u32, String> {\n\n let (op, pos) = parse_op(mode, Some('='))?;\n\n mode = mode[pos..].trim_left_matches('0');\n\n if mode.len() > 4 {\n\n Err(format!(\"mode is too large ({} > 7777)\", mode))\n\n } else {\n\n matc...
Rust
src/io/ui.rs
casey/paper
2ad01386f7f05c71860127b2367cba4dfc0003ac
mod error; pub use error::{CreateTerminalError, DisplayCmdFailure, UserActionFailure}; use { core::{ cell::{RefCell, RefMut}, convert::{TryFrom, TryInto}, ops::Deref, time::Duration, }, crossterm::{ cursor::{Hide, MoveTo}, event::{self, Event, KeyCode, KeyEvent, KeyModifiers}, execute, style::Print, terminal::{EnterAlternateScreen, LeaveAlternateScreen}, }, error::{DestroyError, InitError, PollFailure, ReachedEnd, ReadFailure, WriteFailure}, fehler::{throw, throws}, log::{trace, warn}, market::{ConsumeError, Consumer, ProduceError, Producer}, parse_display::Display as ParseDisplay, std::io::{self, Stdout, Write}, }; static NO_DURATION: Duration = Duration::from_secs(0); #[throws(PollFailure)] fn is_action_available() -> bool { event::poll(NO_DURATION)? } #[throws(ReadFailure)] fn read_action() -> UserAction { event::read().map(UserAction::from)? } #[derive(Debug, Default)] pub(crate) struct UserActionConsumer; impl UserActionConsumer { pub(crate) fn new() -> Self { Self::default() } } impl Consumer for UserActionConsumer { type Good = UserAction; type Failure = UserActionFailure; #[throws(ConsumeError<Self::Failure>)] fn consume(&self) -> Self::Good { if is_action_available().map_err(|error| ConsumeError::Failure(error.into()))? { read_action().map_err(|error| ConsumeError::Failure(error.into()))? } else { throw!(ConsumeError::EmptyStock); } } } #[derive(Debug, Default)] pub(crate) struct Terminal { presenter: Presenter, } impl Terminal { #[throws(CreateTerminalError)] pub(crate) fn new() -> Self { let terminal = Self::default(); terminal.presenter.init()?; terminal } } impl Drop for Terminal { fn drop(&mut self) { if let Err(error) = self.presenter.destroy() { warn!("Error while destroying user interface: {}", error); } } } impl Producer for Terminal { type Good = DisplayCmd; type Failure = DisplayCmdFailure; #[throws(ProduceError<Self::Failure>)] fn produce(&self, good: Self::Good) { match good { DisplayCmd::Rows { rows } => { let mut row = RowId(0); for text in rows { self.presenter .single_line( row.try_into() .map_err(|error: ReachedEnd| ProduceError::Failure(error.into()))?, text.to_string(), ) .map_err(|failure| ProduceError::Failure(failure.into()))?; row.step_forward() .map_err(|failure| ProduceError::Failure(failure.into()))?; } } DisplayCmd::Header { header } => { self.presenter .single_line(Unit(0), header) .map_err(|failure| ProduceError::Failure(failure.into()))?; } } } } #[derive(Debug)] struct Presenter { out: RefCell<Stdout>, } impl Presenter { fn out_mut(&self) -> RefMut<'_, Stdout> { self.out.borrow_mut() } #[throws(InitError)] fn init(&self) { execute!(self.out_mut(), EnterAlternateScreen, Hide)?; } #[throws(DestroyError)] fn destroy(&self) { execute!(self.out_mut(), LeaveAlternateScreen)?; } #[throws(WriteFailure)] fn single_line(&self, row: Unit, text: String) { trace!("Writing to {}: `{}`", row, text); execute!(self.out_mut(), MoveTo(0, *row), Print(text))?; } } impl Default for Presenter { fn default() -> Self { Self { out: RefCell::new(io::stdout()), } } } #[derive(Clone, Copy, Debug)] pub enum UserAction { Resize { dimensions: Dimensions, }, Mouse, Key { code: KeyCode, modifiers: KeyModifiers, }, } impl From<Event> for UserAction { #[inline] fn from(value: Event) -> Self { match value { Event::Resize(columns, rows) => Self::Resize { dimensions: Dimensions { height: rows.saturating_sub(1).into(), width: columns.into(), }, }, Event::Mouse(..) => Self::Mouse, Event::Key(key) => key.into(), } } } impl From<KeyEvent> for UserAction { #[inline] fn from(value: KeyEvent) -> Self { Self::Key { code: value.code, modifiers: value.modifiers, } } } #[derive(Debug, ParseDisplay)] #[display("DisplayCmd")] pub(crate) enum DisplayCmd { Rows { rows: Vec<String>, }, Header { header: String, }, } #[derive(Clone, Copy, Debug, Default, Eq, ParseDisplay, PartialEq)] #[display("{height}h x {width}w")] pub struct Dimensions { pub(crate) height: Unit, pub(crate) width: Unit, } #[derive(Clone, Copy, Debug, Default, Eq, ParseDisplay, PartialEq)] #[display("{0}")] pub struct Unit(u16); impl Deref for Unit { type Target = u16; #[inline] fn deref(&self) -> &Self::Target { &self.0 } } impl From<u16> for Unit { #[inline] fn from(value: u16) -> Self { Self(value) } } impl TryFrom<RowId> for Unit { type Error = ReachedEnd; #[throws(Self::Error)] #[inline] fn try_from(value: RowId) -> Self { value.0.checked_add(1).ok_or(ReachedEnd)?.into() } } #[derive(Clone, Copy, Debug, ParseDisplay)] #[display("{0}")] pub(crate) struct RowId(u16); impl RowId { #[throws(ReachedEnd)] fn step_forward(&mut self) { self.0 = self.0.checked_add(1).ok_or(ReachedEnd)?; } } impl Deref for RowId { type Target = u16; fn deref(&self) -> &Self::Target { &self.0 } }
mod error; pub use error::{CreateTerminalError, DisplayCmdFailure, UserActionFailure}; use { core::{ cell::{RefCell, RefMut}, convert::{TryFrom, TryInto}, ops::Deref, time::Duration, }, crossterm::{ cursor::{Hide, MoveTo}, event::{self, Event, KeyCode, KeyEvent, KeyModifiers}, execute, style::Print, terminal::{EnterAlternateScreen, LeaveAlternateScreen}, }, error::{DestroyError, InitError, PollFailure, ReachedEnd, ReadFailure, WriteFailure}, fehler::{throw, throws}, log::{trace, warn}, market::{ConsumeError, Consumer, ProduceError, Producer}, parse_display::Display as ParseDisplay, std::io::{self, Stdout, Write}, }; static NO_DURATION: Duration = Duration::from_secs(0); #[throws(PollFailure)] fn is_action_available() -> bool { event::poll(NO_DURATION)? } #[throws(ReadFailure)] fn read_action() -> UserAction { event::read().map(UserAction::from)? } #[derive(Debug, Default)] pub(crate) struct UserActionConsumer; impl UserActionConsumer { pub(crate) fn new() -> Self { Self::default() } } impl Consumer for UserActionConsumer { type Good = UserAction; type Failure = UserActionFailure; #[throws(ConsumeError<Self::Failure>)] fn consume(&self) -> Self::Good { if is_action_available().map_err(|error| ConsumeError::Failure(error.into()))? { read_action().map_err(|error| ConsumeError::Failure(error.into()))? } else { throw!(ConsumeError::EmptyStock); } } } #[derive(Debug, Default)] pub(crate) struct Terminal { presenter: Presenter, } impl Terminal { #[throws(CreateTerminalError)] pub(crate) fn new() -> Self { let terminal = Self::default(); terminal.presenter.init()?; terminal } } impl Drop for Terminal { fn drop(&mut self) { if let Err(error) = self.presenter.destroy() { warn!("Error while destroying user interface: {}", error); } } } impl Producer for Terminal { type Good = DisplayCmd; type Failure = DisplayCmdFailure; #[throws(ProduceError<Self::Failure>)] fn produce(&self, good: Self::Good) { match good { DisplayCmd::Rows { rows } => { let mut row = RowId(0); for text in rows { self.presenter .single_line( row.try_into() .map_err(|error: ReachedEnd| ProduceError::Failure(error.into()))?, text.to_string(), ) .map_err(|failure| ProduceError::Failure(failure.into()))?; row.step_forward() .map_err(|failure| ProduceError::Failure(failure.into()))?; } } DisplayCmd::Header { header } => { self.presenter .single_line(Unit(0), header) .map_err(|failure| ProduceError::Failure(failure.into()))?; } } } } #[derive(Debug)] struct Presenter { out: RefCell<Stdout>, } impl Presenter { fn out_mut(&self) -> RefMut<'_, Stdout> { self.out.borrow_mut() } #[throws(InitError)] fn init(&self) { execute!(self.out_mut(), EnterAlternateScreen, Hide)?; } #[throws(DestroyError)] fn destroy(&self) { execute!(self.out_mut(), LeaveAlternateScreen)?; } #[throws(WriteFailure)] fn single_line(&self, row: Unit, text: String) { trace!("Writing to {}: `{}`", row, text); execute!(self.out_mut(), MoveTo(0, *row), Print(text))?; } } impl Default for Presenter { fn default() -> Self { Self { out: RefCell::new(io::stdout()), } } } #[derive(Clone, Copy, Debug)] pub enum UserAction { Resize { dimensions: Dimensions, }, Mouse, Key { code: KeyCode, modifiers: KeyModifiers, }, } impl From<Event> for UserAction { #[inline]
} impl From<KeyEvent> for UserAction { #[inline] fn from(value: KeyEvent) -> Self { Self::Key { code: value.code, modifiers: value.modifiers, } } } #[derive(Debug, ParseDisplay)] #[display("DisplayCmd")] pub(crate) enum DisplayCmd { Rows { rows: Vec<String>, }, Header { header: String, }, } #[derive(Clone, Copy, Debug, Default, Eq, ParseDisplay, PartialEq)] #[display("{height}h x {width}w")] pub struct Dimensions { pub(crate) height: Unit, pub(crate) width: Unit, } #[derive(Clone, Copy, Debug, Default, Eq, ParseDisplay, PartialEq)] #[display("{0}")] pub struct Unit(u16); impl Deref for Unit { type Target = u16; #[inline] fn deref(&self) -> &Self::Target { &self.0 } } impl From<u16> for Unit { #[inline] fn from(value: u16) -> Self { Self(value) } } impl TryFrom<RowId> for Unit { type Error = ReachedEnd; #[throws(Self::Error)] #[inline] fn try_from(value: RowId) -> Self { value.0.checked_add(1).ok_or(ReachedEnd)?.into() } } #[derive(Clone, Copy, Debug, ParseDisplay)] #[display("{0}")] pub(crate) struct RowId(u16); impl RowId { #[throws(ReachedEnd)] fn step_forward(&mut self) { self.0 = self.0.checked_add(1).ok_or(ReachedEnd)?; } } impl Deref for RowId { type Target = u16; fn deref(&self) -> &Self::Target { &self.0 } }
fn from(value: Event) -> Self { match value { Event::Resize(columns, rows) => Self::Resize { dimensions: Dimensions { height: rows.saturating_sub(1).into(), width: columns.into(), }, }, Event::Mouse(..) => Self::Mouse, Event::Key(key) => key.into(), } }
function_block-full_function
[ { "content": "#[throws(Failure)]\n\nfn main() {\n\n // Forces compiler to rebuild when Cargo.toml file is changed, needed for app_from_crate.\n\n let _ = include_str!(\"../Cargo.toml\");\n\n\n\n Paper::new(\n\n &(&app_from_crate!()\n\n .arg(\n\n Arg::with_name(\"log\")\...
Rust
common/functions/src/aggregates/aggregate_stddev_pop.rs
mrhamburg/databend
9e4c5ae43de9a77d47cd39cc98ef0aa7a5e29337
use std::alloc::Layout; use std::fmt; use std::marker::PhantomData; use std::sync::Arc; use common_datavalues::prelude::*; use common_datavalues::with_match_primitive_type_id; use common_exception::ErrorCode; use common_exception::Result; use common_io::prelude::*; use num::cast::AsPrimitive; use serde::Deserialize; use serde::Serialize; use super::StateAddr; use crate::aggregates::aggregate_function_factory::AggregateFunctionDescription; use crate::aggregates::aggregator_common::assert_unary_arguments; use crate::aggregates::AggregateFunction; use crate::aggregates::AggregateFunctionRef; #[derive(Serialize, Deserialize)] struct AggregateStddevPopState { pub sum: f64, pub count: u64, pub variance: f64, } impl AggregateStddevPopState { #[inline(always)] fn add(&mut self, value: f64) { self.sum += value; self.count += 1; if self.count > 1 { let t = self.count as f64 * value - self.sum; self.variance += (t * t) / (self.count * (self.count - 1)) as f64; } } #[inline(always)] fn merge(&mut self, other: &Self) { if other.count == 0 { return; } if self.count == 0 { self.count = other.count; self.sum = other.sum; self.variance = other.variance; return; } let t = (other.count as f64 / self.count as f64) * self.sum - other.sum; self.variance += other.variance + ((self.count as f64 / other.count as f64) / (self.count as f64 + other.count as f64)) * t * t; self.count += other.count; self.sum += other.sum; } } #[derive(Clone)] pub struct AggregateStddevPopFunction<T> { display_name: String, _arguments: Vec<DataField>, t: PhantomData<T>, } impl<T> AggregateFunction for AggregateStddevPopFunction<T> where T: PrimitiveType + AsPrimitive<f64> { fn name(&self) -> &str { "AggregateStddevPopFunction" } fn return_type(&self) -> Result<DataTypePtr> { Ok(f64::to_data_type()) } fn init_state(&self, place: StateAddr) { place.write(|| AggregateStddevPopState { sum: 0.0, count: 0, variance: 0.0, }); } fn state_layout(&self) -> Layout { Layout::new::<AggregateStddevPopState>() } fn accumulate( &self, place: StateAddr, columns: &[ColumnRef], validity: Option<&common_arrow::arrow::bitmap::Bitmap>, _input_rows: usize, ) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; match validity { Some(bitmap) => { for (value, is_valid) in column.iter().zip(bitmap.iter()) { if is_valid { state.add(value.as_()); } } } None => { for value in column.iter() { state.add(value.as_()); } } } Ok(()) } fn accumulate_keys( &self, places: &[StateAddr], offset: usize, columns: &[ColumnRef], _input_rows: usize, ) -> Result<()> { let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; column.iter().zip(places.iter()).for_each(|(value, place)| { let place = place.next(offset); let state = place.get::<AggregateStddevPopState>(); let v: f64 = value.as_(); state.add(v); }); Ok(()) } fn accumulate_row(&self, place: StateAddr, columns: &[ColumnRef], row: usize) -> Result<()> { let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; let state = place.get::<AggregateStddevPopState>(); let v: f64 = unsafe { column.value_unchecked(row).as_() }; state.add(v); Ok(()) } fn serialize(&self, place: StateAddr, writer: &mut BytesMut) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); serialize_into_buf(writer, state) } fn deserialize(&self, place: StateAddr, reader: &mut &[u8]) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); *state = deserialize_from_slice(reader)?; Ok(()) } fn merge(&self, place: StateAddr, rhs: StateAddr) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let rhs = rhs.get::<AggregateStddevPopState>(); state.merge(rhs); Ok(()) } #[allow(unused_mut)] fn merge_result(&self, place: StateAddr, column: &mut dyn MutableColumn) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let column: &mut MutablePrimitiveColumn<f64> = Series::check_get_mutable_column(column)?; let variance = state.variance / state.count as f64; column.push(variance.sqrt()); Ok(()) } } impl<T> fmt::Display for AggregateStddevPopFunction<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.display_name) } } impl<T> AggregateStddevPopFunction<T> where T: PrimitiveType + AsPrimitive<f64> { pub fn try_create( display_name: &str, arguments: Vec<DataField>, ) -> Result<AggregateFunctionRef> { Ok(Arc::new(Self { display_name: display_name.to_string(), _arguments: arguments, t: PhantomData, })) } } pub fn try_create_aggregate_stddev_pop_function( display_name: &str, _params: Vec<DataValue>, arguments: Vec<DataField>, ) -> Result<Arc<dyn AggregateFunction>> { assert_unary_arguments(display_name, arguments.len())?; let data_type = arguments[0].data_type(); with_match_primitive_type_id!(data_type.data_type_id(), |$T| { AggregateStddevPopFunction::<$T>::try_create(display_name, arguments) }, { Err(ErrorCode::BadDataValueType(format!( "AggregateStddevPopFunction does not support type '{:?}'", data_type ))) }) } pub fn aggregate_stddev_pop_function_desc() -> AggregateFunctionDescription { AggregateFunctionDescription::creator(Box::new(try_create_aggregate_stddev_pop_function)) }
use std::alloc::Layout; use std::fmt; use std::marker::PhantomData; use std::sync::Arc; use common_datavalues::prelude::*; use common_datavalues::with_match_primitive_type_id; use common_exception::ErrorCode; use common_exception::Result; use common_io::prelude::*; use num::cast::AsPrimitive; use serde::Deserialize; use serde::Serialize; use super::StateAddr; use crate::aggregates::aggregate_function_factory::AggregateFunctionDescription; use crate::aggregates::aggregator_common::assert_unary_arguments; use crate::aggregates::AggregateFunction; use crate::aggregates::AggregateFunctionRef; #[derive(Serialize, Deserialize)] struct AggregateStddevPopState { pub sum: f64, pub count: u64, pub variance: f64, } impl AggregateStddevPopState { #[inline(always)] fn add(&mut self, value: f64) { self.sum += value; self.count += 1; if self.count > 1 { let t = self.count as f64 * value - self.sum; self.variance += (t * t) / (self.count * (self.count - 1)) as f64; } } #[inline(always)] fn merge(&mut self, other: &Self) { if other.count == 0 { return; } if self.count == 0 { self.count = other.count; self.sum = other.sum; self.variance = other.variance; return; } let t = (other.count as f64 / self.count as f64) * self.sum - other.sum; self.variance += other.variance + ((self.count as f64 / other.count as f64) / (self.count as f64 + other.count as f64)) * t * t; self.count += other.count; self.sum += other.sum; } } #[derive(Clone)] pub struct AggregateStddevPopFunction<T> { display_name: String, _arguments: Vec<DataField>, t: PhantomData<T>, } impl<T> AggregateFunction for AggregateStddevPopFunction<T> where T: PrimitiveType + AsPrimitive<f64> { fn name(&self) -> &str { "AggregateStddevPopFunction" } fn return_type(&self) -> Result<DataTypePtr> { Ok(f64::to_data_type()) } fn init_state(&self, place: StateAddr) { place.write(|| AggregateStddevPopState { sum: 0.0, count: 0, variance: 0.0, }); } fn state_layout(&self) -> Layout { Layout::new::<AggregateStddevPopState>() } fn accumulate( &self, place: StateAddr, columns: &[ColumnRef], validity: Option<&common_arrow::arrow::bitmap::Bitmap>, _input_rows: usize, ) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; match validity { Some(bitmap) => { for (value, is_valid) in column.iter().zip(bitmap.iter()) { if is_valid { state.add(value.as_()); } } } None => { for value in column.iter() { state.add(value.as_()); } } } Ok(()) } fn accumulate_keys( &self, places: &[StateAddr], offset: usize, columns: &[ColumnRef], _input_rows: usize, ) -> Result<()> { let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; column.iter().zip(places.iter()).for_each(|(value, place)| { let place = place.next(offset); let state = place.get::<AggregateStddevPopState>(); let v: f64 = value.as_(); state.add(v); }); Ok(()) }
fn serialize(&self, place: StateAddr, writer: &mut BytesMut) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); serialize_into_buf(writer, state) } fn deserialize(&self, place: StateAddr, reader: &mut &[u8]) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); *state = deserialize_from_slice(reader)?; Ok(()) } fn merge(&self, place: StateAddr, rhs: StateAddr) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let rhs = rhs.get::<AggregateStddevPopState>(); state.merge(rhs); Ok(()) } #[allow(unused_mut)] fn merge_result(&self, place: StateAddr, column: &mut dyn MutableColumn) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let column: &mut MutablePrimitiveColumn<f64> = Series::check_get_mutable_column(column)?; let variance = state.variance / state.count as f64; column.push(variance.sqrt()); Ok(()) } } impl<T> fmt::Display for AggregateStddevPopFunction<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.display_name) } } impl<T> AggregateStddevPopFunction<T> where T: PrimitiveType + AsPrimitive<f64> { pub fn try_create( display_name: &str, arguments: Vec<DataField>, ) -> Result<AggregateFunctionRef> { Ok(Arc::new(Self { display_name: display_name.to_string(), _arguments: arguments, t: PhantomData, })) } } pub fn try_create_aggregate_stddev_pop_function( display_name: &str, _params: Vec<DataValue>, arguments: Vec<DataField>, ) -> Result<Arc<dyn AggregateFunction>> { assert_unary_arguments(display_name, arguments.len())?; let data_type = arguments[0].data_type(); with_match_primitive_type_id!(data_type.data_type_id(), |$T| { AggregateStddevPopFunction::<$T>::try_create(display_name, arguments) }, { Err(ErrorCode::BadDataValueType(format!( "AggregateStddevPopFunction does not support type '{:?}'", data_type ))) }) } pub fn aggregate_stddev_pop_function_desc() -> AggregateFunctionDescription { AggregateFunctionDescription::creator(Box::new(try_create_aggregate_stddev_pop_function)) }
fn accumulate_row(&self, place: StateAddr, columns: &[ColumnRef], row: usize) -> Result<()> { let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; let state = place.get::<AggregateStddevPopState>(); let v: f64 = unsafe { column.value_unchecked(row).as_() }; state.add(v); Ok(()) }
function_block-full_function
[]
Rust
src/oauth/google/mod.rs
saturn-xiv/peony
2161b89624b12bcea77c639a18569f38b8736187
pub mod openid; pub mod photo; pub mod youtube; use std::collections::HashMap; use std::fmt; use std::str::FromStr; use actix_web::http::StatusCode; use rand::Rng; use serde::de::DeserializeOwned; use url::{form_urlencoded, Url}; use super::super::{ errors::{Error, Result}, request::https_client, }; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ClientSecret { pub web: Web, } impl ClientSecret { pub const KEY: &'static str = "google.client-secret"; } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Web { pub client_id: String, pub project_id: String, pub auth_uri: String, pub token_uri: String, pub auth_provider_x509_cert_url: String, pub client_secret: String, pub redirect_uris: Vec<String>, pub javascript_origins: Vec<String>, } pub enum Scope { YoutubeReadonly, PhotosLibraryReadonly, Profile, Openid, Email, } impl fmt::Display for Scope { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", match *self { Scope::YoutubeReadonly => "https://www.googleapis.com/auth/youtube.readonly", Scope::PhotosLibraryReadonly => { "https://www.googleapis.com/auth/photoslibrary.readonly" } Scope::Profile => "profile", Scope::Openid => "openid", Scope::Email => "email", } ) } } pub enum AccessType { Online, Offline, } impl Default for AccessType { fn default() -> Self { AccessType::Online } } impl fmt::Display for AccessType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", match *self { AccessType::Online => "online", AccessType::Offline => "offline", } ) } } impl Web { pub fn oauth2(&self, scope: Vec<Scope>, redirect_uri: &str) -> (String, String, String) { let mut rng = rand::thread_rng(); let nonce = rng.gen::<u32>().to_string(); let state = rng.gen::<u32>().to_string(); let url = form_urlencoded::Serializer::new( "https://accounts.google.com/o/oauth2/v2/auth?".to_string(), ) .append_pair("client_id", &self.client_id) .append_pair("redirect_uri", &redirect_uri) .append_pair( "scope", &scope .iter() .map(|x| x.to_string()) .collect::<Vec<_>>() .join(" "), ) .append_pair("access_type", &AccessType::default().to_string()) .append_pair("state", &state) .append_pair("include_granted_scopes", &true.to_string()) .append_pair("response_type", Code::CODE) .append_pair("nonce", &nonce) .finish(); (url, state, nonce) } pub async fn get<Q: DeserializeOwned>(&self, action: &str, token: &str) -> Result<Q> { let mut res = https_client()? .bearer_auth(token) .finish() .get(action) .send() .await?; if res.status().is_success() { return Ok(res.json().await?); } Err(Error::Http(StatusCode::BAD_REQUEST, None)) } } pub struct Code(pub String); impl Code { const CODE: &'static str = "code"; const ERROR: &'static str = "error"; } impl fmt::Display for Code { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } impl FromStr for Code { type Err = Error; fn from_str(s: &str) -> Result<Self> { let it = Url::parse(s)?; let query: HashMap<_, _> = it.query_pairs().into_owned().collect(); if let Some(v) = query.get(Self::CODE) { return Ok(Self(v.to_string())); } if let Some(v) = query.get(Self::ERROR) { return Err(Error::Http(StatusCode::BAD_REQUEST, Some(v.clone()))); } Err(Error::Http(StatusCode::BAD_REQUEST, None)) } }
pub mod openid; pub mod photo; pub mod youtube; use std::collections::HashMap; use std::fmt; use std::str::FromStr; use actix_web::http::StatusCode; use rand::Rng; use serde::de::DeserializeOwned; use url::{form_urlencoded, Url}; use super::super::{ errors::{Error, Result}, request::https_client, }; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ClientSecret { pub web: Web, } impl ClientSecret { pub const KEY: &'static str = "google.client-secret"; } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Web { pub client_id: String, pub project_id: String, pub auth_uri: String, pub token_uri: String, pub auth_provider_x509_cert_url: String, pub client_secret: String, pub redirect_uris: Vec<String>, pub javascript_origins: Vec<String>, } pub enum Scope { YoutubeReadonly, PhotosLibraryReadonly, Profile, Openid, Email, } impl fmt::Display for Scope { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", match *self { Scope::YoutubeReadonly => "https://www.googleapis.com/auth/youtube.readonly", Scope::PhotosLibraryReadonly => { "https://www.googleapis.com/auth/photoslibrary.readonly" } Scope::Profile => "profile", Scope::Openid => "openid", Scope::Email => "email", } ) } } pub enum AccessType { Online, Offline, } impl Default for AccessType { fn default() -> Self { AccessType::Online } } impl fmt::Display for AccessType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", match *self { AccessType::Online => "online", AccessType::Offline => "offline", } ) } } impl Web {
pub async fn get<Q: DeserializeOwned>(&self, action: &str, token: &str) -> Result<Q> { let mut res = https_client()? .bearer_auth(token) .finish() .get(action) .send() .await?; if res.status().is_success() { return Ok(res.json().await?); } Err(Error::Http(StatusCode::BAD_REQUEST, None)) } } pub struct Code(pub String); impl Code { const CODE: &'static str = "code"; const ERROR: &'static str = "error"; } impl fmt::Display for Code { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } impl FromStr for Code { type Err = Error; fn from_str(s: &str) -> Result<Self> { let it = Url::parse(s)?; let query: HashMap<_, _> = it.query_pairs().into_owned().collect(); if let Some(v) = query.get(Self::CODE) { return Ok(Self(v.to_string())); } if let Some(v) = query.get(Self::ERROR) { return Err(Error::Http(StatusCode::BAD_REQUEST, Some(v.clone()))); } Err(Error::Http(StatusCode::BAD_REQUEST, None)) } }
pub fn oauth2(&self, scope: Vec<Scope>, redirect_uri: &str) -> (String, String, String) { let mut rng = rand::thread_rng(); let nonce = rng.gen::<u32>().to_string(); let state = rng.gen::<u32>().to_string(); let url = form_urlencoded::Serializer::new( "https://accounts.google.com/o/oauth2/v2/auth?".to_string(), ) .append_pair("client_id", &self.client_id) .append_pair("redirect_uri", &redirect_uri) .append_pair( "scope", &scope .iter() .map(|x| x.to_string()) .collect::<Vec<_>>() .join(" "), ) .append_pair("access_type", &AccessType::default().to_string()) .append_pair("state", &state) .append_pair("include_granted_scopes", &true.to_string()) .append_pair("response_type", Code::CODE) .append_pair("nonce", &nonce) .finish(); (url, state, nonce) }
function_block-full_function
[ { "content": "pub fn hostname() -> Result<String> {\n\n let mut buf = [0u8; 64];\n\n let it = nix::unistd::gethostname(&mut buf)?.to_str()?;\n\n Ok(it.to_string())\n\n}\n\n\n", "file_path": "src/sys/mod.rs", "rank": 0, "score": 276874.06456342764 }, { "content": "// https://en.grava...
Rust
examples/scaling/lib.rs
lykhouzov/rust-wasm-webgl
cfebda351f3c9fa3c3813c317fb40ad59e598a20
extern crate js_sys; extern crate wasm_bindgen; extern crate web_sys; use js_sys::{Float32Array, Uint16Array, WebAssembly}; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use web_sys::WebGlRenderingContext; #[allow(dead_code)] mod utils; use utils::{compile_shader, link_program, set_panic_hook}; #[allow(non_snake_case)] #[wasm_bindgen(start)] pub fn start() -> Result<(), JsValue> { set_panic_hook(); /*============ Creating a canvas =================*/ let document = web_sys::window().unwrap().document().unwrap(); let canvas = document.get_element_by_id("canvas").unwrap(); let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?; let gl = canvas .get_context("webgl")? .unwrap() .dyn_into::<WebGlRenderingContext>()?; /*==========Defining and storing the geometry=======*/ let vertices: [f32; 12] = [ -0.5, 0.5, 0.0, -0.5, -0.5, 0.0, 0.5, -0.5, 0.0, 0.5, 0.5, 0.0, ]; let vertices_array = float_32_array!(vertices); let indices: [u16; 6] = [3, 2, 1, 3, 1, 0]; let indices_array = uint_16_array!(indices); let colors: [f32; 12] = [0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0]; let colors_array = float_32_array!(colors); let vertex_buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer)); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ARRAY_BUFFER, &vertices_array, WebGlRenderingContext::STATIC_DRAW, ); gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, None); let Index_Buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, Some(&Index_Buffer), ); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, &indices_array, WebGlRenderingContext::STATIC_DRAW, ); gl.bind_buffer(WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, None); let colors_buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colors_buffer)); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ARRAY_BUFFER, &colors_array, WebGlRenderingContext::STATIC_DRAW, ); /*=========================Shaders========================*/ let vertCode = r#"attribute vec3 coordinates; attribute vec3 color; varying vec3 vColor; uniform mat4 u_xformMatrix; void main(void) { gl_Position = u_xformMatrix * vec4(coordinates, 1.0); vColor = color; } "#; let vertShader = compile_shader(&gl, WebGlRenderingContext::VERTEX_SHADER, vertCode)?; let fragCode = r#"precision mediump float; varying vec3 vColor; void main(void) { gl_FragColor = vec4(vColor, 1.); }"#; let fragShader = compile_shader(&gl, WebGlRenderingContext::FRAGMENT_SHADER, fragCode)?; let shaderProgram = link_program(&gl, &vertShader, &fragShader)?; gl.use_program(Some(&shaderProgram)); /*======== Associating shaders to buffer objects ========*/ gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer)); gl.bind_buffer( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, Some(&Index_Buffer), ); let coord = gl.get_attrib_location(&shaderProgram, "coordinates") as u32; gl.vertex_attrib_pointer_with_i32(coord, 3, WebGlRenderingContext::FLOAT, false, 0, 0); gl.enable_vertex_attrib_array(coord); gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colors_buffer)); let color = gl.get_attrib_location(&shaderProgram, "color") as u32; gl.vertex_attrib_pointer_with_i32(color, 3, WebGlRenderingContext::FLOAT, false, 0, 0); gl.enable_vertex_attrib_array(color); /*===================scaling==========================*/ let Sx = 1.0; let Sy = 1.5;let Sz = 1.0; let xformMatrix = [ Sx, 0.0, 0.0, 0.0, 0.0, Sy, 0.0, 0.0, 0.0, 0.0, Sz, 0.0, 0.0, 0.0, 0.0, 1.0 ]; let u_xformMatrix = gl.get_uniform_location(&shaderProgram, "u_xformMatrix"); gl.uniform_matrix4fv_with_f32_array(u_xformMatrix.as_ref(), false, &xformMatrix); /*============= Drawing the primitive ===============*/ gl.clear_color(0.5, 0.5, 0.5, 0.9); gl.enable(WebGlRenderingContext::DEPTH_TEST); gl.clear(WebGlRenderingContext::COLOR_BUFFER_BIT); gl.viewport(0, 0, canvas.width() as i32, canvas.height() as i32); gl.draw_elements_with_i32( WebGlRenderingContext::TRIANGLES, indices.len() as i32, WebGlRenderingContext::UNSIGNED_SHORT, 0, ); Ok(()) }
extern crate js_sys; extern crate wasm_bindgen; extern crate web_sys; use js_sys::{Float32Array, Uint16Array, WebAssembly}; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use web_sys::WebGlRenderingContext; #[allow(dead_code)] mod utils; use utils::{compile_shader, link_program, set_panic_hook}; #[allow(non_snake_case)] #[wasm_bindgen(start)] pub fn start() -> Result<(), JsValue> { set_pani
ates; attribute vec3 color; varying vec3 vColor; uniform mat4 u_xformMatrix; void main(void) { gl_Position = u_xformMatrix * vec4(coordinates, 1.0); vColor = color; } "#; let vertShader = compile_shader(&gl, WebGlRenderingContext::VERTEX_SHADER, vertCode)?; let fragCode = r#"precision mediump float; varying vec3 vColor; void main(void) { gl_FragColor = vec4(vColor, 1.); }"#; let fragShader = compile_shader(&gl, WebGlRenderingContext::FRAGMENT_SHADER, fragCode)?; let shaderProgram = link_program(&gl, &vertShader, &fragShader)?; gl.use_program(Some(&shaderProgram)); /*======== Associating shaders to buffer objects ========*/ gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer)); gl.bind_buffer( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, Some(&Index_Buffer), ); let coord = gl.get_attrib_location(&shaderProgram, "coordinates") as u32; gl.vertex_attrib_pointer_with_i32(coord, 3, WebGlRenderingContext::FLOAT, false, 0, 0); gl.enable_vertex_attrib_array(coord); gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colors_buffer)); let color = gl.get_attrib_location(&shaderProgram, "color") as u32; gl.vertex_attrib_pointer_with_i32(color, 3, WebGlRenderingContext::FLOAT, false, 0, 0); gl.enable_vertex_attrib_array(color); /*===================scaling==========================*/ let Sx = 1.0; let Sy = 1.5;let Sz = 1.0; let xformMatrix = [ Sx, 0.0, 0.0, 0.0, 0.0, Sy, 0.0, 0.0, 0.0, 0.0, Sz, 0.0, 0.0, 0.0, 0.0, 1.0 ]; let u_xformMatrix = gl.get_uniform_location(&shaderProgram, "u_xformMatrix"); gl.uniform_matrix4fv_with_f32_array(u_xformMatrix.as_ref(), false, &xformMatrix); /*============= Drawing the primitive ===============*/ gl.clear_color(0.5, 0.5, 0.5, 0.9); gl.enable(WebGlRenderingContext::DEPTH_TEST); gl.clear(WebGlRenderingContext::COLOR_BUFFER_BIT); gl.viewport(0, 0, canvas.width() as i32, canvas.height() as i32); gl.draw_elements_with_i32( WebGlRenderingContext::TRIANGLES, indices.len() as i32, WebGlRenderingContext::UNSIGNED_SHORT, 0, ); Ok(()) }
c_hook(); /*============ Creating a canvas =================*/ let document = web_sys::window().unwrap().document().unwrap(); let canvas = document.get_element_by_id("canvas").unwrap(); let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?; let gl = canvas .get_context("webgl")? .unwrap() .dyn_into::<WebGlRenderingContext>()?; /*==========Defining and storing the geometry=======*/ let vertices: [f32; 12] = [ -0.5, 0.5, 0.0, -0.5, -0.5, 0.0, 0.5, -0.5, 0.0, 0.5, 0.5, 0.0, ]; let vertices_array = float_32_array!(vertices); let indices: [u16; 6] = [3, 2, 1, 3, 1, 0]; let indices_array = uint_16_array!(indices); let colors: [f32; 12] = [0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0]; let colors_array = float_32_array!(colors); let vertex_buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer)); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ARRAY_BUFFER, &vertices_array, WebGlRenderingContext::STATIC_DRAW, ); gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, None); let Index_Buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, Some(&Index_Buffer), ); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, &indices_array, WebGlRenderingContext::STATIC_DRAW, ); gl.bind_buffer(WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, None); let colors_buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colors_buffer)); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ARRAY_BUFFER, &colors_array, WebGlRenderingContext::STATIC_DRAW, ); /*=========================Shaders========================*/ let vertCode = r#"attribute vec3 coordin
random
[ { "content": "#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n set_panic_hook();\n\n /*============ Creating a canvas =================*/\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document.get_element_by_id(\"...
Rust
src/utils.rs
nrot/image-png
0069402d348bf3bf7696163b5ec88b85147b35d5
use std::iter::{repeat, StepBy}; use std::ops::Range; #[inline(always)] pub fn unpack_bits<F>(buf: &mut [u8], channels: usize, bit_depth: u8, func: F) where F: Fn(u8, &mut [u8]), { if buf.len() < channels { return; } let bits = buf.len() / channels * bit_depth as usize; let extra_bits = bits % 8; let entries = bits / 8 + match extra_bits { 0 => 0, _ => 1, }; let skip = match extra_bits { 0 => 0, n => (8 - n) / bit_depth as usize, }; let mask = ((1u16 << bit_depth) - 1) as u8; let i = (0..entries) .rev() .flat_map(|idx| (0..8).step_by(bit_depth.into()) .zip(repeat(idx))) .skip(skip); let j = (0..=buf.len() - channels).rev().step_by(channels); for ((shift, i), j) in i.zip(j) { let pixel = (buf[i] & (mask << shift)) >> shift; func(pixel, &mut buf[j..(j + channels)]) } } pub fn expand_trns_line(buf: &mut [u8], trns: &[u8], channels: usize) { if buf.len() < (channels + 1) { return; } let i = (0..=buf.len() / (channels + 1) * channels - channels) .rev() .step_by(channels); let j = (0..=buf.len() - (channels + 1)).rev().step_by(channels + 1); for (i, j) in i.zip(j) { let i_pixel = i; let j_chunk = j; if &buf[i_pixel..i_pixel + channels] == trns { buf[j_chunk + channels] = 0 } else { buf[j_chunk + channels] = 0xFF } for k in (0..channels).rev() { buf[j_chunk + k] = buf[i_pixel + k]; } } } pub fn expand_trns_line16(buf: &mut [u8], trns: &[u8], channels: usize) { let c2 = 2 * channels; if buf.len() < (c2 + 2) { return; } let i = (0..=buf.len() / (c2 + 2) * c2 - c2).rev().step_by(c2); let j = (0..=buf.len() - (c2 + 2)).rev().step_by(c2 + 2); for (i, j) in i.zip(j) { let i_pixel = i; let j_chunk = j; if &buf[i_pixel..i_pixel + c2] == trns { buf[j_chunk + c2] = 0; buf[j_chunk + c2 + 1] = 0 } else { buf[j_chunk + c2] = 0xFF; buf[j_chunk + c2 + 1] = 0xFF } for k in (0..c2).rev() { buf[j_chunk + k] = buf[i_pixel + k]; } } } #[derive(Clone)] pub(crate) struct Adam7Iterator { line: u32, lines: u32, line_width: u32, current_pass: u8, width: u32, height: u32, } impl Adam7Iterator { pub fn new(width: u32, height: u32) -> Adam7Iterator { let mut this = Adam7Iterator { line: 0, lines: 0, line_width: 0, current_pass: 1, width, height, }; this.init_pass(); this } fn init_pass(&mut self) { let w = f64::from(self.width); let h = f64::from(self.height); let (line_width, lines) = match self.current_pass { 1 => (w / 8.0, h / 8.0), 2 => ((w - 4.0) / 8.0, h / 8.0), 3 => (w / 4.0, (h - 4.0) / 8.0), 4 => ((w - 2.0) / 4.0, h / 4.0), 5 => (w / 2.0, (h - 2.0) / 4.0), 6 => ((w - 1.0) / 2.0, h / 2.0), 7 => (w, (h - 1.0) / 2.0), _ => unreachable!(), }; self.line_width = line_width.ceil() as u32; self.lines = lines.ceil() as u32; self.line = 0; } pub fn current_pass(&self) -> u8 { self.current_pass } } impl Iterator for Adam7Iterator { type Item = (u8, u32, u32); fn next(&mut self) -> Option<Self::Item> { if self.line < self.lines && self.line_width > 0 { let this_line = self.line; self.line += 1; Some((self.current_pass, this_line, self.line_width)) } else if self.current_pass < 7 { self.current_pass += 1; self.init_pass(); self.next() } else { None } } } fn subbyte_pixels<'a>(scanline: &'a [u8], bits_pp: usize) -> impl Iterator<Item = u8> + 'a { (0..scanline.len() * 8) .step_by(bits_pp) .map(move |bit_idx| { let byte_idx = bit_idx / 8; let rem = 8 - bit_idx % 8 - bits_pp; match bits_pp { 1 => (scanline[byte_idx] >> rem) & 1, 2 => (scanline[byte_idx] >> rem) & 3, 4 => (scanline[byte_idx] >> rem) & 15, _ => unreachable!(), } }) } fn expand_adam7_bits( pass: u8, width: usize, line_no: usize, bits_pp: usize, ) -> StepBy<Range<usize>> { let (line_mul, line_off, samp_mul, samp_off) = match pass { 1 => (8, 0, 8, 0), 2 => (8, 0, 8, 4), 3 => (8, 4, 4, 0), 4 => (4, 0, 4, 2), 5 => (4, 2, 2, 0), 6 => (2, 0, 2, 1), 7 => (2, 1, 1, 0), _ => panic!("Adam7 pass out of range: {}", pass), }; let prog_line = line_mul * line_no + line_off; let line_width = (width * bits_pp + 7) & !7; let line_start = prog_line * line_width; let start = line_start + (samp_off * bits_pp); let stop = line_start + (width * bits_pp); (start..stop).step_by(bits_pp * samp_mul) } pub fn expand_pass( img: &mut [u8], width: u32, scanline: &[u8], pass: u8, line_no: u32, bits_pp: u8, ) { let width = width as usize; let line_no = line_no as usize; let bits_pp = bits_pp as usize; if pass == 0 || pass > 7 { return; } let bit_indices = expand_adam7_bits(pass, width, line_no, bits_pp); if bits_pp < 8 { for (pos, px) in bit_indices.zip(subbyte_pixels(scanline, bits_pp)) { let rem = 8 - pos % 8 - bits_pp; img[pos / 8] |= px << rem as u8; } } else { let bytes_pp = bits_pp / 8; for (bitpos, px) in bit_indices.zip(scanline.chunks(bytes_pp)) { for (offset, val) in px.iter().enumerate() { img[bitpos / 8 + offset] = *val; } } } } #[test] fn test_adam7() { /* 1646 7777 5656 7777 */ let it = Adam7Iterator::new(4, 4); let passes: Vec<_> = it.collect(); assert_eq!( &*passes, &[ (1, 0, 1), (4, 0, 1), (5, 0, 2), (6, 0, 2), (6, 1, 2), (7, 0, 4), (7, 1, 4) ] ); } #[test] fn test_subbyte_pixels() { let scanline = &[0b10101010, 0b10101010]; let pixels = subbyte_pixels(scanline, 1).collect::<Vec<_>>(); assert_eq!(pixels.len(), 16); assert_eq!(pixels, [1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0]); } #[test] fn test_expand_adam7_bits() { let width = 32; let bits_pp = 1; let expected = |offset: usize, step: usize, count: usize| { (0..count) .map(move |i| step * i + offset) .collect::<Vec<_>>() }; for line_no in 0..8 { let start = 8 * line_no * width; assert_eq!( expand_adam7_bits(1, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 8, 4) ); let start = start + 4; assert_eq!( expand_adam7_bits(2, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 8, 4) ); let start = (8 * line_no + 4) as usize * width as usize; assert_eq!( expand_adam7_bits(3, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 4, 8) ); } for line_no in 0..16 { let start = 4 * line_no * width + 2; assert_eq!( expand_adam7_bits(4, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 4, 8) ); let start = (4 * line_no + 2) * width; assert_eq!( expand_adam7_bits(5, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 2, 16) ) } for line_no in 0..32 { let start = 2 * line_no * width + 1; assert_eq!( expand_adam7_bits(6, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 2, 16), "line_no: {}", line_no ); let start = (2 * line_no + 1) * width; assert_eq!( expand_adam7_bits(7, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 1, 32) ); } } #[test] fn test_expand_pass_subbyte() { let mut img = [0u8; 8]; let width = 8; let bits_pp = 1; expand_pass(&mut img, width, &[0b10000000], 1, 0, bits_pp); assert_eq!(img, [0b10000000u8, 0, 0, 0, 0, 0, 0, 0]); expand_pass(&mut img, width, &[0b10000000], 2, 0, bits_pp); assert_eq!(img, [0b10001000u8, 0, 0, 0, 0, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 3, 0, bits_pp); assert_eq!(img, [0b10001000u8, 0, 0, 0, 0b10001000, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 4, 0, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0, 0, 0b10001000, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 4, 1, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0, 0, 0b10101010, 0, 0, 0]); expand_pass(&mut img, width, &[0b11110000], 5, 0, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0b10101010, 0, 0b10101010, 0, 0, 0]); expand_pass(&mut img, width, &[0b11110000], 5, 1, bits_pp); assert_eq!( img, [0b10101010u8, 0, 0b10101010, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 0, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b10101010, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 1, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b11111111, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 2, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b11111111, 0, 0b11111111, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 3, bits_pp); assert_eq!( [0b11111111u8, 0, 0b11111111, 0, 0b11111111, 0, 0b11111111, 0], img ); expand_pass(&mut img, width, &[0b11111111], 7, 0, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0, 0b11111111, 0, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 1, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 2, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 3, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111 ], img ); }
use std::iter::{repeat, StepBy}; use std::ops::Range; #[inline(always)] pub fn unpack_bits<F>(buf: &mut [u8], channels: usize, bit_depth: u8, func: F) where F: Fn(u8, &mut [u8]), { if buf.len() < channels { return; } let bits = buf.len() / channels * bit_depth as usize; let extra_bits = bits % 8; let entries = bits / 8 + match extra_bits { 0 => 0, _ => 1, }; let skip = match extra_bits { 0 => 0, n => (8 - n) / bit_depth as usize, }; let mask = ((1u16 << bit_depth) - 1) as u8; let i = (0..entries) .rev() .flat_map(|idx| (0..8).step_by(bit_depth.into()) .zip(repeat(idx))) .skip(skip); let j = (0..=buf.len() - channels).rev().step_by(channels); for ((shift, i), j) in i.zip(j) { let pixel = (buf[i] & (mask << shift)) >> shift; func(pixel, &mut buf[j..(j + channels)]) } } pub fn expand_trns_line(buf: &mut [u8], trns: &[u8], channels: usize) { if buf.len() < (channels + 1) { return; } let i = (0..=buf.len() / (channels + 1) * channels - channels) .rev() .step_by(channels); let j = (0..=buf.len() - (channels + 1)).rev().step_by(channels + 1); for (i, j) in i.zip(j) { let i_pixel = i; let j_chunk = j; if &buf[i_pixel..i_pixel + channels] == trns { buf[j_chunk + channels] = 0 } else { buf[j_chunk + channels] = 0xFF } for k in (0..channels).rev() { buf[j_chunk + k] = buf[i_pixel + k]; } } }
#[derive(Clone)] pub(crate) struct Adam7Iterator { line: u32, lines: u32, line_width: u32, current_pass: u8, width: u32, height: u32, } impl Adam7Iterator { pub fn new(width: u32, height: u32) -> Adam7Iterator { let mut this = Adam7Iterator { line: 0, lines: 0, line_width: 0, current_pass: 1, width, height, }; this.init_pass(); this } fn init_pass(&mut self) { let w = f64::from(self.width); let h = f64::from(self.height); let (line_width, lines) = match self.current_pass { 1 => (w / 8.0, h / 8.0), 2 => ((w - 4.0) / 8.0, h / 8.0), 3 => (w / 4.0, (h - 4.0) / 8.0), 4 => ((w - 2.0) / 4.0, h / 4.0), 5 => (w / 2.0, (h - 2.0) / 4.0), 6 => ((w - 1.0) / 2.0, h / 2.0), 7 => (w, (h - 1.0) / 2.0), _ => unreachable!(), }; self.line_width = line_width.ceil() as u32; self.lines = lines.ceil() as u32; self.line = 0; } pub fn current_pass(&self) -> u8 { self.current_pass } } impl Iterator for Adam7Iterator { type Item = (u8, u32, u32); fn next(&mut self) -> Option<Self::Item> { if self.line < self.lines && self.line_width > 0 { let this_line = self.line; self.line += 1; Some((self.current_pass, this_line, self.line_width)) } else if self.current_pass < 7 { self.current_pass += 1; self.init_pass(); self.next() } else { None } } } fn subbyte_pixels<'a>(scanline: &'a [u8], bits_pp: usize) -> impl Iterator<Item = u8> + 'a { (0..scanline.len() * 8) .step_by(bits_pp) .map(move |bit_idx| { let byte_idx = bit_idx / 8; let rem = 8 - bit_idx % 8 - bits_pp; match bits_pp { 1 => (scanline[byte_idx] >> rem) & 1, 2 => (scanline[byte_idx] >> rem) & 3, 4 => (scanline[byte_idx] >> rem) & 15, _ => unreachable!(), } }) } fn expand_adam7_bits( pass: u8, width: usize, line_no: usize, bits_pp: usize, ) -> StepBy<Range<usize>> { let (line_mul, line_off, samp_mul, samp_off) = match pass { 1 => (8, 0, 8, 0), 2 => (8, 0, 8, 4), 3 => (8, 4, 4, 0), 4 => (4, 0, 4, 2), 5 => (4, 2, 2, 0), 6 => (2, 0, 2, 1), 7 => (2, 1, 1, 0), _ => panic!("Adam7 pass out of range: {}", pass), }; let prog_line = line_mul * line_no + line_off; let line_width = (width * bits_pp + 7) & !7; let line_start = prog_line * line_width; let start = line_start + (samp_off * bits_pp); let stop = line_start + (width * bits_pp); (start..stop).step_by(bits_pp * samp_mul) } pub fn expand_pass( img: &mut [u8], width: u32, scanline: &[u8], pass: u8, line_no: u32, bits_pp: u8, ) { let width = width as usize; let line_no = line_no as usize; let bits_pp = bits_pp as usize; if pass == 0 || pass > 7 { return; } let bit_indices = expand_adam7_bits(pass, width, line_no, bits_pp); if bits_pp < 8 { for (pos, px) in bit_indices.zip(subbyte_pixels(scanline, bits_pp)) { let rem = 8 - pos % 8 - bits_pp; img[pos / 8] |= px << rem as u8; } } else { let bytes_pp = bits_pp / 8; for (bitpos, px) in bit_indices.zip(scanline.chunks(bytes_pp)) { for (offset, val) in px.iter().enumerate() { img[bitpos / 8 + offset] = *val; } } } } #[test] fn test_adam7() { /* 1646 7777 5656 7777 */ let it = Adam7Iterator::new(4, 4); let passes: Vec<_> = it.collect(); assert_eq!( &*passes, &[ (1, 0, 1), (4, 0, 1), (5, 0, 2), (6, 0, 2), (6, 1, 2), (7, 0, 4), (7, 1, 4) ] ); } #[test] fn test_subbyte_pixels() { let scanline = &[0b10101010, 0b10101010]; let pixels = subbyte_pixels(scanline, 1).collect::<Vec<_>>(); assert_eq!(pixels.len(), 16); assert_eq!(pixels, [1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0]); } #[test] fn test_expand_adam7_bits() { let width = 32; let bits_pp = 1; let expected = |offset: usize, step: usize, count: usize| { (0..count) .map(move |i| step * i + offset) .collect::<Vec<_>>() }; for line_no in 0..8 { let start = 8 * line_no * width; assert_eq!( expand_adam7_bits(1, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 8, 4) ); let start = start + 4; assert_eq!( expand_adam7_bits(2, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 8, 4) ); let start = (8 * line_no + 4) as usize * width as usize; assert_eq!( expand_adam7_bits(3, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 4, 8) ); } for line_no in 0..16 { let start = 4 * line_no * width + 2; assert_eq!( expand_adam7_bits(4, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 4, 8) ); let start = (4 * line_no + 2) * width; assert_eq!( expand_adam7_bits(5, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 2, 16) ) } for line_no in 0..32 { let start = 2 * line_no * width + 1; assert_eq!( expand_adam7_bits(6, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 2, 16), "line_no: {}", line_no ); let start = (2 * line_no + 1) * width; assert_eq!( expand_adam7_bits(7, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 1, 32) ); } } #[test] fn test_expand_pass_subbyte() { let mut img = [0u8; 8]; let width = 8; let bits_pp = 1; expand_pass(&mut img, width, &[0b10000000], 1, 0, bits_pp); assert_eq!(img, [0b10000000u8, 0, 0, 0, 0, 0, 0, 0]); expand_pass(&mut img, width, &[0b10000000], 2, 0, bits_pp); assert_eq!(img, [0b10001000u8, 0, 0, 0, 0, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 3, 0, bits_pp); assert_eq!(img, [0b10001000u8, 0, 0, 0, 0b10001000, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 4, 0, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0, 0, 0b10001000, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 4, 1, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0, 0, 0b10101010, 0, 0, 0]); expand_pass(&mut img, width, &[0b11110000], 5, 0, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0b10101010, 0, 0b10101010, 0, 0, 0]); expand_pass(&mut img, width, &[0b11110000], 5, 1, bits_pp); assert_eq!( img, [0b10101010u8, 0, 0b10101010, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 0, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b10101010, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 1, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b11111111, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 2, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b11111111, 0, 0b11111111, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 3, bits_pp); assert_eq!( [0b11111111u8, 0, 0b11111111, 0, 0b11111111, 0, 0b11111111, 0], img ); expand_pass(&mut img, width, &[0b11111111], 7, 0, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0, 0b11111111, 0, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 1, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 2, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 3, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111 ], img ); }
pub fn expand_trns_line16(buf: &mut [u8], trns: &[u8], channels: usize) { let c2 = 2 * channels; if buf.len() < (c2 + 2) { return; } let i = (0..=buf.len() / (c2 + 2) * c2 - c2).rev().step_by(c2); let j = (0..=buf.len() - (c2 + 2)).rev().step_by(c2 + 2); for (i, j) in i.zip(j) { let i_pixel = i; let j_chunk = j; if &buf[i_pixel..i_pixel + c2] == trns { buf[j_chunk + c2] = 0; buf[j_chunk + c2 + 1] = 0 } else { buf[j_chunk + c2] = 0xFF; buf[j_chunk + c2 + 1] = 0xFF } for k in (0..c2).rev() { buf[j_chunk + k] = buf[i_pixel + k]; } } }
function_block-full_function
[ { "content": "fn expand_gray_u8(buffer: &mut [u8], info: &Info) {\n\n let rescale = true;\n\n let scaling_factor = if rescale {\n\n (255) / ((1u16 << info.bit_depth as u8) - 1) as u8\n\n } else {\n\n 1\n\n };\n\n if let Some(ref trns) = info.trns {\n\n utils::unpack_bits(buff...