text
stringlengths
8
4.13M
#[test] fn iterators_filter_map_test() { use std::str::FromStr; let mut results = Vec::new(); let text = "aloha 3.14 notanumber 2t 6.18 15"; for number in text .split_whitespace() .filter_map(|w| f64::from_str(w).ok()) { results.push(number); } assert_eq!(results, [3.14, 6.18, 15.0]); } #[test] fn interators_flat_map_test() { use std::collections::HashMap; let mut tv_shows = HashMap::new(); tv_shows.insert("suits", vec!["Harvy", "Mike", "Donna"]); tv_shows.insert("dragon ball", vec!["Goku", "Gohan", "Freezer"]); tv_shows.insert("got", vec!["John Snow", "Daenerys", "Tyrion"]); let shows = ["suits", "got"]; let mut results = vec![]; for &show in shows.iter().flat_map(|sh| &tv_shows[sh]) { &results.push(show); } assert_eq!( results, ["Harvy", "Mike", "Donna", "John Snow", "Daenerys", "Tyrion"] ) }
//! <https://github.com/EOSIO/eosio.cdt/blob/4985359a30da1f883418b7133593f835927b8046/libraries/eosiolib/core/eosio/symbol.hpp#L234-L337> use super::SymbolCode; use crate::bytes::{NumBytes, Read, Write}; use core::fmt; use eosio_numstr::{symbol_from_code, symbol_to_code, symbol_to_precision}; /// Stores information about a symbol, the symbol can be 7 characters long. #[derive( Debug, PartialEq, Eq, Clone, Copy, Default, Read, Write, NumBytes, Hash, PartialOrd, Ord, )] #[eosio(crate_path = "crate::bytes")] pub struct Symbol(u64); impl Symbol { /// Construct a new symbol given a value. #[inline] #[must_use] pub const fn new(value: u64) -> Self { Self(value) } /// Construct a new symbol given a `u8` precision and `SymbolCode`. #[inline] #[must_use] pub const fn new_with_code(precision: u8, code: SymbolCode) -> Self { Self(symbol_from_code(precision, code.as_u64())) } /// This symbol's precision #[inline] #[must_use] pub const fn precision(&self) -> u8 { symbol_to_precision(self.as_u64()) } /// Returns representation of symbol name /// /// # Examples /// /// ``` /// use eosio::{Symbol, s}; /// let symbol: Symbol = s!(4, "EOS").into(); /// let code = symbol.code(); /// assert_eq!(code.to_string(), "EOS"); /// ``` #[inline] #[must_use] pub const fn code(&self) -> SymbolCode { SymbolCode::new(symbol_to_code(self.as_u64())) } /// TODO docs #[inline] #[must_use] pub const fn as_u64(&self) -> u64 { self.0 } /// Is this symbol valid #[inline] #[must_use] pub fn is_valid(&self) -> bool { self.code().is_valid() } } impl fmt::Display for Symbol { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use alloc::string::ToString; write!(f, "{},{}", self.precision(), self.code().to_string()) } } impl From<u64> for Symbol { #[inline] #[must_use] fn from(n: u64) -> Self { Self(n) } } impl From<Symbol> for u64 { #[inline] #[must_use] fn from(n: Symbol) -> Self { n.0 } } impl PartialEq<u64> for Symbol { #[inline] #[must_use] fn eq(&self, other: &u64) -> bool { self.as_u64() == *other } } #[cfg(test)] mod tests { use super::*; use alloc::string::ToString; use core::str::FromStr; use proptest::prelude::*; proptest! { #[test] fn from_str_to_code(precision in 1_u8.., code in "[A-Z]{1,7}") { let expected = SymbolCode::from_str(&code).unwrap(); let symbol = Symbol::new_with_code(precision, expected); let result = symbol.code(); prop_assert_eq!(result, expected); } } proptest! { #[test] fn from_str_to_string(precision in 0_u8.., code in "[A-Z]{1,7}") { let expected = format!("{},{}", precision, code); let code = SymbolCode::from_str(&code).unwrap(); let symbol = Symbol::new_with_code(precision, code); let result = symbol.to_string(); prop_assert_eq!(result, expected); } } // #[test] // fn from_int() { // let symbol = Symbol::from(361_956_332_546); // assert_eq!(symbol.precision(), 2); // let name = symbol.code(); // let num: u64 = name.into(); // assert_eq!(num, 1_413_891_924); // } // #[test] // fn is_valid() { // let symbol = Symbol::from(361_956_332_546); // assert_eq!(symbol.is_valid(), true); // } // #[test] // fn to_string() { // fn test(value: u64, expected: &str) { // assert_eq!(Symbol::from(value).to_string(), expected); // } // test(s!(2, "TGFT"), "2,TGFT"); // test(s!(0, "TGFT"), "0,TGFT"); // test(s!(4, "EOS"), "4,EOS"); // } // #[test] // fn code_to_string() { // fn test(value: u64, expected: &str) { // assert_eq!(Symbol::from(value).code().to_string(), expected); // } // test(s!(4, "EOS"), "EOS"); // test(s!(0, "TGFT"), "TGFT"); // test(s!(9, "SYS"), "SYS"); // } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use common_exception::Span; use common_meta_app::principal::FileFormatOptions; use common_meta_app::principal::PrincipalIdentity; use common_meta_app::principal::UserIdentity; use super::walk::walk_cte; use super::walk::walk_expr; use super::walk::walk_identifier; use super::walk::walk_join_condition; use super::walk::walk_query; use super::walk::walk_select_target; use super::walk::walk_set_expr; use super::walk::walk_statement; use super::walk::walk_table_reference; use super::walk_time_travel_point; use crate::ast::*; pub trait Visitor<'ast>: Sized { fn visit_expr(&mut self, expr: &'ast Expr) { walk_expr(self, expr); } fn visit_identifier(&mut self, _ident: &'ast Identifier) {} fn visit_database_ref( &mut self, catalog: &'ast Option<Identifier>, database: &'ast Identifier, ) { if let Some(catalog) = catalog { walk_identifier(self, catalog); } walk_identifier(self, database); } fn visit_table_ref( &mut self, catalog: &'ast Option<Identifier>, database: &'ast Option<Identifier>, table: &'ast Identifier, ) { if let Some(catalog) = catalog { walk_identifier(self, catalog); } if let Some(database) = database { walk_identifier(self, database); } walk_identifier(self, table); } fn visit_column_ref( &mut self, _span: Span, database: &'ast Option<Identifier>, table: &'ast Option<Identifier>, column: &'ast Identifier, ) { if let Some(database) = database { walk_identifier(self, database); } if let Some(table) = table { walk_identifier(self, table); } walk_identifier(self, column); } fn visit_is_null(&mut self, _span: Span, expr: &'ast Expr, _not: bool) { walk_expr(self, expr); } fn visit_is_distinct_from( &mut self, _span: Span, left: &'ast Expr, right: &'ast Expr, _not: bool, ) { walk_expr(self, left); walk_expr(self, right); } fn visit_in_list(&mut self, _span: Span, expr: &'ast Expr, list: &'ast [Expr], _not: bool) { walk_expr(self, expr); for expr in list { walk_expr(self, expr); } } fn visit_in_subquery( &mut self, _span: Span, expr: &'ast Expr, subquery: &'ast Query, _not: bool, ) { walk_expr(self, expr); walk_query(self, subquery); } fn visit_between( &mut self, _span: Span, expr: &'ast Expr, low: &'ast Expr, high: &'ast Expr, _not: bool, ) { walk_expr(self, expr); walk_expr(self, low); walk_expr(self, high); } fn visit_binary_op( &mut self, _span: Span, _op: &'ast BinaryOperator, left: &'ast Expr, right: &'ast Expr, ) { walk_expr(self, left); walk_expr(self, right); } fn visit_unary_op(&mut self, _span: Span, _op: &'ast UnaryOperator, expr: &'ast Expr) { walk_expr(self, expr); } fn visit_cast( &mut self, _span: Span, expr: &'ast Expr, _target_type: &'ast TypeName, _pg_style: bool, ) { walk_expr(self, expr); } fn visit_try_cast(&mut self, _span: Span, expr: &'ast Expr, _target_type: &'ast TypeName) { walk_expr(self, expr); } fn visit_extract(&mut self, _span: Span, _kind: &'ast IntervalKind, expr: &'ast Expr) { walk_expr(self, expr); } fn visit_position(&mut self, _span: Span, substr_expr: &'ast Expr, str_expr: &'ast Expr) { walk_expr(self, substr_expr); walk_expr(self, str_expr); } fn visit_substring( &mut self, _span: Span, expr: &'ast Expr, substring_from: &'ast Expr, substring_for: &'ast Option<Box<Expr>>, ) { walk_expr(self, expr); walk_expr(self, substring_from); if let Some(substring_for) = substring_for { walk_expr(self, substring_for); } } fn visit_trim( &mut self, _span: Span, expr: &'ast Expr, _trim_where: &'ast Option<(TrimWhere, Box<Expr>)>, ) { walk_expr(self, expr); } fn visit_literal(&mut self, _span: Span, _lit: &'ast Literal) {} fn visit_count_all(&mut self, _span: Span) {} fn visit_tuple(&mut self, _span: Span, elements: &'ast [Expr]) { for element in elements { walk_expr(self, element); } } fn visit_function_call( &mut self, _span: Span, _distinct: bool, _name: &'ast Identifier, args: &'ast [Expr], _params: &'ast [Literal], over: &'ast Option<WindowSpec>, ) { for arg in args { walk_expr(self, arg); } if let Some(over) = over { over.partition_by .iter() .for_each(|expr| walk_expr(self, expr)); over.order_by .iter() .for_each(|expr| walk_expr(self, &expr.expr)); if let Some(frame) = &over.window_frame { self.visit_frame_bound(&frame.start_bound); self.visit_frame_bound(&frame.end_bound); } } } fn visit_frame_bound(&mut self, bound: &'ast WindowFrameBound) { match bound { WindowFrameBound::Preceding(Some(expr)) => walk_expr(self, expr.as_ref()), WindowFrameBound::Following(Some(expr)) => walk_expr(self, expr.as_ref()), _ => {} } } fn visit_case_when( &mut self, _span: Span, operand: &'ast Option<Box<Expr>>, conditions: &'ast [Expr], results: &'ast [Expr], else_result: &'ast Option<Box<Expr>>, ) { if let Some(operand) = operand { walk_expr(self, operand); } for condition in conditions { walk_expr(self, condition); } for result in results { walk_expr(self, result); } if let Some(else_result) = else_result { walk_expr(self, else_result); } } fn visit_exists(&mut self, _span: Span, _not: bool, subquery: &'ast Query) { walk_query(self, subquery); } fn visit_subquery( &mut self, _span: Span, _modifier: &'ast Option<SubqueryModifier>, subquery: &'ast Query, ) { walk_query(self, subquery); } fn visit_map_access(&mut self, _span: Span, expr: &'ast Expr, _accessor: &'ast MapAccessor) { walk_expr(self, expr); } fn visit_array(&mut self, _span: Span, exprs: &'ast [Expr]) { for expr in exprs { walk_expr(self, expr); } } fn visit_array_sort(&mut self, _span: Span, expr: &'ast Expr, _asc: bool, _null_first: bool) { walk_expr(self, expr); } fn visit_map(&mut self, _span: Span, kvs: &'ast [(Expr, Expr)]) { for (key_expr, val_expr) in kvs { walk_expr(self, key_expr); walk_expr(self, val_expr); } } fn visit_interval(&mut self, _span: Span, expr: &'ast Expr, _unit: &'ast IntervalKind) { walk_expr(self, expr); } fn visit_date_add( &mut self, _span: Span, _unit: &'ast IntervalKind, interval: &'ast Expr, date: &'ast Expr, ) { walk_expr(self, date); walk_expr(self, interval); } fn visit_date_sub( &mut self, _span: Span, _unit: &'ast IntervalKind, interval: &'ast Expr, date: &'ast Expr, ) { walk_expr(self, date); walk_expr(self, interval); } fn visit_date_trunc(&mut self, _span: Span, _unit: &'ast IntervalKind, date: &'ast Expr) { walk_expr(self, date); } fn visit_statement(&mut self, statement: &'ast Statement) { walk_statement(self, statement); } fn visit_query(&mut self, query: &'ast Query) { walk_query(self, query); } fn visit_explain(&mut self, _kind: &'ast ExplainKind, _query: &'ast Statement) {} fn visit_copy(&mut self, _copy: &'ast CopyStmt) {} fn visit_copy_unit(&mut self, _copy_unit: &'ast CopyUnit) {} fn visit_call(&mut self, _call: &'ast CallStmt) {} fn visit_show_settings(&mut self, _like: &'ast Option<String>) {} fn visit_unset_variable(&mut self, _stmt: &'ast UnSetStmt) {} fn visit_show_process_list(&mut self) {} fn visit_show_metrics(&mut self) {} fn visit_show_engines(&mut self) {} fn visit_show_functions(&mut self, _limit: &'ast Option<ShowLimit>) {} fn visit_show_table_functions(&mut self, _limit: &'ast Option<ShowLimit>) {} fn visit_show_limit(&mut self, _limit: &'ast ShowLimit) {} fn visit_kill(&mut self, _kill_target: &'ast KillTarget, _object_id: &'ast str) {} fn visit_set_variable( &mut self, _is_global: bool, _variable: &'ast Identifier, _value: &'ast Expr, ) { } fn visit_set_role(&mut self, _is_default: bool, _role_name: &'ast str) {} fn visit_insert(&mut self, _insert: &'ast InsertStmt) {} fn visit_replace(&mut self, _replace: &'ast ReplaceStmt) {} fn visit_insert_source(&mut self, _insert_source: &'ast InsertSource) {} fn visit_delete( &mut self, _table_reference: &'ast TableReference, _selection: &'ast Option<Expr>, ) { } fn visit_update(&mut self, _update: &'ast UpdateStmt) {} fn visit_show_catalogs(&mut self, _stmt: &'ast ShowCatalogsStmt) {} fn visit_show_create_catalog(&mut self, _stmt: &'ast ShowCreateCatalogStmt) {} fn visit_create_catalog(&mut self, _stmt: &'ast CreateCatalogStmt) {} fn visit_drop_catalog(&mut self, _stmt: &'ast DropCatalogStmt) {} fn visit_show_databases(&mut self, _stmt: &'ast ShowDatabasesStmt) {} fn visit_show_create_databases(&mut self, _stmt: &'ast ShowCreateDatabaseStmt) {} fn visit_create_database(&mut self, _stmt: &'ast CreateDatabaseStmt) {} fn visit_drop_database(&mut self, _stmt: &'ast DropDatabaseStmt) {} fn visit_undrop_database(&mut self, _stmt: &'ast UndropDatabaseStmt) {} fn visit_alter_database(&mut self, _stmt: &'ast AlterDatabaseStmt) {} fn visit_use_database(&mut self, _database: &'ast Identifier) {} fn visit_show_tables(&mut self, _stmt: &'ast ShowTablesStmt) {} fn visit_show_columns(&mut self, _stmt: &'ast ShowColumnsStmt) {} fn visit_show_create_table(&mut self, _stmt: &'ast ShowCreateTableStmt) {} fn visit_describe_table(&mut self, _stmt: &'ast DescribeTableStmt) {} fn visit_show_tables_status(&mut self, _stmt: &'ast ShowTablesStatusStmt) {} fn visit_create_table(&mut self, _stmt: &'ast CreateTableStmt) {} fn visit_create_table_source(&mut self, _source: &'ast CreateTableSource) {} fn visit_column_definition(&mut self, _column_definition: &'ast ColumnDefinition) {} fn visit_drop_table(&mut self, _stmt: &'ast DropTableStmt) {} fn visit_undrop_table(&mut self, _stmt: &'ast UndropTableStmt) {} fn visit_alter_table(&mut self, _stmt: &'ast AlterTableStmt) {} fn visit_rename_table(&mut self, _stmt: &'ast RenameTableStmt) {} fn visit_truncate_table(&mut self, _stmt: &'ast TruncateTableStmt) {} fn visit_optimize_table(&mut self, _stmt: &'ast OptimizeTableStmt) {} fn visit_analyze_table(&mut self, _stmt: &'ast AnalyzeTableStmt) {} fn visit_exists_table(&mut self, _stmt: &'ast ExistsTableStmt) {} fn visit_create_view(&mut self, _stmt: &'ast CreateViewStmt) {} fn visit_alter_view(&mut self, _stmt: &'ast AlterViewStmt) {} fn visit_drop_view(&mut self, _stmt: &'ast DropViewStmt) {} fn visit_show_users(&mut self) {} fn visit_create_user(&mut self, _stmt: &'ast CreateUserStmt) {} fn visit_alter_user(&mut self, _stmt: &'ast AlterUserStmt) {} fn visit_drop_user(&mut self, _if_exists: bool, _user: &'ast UserIdentity) {} fn visit_show_roles(&mut self) {} fn visit_create_role(&mut self, _if_not_exists: bool, _role_name: &'ast str) {} fn visit_drop_role(&mut self, _if_exists: bool, _role_name: &'ast str) {} fn visit_grant(&mut self, _grant: &'ast GrantStmt) {} fn visit_show_grant(&mut self, _principal: &'ast Option<PrincipalIdentity>) {} fn visit_revoke(&mut self, _revoke: &'ast RevokeStmt) {} fn visit_create_udf( &mut self, _if_not_exists: bool, _udf_name: &'ast Identifier, _parameters: &'ast [Identifier], _definition: &'ast Expr, _description: &'ast Option<String>, ) { } fn visit_drop_udf(&mut self, _if_exists: bool, _udf_name: &'ast Identifier) {} fn visit_alter_udf( &mut self, _udf_name: &'ast Identifier, _parameters: &'ast [Identifier], _definition: &'ast Expr, _description: &'ast Option<String>, ) { } fn visit_create_stage(&mut self, _stmt: &'ast CreateStageStmt) {} fn visit_show_stages(&mut self) {} fn visit_drop_stage(&mut self, _if_exists: bool, _stage_name: &'ast str) {} fn visit_describe_stage(&mut self, _stage_name: &'ast str) {} fn visit_remove_stage(&mut self, _location: &'ast str, _pattern: &'ast str) {} fn visit_list_stage(&mut self, _location: &'ast str, _pattern: &'ast str) {} fn visit_create_file_format( &mut self, _if_not_exists: bool, _name: &'ast str, _file_format_options: &'ast FileFormatOptions, ) { } fn visit_drop_file_format(&mut self, _if_exists: bool, _name: &'ast str) {} fn visit_show_file_formats(&mut self) {} fn visit_presign(&mut self, _presign: &'ast PresignStmt) {} fn visit_create_share(&mut self, _stmt: &'ast CreateShareStmt) {} fn visit_drop_share(&mut self, _stmt: &'ast DropShareStmt) {} fn visit_grant_share_object(&mut self, _stmt: &'ast GrantShareObjectStmt) {} fn visit_revoke_share_object(&mut self, _stmt: &'ast RevokeShareObjectStmt) {} fn visit_alter_share_tenants(&mut self, _stmt: &'ast AlterShareTenantsStmt) {} fn visit_desc_share(&mut self, _stmt: &'ast DescShareStmt) {} fn visit_show_shares(&mut self, _stmt: &'ast ShowSharesStmt) {} fn visit_show_object_grant_privileges(&mut self, _stmt: &'ast ShowObjectGrantPrivilegesStmt) {} fn visit_show_grants_of_share(&mut self, _stmt: &'ast ShowGrantsOfShareStmt) {} fn visit_with(&mut self, with: &'ast With) { let With { ctes, .. } = with; for cte in ctes.iter() { walk_cte(self, cte); } } fn visit_set_expr(&mut self, expr: &'ast SetExpr) { walk_set_expr(self, expr); } fn visit_set_operation(&mut self, op: &'ast SetOperation) { let SetOperation { left, right, .. } = op; walk_set_expr(self, left); walk_set_expr(self, right); } fn visit_order_by(&mut self, order_by: &'ast OrderByExpr) { let OrderByExpr { expr, .. } = order_by; walk_expr(self, expr); } fn visit_select_stmt(&mut self, stmt: &'ast SelectStmt) { let SelectStmt { select_list, from, selection, group_by, having, .. } = stmt; for target in select_list.iter() { walk_select_target(self, target); } for table_ref in from.iter() { walk_table_reference(self, table_ref); } if let Some(selection) = selection { walk_expr(self, selection); } match group_by { Some(GroupBy::Normal(exprs)) => { for expr in exprs { walk_expr(self, expr); } } Some(GroupBy::GroupingSets(sets)) => { for set in sets { for expr in set { walk_expr(self, expr); } } } _ => {} } if let Some(having) = having { walk_expr(self, having); } } fn visit_select_target(&mut self, target: &'ast SelectTarget) { walk_select_target(self, target); } fn visit_table_reference(&mut self, table: &'ast TableReference) { walk_table_reference(self, table); } fn visit_time_travel_point(&mut self, time: &'ast TimeTravelPoint) { walk_time_travel_point(self, time); } fn visit_join(&mut self, join: &'ast Join) { let Join { left, right, condition, .. } = join; walk_table_reference(self, left); walk_table_reference(self, right); walk_join_condition(self, condition); } }
#[cfg(test)] mod tests { use lib; /// Returns the array to work with for tests purposes /// /// Returns: /// /// tests purpose array fn get_array() -> Vec<f32> { return vec![ 9.0, 3.0, 3.0, 4.0, 5.0, 4.9, 8.0, 3.3, 2.0, 0.1, ]; } #[test] fn test_get_percentile() { let array = get_array(); assert_approx_eq!( lib::get_percentile( &array, 70, ), 4.95 ); assert_approx_eq!( lib::get_percentile( &array, 50, ), 3.65 ); assert_approx_eq!( lib::get_percentile( &array, 1, ), 0.1 ); } #[test] fn test_get_average() { let array = get_array(); assert_approx_eq!( lib::get_average(&array), 4.23 ); } }
#[doc = "Reader of register TEST"] pub type R = crate::R<u8, super::TEST>; #[doc = "Writer for register TEST"] pub type W = crate::W<u8, super::TEST>; #[doc = "Register TEST `reset()`'s with value 0"] impl crate::ResetValue for super::TEST { type Type = u8; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `TESTSE0NAK`"] pub type TESTSE0NAK_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TESTSE0NAK`"] pub struct TESTSE0NAK_W<'a> { w: &'a mut W, } impl<'a> TESTSE0NAK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u8) & 0x01); self.w } } #[doc = "Reader of field `TESTJ`"] pub type TESTJ_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TESTJ`"] pub struct TESTJ_W<'a> { w: &'a mut W, } impl<'a> TESTJ_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u8) & 0x01) << 1); self.w } } #[doc = "Reader of field `TESTK`"] pub type TESTK_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TESTK`"] pub struct TESTK_W<'a> { w: &'a mut W, } impl<'a> TESTK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u8) & 0x01) << 2); self.w } } #[doc = "Reader of field `TESTPKT`"] pub type TESTPKT_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TESTPKT`"] pub struct TESTPKT_W<'a> { w: &'a mut W, } impl<'a> TESTPKT_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u8) & 0x01) << 3); self.w } } #[doc = "Reader of field `FORCEHS`"] pub type FORCEHS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `FORCEHS`"] pub struct FORCEHS_W<'a> { w: &'a mut W, } impl<'a> FORCEHS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u8) & 0x01) << 4); self.w } } #[doc = "Reader of field `FORCEFS`"] pub type FORCEFS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `FORCEFS`"] pub struct FORCEFS_W<'a> { w: &'a mut W, } impl<'a> FORCEFS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u8) & 0x01) << 5); self.w } } #[doc = "Reader of field `FIFOACC`"] pub type FIFOACC_R = crate::R<bool, bool>; #[doc = "Write proxy for field `FIFOACC`"] pub struct FIFOACC_W<'a> { w: &'a mut W, } impl<'a> FIFOACC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u8) & 0x01) << 6); self.w } } #[doc = "Reader of field `FORCEH`"] pub type FORCEH_R = crate::R<bool, bool>; #[doc = "Write proxy for field `FORCEH`"] pub struct FORCEH_W<'a> { w: &'a mut W, } impl<'a> FORCEH_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u8) & 0x01) << 7); self.w } } impl R { #[doc = "Bit 0 - Test_SE0_NAK Test Mode Enable"] #[inline(always)] pub fn testse0nak(&self) -> TESTSE0NAK_R { TESTSE0NAK_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Test_J Mode Enable"] #[inline(always)] pub fn testj(&self) -> TESTJ_R { TESTJ_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Test_K Mode Enable"] #[inline(always)] pub fn testk(&self) -> TESTK_R { TESTK_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Test Packet Mode Enable"] #[inline(always)] pub fn testpkt(&self) -> TESTPKT_R { TESTPKT_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - Force High-Speed Mode"] #[inline(always)] pub fn forcehs(&self) -> FORCEHS_R { FORCEHS_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - Force Full-Speed Mode"] #[inline(always)] pub fn forcefs(&self) -> FORCEFS_R { FORCEFS_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - FIFO Access"] #[inline(always)] pub fn fifoacc(&self) -> FIFOACC_R { FIFOACC_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - Force Host Mode"] #[inline(always)] pub fn forceh(&self) -> FORCEH_R { FORCEH_R::new(((self.bits >> 7) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Test_SE0_NAK Test Mode Enable"] #[inline(always)] pub fn testse0nak(&mut self) -> TESTSE0NAK_W { TESTSE0NAK_W { w: self } } #[doc = "Bit 1 - Test_J Mode Enable"] #[inline(always)] pub fn testj(&mut self) -> TESTJ_W { TESTJ_W { w: self } } #[doc = "Bit 2 - Test_K Mode Enable"] #[inline(always)] pub fn testk(&mut self) -> TESTK_W { TESTK_W { w: self } } #[doc = "Bit 3 - Test Packet Mode Enable"] #[inline(always)] pub fn testpkt(&mut self) -> TESTPKT_W { TESTPKT_W { w: self } } #[doc = "Bit 4 - Force High-Speed Mode"] #[inline(always)] pub fn forcehs(&mut self) -> FORCEHS_W { FORCEHS_W { w: self } } #[doc = "Bit 5 - Force Full-Speed Mode"] #[inline(always)] pub fn forcefs(&mut self) -> FORCEFS_W { FORCEFS_W { w: self } } #[doc = "Bit 6 - FIFO Access"] #[inline(always)] pub fn fifoacc(&mut self) -> FIFOACC_W { FIFOACC_W { w: self } } #[doc = "Bit 7 - Force Host Mode"] #[inline(always)] pub fn forceh(&mut self) -> FORCEH_W { FORCEH_W { w: self } } }
use std::cmp::max; use std::collections::HashMap; use std::ffi::CString; use std::fmt::{ Debug, Formatter, }; use std::hash::{ Hash, Hasher, }; use std::mem::MaybeUninit; use std::sync::{ Arc, Mutex, }; use ash::vk::{ self, BufferDeviceAddressInfo, Handle, SharingMode, }; use smallvec::SmallVec; use sourcerenderer_core::graphics::{ Buffer, BufferInfo, BufferUsage, MappedBuffer, MemoryUsage, MutMappedBuffer, }; use crate::device::memory_usage_to_vma; use crate::raw::*; pub struct VkBuffer { buffer: vk::Buffer, allocation: vma_sys::VmaAllocation, device: Arc<RawVkDevice>, map_ptr: Option<*mut u8>, memory_usage: MemoryUsage, info: BufferInfo, va: Option<vk::DeviceSize>, } unsafe impl Send for VkBuffer {} unsafe impl Sync for VkBuffer {} impl VkBuffer { pub fn new( device: &Arc<RawVkDevice>, memory_usage: MemoryUsage, info: &BufferInfo, allocator: &vma_sys::VmaAllocator, pool: Option<vma_sys::VmaPool>, name: Option<&str>, ) -> Arc<Self> { let mut queue_families = SmallVec::<[u32; 2]>::new(); let mut sharing_mode = vk::SharingMode::EXCLUSIVE; if info.usage.contains(BufferUsage::COPY_SRC) { queue_families.push(device.graphics_queue_info.queue_family_index as u32); if let Some(info) = device.transfer_queue_info { sharing_mode = vk::SharingMode::CONCURRENT; queue_families.push(info.queue_family_index as u32); } } let buffer_info = vk::BufferCreateInfo { size: info.size as u64, usage: buffer_usage_to_vk( info.usage, device.features.contains(VkFeatures::RAY_TRACING), ), sharing_mode, p_queue_family_indices: queue_families.as_ptr(), queue_family_index_count: queue_families.len() as u32, ..Default::default() }; let vk_mem_flags = memory_usage_to_vma(memory_usage); let allocation_create_info = vma_sys::VmaAllocationCreateInfo { flags: if memory_usage != MemoryUsage::VRAM { vma_sys::VmaAllocationCreateFlagBits_VMA_ALLOCATION_CREATE_MAPPED_BIT as u32 } else { 0 }, usage: vma_sys::VmaMemoryUsage_VMA_MEMORY_USAGE_UNKNOWN, preferredFlags: vk_mem_flags.preferred, requiredFlags: vk_mem_flags.required, memoryTypeBits: 0, pool: pool.unwrap_or(std::ptr::null_mut()), pUserData: std::ptr::null_mut(), priority: 0f32, }; let mut buffer: vk::Buffer = vk::Buffer::null(); let mut allocation: vma_sys::VmaAllocation = std::ptr::null_mut(); let mut allocation_info_uninit: MaybeUninit<vma_sys::VmaAllocationInfo> = MaybeUninit::uninit(); let allocation_info: vma_sys::VmaAllocationInfo; unsafe { assert_eq!( vma_sys::vmaCreateBuffer( *allocator, &buffer_info, &allocation_create_info, &mut buffer, &mut allocation, allocation_info_uninit.as_mut_ptr() ), vk::Result::SUCCESS ); allocation_info = allocation_info_uninit.assume_init(); }; if let Some(name) = name { if let Some(debug_utils) = device.instance.debug_utils.as_ref() { let name_cstring = CString::new(name).unwrap(); unsafe { debug_utils .debug_utils_loader .set_debug_utils_object_name( device.handle(), &vk::DebugUtilsObjectNameInfoEXT { object_type: vk::ObjectType::BUFFER, object_handle: buffer.as_raw(), p_object_name: name_cstring.as_ptr(), ..Default::default() }, ) .unwrap(); } } } let map_ptr: Option<*mut u8> = unsafe { if memory_usage != MemoryUsage::VRAM && allocation_info.pMappedData != std::ptr::null_mut() { Some(std::mem::transmute(allocation_info.pMappedData)) } else { None } }; let va = if buffer_info .usage .contains(vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS) { device.rt.as_ref().map(|rt| unsafe { rt.bda.get_buffer_device_address(&BufferDeviceAddressInfo { buffer, ..Default::default() }) }) } else { None }; Arc::new(VkBuffer { buffer, allocation, device: device.clone(), map_ptr, memory_usage, info: info.clone(), va, }) } pub fn handle(&self) -> &vk::Buffer { &self.buffer } pub fn va(&self) -> Option<vk::DeviceAddress> { self.va } pub fn info(&self) -> &BufferInfo { &self.info } } impl Drop for VkBuffer { fn drop(&mut self) { unsafe { // VMA_ALLOCATION_CREATE_MAPPED_BIT will get automatically unmapped vma_sys::vmaDestroyBuffer(self.device.allocator, self.buffer, self.allocation); } } } impl Hash for VkBuffer { fn hash<H: Hasher>(&self, state: &mut H) { self.buffer.hash(state); } } impl PartialEq for VkBuffer { fn eq(&self, other: &Self) -> bool { self.buffer == other.buffer } } impl Eq for VkBuffer {} impl Buffer for VkBufferSlice { fn map_mut<T>(&self) -> Option<MutMappedBuffer<Self, T>> where T: 'static + Send + Sync + Sized + Clone, { MutMappedBuffer::new(self, true) } fn map<T>(&self) -> Option<MappedBuffer<Self, T>> where T: 'static + Send + Sync + Sized + Clone, { MappedBuffer::new(self, true) } unsafe fn map_unsafe(&self, invalidate: bool) -> Option<*mut u8> { if !invalidate { let allocator = self.buffer.device.allocator; assert_eq!( vma_sys::vmaInvalidateAllocation( allocator, self.buffer.allocation, self.offset as u64, self.info.size as u64 ), vk::Result::SUCCESS ); } self.buffer.map_ptr.map(|ptr| ptr.add(self.offset())) } unsafe fn unmap_unsafe(&self, flush: bool) { if !flush { return; } let allocator = self.buffer.device.allocator; assert_eq!( vma_sys::vmaFlushAllocation( allocator, self.buffer.allocation, self.offset as u64, self.info.size as u64 ), vk::Result::SUCCESS ); } fn length(&self) -> usize { self.info.size } fn info(&self) -> &BufferInfo { &self.info } } pub fn buffer_usage_to_vk(usage: BufferUsage, rt_supported: bool) -> vk::BufferUsageFlags { let mut flags = vk::BufferUsageFlags::empty(); if usage.contains(BufferUsage::STORAGE) { flags |= vk::BufferUsageFlags::STORAGE_BUFFER; } if usage.contains(BufferUsage::CONSTANT) { flags |= vk::BufferUsageFlags::UNIFORM_BUFFER; } if usage.contains(BufferUsage::VERTEX) { flags |= vk::BufferUsageFlags::VERTEX_BUFFER; if rt_supported { flags |= vk::BufferUsageFlags::ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS_EXT; } } if usage.contains(BufferUsage::INDEX) { flags |= vk::BufferUsageFlags::INDEX_BUFFER; if rt_supported { flags |= vk::BufferUsageFlags::ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS_EXT; } } if usage.contains(BufferUsage::INDIRECT) { flags |= vk::BufferUsageFlags::INDIRECT_BUFFER; } if usage.contains(BufferUsage::COPY_SRC) { flags |= vk::BufferUsageFlags::TRANSFER_SRC; } if usage.contains(BufferUsage::COPY_DST) { flags |= vk::BufferUsageFlags::TRANSFER_DST; } if usage.contains(BufferUsage::ACCELERATION_STRUCTURE) { flags |= vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS_EXT; } if usage.contains(BufferUsage::ACCELERATION_STRUCTURE_BUILD) { flags |= vk::BufferUsageFlags::ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS_EXT; } if usage.contains(BufferUsage::SHADER_BINDING_TABLE) { flags |= vk::BufferUsageFlags::SHADER_BINDING_TABLE_KHR | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS_EXT; } flags } pub(crate) fn align_up(value: usize, alignment: usize) -> usize { if alignment == 0 { return value; } if value == 0 { return 0; } (value + alignment - 1) & !(alignment - 1) } pub(crate) fn align_down(value: usize, alignment: usize) -> usize { if alignment == 0 { return value; } (value / alignment) * alignment } pub(crate) fn align_up_32(value: u32, alignment: u32) -> u32 { if alignment == 0 { return value; } if value == 0 { return 0; } (value + alignment - 1) & !(alignment - 1) } pub(crate) fn align_down_32(value: u32, alignment: u32) -> u32 { if alignment == 0 { return value; } (value / alignment) * alignment } pub(crate) fn align_up_64(value: u64, alignment: u64) -> u64 { if alignment == 0 { return value; } (value + alignment - 1) & !(alignment - 1) } pub(crate) fn align_down_64(value: u64, alignment: u64) -> u64 { if alignment == 0 { return value; } (value / alignment) * alignment } pub struct VkBufferSlice { buffer: Arc<VkBuffer>, offset: usize, info: BufferInfo } impl Debug for VkBufferSlice { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!( f, "(Buffer Slice: {}-{} (length: {}))", self.offset, self.offset + self.info.size, self.info.size ) } } impl Hash for VkBufferSlice { fn hash<H: Hasher>(&self, state: &mut H) { self.buffer.hash(state); self.offset.hash(state); self.info.size.hash(state); } } impl PartialEq for VkBufferSlice { fn eq(&self, other: &Self) -> bool { self.buffer == other.buffer && self.info.size == other.info.size && self.offset == other.offset } } impl Eq for VkBufferSlice {} impl VkBufferSlice { pub fn buffer(&self) -> &Arc<VkBuffer> { &self.buffer } pub fn offset(&self) -> usize { self.offset } pub fn length(&self) -> usize { self.info.size } pub fn va(&self) -> Option<vk::DeviceAddress> { self.buffer .va() .map(|va| va + self.offset as vk::DeviceSize) } pub fn va_offset(&self, offset: usize) -> Option<vk::DeviceAddress> { self.buffer .va() .map(|va| va + (self.offset + offset) as vk::DeviceSize) } } const SLICED_BUFFER_SIZE: usize = 16384; const BIG_BUFFER_SLAB_SIZE: usize = 4096; const BUFFER_SLAB_SIZE: usize = 1024; const SMALL_BUFFER_SLAB_SIZE: usize = 512; const TINY_BUFFER_SLAB_SIZE: usize = 256; const STAGING_BUFFER_POOL_SIZE: usize = 16 << 20; #[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)] struct BufferKey { memory_usage: MemoryUsage, buffer_usage: BufferUsage, } #[derive(Default)] struct VkBufferSliceCollection { used_slices: Vec<Arc<VkBufferSlice>>, free_slices: Vec<Arc<VkBufferSlice>>, } pub struct BufferAllocator { device: Arc<RawVkDevice>, buffers: Mutex<HashMap<BufferKey, VkBufferSliceCollection>>, device_limits: vk::PhysicalDeviceLimits, reuse_automatically: bool, transfer_pool: Option<vma_sys::VmaPool>, } unsafe impl Send for BufferAllocator {} unsafe impl Sync for BufferAllocator {} impl BufferAllocator { pub fn new(device: &Arc<RawVkDevice>, reuse_automatically: bool) -> Self { let buffers: HashMap<BufferKey, VkBufferSliceCollection> = HashMap::new(); let mut limits2 = vk::PhysicalDeviceProperties2 { ..Default::default() }; unsafe { device .instance .get_physical_device_properties2(device.physical_device, &mut limits2) } // Pure copy buffers are expected to be very short lived, so put them into a separate pool to avoid // fragmentation let transfer_pool = if reuse_automatically { let buffer_info = vk::BufferCreateInfo { size: 1024, usage: buffer_usage_to_vk(BufferUsage::COPY_SRC, false), sharing_mode: SharingMode::EXCLUSIVE, queue_family_index_count: 0, p_queue_family_indices: std::ptr::null(), ..Default::default() }; let vk_mem_flags = memory_usage_to_vma(MemoryUsage::UncachedRAM); let allocation_info = vma_sys::VmaAllocationCreateInfo { flags: vma_sys::VmaAllocationCreateFlagBits_VMA_ALLOCATION_CREATE_MAPPED_BIT as u32, usage: vma_sys::VmaMemoryUsage_VMA_MEMORY_USAGE_UNKNOWN, preferredFlags: vk_mem_flags.preferred, requiredFlags: vk_mem_flags.required, memoryTypeBits: 0, pool: std::ptr::null_mut(), pUserData: std::ptr::null_mut(), priority: 0f32, }; let mut memory_type_index: u32 = 0; unsafe { assert_eq!( vma_sys::vmaFindMemoryTypeIndexForBufferInfo( device.allocator, &buffer_info as *const vk::BufferCreateInfo, &allocation_info as *const vma_sys::VmaAllocationCreateInfo, &mut memory_type_index as *mut u32 ), vk::Result::SUCCESS ); } let pool_info = vma_sys::VmaPoolCreateInfo { memoryTypeIndex: memory_type_index, flags: 0, blockSize: STAGING_BUFFER_POOL_SIZE as vk::DeviceSize, minBlockCount: 0, maxBlockCount: 0, priority: 0.1f32, minAllocationAlignment: 0, pMemoryAllocateNext: std::ptr::null_mut(), }; unsafe { let mut pool: vma_sys::VmaPool = std::ptr::null_mut(); let res = vma_sys::vmaCreatePool( device.allocator, &pool_info as *const vma_sys::VmaPoolCreateInfo, &mut pool as *mut vma_sys::VmaPool, ); if res != vk::Result::SUCCESS { None } else { Some(pool) } } } else { None }; BufferAllocator { device: device.clone(), buffers: Mutex::new(buffers), device_limits: limits2.properties.limits, reuse_automatically, transfer_pool, } } pub fn get_slice( &self, info: &BufferInfo, memory_usage: MemoryUsage, name: Option<&str>, ) -> Arc<VkBufferSlice> { if info.size > BIG_BUFFER_SLAB_SIZE && self.reuse_automatically { let pool = if memory_usage == MemoryUsage::UncachedRAM && info.usage == BufferUsage::COPY_SRC { self.transfer_pool } else { None }; // Don't do one-off buffers for command lists let buffer = VkBuffer::new( &self.device, memory_usage, info, &self.device.allocator, pool, name, ); return Arc::new(VkBufferSlice { buffer, offset: 0, info: info.clone() }); } let mut info = info.clone(); let mut alignment: usize = 4; if info.usage.contains(BufferUsage::CONSTANT) { // TODO max doesnt guarantee both alignments alignment = max( alignment, self.device_limits.min_uniform_buffer_offset_alignment as usize, ); } if info.usage.contains(BufferUsage::STORAGE) { // TODO max doesnt guarantee both alignments alignment = max( alignment, self.device_limits.min_storage_buffer_offset_alignment as usize, ); } if info.usage.contains(BufferUsage::ACCELERATION_STRUCTURE) { // TODO max doesnt guarantee both alignments alignment = max(alignment, 256); } if info.usage.contains(BufferUsage::SHADER_BINDING_TABLE) { let rt = self.device.rt.as_ref().unwrap(); alignment = max( alignment, rt.rt_pipeline_properties.shader_group_handle_alignment as usize, ); alignment = max( alignment, rt.rt_pipeline_properties.shader_group_base_alignment as usize, ); } let key = BufferKey { memory_usage, buffer_usage: info.usage, }; let mut guard = self.buffers.lock().unwrap(); let matching_buffers = guard.entry(key).or_default(); // TODO: consider a smarter data structure than a simple list of all slices regardless of size. let slice_index = matching_buffers .free_slices .iter() .enumerate() .find(|(_, slice)| { slice.offset % alignment == 0 && slice.info.size % alignment == 0 && slice.info.size >= info.size }) .map(|(index, _b)| index); if let Some(index) = slice_index { let slice = matching_buffers.free_slices.remove(index); matching_buffers.used_slices.push(slice.clone()); return slice; } if self.reuse_automatically && !matching_buffers.used_slices.is_empty() { // This is awful. Completely rewrite this with drain_filter once that's stabilized. // Right now cleaner alternatives would likely need to do more copying and allocations. let length = matching_buffers.used_slices.len(); for i in (0..length).rev() { let refcount = { let slice = &matching_buffers.used_slices[i]; Arc::strong_count(slice) }; if refcount == 1 { matching_buffers .free_slices .push(matching_buffers.used_slices.remove(i)); } } let slice_index = matching_buffers .free_slices .iter() .enumerate() .find(|(_, slice)| { slice.offset % alignment == 0 && slice.info.size % alignment == 0 && slice.info.size >= info.size }) .map(|(index, _b)| index); if let Some(index) = slice_index { let slice = matching_buffers.free_slices.remove(index); matching_buffers.used_slices.push(slice.clone()); return slice; } } let mut slice_size = align_up(info.size, alignment); slice_size = if slice_size <= TINY_BUFFER_SLAB_SIZE { TINY_BUFFER_SLAB_SIZE } else if info.size <= SMALL_BUFFER_SLAB_SIZE { SMALL_BUFFER_SLAB_SIZE } else if info.size <= BUFFER_SLAB_SIZE { BUFFER_SLAB_SIZE } else if info.size <= BIG_BUFFER_SLAB_SIZE { BIG_BUFFER_SLAB_SIZE } else { info.size }; let slices = if slice_size <= BIG_BUFFER_SLAB_SIZE { info.size = SLICED_BUFFER_SIZE; SLICED_BUFFER_SIZE / slice_size } else { 1 }; let buffer = VkBuffer::new( &self.device, memory_usage, &info, &self.device.allocator, None, None, ); let slice_info = BufferInfo { usage: info.usage, size: slice_size }; for i in 0..(slices - 1) { let slice = Arc::new(VkBufferSlice { buffer: buffer.clone(), offset: i * slice_size, info: slice_info.clone() }); matching_buffers.free_slices.push(slice); } let slice = Arc::new(VkBufferSlice { buffer, offset: (slices - 1) * slice_size, info: slice_info }); matching_buffers.used_slices.push(slice.clone()); slice } pub fn reset(&self) { let mut buffers_types = self.buffers.lock().unwrap(); for (_key, buffers) in buffers_types.iter_mut() { buffers.free_slices.append(buffers.used_slices.as_mut()); buffers.free_slices.sort_unstable_by_key(|a| a.info.size); } } } impl Drop for BufferAllocator { fn drop(&mut self) { if let Some(pool) = self.transfer_pool { unsafe { vma_sys::vmaDestroyPool(self.device.allocator, pool); } } } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::future::Future; use std::pin::Pin; use std::task::Context; use std::task::Poll; use common_exception::ErrorCode; use common_exception::Result; use futures::future::BoxFuture; use futures::FutureExt; pub fn catch_unwind<F: FnOnce() -> R, R>(f: F) -> Result<R> { match std::panic::catch_unwind(std::panic::AssertUnwindSafe(f)) { Ok(res) => Ok(res), Err(cause) => match cause.downcast_ref::<&'static str>() { None => match cause.downcast_ref::<String>() { None => Err(ErrorCode::PanicError("Sorry, unknown panic message")), Some(message) => Err(ErrorCode::PanicError(message.to_string())), }, Some(message) => Err(ErrorCode::PanicError(message.to_string())), }, } } pub struct CatchUnwindFuture<F: Future + Send + 'static> { inner: BoxFuture<'static, F::Output>, } impl<F: Future + Send + 'static> CatchUnwindFuture<F> { pub fn create(f: F) -> CatchUnwindFuture<F> { CatchUnwindFuture::<F> { inner: f.boxed() } } } impl<F: Future + Send + 'static> Future for CatchUnwindFuture<F> { type Output = Result<F::Output>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let inner = &mut self.inner; match catch_unwind(move || inner.poll_unpin(cx)) { Ok(Poll::Pending) => Poll::Pending, Ok(Poll::Ready(value)) => Poll::Ready(Ok(value)), Err(cause) => Poll::Ready(Err(cause)), } } }
use crate::{font::Font, widgets::Color}; use nannou::{geom::rect::Rect, prelude::Vector3, text::Scale, wgpu::Texture, Draw}; use utopia_core::CommonPrimitive; use utopia_decorations::{ primitives::{border::BorderPrimitive, quad::QuadPrimitive}, widgets::scale::ScaledPrimitive, }; use utopia_image::primitive::ImagePrimitive; use utopia_scroll::primitive::ClipPrimitive; use utopia_text::primitives::text::TextPrimitive; #[derive(Debug)] pub enum NannouPrimitive { Common(CommonPrimitive<NannouPrimitive>), Text(TextPrimitive<Font, Color>), Quad(QuadPrimitive<Color>), Border(BorderPrimitive<Color>), Image(ImagePrimitive<Texture>), Clip(ClipPrimitive<NannouPrimitive>), Scaled(ScaledPrimitive<NannouPrimitive>), } impl NannouPrimitive { pub fn draw(self, draw: &Draw, win_height: f32) { match self { NannouPrimitive::Common(common) => match common { CommonPrimitive::Group { children } => children .into_iter() .for_each(|prim| prim.draw(draw, win_height)), _ => {} }, NannouPrimitive::Text(text) => { if text.content == "" { return; } let font = text.font.resolve(); let v_metrics = font.v_metrics(Scale::uniform(text.font_size as f32)); let x = text.origin.x + text.size.width / 2.; let y = text.origin.y + text.size.height / 2. + v_metrics.descent; draw.text(&text.content) .color(text.color) .font_size(text.font_size as u32) .x_y(x, win_height - y); } NannouPrimitive::Quad(quad) => { let x = quad.origin.x + quad.size.width / 2.; let y = quad.origin.y + quad.size.height / 2.; draw.rect() .x_y(x, win_height - y) .w_h(quad.size.width.ceil(), quad.size.height.ceil()) .color(quad.color); } NannouPrimitive::Border(border) => { let x = border.origin.x + border.size.width / 2.; let y = border.origin.y + border.size.height / 2.; draw.rect() .x_y(x, win_height - y) .w_h( (border.size.width - border.border_width as f32 / 2.).ceil(), (border.size.height - border.border_width as f32 / 2.).ceil(), ) .no_fill() .stroke_weight(border.border_width as f32) .stroke(border.border_color); } NannouPrimitive::Image(image) => { let x = image.position.x + image.size.width / 2.; let y = image.position.y + image.size.height / 2.; draw.texture(&image.src) .x_y(x, y) .w_h(image.size.width, image.size.height); } NannouPrimitive::Clip(clip) => { let x = clip.bounds.width / 2.; let x = nannou::geom::Range::new(-x, x); let y = clip.bounds.height / 2.; let y = nannou::geom::Range::new(-y, y); let scissor = draw .scissor(Rect { x, y }) .x_y(-clip.offset.x, clip.offset.y); clip.primitive.draw(&scissor, win_height); } NannouPrimitive::Scaled(scaled) => { // FIXME: Not scaled with proper origin ? let scaled_draw = draw.scale_axes(Vector3::new(scaled.scale_x, scaled.scale_y, 1.)); scaled.primitive.draw(&scaled_draw, win_height); } } } } impl From<CommonPrimitive<NannouPrimitive>> for NannouPrimitive { fn from(input: CommonPrimitive<NannouPrimitive>) -> Self { NannouPrimitive::Common(input) } } impl From<TextPrimitive<Font, Color>> for NannouPrimitive { fn from(input: TextPrimitive<Font, Color>) -> Self { NannouPrimitive::Text(input) } } impl From<QuadPrimitive<Color>> for NannouPrimitive { fn from(input: QuadPrimitive<Color>) -> Self { NannouPrimitive::Quad(input) } } impl From<BorderPrimitive<Color>> for NannouPrimitive { fn from(input: BorderPrimitive<Color>) -> Self { NannouPrimitive::Border(input) } } impl From<ImagePrimitive<Texture>> for NannouPrimitive { fn from(input: ImagePrimitive<Texture>) -> Self { NannouPrimitive::Image(input) } } impl From<ClipPrimitive<NannouPrimitive>> for NannouPrimitive { fn from(input: ClipPrimitive<NannouPrimitive>) -> Self { NannouPrimitive::Clip(input) } } impl From<ScaledPrimitive<NannouPrimitive>> for NannouPrimitive { fn from(input: ScaledPrimitive<NannouPrimitive>) -> Self { NannouPrimitive::Scaled(input) } } impl From<()> for NannouPrimitive { fn from(_input: ()) -> Self { NannouPrimitive::Common(CommonPrimitive::None) } } impl<A, B> From<(A, B)> for NannouPrimitive where NannouPrimitive: From<A>, NannouPrimitive: From<B>, { fn from((a, b): (A, B)) -> NannouPrimitive { NannouPrimitive::Common(CommonPrimitive::Group { children: vec![a.into(), b.into()], }) } }
/* * Copyright 2020 Fluence Labs Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #![recursion_limit = "512"] #![warn(rust_2018_idioms)] #![deny( dead_code, nonstandard_style, unused_imports, unused_mut, unused_variables, unused_unsafe, unreachable_patterns )] use async_std::task; use ctrlc_adapter::block_until_ctrlc; use fluence_ipfs::run_ipfs_multiaddr_service; use futures::channel::oneshot; use parity_multiaddr::Multiaddr; use std::error::Error; use std::io::Write; fn main() -> Result<(), Box<dyn Error>> { env_logger::builder() .format(|buf, record| { writeln!( buf, "[{} {} {} JI]: {}", buf.timestamp_micros(), record.level(), record.module_path().unwrap_or_default(), record.args() ) }) .init(); // TODO: use ArgMatches let bootstrap: Multiaddr = std::env::args() .nth(1) .expect("multiaddr for bootstrap node should be provided by the first argument") .parse() .expect("provided wrong bootstrap Multiaddr"); let ipfs: Multiaddr = std::env::args() .nth(2) .expect("ipfs multiaddr should be provided as a second argument") .parse() .expect("provided wrong IPFS Multiaddr"); let (exit_sender, exit_receiver) = oneshot::channel::<()>(); let ipfs_task = task::spawn(async move { let result = run_ipfs_multiaddr_service(bootstrap, ipfs, exit_receiver).await; if let Err(e) = result { log::error!("Error running ipfs_task: {:?}", e) } }); log::info!("Waiting for Ctrl-C..."); block_until_ctrlc(); log::debug!("Got Ctrl-C, stopping"); exit_sender.send(()).unwrap(); log::debug!("Waiting for ipfs_task to end"); task::block_on(ipfs_task); log::debug!("ipfs_task stopped"); Ok(()) }
pub mod physics; mod spatial_grid; pub use physics::PhysicsPipeline; pub use spatial_grid::*;
//! Trivial type-map implementation //! //! Implementation uses type erased values with type as index. //! Due to limitation of `TypeId` only types without non-static references are supported. (in future it can be changed) //! //! ## Hash implementation //! //! The map uses simplified `Hasher` that relies on fact that `TypeId` produces unique values only. //! In fact there is no hashing under hood, and type's id is returned as it is. //! //! ## Usage //! //! ```rust //! use ttmap::TypeMap; //! //! let mut map = TypeMap::new(); //! //! map.insert("string"); //! //! assert_eq!(*map.get::<&'static str>().unwrap(), "string"); //! //! map.insert(1u8); //! //! assert_eq!(*map.get::<u8>().unwrap(), 1); //! //! assert_eq!(map.get_or_default::<String>(), ""); //! ``` #![warn(missing_docs)] use core::any::TypeId; mod hash; type Key = TypeId; #[cold] #[inline(never)] fn unlikely_vacant_insert(this: std::collections::hash_map::VacantEntry<'_, Key, ValueBox>, val: ValueBox) -> &'_ mut ValueBox { this.insert(val) } #[cfg(not(debug_assertions))] macro_rules! unreach { () => ({ unsafe { core::hint::unreachable_unchecked(); } }) } #[cfg(debug_assertions)] macro_rules! unreach { () => ({ unreachable!() }) } type HashMap = std::collections::HashMap<Key, ValueBox, hash::UniqueHasherBuilder>; ///Type-safe store, indexed by types. pub struct TypeMap { inner: HashMap, } ///Valid type for [TypeMap] pub trait Type: 'static + Send + Sync {} impl<T: 'static + Send + Sync> Type for T {} ///Shared reference to [Type] pub type ValueRef<'a> = &'a (dyn core::any::Any + Send + Sync); ///Mutable reference to [Type] pub type ValueMut<'a> = &'a mut (dyn core::any::Any + Send + Sync); ///Boxed [Type] pub type ValueBox = Box<dyn core::any::Any + Send + Sync>; impl TypeMap { #[inline] ///Creates new instance pub fn new() -> Self { Self { inner: HashMap::with_capacity_and_hasher(0, hash::UniqueHasherBuilder), } } #[inline] ///Returns number of key & value pairs inside. pub fn len(&self) -> usize { self.inner.len() } #[inline] ///Returns number of key & value pairs inside. pub fn capacity(&self) -> usize { self.inner.capacity() } #[inline] ///Returns whether map is empty pub fn is_empty(&self) -> bool { self.inner.is_empty() } #[inline] ///Removes all pairs of key & value from the map. pub fn clear(&mut self) { self.inner.clear() } #[inline] ///Returns whether element is present in the map. pub fn has<T: Type>(&self) -> bool { self.inner.contains_key(&TypeId::of::<T>()) } #[inline] ///Returns whether element is present in the map. pub fn contains_key<T: Type>(&self) -> bool { self.inner.contains_key(&TypeId::of::<T>()) } #[inline] ///Access element in the map, returning reference to it, if present pub fn get<T: Type>(&self) -> Option<&T> { match self.inner.get(&TypeId::of::<T>()) { Some(ptr) => match ptr.downcast_ref() { Some(res) => Some(res), None => unreach!(), }, None => None } } #[inline] ///Access element in the map with type-id provided at runtime, returning reference to it, if present pub fn get_raw(&self, k: TypeId) -> Option<ValueRef> { match self.inner.get(&k) { Some(ptr) => Some(ptr.as_ref()), None => None } } #[inline] ///Access element in the map, returning mutable reference to it, if present pub fn get_mut<T: Type>(&mut self) -> Option<&mut T> { match self.inner.get_mut(&TypeId::of::<T>()) { Some(ptr) => match ptr.downcast_mut() { Some(res) => Some(res), None => unreach!(), }, None => None } } #[inline] ///Access element in the map with type-id provided at runtime, returning mutable reference to it, if present pub fn get_mut_raw(&mut self, k: TypeId) -> Option<ValueMut> { match self.inner.get_mut(&k) { Some(ptr) => Some(ptr.as_mut()), None => None } } #[inline] ///Access element in the map, if not present, constructs it using default value. pub fn get_or_default<T: Type + Default>(&mut self) -> &mut T { use std::collections::hash_map::Entry; match self.inner.entry(TypeId::of::<T>()) { Entry::Occupied(occupied) => { match occupied.into_mut().downcast_mut() { Some(res) => res, None => unreach!(), } }, Entry::Vacant(vacant) => { let ptr = unlikely_vacant_insert(vacant, Box::new(T::default())); match ptr.downcast_mut() { Some(res) => res, None => unreach!(), } } } } ///Insert element inside the map, returning heap-allocated old one if any /// ///## Note /// ///Be careful when inserting without explicitly specifying type. ///Some special types like function pointers are impossible to infer as non-anonymous type. ///You should manually specify type when in doubt. pub fn insert<T: Type>(&mut self, value: T) -> Option<Box<T>> { use std::collections::hash_map::Entry; match self.inner.entry(TypeId::of::<T>()) { Entry::Occupied(mut occupied) => { let result = occupied.insert(Box::new(value)); match result.downcast() { Ok(result) => Some(result), Err(_) => unreach!() } }, Entry::Vacant(vacant) => { vacant.insert(Box::new(value)); None } } } ///Insert boxed element inside the map with dynamic type, ///returning heap-allocated old one with the same type-id if any. /// ///This does not reallocate `value`. pub fn insert_raw(&mut self, value: ValueBox) -> Option<ValueBox> { use std::collections::hash_map::Entry; match self.inner.entry(value.as_ref().type_id()) { Entry::Occupied(mut occupied) => { let result = occupied.insert(value); Some(result) }, Entry::Vacant(vacant) => { vacant.insert(value); None } } } ///Attempts to remove element from the map, returning boxed `Some` if it is present. pub fn remove<T: Type>(&mut self) -> Option<Box<T>> { self.inner.remove(&TypeId::of::<T>()).map(|ptr| { match ptr.downcast() { Ok(result) => result, Err(_) => unreach!() } }) } #[inline] ///Attempts to remove element from the map with type-id provided at runtime, returning boxed `Some` if it is present. pub fn remove_raw(&mut self, id: TypeId) -> Option<ValueBox> { self.inner.remove(&id) } } impl core::default::Default for TypeMap { #[inline] fn default() -> Self { Self::new() } } impl core::fmt::Debug for TypeMap { #[inline] fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { writeln!(f, "TypeMap {{ size={}, capacity={} }}", self.len(), self.capacity()) } }
#![crate_name = "uu_logname"] /* * This file is part of the uutils coreutils package. * * (c) Benoit Benedetti <benoit.benedetti@gmail.com> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ /* last synced with: logname (GNU coreutils) 8.22 */ extern crate getopts; extern crate libc; #[macro_use] extern crate uucore; use std::ffi::CStr; use std::io::Write; extern { // POSIX requires using getlogin (or equivalent code) pub fn getlogin() -> *const libc::c_char; } fn get_userlogin() -> Option<String> { unsafe { let login: *const libc::c_char = getlogin(); if login.is_null() { None } else { Some(String::from_utf8_lossy(CStr::from_ptr(login).to_bytes()).to_string()) } } } static NAME: &'static str = "logname"; static VERSION: &'static str = env!("CARGO_PKG_VERSION"); pub fn uumain(args: Vec<String>) -> i32 { // // Argument parsing // let mut opts = getopts::Options::new(); opts.optflag("h", "help", "display this help and exit"); opts.optflag("V", "version", "output version information and exit"); let matches = match opts.parse(&args[1..]) { Ok(m) => m, Err(f) => crash!(1, "Invalid options\n{}", f) }; if matches.opt_present("help") { let msg = format!("{0} {1} Usage: {0} Print user's login name.", NAME, VERSION); print!("{}", opts.usage(&msg)); return 0; } if matches.opt_present("version") { println!("{} {}", NAME, VERSION); return 0; } exec(); 0 } fn exec() { match get_userlogin() { Some(userlogin) => println!("{}", userlogin), None => println!("{}: no login name", NAME) } }
#[macro_use] mod syntax_kind; mod grammar; mod lexer; pub use syntax_kind::SyntaxKind; #[cfg(test)] mod tests { use super::*; use std::fmt; fn multi_assert<T>(assertions: &[(T, T)]) where T: fmt::Debug + PartialEq, { for assertion in assertions { assert_eq!(assertion.0, assertion.1); } } #[test] fn it_works() { multi_assert(&[ (T![==], SyntaxKind::EqEq), (T![ident], SyntaxKind::Ident), (T![fn], SyntaxKind::FnKw), (T![if], SyntaxKind::IfKw), (T![')'], SyntaxKind::RParen), ]) } }
/* * This code is totally unsafe. * It relies on the gc thread getting enough cpu time that it never lags * too far behind the app thread. If this assumption fails, undefined behaviour * results. */ #![feature(alloc)] use std::boxed; use std::mem::transmute; use std::sync::Arc; use std::sync::atomic::{AtomicIsize, AtomicBool, Ordering}; use std::thread; struct Buffer { counter: AtomicIsize, elements: [usize; 65536], wait: AtomicBool } impl Buffer { fn new() -> Buffer { Buffer{ counter: AtomicIsize::new(0), elements: [0; 65536], wait: AtomicBool::new(false) } } } fn main() { let limit = 10000000; let buffer = Arc::new(Buffer::new()); let buffer_clone = buffer.clone(); thread::spawn(move|| { let mut wait_counter = 0; unsafe { let raw = transmute::<&usize, *mut usize>(&buffer_clone.elements[0]); for value in 0..limit + 1 { while buffer_clone.wait.load(Ordering::Acquire) == true { wait_counter += 1; thread::yield_now(); } let d = Box::new(value); let ptr = boxed::into_raw(d) as usize; *raw.offset(value % 65536) = ptr; buffer_clone.counter.store(value, Ordering::Release); } } println!("app waitied {} cycles", wait_counter); }); unsafe { let raw = transmute::<&usize, *mut usize>(&buffer.elements[0]); let mut counter: isize = 0; let mut wait = false; let mut wait_counter = 0; loop { let value = buffer.counter.load(Ordering::Acquire); if counter == limit { break; } if value - counter > 1024 { buffer.wait.store(true, Ordering::Release); wait = true; wait_counter += 1; } while counter < value { let ptr = *raw.offset(counter % 65536); let d = Box::from_raw(ptr as *mut isize); drop(d); counter += 1; } if wait == true { buffer.wait.store(false, Ordering::Release); wait = false; } } println!("gc waited {} times", wait_counter); } }
use crate::base::Mat; use crate::base::Vector; use crate::base::Layer; pub struct MaxPooling { pub input: Mat, pub output: Mat, pub input_width: usize, pub input_height: usize, pub pool_size: usize, pub strides: usize, pub padding: usize, pub max_index: Mat, } impl MaxPooling { pub fn new( input_width: usize, input_height: usize, pool_size: usize, strides: usize, padding: usize, ) -> Self { MaxPooling { input: vec![vec![]], output: vec![vec![]], input_width: input_width, input_height: input_height, pool_size: pool_size, strides: strides, padding: padding, max_index: vec![vec![]], } } fn pooling_single_channel( &mut self, input: &Vector, h: usize, w: usize, pooling_size: usize, padding: usize, strides: usize, ) -> (Vector, Vec<usize>) { if input.len() != (h * w) as usize { panic!( "input size error, w={}, h={}, but length is {}", w, h, input.len() ); } let mut rs = vec![]; let mut index_vec:Vec<usize> = vec![]; let h_max = h - kernel_size + 1 + 2 * padding; let w_max = w - kernel_size + 1 + 2 * padding; let new_w = w + 2 * padding; let new_h = h + 2 * padding; for hi in (0..h_max).step_by(strides as usize) { for wi in (0..w_max).step_by(strides as usize) { let base = wi + hi * new_w; let mut maxv = 0.0f32; for kh in 0..kernel_size { for kw in 0..kernel_size { let index = base + kw + (kh * new_w); let py = hi + kh; let px = wi + kw; if py >= padding && px >= padding && px < w + padding && py < new_h - padding { let rindex = index - w * padding - py * 2 * padding - padding let input_v = input[rindex]; if input_v > maxv { maxv = input_v; } index_vec.push(rindex); } } } rs.push(maxv); } } (rs,index_vec) } } impl Layer for MaxPooling { fn forward(&mut self, input: &Mat, _: bool) -> Mat { self.input = input.clone(); let mut result: Mat = vec![vec![]]; for row in self.input.iter() { let (rs_row, indexs_row) = self.pooling_single_channel(row, self.input_height, self.input_width, self.pooling_size, self.padding, self.strides) result.push(rs_row); self.max_index.push(indexs_row); } result } fn backward(&mut self, up_grads: &Mat) -> Mat { let channel = self.input.len(); let length = self.input_height*self.input_width; let mut back = vec![vec![0f32; length]; channel]; for (i, ug) in up_grads.iter().enumerate() { for (j, grads) in ug.iter().enumerate() { back[i][self.max_index[i][j]] = grads; } } back.clone() } fn update_weights(&mut self, lamda: f32) { } fn clear(&mut self) { } }
use std::convert::TryInto; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use cosmwasm_std::{Addr, Coin, Order, StdResult, Storage, Uint128}; use cw0::Expiration; use cw_storage_plus::{Bound, I64Key, Item, Map, U128Key}; use crate::msg::{Params, PlanContent}; /// (plan-id, subscriber-address) pub type SubscriptionKey<'a> = (U128Key, &'a str); /// Store contract params pub const PARAMS: Item<Params> = Item::new("params"); /// Store the self-incremental unique ids for plans pub const PLAN_ID: Item<Uint128> = Item::new("planid"); /// Store the plans, `plan-id -> Plan` pub const PLANS: Map<U128Key, Plan> = Map::new("plans"); /// Store the subscriptions, `(plan-id, subscriber) -> Subscription` pub const SUBSCRIPTIONS: Map<SubscriptionKey, Subscription> = Map::new("plan-subs"); // /// Subscription queue ordered by expiration time // /// (expiration-time, subscription-id) -> () // pub const Q_EXPIRATION: Map<(i64, Uint128), ()> = Map::new("subs-expiration"); /// Subscription queue ordered by next_collection_time /// (next-collection-time, plan-id, subscriber) -> () pub const Q_COLLECTION: Map<(I64Key, SubscriptionKey), ()> = Map::new("q-collection"); const ZERO: Uint128 = Uint128::zero(); #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] pub struct Plan { pub id: Uint128, pub owner: Addr, pub content: PlanContent<Addr>, pub deposit: Vec<Coin>, } #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] pub struct Subscription { pub expires: Expiration, /// Initialized to current block time created pub last_collection_time: i64, pub next_collection_time: i64, pub deposit: Vec<Coin>, } pub fn gen_plan_id(store: &mut dyn Storage) -> StdResult<Uint128> { let mut plan_id = PLAN_ID.may_load(store)?.unwrap_or(ZERO); plan_id = plan_id.wrapping_add(1u64.into()); // ensure id not used while store .get(&PLANS.key(U128Key::from(plan_id.u128()))) .is_some() { plan_id = plan_id.wrapping_add(1u64.into()); } PLAN_ID.save(store, &plan_id)?; Ok(plan_id) } /// PANIC: if deserialization failed caused by corrupted storage pub fn iter_subscriptions_by_plan( store: &dyn Storage, plan_id: Uint128, start_after: Option<Addr>, ) -> impl Iterator<Item = (Addr, Subscription)> + '_ { let start = start_after.map(|addr| Bound::exclusive(addr.as_ref())); SUBSCRIPTIONS .prefix(plan_id.u128().into()) .range(store, start, None, Order::Ascending) .map(|mpair| { let (k, v) = mpair.unwrap(); (Addr::unchecked(String::from_utf8(k).unwrap()), v) }) } /// PANIC: if deserialization failed because of corrupted storage pub fn iter_collectible_subscriptions( store: &dyn Storage, now: i64, ) -> impl Iterator<Item = (i64, Uint128, Addr)> + '_ { let minkey = Q_COLLECTION.key((I64Key::from(0), (U128Key::from(0), ""))); let maxkey = Q_COLLECTION.key(( I64Key::from(now.checked_add(1).unwrap()), (U128Key::from(0), ""), )); store .range(Some(&minkey), Some(&maxkey), Order::Ascending) .map(|(k, _)| { // decode key, TODO more elegant way? // skip the prefix let (_, k) = decode_key_step(&k).unwrap(); let (s, k) = decode_key_step(k).unwrap(); let collection_time = i64::from_be_bytes(s.try_into().unwrap()); let (s, k) = decode_key_step(k).unwrap(); let plan_id = u128::from_be_bytes(s.try_into().unwrap()); // the last part is not prefixed with length let addr = Addr::unchecked(String::from_utf8(k.to_owned()).unwrap()); (collection_time, plan_id.into(), addr) }) } /// decode key, depends on the implemention details in cw-storage-plus fn decode_key_step(buf: &[u8]) -> Option<(&[u8], &[u8])> { if buf.len() < 2 { return None; } let end = u16::from_be_bytes([buf[0], buf[1]]) as usize + 2; if buf.len() < end { return None; } Some((&buf[2..end], &buf[end..])) }
use std::io::{Read, Result as IOResult}; use crate::PrimitiveRead; pub struct Header { pub id: i32, pub version: i32, pub checksum: i32, pub lods_count: i32, pub lod_vertexes_count: [i32; 8], pub fixups_count: i32, pub fixup_table_start: i32, pub vertex_data_start: i32, pub tangent_data_start: i32 } impl Header { pub fn read(read: &mut dyn Read) -> IOResult<Self> { let id = read.read_i32()?; let version = read.read_i32()?; let checksum = read.read_i32()?; let lods_count = read.read_i32()?; let mut lod_vertexes_count = [0i32; 8]; for i in 0..8 { lod_vertexes_count[i] = read.read_i32()?; } let fixups_count = read.read_i32()?; let fixup_table_start = read.read_i32()?; let vertex_data_start = read.read_i32()?; let tangent_data_start = read.read_i32()?; Ok(Self { id, version, checksum, lods_count, lod_vertexes_count, fixups_count, fixup_table_start, vertex_data_start, tangent_data_start }) } }
/* * Runtime.rs * * Contains re-implemented parts of Rust's libstd that are needed here, for * convenience or because the compiler needs some of them to use some features * of the language. * * Contains some parts copied from Rust's official libstd. See * http://github.com/mozilla/rust/ */ use self::libc::*; pub mod intrinsics { /* * Intrinsics are functions implemented in-compiler by directly inserting * LLVM code snippets. * One just need to declare their signature in order to be able to use * them properly. */ extern "rust-intrinsic" { pub fn transmute<T,U>(e: T) -> U; pub fn copy_memory<T>(dst: *mut T, src: *T, count: uint); pub fn uninit<T>() -> T; pub fn size_of<T>() -> uint; pub fn offset<T>(dst: *T, offset: int) -> *T; } } /* * TODO: implement range in the runtime * Iterators and options needs to be implemented first. */ /* pub struct Range<A> { priv state: A, priv stop: A, priv one: A } #[inline] pub fn range<A: Add<A, A> + Ord + Clone + One>(start: A, stop: A) -> Range<A> { Range{state: start, stop: stop, one: One::one()} } impl<A: Add<A, A> + Ord + Clone> Iterator<A> for Range<A> { #[inline] fn next(&mut self) -> Option<A> { if self.state < self.stop { let result = self.state.clone(); self.state = self.state + self.one; Some(result) } else { None } } // FIXME: #8606 Implement size_hint() on Range // Blocked on #8605 Need numeric trait for converting to `Option<uint>` } */ /* some libc types that are needed */ pub mod libc { pub type c_char = u8; #[cfg(target_word_size = "32")] pub type size_t = u32; #[cfg(target_word_size = "64")] pub type size_t = u64; } #[lang="fail_"] #[inline] pub fn fail_(expr: *c_char, file: *c_char, line: size_t) -> ! { loop {} } #[lang="fail_bounds_check"] #[inline] pub fn fail_bounds_check(file: *c_char, line: size_t, index: size_t, len: size_t) { use util::kprint; use util::kprintln; kprint("Index out of bounds at "); // kprint(file); kprintln(""); /* * TODO: implement format, or print_int or whatever, to print the line no * and the index and size of the array */ kprintln("Index is of size ?? but the array is of size ??"); /* TODO: implement a panic() function */ loop {} } pub mod cast { use super::intrinsics; use super::sys; #[cfg(target_word_size = "64")] #[inline] pub unsafe fn transmute_copy<T, U>(src: &T) -> U { let mut dest: U = intrinsics::uninit(); let dest_ptr: *mut u8 = transmute(&mut dest); let src_ptr: *u8 = transmute(src); intrinsics::copy_memory(dest_ptr, src_ptr, sys::size_of::<U>()); dest } #[inline] pub unsafe fn transmute<L, G>(thing: L) -> G { intrinsics::transmute(thing) } } pub mod sys { #[inline] pub fn size_of<T>() -> uint { unsafe { super::intrinsics::size_of::<T>() } } } pub struct Slice<T> { data: *T, len: uint } pub trait Repr<T> { #[inline] fn repr(&self) -> T { unsafe { cast::transmute_copy(self) } } }
#[doc = "Reader of register TPLOG6"] pub type R = crate::R<u32, super::TPLOG6>; #[doc = "Reader of field `TIME`"] pub type TIME_R = crate::R<u32, u32>; impl R { #[doc = "Bits 0:31 - Tamper Log Calendar Information"] #[inline(always)] pub fn time(&self) -> TIME_R { TIME_R::new((self.bits & 0xffff_ffff) as u32) } }
use mpi::collective::Root; use mpi::traits::Communicator; use sdl2::event::Event; use sdl2::pixels::Color; use crate::calculation::mpi_eng::common::*; use crate::calculation::mpi_eng::procedure::*; use crate::global::*; pub fn main() { if WORLD.rank() == ROOT { root_routine() } else { sub_routine() } } fn root_routine() { show_global(); if *BENCHMARK { let mut flag = true; let mut buffer = Vec::new(); buffer.resize(*WIDTH * *HEIGHT, 0.0); let mut proc = ProcData::default(); let (prei, starti, endi, padi) = init_range(&mut proc); let sor = match &*MATH_METHOD { MathMethod::Sor => Some(filter_index(&proc)), MathMethod::Jacobi => None }; let start = std::time::SystemTime::now(); match &*MATH_METHOD { MathMethod::Sor => { let (even, odd) = sor.as_ref().unwrap(); do_root_sor(&mut buffer, &mut proc, &prei, &starti, &endi, &padi, even, odd) }, MathMethod::Jacobi => do_root(&mut buffer, &mut proc, &prei, &starti, &endi, &padi) }; ROOT_PROC.broadcast_into(&mut flag); let end = std::time::SystemTime::now(); println!("Duration: {}", (end.duration_since(start).unwrap().as_millis())); } else { let sdl_context = sdl2::init().unwrap(); let video_subsystem = sdl_context.video().unwrap(); let window = video_subsystem.window("HW4", (*WIDTH * *SCALE) as u32, (*HEIGHT * *SCALE) as u32) .position_centered() .build() .unwrap(); let mut canvas = window.into_canvas().build().unwrap(); let mut buffer = Vec::new(); buffer.resize(*WIDTH * *HEIGHT, 0.0); canvas.set_draw_color(Color::RGB(0, 255, 255)); canvas.clear(); canvas.present(); let mut event_pump = sdl_context.event_pump().unwrap(); let mut flag = false; let start = std::time::SystemTime::now(); let mut fps_start = start; let mut depth = 0; let mut counter = 0; let mut proc = ProcData::default(); let (prei, starti, endi, padi) = init_range(&mut proc); let sor = match &*MATH_METHOD { MathMethod::Sor => Some(filter_index(&proc)), MathMethod::Jacobi => None }; 'running: loop { //println!("{:?}", pool); depth += 1; counter += 1; canvas.set_scale(*SCALE as f32, *SCALE as f32).unwrap(); let checking = if !flag { match &*MATH_METHOD { MathMethod::Sor => { let (even, odd) = sor.as_ref().unwrap(); do_root_sor(&mut buffer, &mut proc, &prei, &starti, &endi, &padi, even, odd) }, MathMethod::Jacobi => do_root(&mut buffer, &mut proc, &prei, &starti, &endi, &padi) } } else { true }; if !flag && checking { flag = true; let end = std::time::SystemTime::now(); println!("Duration: {}, Final Depth: {}", end.duration_since(start).unwrap().as_millis(), depth); } for i in 0..*WIDTH * *HEIGHT { let color = (buffer[i] / 100.0 * 255.0) as u8; canvas.set_draw_color(Color::RGB(color, 0, 255 - color)); let cords = cords(i); let point = sdl2::rect::Point::new(cords.0 as i32, cords.1 as i32); canvas.draw_point(point).unwrap(); } for event in event_pump.poll_iter() { match event { Event::Quit { .. } => { flag = true; ROOT_PROC.broadcast_into(&mut flag); break 'running; } _ => {} } } if !flag { ROOT_PROC.broadcast_into(&mut flag); show_state(&mut counter, &mut fps_start, &depth); } canvas.present(); } } } fn sub_routine() { match &*MATH_METHOD { MathMethod::Jacobi => { let mut proc = ProcData::default(); let mut flag = false; proc.init(); while !flag { do_sub(&mut proc); ROOT_PROC.broadcast_into(&mut flag); } }, MathMethod::Sor => { let mut proc = ProcData::default(); let mut flag = false; proc.init(); let (even, odd) = filter_index(&proc); while !flag { do_sub_sor(&mut proc, &even, &odd); ROOT_PROC.broadcast_into(&mut flag); } } } }
/// Extend `std::io::Write` with wincon styling /// /// Generally, you will want to use [`Console`][crate::Console] instead pub trait WinconStream { /// Change the foreground/background /// /// A common pitfall is to forget to flush writes to /// stdout before setting new text attributes. fn set_colors( &mut self, fg: Option<anstyle::AnsiColor>, bg: Option<anstyle::AnsiColor>, ) -> std::io::Result<()>; /// Get the current foreground/background colors fn get_colors( &self, ) -> std::io::Result<(Option<anstyle::AnsiColor>, Option<anstyle::AnsiColor>)>; } impl WinconStream for std::io::Stdout { fn set_colors( &mut self, fg: Option<anstyle::AnsiColor>, bg: Option<anstyle::AnsiColor>, ) -> std::io::Result<()> { inner::set_colors(self, fg, bg) } fn get_colors( &self, ) -> std::io::Result<(Option<anstyle::AnsiColor>, Option<anstyle::AnsiColor>)> { inner::get_colors(self) } } impl<'s> WinconStream for std::io::StdoutLock<'s> { fn set_colors( &mut self, fg: Option<anstyle::AnsiColor>, bg: Option<anstyle::AnsiColor>, ) -> std::io::Result<()> { inner::set_colors(self, fg, bg) } fn get_colors( &self, ) -> std::io::Result<(Option<anstyle::AnsiColor>, Option<anstyle::AnsiColor>)> { inner::get_colors(self) } } impl WinconStream for std::io::Stderr { fn set_colors( &mut self, fg: Option<anstyle::AnsiColor>, bg: Option<anstyle::AnsiColor>, ) -> std::io::Result<()> { inner::set_colors(self, fg, bg) } fn get_colors( &self, ) -> std::io::Result<(Option<anstyle::AnsiColor>, Option<anstyle::AnsiColor>)> { inner::get_colors(self) } } impl<'s> WinconStream for std::io::StderrLock<'s> { fn set_colors( &mut self, fg: Option<anstyle::AnsiColor>, bg: Option<anstyle::AnsiColor>, ) -> std::io::Result<()> { inner::set_colors(self, fg, bg) } fn get_colors( &self, ) -> std::io::Result<(Option<anstyle::AnsiColor>, Option<anstyle::AnsiColor>)> { inner::get_colors(self) } } impl WinconStream for std::fs::File { fn set_colors( &mut self, fg: Option<anstyle::AnsiColor>, bg: Option<anstyle::AnsiColor>, ) -> std::io::Result<()> { ansi::set_colors(self, fg, bg) } fn get_colors( &self, ) -> std::io::Result<(Option<anstyle::AnsiColor>, Option<anstyle::AnsiColor>)> { ansi::get_colors(self) } } #[cfg(windows)] mod wincon { use std::os::windows::io::{AsHandle, AsRawHandle}; pub(super) fn set_colors<S: AsHandle>( stream: &mut S, fg: Option<anstyle::AnsiColor>, bg: Option<anstyle::AnsiColor>, ) -> std::io::Result<()> { let handle = stream.as_handle(); let handle = handle.as_raw_handle(); if let (Some(fg), Some(bg)) = (fg, bg) { let attributes = crate::windows::set_colors(fg, bg); crate::windows::set_console_text_attributes(handle, attributes) } else { Ok(()) } } pub(super) fn get_colors<S: AsHandle>( stream: &S, ) -> std::io::Result<(Option<anstyle::AnsiColor>, Option<anstyle::AnsiColor>)> { let handle = stream.as_handle(); let handle = handle.as_raw_handle(); let info = crate::windows::get_screen_buffer_info(handle)?; let (fg, bg) = crate::windows::get_colors(&info); Ok((Some(fg), Some(bg))) } } mod ansi { pub(super) fn set_colors<S: std::io::Write>( stream: &mut S, fg: Option<anstyle::AnsiColor>, bg: Option<anstyle::AnsiColor>, ) -> std::io::Result<()> { if let Some(fg) = fg { write!(stream, "{}", fg.render_fg())?; } if let Some(bg) = bg { write!(stream, "{}", bg.render_bg())?; } if fg.is_none() && bg.is_none() { write!(stream, "{}", anstyle::Reset.render())?; } Ok(()) } pub(super) fn get_colors<S>( _stream: &S, ) -> std::io::Result<(Option<anstyle::AnsiColor>, Option<anstyle::AnsiColor>)> { Ok((None, None)) } } #[cfg(not(windows))] use ansi as inner; #[cfg(windows)] use wincon as inner;
mod database; use crate::database::{DatabaseQueries, UserModel}; use crate::database::{CollectibleModel, DatabaseHelper, }; use anyhow::{bail, Error}; use anyhow::Result; use serde_derive::Deserialize; use serenity::client::{Context, EventHandler}; use serenity::model::channel::{Attachment, Message, Reaction}; use serenity::model::id::UserId; use serenity::utils::{MessageBuilder}; use serenity::{async_trait, Client}; use std::sync::Arc; use tokio::fs; use hex_literal::hex; use serenity::framework::StandardFramework; use image::ImageOutputFormat; use serenity::model::prelude::Ready; use std::fmt::Write; use rand::Rng; use serenity::model::user::User; use warp::Filter; use serde_derive::Serialize; macro_rules! none_error { ($x:expr) => {{ match $x { Some(x) => x, None => bail!("none error"), } }}; } fn get_image_extension(image: &[u8]) -> &str { const PNG_MAGIC: &[u8] = &hex!("89 50 4E 47 0D 0A 1A 0A"); const JPG_MAGIC1: &[u8] = &hex!("FF D8 FF EE"); const JPG_MAGIC2: &[u8] = &hex!("FF D8 FF E0 00 10 4A 46 49 46 00 01"); const JPG_MAGIC3: &[u8] = &hex!("FF D8 FF E1"); if image.starts_with(PNG_MAGIC) { "png" } else if image.starts_with(JPG_MAGIC1) || image.starts_with(JPG_MAGIC2) || image.starts_with(JPG_MAGIC3) { "jpg" } else { "unknown" } } async fn send_message(message: &str, context: &Context, original: &Message, image: Option<&[u8]>) -> Result<()> { let message: String = message.chars().take(2000).collect(); original .channel_id .send_message(context, |builder| { builder.content(message); if let Some(image) = image { let image_name = format!("image.{}", get_image_extension(image)); builder.add_file((image, image_name.as_str())); } builder }) .await?; Ok(()) } struct DiscordBot { config: Configuration, database_helper: DatabaseHelper, } impl DiscordBot { async fn new(config: Configuration) -> Result<(Arc<DiscordBot>, Client)> { let database_helper = DatabaseHelper { connection_string: config.database_connection_string.clone(), }; let bot = Arc::new(DiscordBot { config, database_helper, }); let discord = DiscordHandler { bot: bot.clone() }; let client = Client::new(&bot.config.discord_token).event_handler(discord).framework(StandardFramework::new()).await?; bot.database_helper.simple().await?.run_create_tables().await?; Ok((bot, client)) } } struct DiscordHandler { bot: Arc<DiscordBot>, } impl DiscordHandler { async fn do_command(&self, string: &[&str], context: &Context, message: &Message) -> Result<()> { let command = string[0]; let args = &string[1..]; if command == "hello" { self.on_hello(context, message).await?; } else if command == "ping" { let who = none_error!(string.get(1)); self.ping_someone(context, message, who).await?; } else if command == "clear" { self.clear_channel(context, message).await?; } else if command == "add_collectible" { self.on_create_collectible(context, message, args).await?; } else if command == "my_collectibles" { self.on_my_collectible(context, message).await?; } else if command == "add_pack" { self.on_add_pack(context, message, args).await?; } else if command == "packs" { self.on_packs(context, message).await?; } else if command == "open" { self.on_open(context, message, args).await?; } else if command == "userinfo" { self.on_user_info(context, message).await?; } Ok(()) } async fn on_add_pack(&self, _context: &Context, original: &Message, args: &[&str]) -> Result<()> { let name = none_error!(args.get(0)); let price = none_error!(args.get(1)).parse()?; let ids: Result<Vec<i32>, std::num::ParseIntError> = args[2..].iter().map(|x| x.parse()).collect(); let ids = ids?; let photo: Option<Vec<u8>> = DiscordHandler::get_image(&original).await?; let mut database = self.bot.database_helper.simple().await?; let database = database.transaction().await?; let user = database .get_user_by_discord_id(original.author.id.0) .await?; for i in &ids { let _ = database.get_collectibles_by_id(*i).await?; } let guild_id = original.guild_id.unwrap().0 as i64; database.add_pack(name, &ids, user.id, guild_id, photo.as_deref(), price).await?; database.commit().await?; Ok(()) } async fn on_open(&self, context: &Context, original: &Message, args: &[&str]) -> Result<()> { let name = none_error!(args.get(0)); let guild_id = none_error!(original.guild_id).0; let database = self.bot.database_helper.simple().await?; let pack = database.get_pack_by_guild_name(name, guild_id as i64).await?; let collectibles = &pack.collectibles; if collectibles.is_empty() { return Err(Error::msg("no collectibles")); } let user = database.get_user_by_discord_id(original.author.id.0).await?; if user.money < pack.price { original.reply(context, "🧐 you don't have enough money 🧐").await?; return Ok(()); } self.add_money(&original.author, -(pack.price as i32)).await?; let index = rand::thread_rng().gen_range(0, collectibles.len()); let chosen = &collectibles[index]; let message = format!("you opened {}! yay! 🙀🙀🙀 you paid {} jerrygold", chosen.name, pack.price); send_message(&message, context, original, chosen.photo.as_deref()).await?; database.add_collectible_for_user(user.id, chosen.id).await?; Ok(()) } async fn on_packs(&self, context: &Context, original: &Message) -> Result<()> { let guild: i64 = none_error!(original.guild_id).0 as i64; let database = self.bot.database_helper.simple().await?; let packs = database.get_packs_for_guild(guild).await?; let mut string = String::new(); for (index, (name, count)) in packs.into_iter().enumerate() { writeln!(&mut string, "{}. {}, count={}", index + 1, name, count)?; } original.reply(context, string).await?; Ok(()) } async fn on_my_collectible(&self, context: &Context, original: &Message) -> Result<()> { let mut database = self.bot.database_helper.simple().await?; let database = database.transaction().await?; let user = database .get_user_by_discord_id(original.author.id.0) .await?; let collectibles: Vec<CollectibleModel> = database.get_collectibles_for_user_id(user.id).await?; for i in collectibles { let text = format!( "name={}, description={}, rarity={}, id={}, created_by=", i.name, i.description, i.rarity, i.id ); let message = MessageBuilder::new() .push(text) .user(i.created_by.discord_id as u64) .build(); send_message(&message, &context, &original, i.photo.as_deref()).await?; } Ok(()) } async fn update_user(&self, id: u64, message: &Message) -> Result<()> { let mut client = self.bot.database_helper.simple().await?; client.update_info(id, &message.author.name).await?; self.add_money(&message.author, message.content.trim().len() as i32).await?; Ok(()) } async fn on_create_collectible( &self, _context: &Context, original: &Message, args: &[&str], ) -> Result<()> { let name: &str = none_error!(args.get(0)); let rarity: &str = none_error!(args.get(1)); let photo: Option<Vec<u8>> = DiscordHandler::get_image(&original).await?; let mut database = self.bot.database_helper.simple().await?; let database = database.transaction().await?; let user = database .get_user_by_discord_id(original.author.id.0) .await?; let collectible = CollectibleModel { id: 0, name: name.to_string(), description: "".to_string(), rarity: rarity.to_string(), photo, // created: 0, created_by: user, created_on_server: original.guild_id.unwrap().0 as i64, }; database.insert_collectible(collectible).await?; database.commit().await?; Ok(()) } async fn get_image(message: &Message) -> Result<Option<Vec<u8>>> { let attachment = match DiscordHandler::get_image_option(message) { Some(x) => x, None => return Ok(None), }; let mut result = attachment.download().await?; let image = image::load_from_memory(&result)?; let image = image.thumbnail(100, 100); result.clear(); image.write_to(&mut result, ImageOutputFormat::Png)?; Ok(Some(result)) } fn get_image_option(message: &Message) -> Option<&Attachment> { let attachment = message.attachments.get(0)?; attachment.width?; Some(attachment) } async fn on_hello(&self, context: &Context, original: &Message) -> Result<()> { let message = MessageBuilder::new() .push("hello, ") .mention(&original.author) .build(); original.channel_id.say(&context, message).await?; Ok(()) } async fn ping_someone(&self, context: &Context, original: &Message, who: &str) -> Result<()> { let who = who.parse()?; let message = MessageBuilder::new() .push("boo, ") .mention(&UserId(who)) .build(); original.channel_id.say(&context, message).await?; Ok(()) } async fn on_user_info(&self, context: &Context, original: &Message) -> Result<()> { let database = self.bot.database_helper.simple().await?; let user = database.get_user_by_discord_id(original.author.id.0).await?; let message = format!("name={}, money={}", original.author.name, user.money); original.reply(context, message).await?; Ok(()) } async fn clear_channel(&self, context: &Context, original: &Message) -> Result<()> { loop { let messages = original .channel_id .messages(&context, |retriever| { retriever.before(original.id).limit(10) }) .await?; if messages.is_empty() { break; } for i in messages { i.delete(context).await?; } } Ok(()) } async fn add_money(&self, user: &User, money: i32) -> Result<()> { println!("adding {} money to {}", money, user.name); let database = self.bot.database_helper.simple().await?; database.add_money(user.id.0, money).await?; Ok(()) } } fn split_command(string: &str) -> Vec<&str> { string.split(' ').collect() } #[async_trait] impl EventHandler for DiscordHandler { async fn message(&self, context: Context, message: Message) { println!("{}: {}", message.author.name, message.content); if message.author.bot { return; } self .update_user(message.author.id.0, &message) .await.unwrap(); let text = message.content.trim().to_string(); if text.starts_with('~') { let command = split_command(&text[1..]); let emoticon = match self.do_command(&command, &context, &message).await { Ok(_) => '✅', Err(e) => { println!("{:?}", e); '❌' } }; message.react(context, emoticon).await.unwrap(); } } async fn reaction_add(&self, context: Context, add_reaction: Reaction) { self.add_money(&add_reaction.user_id.to_user(&context).await.unwrap(), 50).await.unwrap(); } async fn ready(&self, _ctx: Context, _data_about_bot: Ready) { println!("Ready!"); } } #[derive(Deserialize)] struct Configuration { discord_token: String, database_connection_string: String, } async fn read_config() -> Result<Configuration> { let text = fs::read_to_string("config.toml").await?; let config = toml::from_str(&text)?; Ok(config) } macro_rules! anyhow_to_warp { ($x:expr) => {{ match $x { Ok(x) => x, Err(e) => { println!("{:?}", e); return Err(warp::reject::not_found()); } } }}; } #[derive(Serialize)] struct UserInfoResponse { user: UserModel, collectibles: Vec<CollectibleModel> } async fn http_on_user_id(bot: Arc<DiscordBot>, id: u64) -> Result<Box<dyn warp::Reply>, warp::Rejection>{ let database: tokio_postgres::Client = anyhow_to_warp!(bot.database_helper.simple().await); let user: UserModel = anyhow_to_warp!(database.get_user_by_discord_id(id).await); let collectibles = anyhow_to_warp!(database.get_owned_collectibles_for_user(user.id).await); let result = UserInfoResponse { user, collectibles }; let result = serde_json::to_string_pretty(&result).unwrap(); Ok(Box::new(result)) } async fn warp_server(bot: Arc<DiscordBot>) -> Result<()> { let user = warp::path!("user" / u64) .and_then(move |user_id| http_on_user_id(bot.clone(), user_id)); warp::serve(user) .run(([0, 0, 0, 0], 3421)) .await; Ok(()) } #[tokio::main] async fn main() -> Result<()> { let config = read_config().await?; let (bot, mut client) = DiscordBot::new(config).await?; tokio::spawn(async move { let result = warp_server(bot).await; if let Err(e) = result { println!("{:?}", e); } }); client.start().await?; Ok(()) }
use bindings::emscripten::*; use bindings::gl; use common::color::*; pub struct WebGLContext {} impl WebGLContext { pub fn new() -> Self { use std::mem::uninitialized; let ems_context_handle = unsafe { let mut attribs = uninitialized(); emscripten_webgl_init_context_attributes(&mut attribs); attribs.alpha = 1; attribs.stencil = 0; attribs.antialias = 1; attribs.preserveDrawingBuffer = 0; attribs.enableExtensionsByDefault = 0; emscripten_webgl_create_context(b"canvas\0".as_ptr() as _, &attribs) }; match ems_context_handle { EMSCRIPTEN_RESULT_NOT_SUPPORTED => { panic!("WebGL not supported"); } EMSCRIPTEN_RESULT_FAILED_NOT_DEFERRED => { panic!("WebGL context creation failed (FAILED_NOT_DEFERRED)"); } EMSCRIPTEN_RESULT_FAILED => { panic!("WebGL context creation failed (FAILED)"); } x if x < 0 => { panic!("WebGL context creation failed ({})", x); } _ => {} } if unsafe {emscripten_webgl_make_context_current(ems_context_handle) != EMSCRIPTEN_RESULT_SUCCESS} { panic!("Failed to make webgl context current"); } unsafe { gl::FrontFace(gl::CCW); } WebGLContext {} } pub fn set_background<C>(&self, col: C) where C: Into<Color> { unsafe { let c = col.into(); gl::ClearColor(c.r, c.g, c.b, c.a); } } }
use crate::Result; use crossterm::{queue, style::*, terminal::*}; use std::{cell::RefCell, fmt::Display, rc::Rc, sync::OnceLock}; static NO_COLOR: OnceLock<bool> = OnceLock::new(); fn no_color() -> bool { *NO_COLOR.get_or_init(|| std::env::var("NO_COLOR").is_ok()) } #[derive(Debug, Clone)] pub struct Raw<W> { pub raw: Rc<RefCell<W>>, } impl<W: std::io::Write> std::io::Write for Raw<W> { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { self.raw.borrow_mut().write(buf) } fn flush(&mut self) -> std::io::Result<()> { self.raw.borrow_mut().flush() } } impl<W: std::io::Write> Raw<W> { pub fn scroll_up(&mut self, n: u16) -> Result<()> { queue!(self, ScrollUp(n))?; Ok(()) } pub fn clear(&mut self, clear_type: ClearType) -> Result<()> { queue!(self, Clear(clear_type))?; Ok(()) } pub fn _write<D: Display + Clone>(&mut self, value: D) -> Result<()> { queue!(self, Print(value))?; Ok(()) } pub fn write<D: Display + Clone>(&mut self, value: D) -> Result<()> { self._write(value) } pub fn write_with_color<D: Display + Clone>(&mut self, value: D, color: Color) -> Result<()> { self.set_fg(color)?; self.write(value)?; self.reset_color()?; Ok(()) } pub fn set_title(&mut self, title: &str) -> Result<()> { queue!(self, SetTitle(title))?; Ok(()) } // color commands pub fn reset_color(&mut self) -> Result<()> { if no_color() { return Ok(()); } queue!(self, ResetColor)?; Ok(()) } pub fn set_fg(&mut self, color: Color) -> Result<()> { if no_color() { return Ok(()); } queue!(self, SetForegroundColor(color))?; Ok(()) } pub fn set_bg(&mut self, color: Color) -> Result<()> { if no_color() { return Ok(()); } queue!(self, SetBackgroundColor(color))?; Ok(()) } }
use async_graphql::*; #[tokio::test] pub async fn test_derived_field_object() { use serde::{Deserialize, Serialize}; struct Query; #[derive(Serialize, Deserialize)] struct ValueDerived(String); scalar!(ValueDerived); impl From<i32> for ValueDerived { fn from(value: i32) -> Self { ValueDerived(format!("{}", value)) } } #[Object] impl Query { #[graphql(derived(name = "value2", into = "ValueDerived"))] async fn value1(&self, #[graphql(default = 100)] input: i32) -> i32 { input } } let query = "{ value1 value2 }"; let schema = Schema::new(Query, EmptyMutation, EmptySubscription); assert_eq!( schema.execute(query).await.data, value!({ "value1": 100, "value2": "100", }) ); let query = "{ value1(input: 1) value2(input: 2) }"; let schema = Schema::new(Query, EmptyMutation, EmptySubscription); assert_eq!( schema.execute(query).await.data, value!({ "value1": 1, "value2": "2", }) ); } #[tokio::test] pub async fn test_derived_field_object_with() { use serde::{Deserialize, Serialize}; struct Query; #[derive(Serialize, Deserialize)] struct ValueDerived(String); scalar!(ValueDerived); impl From<i32> for ValueDerived { fn from(value: i32) -> Self { ValueDerived(format!("{}", value)) } } fn option_to_option<T, U: From<T>>(value: Option<T>) -> Option<U> { value.map(|x| x.into()) } #[Object] impl Query { #[graphql(derived( name = "value2", into = "Option<ValueDerived>", with = "option_to_option" ))] async fn value1(&self, #[graphql(default = 100)] input: i32) -> Option<i32> { Some(input) } } let query = "{ value1 value2 }"; let schema = Schema::new(Query, EmptyMutation, EmptySubscription); assert_eq!( schema.execute(query).await.data, value!({ "value1": 100, "value2": "100", }) ); let query = "{ value1(input: 1) value2(input: 2) }"; let schema = Schema::new(Query, EmptyMutation, EmptySubscription); assert_eq!( schema.execute(query).await.data, value!({ "value1": 1, "value2": "2", }) ); } #[tokio::test] pub async fn test_derived_field_simple_object() { use serde::{Deserialize, Serialize}; struct Query; #[derive(Serialize, Deserialize)] struct ValueDerived(String); scalar!(ValueDerived); impl From<i32> for ValueDerived { fn from(value: i32) -> Self { ValueDerived(format!("{}", value)) } } #[derive(SimpleObject)] struct TestObj { #[graphql(owned, derived(name = "value2", into = "ValueDerived"))] pub value1: i32, } #[Object] impl Query { async fn test(&self, #[graphql(default = 100)] input: i32) -> TestObj { TestObj { value1: input } } } let query = "{ test { value1 value2 } }"; let schema = Schema::new(Query, EmptyMutation, EmptySubscription); assert_eq!( schema.execute(query).await.data, value!({ "test": { "value1": 100, "value2": "100", } }) ); let query = "{ test(input: 2) { value1 value2 }}"; let schema = Schema::new(Query, EmptyMutation, EmptySubscription); dbg!(schema.execute(query).await); assert_eq!( schema.execute(query).await.data, value!({ "test": { "value1": 2, "value2": "2", } }) ); } #[tokio::test] pub async fn test_derived_field_simple_object_option() { use serde::{Deserialize, Serialize}; struct Query; #[derive(Serialize, Deserialize, Clone)] struct ValueDerived(String); #[derive(Serialize, Deserialize, Clone)] struct ValueDerived2(String); scalar!(ValueDerived); scalar!(ValueDerived2); impl From<ValueDerived> for ValueDerived2 { fn from(value: ValueDerived) -> Self { ValueDerived2(value.0) } } fn option_to_option<T, U: From<T>>(value: Option<T>) -> Option<U> { value.map(|x| x.into()) } fn vec_to_vec<T, U: From<T>>(value: Vec<T>) -> Vec<U> { value.into_iter().map(|x| x.into()).collect() } fn vecopt_to_vecopt<T, U: From<T>>(value: Vec<Option<T>>) -> Vec<Option<U>> { value.into_iter().map(|x| x.map(|opt| opt.into())).collect() } fn optvec_to_optvec<T, U: From<T>>(value: Option<Vec<T>>) -> Option<Vec<U>> { value.map(|x| x.into_iter().map(|y| y.into()).collect()) } #[derive(SimpleObject)] struct TestObj { #[graphql(derived( owned, name = "value2", into = "Option<ValueDerived2>", with = "option_to_option" ))] pub value1: Option<ValueDerived>, #[graphql(derived( owned, name = "value_vec_2", into = "Vec<ValueDerived2>", with = "vec_to_vec" ))] pub value_vec_1: Vec<ValueDerived>, #[graphql(derived( owned, name = "value_opt_vec_2", into = "Option<Vec<ValueDerived2>>", with = "optvec_to_optvec" ))] pub value_opt_vec_1: Option<Vec<ValueDerived>>, #[graphql(derived( owned, name = "value_vec_opt_2", into = "Vec<Option<ValueDerived2>>", with = "vecopt_to_vecopt" ))] pub value_vec_opt_1: Vec<Option<ValueDerived>>, } #[Object] impl Query { async fn test(&self) -> TestObj { TestObj { value1: Some(ValueDerived("Test".to_string())), value_vec_1: vec![ValueDerived("Test".to_string())], value_opt_vec_1: Some(vec![ValueDerived("Test".to_string())]), value_vec_opt_1: vec![Some(ValueDerived("Test".to_string()))], } } } let query = "{ test { value1 value2 valueVec1 valueVec2 valueOptVec1 valueOptVec2 } }"; let schema = Schema::new(Query, EmptyMutation, EmptySubscription); assert_eq!( schema.execute(query).await.data, value!({ "test": { "value1": "Test", "value2": "Test", "valueVec1": vec!["Test"], "valueVec2": vec!["Test"], "valueOptVec1": vec!["Test"], "valueOptVec2": vec!["Test"], } }) ); } #[tokio::test] pub async fn test_derived_field_complex_object() { use serde::{Deserialize, Serialize}; #[derive(SimpleObject)] #[graphql(complex)] struct MyObj { a: i32, #[graphql(owned, derived(name = "f", into = "ValueDerived"))] b: i32, } #[derive(Serialize, Deserialize)] struct ValueDerived(String); scalar!(ValueDerived); impl From<i32> for ValueDerived { fn from(value: i32) -> Self { ValueDerived(format!("{}", value)) } } #[ComplexObject] impl MyObj { async fn c(&self) -> i32 { self.a + self.b } #[graphql(derived(name = "e", into = "ValueDerived"))] async fn d(&self, v: i32) -> i32 { self.a + self.b + v } } struct Query; #[Object] impl Query { async fn obj(&self) -> MyObj { MyObj { a: 10, b: 20 } } } let query = "{ obj { a b c d(v:100) e(v: 200) f } }"; let schema = Schema::new(Query, EmptyMutation, EmptySubscription); dbg!(schema.execute(query).await); assert_eq!( schema.execute(query).await.data, value!({ "obj": { "a": 10, "b": 20, "c": 30, "d": 130, "e": "230", "f": "20", }, }) ); } #[tokio::test] pub async fn test_derived_field_complex_object_derived() { use serde::{Deserialize, Serialize}; #[derive(SimpleObject)] #[graphql(complex)] struct MyObj { a: i32, #[graphql(owned, derived(name = "f", into = "ValueDerived"))] b: i32, } #[derive(Serialize, Deserialize)] struct ValueDerived(String); scalar!(ValueDerived); impl From<i32> for ValueDerived { fn from(value: i32) -> Self { ValueDerived(format!("{}", value)) } } fn option_to_option<T, U: From<T>>(value: Option<T>) -> Option<U> { value.map(|x| x.into()) } #[ComplexObject] impl MyObj { async fn c(&self) -> i32 { self.a + self.b } #[graphql(derived(name = "e", into = "Option<ValueDerived>", with = "option_to_option"))] async fn d(&self, v: i32) -> Option<i32> { Some(self.a + self.b + v) } } struct Query; #[Object] impl Query { async fn obj(&self) -> MyObj { MyObj { a: 10, b: 20 } } } let query = "{ obj { a b c d(v:100) e(v: 200) f } }"; let schema = Schema::new(Query, EmptyMutation, EmptySubscription); assert_eq!( schema.execute(query).await.data, value!({ "obj": { "a": 10, "b": 20, "c": 30, "d": 130, "e": "230", "f": "20", }, }) ); }
use std::collections::HashMap; use proc_macro2::{Ident, TokenStream}; use syn::{parse2, DataEnum, GenericParam, Generics, TypeParam, Variant, Visibility}; use crate::token_builder::{ access_field, extend_ts, generics_arg_by_mutating_type_params, ident, join_ts, pattern_match, safe_field_ident, ts, TokenBuilder, }; use crate::{q, Common}; pub fn make_single_variant_mutator( tb: &mut TokenBuilder, ident: &Ident, generics: &Generics, vis: &Visibility, enu: &DataEnum, ) { let cm = Common::new(0); let EnumSingleVariant = ident!(ident "SingleVariant"); // let EnumSingleVariantMutator = ident!(enum_ident "SingleVariantMutator"); let Tuplei = cm.Tuplei.as_ref(); // item_fields: vector holding the item field types // item_mutators: the token stream of the tuple mutator for the item fields // item_pattern_match_bindings: the bindings made when pattern matching the item let (item_fields, item_mutators, item_pattern_match_bindings): ( HashMap<Ident, Vec<TokenStream>>, HashMap<Ident, TokenStream>, HashMap<Ident, Vec<Ident>>, ) = { let mut item_fields = HashMap::new(); let mut map = HashMap::new(); let mut bindings = HashMap::new(); for variant in &enu.variants { let fields = variant.fields.iter().collect::<Vec<_>>(); if !fields.is_empty() { item_fields.insert(variant.ident.clone(), fields.iter().map(|x| ts!(q!(&x.ty))).collect()); let field_tys = join_ts!(fields.iter(), field, field.ty, separator: ","); map.insert( variant.ident.clone(), ts!( cm.TupleMutator "< (" field_tys ",) ," Tuplei(fields.len()) "<" field_tys "> >" ), ); bindings.insert( variant.ident.clone(), fields .iter() .enumerate() .map(|(idx, field)| safe_field_ident(field, idx)) .collect(), ); } else { item_fields.insert(variant.ident.clone(), vec![]); map.insert( variant.ident.clone(), ts!( cm.TupleMutator "< () ," Tuplei(0) " >" ), ); bindings.insert(variant.ident.clone(), vec![]); } } (item_fields, map, bindings) }; // Generics like: // <MSome, MNone> let single_variant_generics = { let generic_params: Vec<GenericParam> = enu .variants .iter() .map(|variant| { let tp: TypeParam = ident!("M" variant.ident).into(); let gp: GenericParam = tp.into(); gp }) .collect(); let mut g = Generics::default(); g.params.extend(generic_params); g }; let mut generics = generics.clone(); // add more conditions to the enum's generics // enum generics with additional condition for each type parameter // where T: Clone + 'static for tp in generics.type_params_mut() { tp.bounds.push(parse2(cm.Clone.clone()).unwrap()); tp.bounds.push(parse2(ts!("'static")).unwrap()); } // The generics for the impl of the mutator // it contains all the generics of the enum (with additional Clone and 'static bounds) // as well as its where clause, PLUS the single-variant-generics created earlier PLUS the requirement that each single-variant generics correspond to the correct mutator // e.g. // <T: Clone + 'static, U: Clone + 'static, MSome: TupleMutator<u8, Tuple1<(u8,)>>, MNone: TupleMutator<(), Tuple0>> ... where T: Default, let impl_mutator_generics = { let mut g = Generics::default(); for param in generics.type_params() { let tp: TypeParam = param.clone(); g.params.push(tp.into()); } for variant in enu.variants.iter() { // same ident as the single-variant generics let mut param: TypeParam = ident!("M" variant.ident).into(); // with an additional TupleMutator clause param .bounds .push(parse2(item_mutators[&variant.ident].clone()).unwrap()); g.params.push(param.into()); } g.where_clause = generics.where_clause.clone(); g }; let pattern_match_binding_append = ident!("__proc_macro__binding__"); let variant_pattern_match_bindings_to_tuple = |variant_ident| { if item_fields[variant_ident].is_empty() { ts!("()") } else { ts!("(" join_ts!(item_pattern_match_bindings[variant_ident].iter(), binding, ident!(binding pattern_match_binding_append) "," ) ")" ) } }; let variant_pattern_match_bindings_to_enum_variant = |variant: &Variant| { ts!( ident "::" variant.ident "{" join_ts!(variant.fields.iter().enumerate(), (i, field), access_field(field, i) ": v." i , separator: ",") "}" ) }; let (mutator_gen_impl, _, mutator_gen_where_clause) = impl_mutator_generics.split_for_impl(); let (_, enum_generics_ty, _) = generics.split_for_impl(); let selfty = ts!(ident q!(&enum_generics_ty)); extend_ts!(tb, " #[derive(" {&cm.Clone} ")] #[doc(hidden)]" q!(vis) "enum " EnumSingleVariant q!(&single_variant_generics) "{" join_ts!(&enu.variants, item, item.ident "(" ident!("M" item.ident) ")," ) "} #[allow(non_shorthand_field_patterns)] impl " q!(&mutator_gen_impl) cm.fuzzcheck_traits_Mutator "<" selfty "> for " EnumSingleVariant q!(&single_variant_generics) q!(&mutator_gen_where_clause) "{ #[doc(hidden)] type Cache = " EnumSingleVariant q!(&generics_arg_by_mutating_type_params(&single_variant_generics, |tp| { ts!(tp "::Cache") })) "; #[doc(hidden)] type MutationStep = " EnumSingleVariant q!(&generics_arg_by_mutating_type_params(&single_variant_generics, |tp| { ts!(tp "::MutationStep") })) "; #[doc(hidden)] type ArbitraryStep = " EnumSingleVariant q!(&generics_arg_by_mutating_type_params(&single_variant_generics, |tp| { ts!(tp "::ArbitraryStep") })) "; #[doc(hidden)] type UnmutateToken = " EnumSingleVariant q!(&generics_arg_by_mutating_type_params(&single_variant_generics, |tp| { ts!(tp "::UnmutateToken") })) "; #[doc(hidden)] #[no_coverage] fn initialize(&self) { match self {" join_ts!(&enu.variants, variant, EnumSingleVariant "::" variant.ident "(m) => { m.initialize() }" ) "} } #[doc(hidden)] #[no_coverage] fn default_arbitrary_step(&self) -> Self::ArbitraryStep { match self {" join_ts!(&enu.variants, variant, EnumSingleVariant "::" variant.ident "(m) =>" EnumSingleVariant "::" variant.ident "(m.default_arbitrary_step())," ) "} } #[doc(hidden)] #[no_coverage] fn is_valid(&self, value: &" selfty ") -> bool {" "match (self, value) {" join_ts!(&enu.variants, variant, "(" EnumSingleVariant "::" variant.ident "(m)," pattern_match(variant, ident, Some(pattern_match_binding_append.clone())) ") => { m.is_valid(" variant_pattern_match_bindings_to_tuple(&variant.ident) ") }" )" _ => false, } } #[doc(hidden)] #[no_coverage] fn validate_value(&self, value: &" selfty ") -> " cm.Option "<Self::Cache> { match (self, value) {" join_ts!(&enu.variants, variant, "(" EnumSingleVariant "::" variant.ident "(m)," pattern_match(variant, ident, Some(pattern_match_binding_append.clone())) ") => { m.validate_value(" variant_pattern_match_bindings_to_tuple(&variant.ident) ").map(" EnumSingleVariant "::" variant.ident ") }" )" _ => " cm.None ", } } #[doc(hidden)] #[no_coverage] fn default_mutation_step(&self, value: &" selfty ", cache: &Self::Cache) -> Self::MutationStep { match (self, value, cache) {" join_ts!(&enu.variants, variant, "( " EnumSingleVariant ":: " variant.ident " (m) , " pattern_match(variant, ident, Some(pattern_match_binding_append.clone())) ", " EnumSingleVariant ":: " variant.ident " (c) ) => { " EnumSingleVariant "::" variant.ident "(m.default_mutation_step(" variant_pattern_match_bindings_to_tuple(&variant.ident) ", c)) }" ) "_ => unreachable!() } } #[doc(hidden)] #[no_coverage] fn global_search_space_complexity(&self) -> f64 { match self {" join_ts!(&enu.variants, variant, EnumSingleVariant "::" variant.ident "(m) => m.global_search_space_complexity() ," )" } } #[doc(hidden)] #[no_coverage] fn max_complexity(&self) -> f64 { match self {" join_ts!(&enu.variants, variant, EnumSingleVariant "::" variant.ident "(m) => m.max_complexity() ," )" } } #[doc(hidden)] #[no_coverage] fn min_complexity(&self) -> f64 { match self {" join_ts!(&enu.variants, variant, EnumSingleVariant "::" variant.ident "(m) => m.min_complexity() ," )" } } #[doc(hidden)] #[no_coverage] fn complexity(&self, value: &" selfty ", cache: &Self::Cache) -> f64 { match (self, value, cache) {" join_ts!(&enu.variants, variant, "( " EnumSingleVariant ":: " variant.ident " (m) , " pattern_match(variant, ident, Some(pattern_match_binding_append.clone())) ", " EnumSingleVariant ":: " variant.ident " (c) ) => { m.complexity(" variant_pattern_match_bindings_to_tuple(&variant.ident) ", c) }" ) "_ => unreachable!() } } #[doc(hidden)] #[no_coverage] fn ordered_arbitrary(&self, step: &mut Self::ArbitraryStep, max_cplx: f64) -> Option<(" selfty ", f64)> { match (self, step) {" join_ts!(&enu.variants, variant, "(" EnumSingleVariant "::" variant.ident "(m)," EnumSingleVariant "::" variant.ident "(s)) => {" "if let" cm.Some "((v, c)) = m.ordered_arbitrary(s, max_cplx) { " cm.Some "((" variant_pattern_match_bindings_to_enum_variant(variant) ", c )) } else { None } }" ) "_ => unreachable!() } } #[doc(hidden)] #[no_coverage] fn random_arbitrary(&self, max_cplx: f64) -> (" selfty ", f64) { match self {" join_ts!(&enu.variants, variant, EnumSingleVariant "::" variant.ident "(m) => { let (v, c) = m.random_arbitrary(max_cplx); (" variant_pattern_match_bindings_to_enum_variant(variant) ", c ) }" )"} } #[doc(hidden)] #[no_coverage] fn ordered_mutate( &self, value: &mut " selfty ", cache: &mut Self::Cache, step: &mut Self::MutationStep, subvalue_provider: &dyn " cm.SubValueProvider ", max_cplx: f64, ) -> Option<(Self::UnmutateToken, f64)> { match (self, value, cache, step) {" join_ts!(&enu.variants, variant, "( " EnumSingleVariant "::" variant.ident "(m) , " pattern_match(variant, ident, Some(pattern_match_binding_append.clone())) ", " EnumSingleVariant "::" variant.ident "(c) , " EnumSingleVariant "::" variant.ident "(s) ) => { m.ordered_mutate(" variant_pattern_match_bindings_to_tuple(&variant.ident) ", c, s, subvalue_provider, max_cplx) .map(#[no_coverage] |(t, c)| (" EnumSingleVariant "::" variant.ident "(t), c)) }" )" _ => unreachable!(), } } #[doc(hidden)] #[no_coverage] fn random_mutate(&self, value: &mut " selfty ", cache: &mut Self::Cache, max_cplx: f64) -> (Self::UnmutateToken, f64) { match (self, value, cache) {" join_ts!(&enu.variants, variant, "( " EnumSingleVariant "::" variant.ident "(m) , " pattern_match(variant, ident, Some(pattern_match_binding_append.clone())) ", " EnumSingleVariant "::" variant.ident "(c) ) => { let (t, c) = m.random_mutate(" variant_pattern_match_bindings_to_tuple(&variant.ident) ", c, max_cplx" "); (" EnumSingleVariant "::" variant.ident "(t), c) }" ) "_ => unreachable!()" "} } #[doc(hidden)] #[no_coverage] fn unmutate(&self, value: &mut " selfty ", cache: &mut Self::Cache, t: Self::UnmutateToken) { match (self, value, cache, t) {" join_ts!(&enu.variants, variant, "( " EnumSingleVariant "::" variant.ident "(m) , " pattern_match(variant, ident, Some(pattern_match_binding_append.clone())) ", " EnumSingleVariant "::" variant.ident "(c) , " EnumSingleVariant "::" variant.ident "(t) ) => {" "m.unmutate(" variant_pattern_match_bindings_to_tuple(&variant.ident) ", c, t)" "}" )" _ => unreachable!() } } #[doc(hidden)] #[no_coverage] fn visit_subvalues<'__fuzzcheck_derive_lt>(&self, value: &'__fuzzcheck_derive_lt " selfty ", cache: &'__fuzzcheck_derive_lt Self::Cache, visit: &mut dyn FnMut(&'__fuzzcheck_derive_lt dyn " cm.Any ", f64)) { match (self, value, cache) {" join_ts!(&enu.variants, variant, "( " EnumSingleVariant "::" variant.ident "(m) , " pattern_match(variant, ident, Some(pattern_match_binding_append.clone())) ", " EnumSingleVariant "::" variant.ident "(cache) ) => { m.visit_subvalues(" variant_pattern_match_bindings_to_tuple(&variant.ident) ", cache, visit); }" )" _ => unreachable!() } } } "); }
use std::collections::{HashMap, VecDeque}; use crate::{ error::LuxError, expr::Expr, function::Function, interpreter::Interpreter, stmt::Stmt, token::Token, }; pub struct Resolver<'a> { scopes: VecDeque<HashMap<String, bool>>, interpreter: &'a mut Interpreter, current_function: FunctionType, } type ResolverResult<T> = Result<T, LuxError>; #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum FunctionType { None, Function, } impl<'a> Resolver<'a> { pub fn new(interpreter: &'a mut Interpreter) -> Self { let scopes: VecDeque<HashMap<String, bool>> = VecDeque::new(); let current_function = FunctionType::None; Self { scopes, interpreter, current_function, } } pub fn resolve(&mut self, statements: &[Stmt]) -> ResolverResult<()> { for statement in statements { self.resolve_one(statement)? } Ok(()) } fn resolve_one(&mut self, statement: &Stmt) -> ResolverResult<()> { match statement { Stmt::Expression { expression } => self.resolve_expr(expression), Stmt::Print { expression } => self.resolve_expr(expression.as_ref()), Stmt::Var { name, initializer } => { self.declare(name)?; if **initializer != Expr::Nil { self.resolve_expr(initializer)? } self.define(name); Ok(()) } Stmt::Block { statements } => { self.begin_scope(); self.resolve(statements)?; self.end_scope(); Ok(()) } Stmt::If { condition, then_branch, else_branch, } => { self.resolve_expr(condition)?; self.resolve_one(then_branch)?; if let Some(branch) = else_branch { self.resolve_one(branch)?; } Ok(()) } Stmt::Function(func) => { self.declare(&func.name)?; self.define(&func.name); self.resolve_func(func, FunctionType::Function) } Stmt::While { condition, body } => { self.resolve_expr(condition)?; self.resolve_one(body) } Stmt::Return { value, keyword } => { if self.current_function == FunctionType::None { return Err(LuxError::new(keyword, "Can't return from top-level code.")); } self.resolve_expr(value.as_ref()) } } } fn resolve_func( &mut self, function: &Function, function_type: FunctionType, ) -> ResolverResult<()> { let enclosing_function = self.current_function; self.current_function = function_type; self.begin_scope(); for param in &function.param { self.declare(param)?; self.define(param); } self.resolve(&function.body)?; self.end_scope(); self.current_function = enclosing_function; Ok(()) } fn declare(&mut self, name: &Token) -> ResolverResult<()> { if self.scopes.is_empty() { return Ok(()); } let scope = self.scopes.back_mut(); if scope.unwrap().contains_key(&name.lexeme) { return Err(LuxError::new( name, "Already a variable with this name in this scope.", )); } if let Some(map) = self.scopes.back_mut() { map.insert(name.lexeme.clone(), false); } Ok(()) } fn define(&mut self, name: &Token) { if self.scopes.is_empty() { return; } let scope = self.scopes.back_mut(); if let Some(map) = scope { map.insert(name.lexeme.clone(), true); } } fn resolve_local(&mut self, expr: &Expr, name: Token) { let mut i = (self.scopes.len() as i64) - 1; while i >= 0 { let scope = self.scopes.get(i as usize).unwrap(); if scope.contains_key(&name.lexeme) { self.interpreter .resolve(expr, self.scopes.len() - 1 - i as usize) } i -= 1; } } fn resolve_expr(&mut self, expression: &Expr) -> ResolverResult<()> { match expression { Expr::Binary { left, right, .. } => { self.resolve_expr(left)?; self.resolve_expr(right) } Expr::Grouping { expression } => self.resolve_expr(expression), Expr::Literal { .. } => Ok(()), Expr::Unary { right, .. } => self.resolve_expr(right), Expr::Variable { name } => { if !self.scopes.is_empty() { let value = self.scopes.back().unwrap().get(&name.lexeme); if let Some(val) = value { if !(*val) { return Err(LuxError::new( name, "Can't read local variable in its own initializer.", )); } } } self.resolve_local(&expression, name.clone()); Ok(()) } Expr::Assign { name, value } => { self.resolve_expr(value)?; self.resolve_local(&expression, name.clone()); Ok(()) } Expr::Logical { left, right, .. } => { self.resolve_expr(left)?; self.resolve_expr(right) } Expr::Call { callee, arguments, .. } => { self.resolve_expr(callee)?; for argument in arguments { self.resolve_expr(argument)?; } Ok(()) } Expr::Nil => Ok(()), } } fn begin_scope(&mut self) { let scope: HashMap<String, bool> = HashMap::new(); self.scopes.push_back(scope) } fn end_scope(&mut self) { self.scopes.pop_back(); } }
use std::collections::HashMap; // Following enums/structs & their methods should be fairly // self-explanitory enum Operation { Increment(isize), Decrement(isize), } impl Operation { fn apply(&self, val: &mut isize) -> isize { use Operation::*; match *self { Increment(i) => { *val += i; *val } Decrement(i) => { *val -= i; *val } } } } enum Comparison { Equal(isize), // == NotEqual(isize), // != LessThan(isize), // < LessThanEqual(isize), // <= GreaterThan(isize), // > GreatherThanEqual(isize), // >= } impl Comparison { fn apply(&self, lhs: isize) -> bool { use Comparison::*; match *self { Equal(i) => lhs == i, NotEqual(i) => lhs != i, LessThan(i) => lhs < i, LessThanEqual(i) => lhs <= i, GreaterThan(i) => lhs > i, GreatherThanEqual(i) => lhs >= i, } } } struct Instruction<'a> { reg_store: &'a str, op: Operation, compare: Comparison, reg_get: &'a str, } impl<'a> From<&'a str> for Instruction<'a> { fn from(line: &'a str) -> Instruction { let components = line.split_whitespace().collect::<Vec<_>>(); let change_by = components[2].parse().unwrap(); let op = if components[1] == "inc" { Operation::Increment(change_by) } else { Operation::Decrement(change_by) }; let compare_to = components[6].parse().unwrap(); let compare = match components[5] { "==" => Comparison::Equal(compare_to), "!=" => Comparison::NotEqual(compare_to), "<" => Comparison::LessThan(compare_to), "<=" => Comparison::LessThanEqual(compare_to), ">" => Comparison::GreaterThan(compare_to), ">=" => Comparison::GreatherThanEqual(compare_to), _ => unreachable!(), }; let reg_store = components[0]; let reg_get = components[4]; Instruction { reg_store: reg_store, op: op, compare: compare, reg_get: reg_get, } } } #[derive(Default)] struct Machine<'a> { instructions: Vec<Instruction<'a>>, registers: HashMap<&'a str, isize>, } impl<'a> Machine<'a> { // Returns the greatest value encountered at any point fn run(&mut self) -> isize { let mut largest = 0; let iter = self.instructions.iter(); for instruction in iter { // Gets the value of the register we need to compare let reg_get = { *self.registers .entry(instruction.reg_get) .or_insert_with(|| 0isize) }; // Get a mutable reference to the register we're modifying let reg_store = self.registers .entry(instruction.reg_store.clone()) .or_insert_with(|| 0isize); if instruction.compare.apply(reg_get) { let result = instruction.op.apply(reg_store); if result > largest { largest = result; } } } largest } } fn main() { let input = include_str!("../input.txt"); let (largest_at_end, largest_any) = day_8(input); println!("largest value at end: {}", largest_at_end); println!("largest value at any time: {}", largest_any); } // Performs both parts of day 8 for ease of use fn day_8(input: &str) -> (isize, isize) { let mut machine = Machine::default(); input .lines() .map(|l| l.into()) .for_each(|i| machine.instructions.push(i)); let result = machine.run(); ( machine.registers.iter().map(|(_, &i)| i).max().unwrap(), result, ) } #[test] fn example_1() { let input = "b inc 5 if a > 1\na inc 1 if b < 5\nc dec -10 if a >= 1\nc inc -20 if c == 10"; let result = day_8(input); assert_eq!(result, (1, 10)); }
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)] #[derive(:: core :: clone :: Clone, :: core :: marker :: Copy)] #[repr(C)] pub struct NV_MEMORY_RANGE { pub BaseAddress: *mut ::core::ffi::c_void, pub Length: usize, } impl NV_MEMORY_RANGE {} impl ::core::default::Default for NV_MEMORY_RANGE { fn default() -> Self { unsafe { ::core::mem::zeroed() } } } impl ::core::fmt::Debug for NV_MEMORY_RANGE { fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { fmt.debug_struct("NV_MEMORY_RANGE").field("BaseAddress", &self.BaseAddress).field("Length", &self.Length).finish() } } impl ::core::cmp::PartialEq for NV_MEMORY_RANGE { fn eq(&self, other: &Self) -> bool { self.BaseAddress == other.BaseAddress && self.Length == other.Length } } impl ::core::cmp::Eq for NV_MEMORY_RANGE {} unsafe impl ::windows::core::Abi for NV_MEMORY_RANGE { type Abi = Self; } #[cfg(any(target_arch = "x86_64", target_arch = "aarch64",))] #[inline] pub unsafe fn RtlDrainNonVolatileFlush(nvtoken: *const ::core::ffi::c_void) -> u32 { #[cfg(windows)] { #[link(name = "windows")] extern "system" { fn RtlDrainNonVolatileFlush(nvtoken: *const ::core::ffi::c_void) -> u32; } ::core::mem::transmute(RtlDrainNonVolatileFlush(::core::mem::transmute(nvtoken))) } #[cfg(not(windows))] unimplemented!("Unsupported target OS"); } #[cfg(any(target_arch = "x86_64", target_arch = "aarch64",))] #[inline] pub unsafe fn RtlFillNonVolatileMemory(nvtoken: *const ::core::ffi::c_void, nvdestination: *mut ::core::ffi::c_void, size: usize, value: u8, flags: u32) -> u32 { #[cfg(windows)] { #[link(name = "windows")] extern "system" { fn RtlFillNonVolatileMemory(nvtoken: *const ::core::ffi::c_void, nvdestination: *mut ::core::ffi::c_void, size: usize, value: u8, flags: u32) -> u32; } ::core::mem::transmute(RtlFillNonVolatileMemory(::core::mem::transmute(nvtoken), ::core::mem::transmute(nvdestination), ::core::mem::transmute(size), ::core::mem::transmute(value), ::core::mem::transmute(flags))) } #[cfg(not(windows))] unimplemented!("Unsupported target OS"); } #[cfg(any(target_arch = "x86_64", target_arch = "aarch64",))] #[inline] pub unsafe fn RtlFlushNonVolatileMemory(nvtoken: *const ::core::ffi::c_void, nvbuffer: *const ::core::ffi::c_void, size: usize, flags: u32) -> u32 { #[cfg(windows)] { #[link(name = "windows")] extern "system" { fn RtlFlushNonVolatileMemory(nvtoken: *const ::core::ffi::c_void, nvbuffer: *const ::core::ffi::c_void, size: usize, flags: u32) -> u32; } ::core::mem::transmute(RtlFlushNonVolatileMemory(::core::mem::transmute(nvtoken), ::core::mem::transmute(nvbuffer), ::core::mem::transmute(size), ::core::mem::transmute(flags))) } #[cfg(not(windows))] unimplemented!("Unsupported target OS"); } #[cfg(any(target_arch = "x86_64", target_arch = "aarch64",))] #[inline] pub unsafe fn RtlFlushNonVolatileMemoryRanges(nvtoken: *const ::core::ffi::c_void, nvranges: *const NV_MEMORY_RANGE, numranges: usize, flags: u32) -> u32 { #[cfg(windows)] { #[link(name = "windows")] extern "system" { fn RtlFlushNonVolatileMemoryRanges(nvtoken: *const ::core::ffi::c_void, nvranges: *const NV_MEMORY_RANGE, numranges: usize, flags: u32) -> u32; } ::core::mem::transmute(RtlFlushNonVolatileMemoryRanges(::core::mem::transmute(nvtoken), ::core::mem::transmute(nvranges), ::core::mem::transmute(numranges), ::core::mem::transmute(flags))) } #[cfg(not(windows))] unimplemented!("Unsupported target OS"); } #[cfg(any(target_arch = "x86_64", target_arch = "aarch64",))] #[inline] pub unsafe fn RtlFreeNonVolatileToken(nvtoken: *const ::core::ffi::c_void) -> u32 { #[cfg(windows)] { #[link(name = "windows")] extern "system" { fn RtlFreeNonVolatileToken(nvtoken: *const ::core::ffi::c_void) -> u32; } ::core::mem::transmute(RtlFreeNonVolatileToken(::core::mem::transmute(nvtoken))) } #[cfg(not(windows))] unimplemented!("Unsupported target OS"); } #[cfg(any(target_arch = "x86_64", target_arch = "aarch64",))] #[inline] pub unsafe fn RtlGetNonVolatileToken(nvbuffer: *const ::core::ffi::c_void, size: usize, nvtoken: *mut *mut ::core::ffi::c_void) -> u32 { #[cfg(windows)] { #[link(name = "windows")] extern "system" { fn RtlGetNonVolatileToken(nvbuffer: *const ::core::ffi::c_void, size: usize, nvtoken: *mut *mut ::core::ffi::c_void) -> u32; } ::core::mem::transmute(RtlGetNonVolatileToken(::core::mem::transmute(nvbuffer), ::core::mem::transmute(size), ::core::mem::transmute(nvtoken))) } #[cfg(not(windows))] unimplemented!("Unsupported target OS"); } #[cfg(any(target_arch = "x86_64", target_arch = "aarch64",))] #[inline] pub unsafe fn RtlWriteNonVolatileMemory(nvtoken: *const ::core::ffi::c_void, nvdestination: *mut ::core::ffi::c_void, source: *const ::core::ffi::c_void, size: usize, flags: u32) -> u32 { #[cfg(windows)] { #[link(name = "windows")] extern "system" { fn RtlWriteNonVolatileMemory(nvtoken: *const ::core::ffi::c_void, nvdestination: *mut ::core::ffi::c_void, source: *const ::core::ffi::c_void, size: usize, flags: u32) -> u32; } ::core::mem::transmute(RtlWriteNonVolatileMemory(::core::mem::transmute(nvtoken), ::core::mem::transmute(nvdestination), ::core::mem::transmute(source), ::core::mem::transmute(size), ::core::mem::transmute(flags))) } #[cfg(not(windows))] unimplemented!("Unsupported target OS"); }
//! //! Trait(s) specific to code generation objects within this crate. //! pub mod attributes; pub mod docs; pub mod fields; pub mod generics; pub mod trait_bounds; pub use attributes::*; pub use docs::*; pub use fields::*; pub use generics::*; pub use trait_bounds::*; /// Trait implemented for elements representing the ability to render as /// raw source code. pub trait SrcCode { /// Given current configuration, give the resulting source code. #[must_use] fn generate(&self) -> String; } /// Trait to help collecting `Vec<impl SrcCode>` into `Vec<String>` via `.generate()` pub trait SrcCodeVec { /// Convert the current `Vec<impl SrcCode>` into `Vec<String>` fn to_src_vec(&self) -> Vec<String>; } impl<T: SrcCode> SrcCodeVec for Vec<T> { fn to_src_vec(&self) -> Vec<String> { self.iter().map(SrcCode::generate).collect() } } impl<'a> SrcCode for &'a str { fn generate(&self) -> String { self.to_string() } } impl SrcCode for String { fn generate(&self) -> String { self.clone() } }
use serde::Deserialize; #[derive(Deserialize)] pub struct Register { pub username: String, pub country: String, }
extern crate gl; use crate::core::{pass, pipeline, tech}; use crate::gl::{shader, uniform}; use std::collections::HashMap; use std::vec::Vec; pub struct TechniqueContainer { pub map: HashMap<Techniques, Technique>, } impl TechniqueContainer { pub fn new() -> TechniqueContainer { TechniqueContainer { map: HashMap::new(), } } } impl TechniqueContainer { pub fn bind_pipeline(&mut self, pipeline: &pipeline::Pipeline) { for pass in pipeline.passes.iter() { self.bind_render_pass(&pass); } } pub fn unbind_pipeline(&mut self, pipeline: &pipeline::Pipeline) { for pass in pipeline.passes.iter() { self.unbind_render_pass(pass.program.handle); } } pub fn bind_render_pass(&mut self, pass: &pass::Pass) { for tech in &pass.techniques { tech::bind_shader_program_to_technique(self.map.get_mut(&tech).unwrap(), &pass.program); } } pub fn unbind_render_pass(&mut self, pass_program_handle: u32) { for (_, technique) in self.map.iter_mut() { tech::unbind_shader_program_from_technique(technique, pass_program_handle); } } } pub struct Technique { pub name: String, pub per_frame_uniforms: uniform::Uniforms, pub per_model_uniforms: uniform::PerModelUniforms, pub textures: Vec<uniform::TextureSampler>, } impl Technique { pub fn new(name: &str) -> Technique { Technique { name: name.to_string(), per_frame_uniforms: uniform::Uniforms { vec1f: Vec::new(), vec1u: Vec::new(), vec2f: Vec::new(), vec3f: Vec::new(), mat4x4f: Vec::new(), }, per_model_uniforms: uniform::PerModelUniforms { vec1f: Vec::new(), vec1u: Vec::new(), vec2f: Vec::new(), vec3f: Vec::new(), mat4x4f: Vec::new(), }, textures: Vec::new(), } } } #[derive(Hash, PartialEq, Clone)] pub enum Techniques { MVP, Lighting, Skybox, IBL, ToneMapping, } impl Eq for Techniques {} pub fn is_technique_valid(technique: &Technique) -> Result<(), String> { if let Err(msg) = uniform::check_empty_uniforms(&technique.per_frame_uniforms) { return Err(format!( "Technique '{}' is invalid.\n{}", technique.name, msg )); } if let Err(msg) = uniform::check_empty_per_model_uniforms(&technique.per_model_uniforms) { return Err(format!( "Technique '{}' is invalid.\n{}", technique.name, msg )); } if let Err(msg) = uniform::check_per_model_uniforms_consistency(&technique.per_model_uniforms) { return Err(format!( "Technique '{}' is inlvalid!\n{}", technique.name, msg )); } Ok(()) } pub fn bind_shader_program_to_technique( technique: &mut Technique, program: &shader::ShaderProgram, ) { uniform::bind_shader_program_to_scalar_uniforms( program, &mut technique.per_frame_uniforms.vec1f, ); uniform::bind_shader_program_to_scalar_uniforms( program, &mut technique.per_frame_uniforms.vec1u, ); uniform::bind_shader_program_to_scalar_uniforms( program, &mut technique.per_frame_uniforms.vec2f, ); uniform::bind_shader_program_to_scalar_uniforms( program, &mut technique.per_frame_uniforms.vec3f, ); uniform::bind_shader_program_to_scalar_uniforms( program, &mut technique.per_frame_uniforms.mat4x4f, ); uniform::bind_shader_program_to_scalar_per_model_uniforms( &program, &mut technique.per_model_uniforms.vec1f, ); uniform::bind_shader_program_to_scalar_per_model_uniforms( &program, &mut technique.per_model_uniforms.vec1u, ); uniform::bind_shader_program_to_scalar_per_model_uniforms( &program, &mut technique.per_model_uniforms.vec2f, ); uniform::bind_shader_program_to_scalar_per_model_uniforms( &program, &mut technique.per_model_uniforms.vec3f, ); uniform::bind_shader_program_to_scalar_per_model_uniforms( &program, &mut technique.per_model_uniforms.mat4x4f, ); uniform::bind_shader_program_to_texture_samplers(program, &mut technique.textures); } pub fn unbind_shader_program_from_technique(technique: &mut Technique, program_handle: u32) { uniform::unbind_shader_program_from_scalar_uniforms( program_handle, &mut technique.per_frame_uniforms.vec1f, ); uniform::unbind_shader_program_from_scalar_uniforms( program_handle, &mut technique.per_frame_uniforms.vec1u, ); uniform::unbind_shader_program_from_scalar_uniforms( program_handle, &mut technique.per_frame_uniforms.vec2f, ); uniform::unbind_shader_program_from_scalar_uniforms( program_handle, &mut technique.per_frame_uniforms.vec3f, ); uniform::unbind_shader_program_from_scalar_uniforms( program_handle, &mut technique.per_frame_uniforms.mat4x4f, ); uniform::unbind_shader_program_from_scalar_per_model_uniforms( program_handle, &mut technique.per_model_uniforms.vec1f, ); uniform::unbind_shader_program_from_scalar_per_model_uniforms( program_handle, &mut technique.per_model_uniforms.vec1u, ); uniform::unbind_shader_program_from_scalar_per_model_uniforms( program_handle, &mut technique.per_model_uniforms.vec2f, ); uniform::unbind_shader_program_from_scalar_per_model_uniforms( program_handle, &mut technique.per_model_uniforms.vec3f, ); uniform::unbind_shader_program_from_scalar_per_model_uniforms( program_handle, &mut technique.per_model_uniforms.mat4x4f, ); uniform::unbind_shader_program_from_texture_samplers(program_handle, &mut technique.textures); }
use std::io::Read; fn read<T: std::str::FromStr>() -> T { let token: String = std::io::stdin() .bytes() .map(|c| c.ok().unwrap() as char) .skip_while(|c| c.is_whitespace()) .take_while(|c| !c.is_whitespace()) .collect(); token.parse().ok().unwrap() } fn main() { let x: i32 = read(); let n: usize = read(); let m: usize = read(); let a: Vec<i32> = (0..n).map(|_| read()).collect(); let b: Vec<i32> = (0..m).map(|_| read()).collect(); let mr = match a.iter().find(|&&y| x == y) { Some(_) => true, _ => false, }; let mx = match b.iter().find(|&&y| x == y) { Some(_) => true, _ => false, }; println!( "{}", if mr && !mx { "MrMax" } else if !mr && mx { "MaxValu" } else if mr && mx { "MrMaxValu" } else { "-1" } ); }
use crate::{ combinators::if_all, loading::systems, modes::ModeExt, AppBuilderExt, GameStage, GlobalMode, ModeEvent, }; use game_lib::bevy::{ecs as bevy_ecs, prelude::*}; #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, SystemLabel)] pub struct LoadingPlugin; impl Plugin for LoadingPlugin { fn build(&self, app: &mut AppBuilder) { app.add_mode(Option::<MainLoadingMode>::None) .add_system_set_to_stage( GameStage::GameUpdate, SystemSet::new() .label(LoadingPlugin) .in_ambiguity_set(GlobalMode::MainLoading) .with_system( // Request assets after main loading begins Some(MainLoadingMode::RequestAssets) .transition_system() .with_run_criteria(GlobalMode::MainLoading.on(ModeEvent::Enter)), ) .with_system( // Wait for assets after requesting assets Some(MainLoadingMode::WaitForAssets) .transition_system() .with_run_criteria( Some(MainLoadingMode::RequestAssets).on(ModeEvent::Enter), ), ) .with_system( // Disable loading mode after main loading finishes Option::<MainLoadingMode>::None .transition_system() .with_run_criteria(GlobalMode::MainLoading.on(ModeEvent::Exit)), ), ) .add_system_set_to_stage( GameStage::GamePreUpdate, SystemSet::new() .label(LoadingPlugin) .label(MainLoadingSystem::Setup) .with_run_criteria(Some(MainLoadingMode::RequestAssets).on(ModeEvent::Enter)) .with_system(systems::setup_main_loading.system()), ) .add_system_to_stage( GameStage::GamePostUpdate, GlobalMode::InGame .transition_system() .label(LoadingPlugin) .label(MainLoadingSystem::CheckIfLoaded) .with_run_criteria(if_all(vec![ Some(MainLoadingMode::WaitForAssets).on(ModeEvent::Active), Box::new(systems::if_required_assets_loaded.system()), ])), ) .add_system_set_to_stage( GameStage::GamePostUpdate, SystemSet::new() .label(LoadingPlugin) .with_run_criteria(GlobalMode::MainLoading.on(ModeEvent::Exit)) .with_system(systems::cleanup_main_loading.system()), ); } } #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, AmbiguitySetLabel)] pub enum MainLoadingMode { /// While requesting assets. This state immediately transitions to /// [`MainLoadingMode::WaitForAssets`] after a single update tick RequestAssets, /// While waiting for required assets to load. Once all required assets are /// loaded, the global state is transitioned. WaitForAssets, } #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, SystemLabel)] pub enum MainLoadingSystem { Setup, CheckIfLoaded, }
extern crate clap; extern crate tar_api; extern crate flate2; use clap::{App, Arg, SubCommand, value_t}; use std::path::{Path, PathBuf}; use std::process::exit; fn main() { let matches = &App::new("tar_api") .subcommand( SubCommand::with_name("tar") ) .subcommand( SubCommand::with_name("untar") .arg(Arg::with_name("file").required(true).takes_value( true, )) .arg(Arg::with_name("dest").required(true).takes_value( true, )) ).get_matches(); if let Some(sub_matches) = matches.subcommand_matches("untar") { let tar_file = sub_matches .value_of("file") .map(PathBuf::from) .unwrap(); let dest = sub_matches .value_of("dest") .map(PathBuf::from) .unwrap(); // println!("{:?} {:?}", tar_file, dest); match tar_api::decompress_tgz( &tar_file, &dest, ) { Ok(_) => {} Err(err) => { eprintln!("{}", err); exit(1) } }; } if let Some(sub_matches) = matches.subcommand_matches("tar") { match tar_api::compress() { Ok(_) => {} Err(err) => { eprintln!("{}", err); exit(1) } }; } }
#![feature(plugin)] #![plugin(phf_macros)] #![feature(test)] extern crate test; #[macro_use] extern crate lazy_static; extern crate phf; // 数量少的话, match/find最遭的情况依然比map好 // // Hash的 O(1), 匹配时间仅取决于当前字符串/bytes的长度(Hash一次). // // match/if-else 的 O(n), 取决于匹配次数(直到匹配成功或全部失败), 另外还有目标字符串/bytes长度. // // 参见b模块, if-else的最糟糕情况 O(n), 大概在 26个元素的集合与 phf 的平均值耗时相当. // // 总的来说, 二三十(应该是个相当保守的值)之内不用担心 match 低效率, 除非目标字符串/bytes非常长. // // 另外自己写个字典树试试? mod b; #[derive(Clone)] pub enum Kw { Loop, Continue, Break, Fn, Extern, } fn match_parse_kw(kw: &str) -> Option<Kw> { match kw { "loop" => Some(Kw::Loop), "continue" => Some(Kw::Continue), "break" => Some(Kw::Break), "fn" => Some(Kw::Fn), "extern" => Some(Kw::Extern), _ => None, } } #[bench] fn match_(b: &mut test::Bencher) { b.iter(|| { vec!["loop", "continue", "break", "fn", "extern"] .into_iter() .for_each(|e| assert!(match_parse_kw(e).is_some())) }) } #[bench] fn match_none(b: &mut test::Bencher) { b.iter(|| { vec!["loop ", "continue,", "breakx", "fnx", "externxx"] .into_iter() .for_each(|e| assert!(match_parse_kw(e).is_none())) }) } fn vec_parse_kw(kw: &str)->Option<&'static Kw> { let svec= &[("loop" ,Kw::Loop),("continue", Kw::Continue), ("break" , Kw::Break), ("fn" , Kw::Fn), ("extern" , Kw::Extern) ][..]; svec.iter().find(|e|e.0 ==kw).map(|ref b|&b.1) } #[bench] fn vec(b: &mut test::Bencher) { b.iter(|| { vec!["loop", "continue", "break", "fn", "extern"] .into_iter() .for_each(|e| assert!(vec_parse_kw(e).is_some())) }) } #[bench] fn vec_none(b: &mut test::Bencher) { b.iter(|| { vec!["loop ", "continue,", "breakx", "fnx", "externxx"] .into_iter() .for_each(|e| assert!(vec_parse_kw(e).is_none())) }) } static KWS: phf::Map<&'static str, &'static Kw> = phf_map! { "loop" => &Kw::Loop, "continue" => &Kw::Continue, "break" => &Kw::Break, "fn" => &Kw::Fn, "extern" => &Kw::Extern, }; pub fn phf_parse_kw(kw: &str) -> Option<&'static Kw> { KWS.get(kw).map(|x|*x ) } #[bench] fn phf_(b: &mut test::Bencher) { b.iter(|| { vec!["loop", "continue", "break", "fn", "extern"] .into_iter() .for_each(|e| assert!(phf_parse_kw(e).is_some())) }) } #[bench] fn phf_none(b: &mut test::Bencher) { b.iter(|| { vec!["loop ", "continue,", "breakx", "fnx", "externxx"] .into_iter() .for_each(|e| assert!(phf_parse_kw(e).is_none())) }) } fn hashmap_parse_kw(kw: &str)-> Option<&'static Kw> { lazy_static!{ static ref MAP: std::collections::HashMap<&'static str, Kw>= vec![("loop" ,Kw::Loop),("continue", Kw::Continue), ("break" , Kw::Break), ("fn" , Kw::Fn), ("extern" , Kw::Extern) ].into_iter().collect(); } MAP.get(kw) } #[bench] fn hashmap(b: &mut test::Bencher) { b.iter(|| { vec!["loop", "continue", "break", "fn", "extern"] .into_iter() .for_each(|e| assert!(hashmap_parse_kw(e).is_some())) }) } #[bench] fn hashmap_none(b: &mut test::Bencher) { b.iter(|| { vec!["loop ", "continue,", "breakx", "fnx", "externxx"] .into_iter() .for_each(|e| assert!(hashmap_parse_kw(e).is_none())) }) } fn main() { println!("Hello, world!"); for idx in 0..100 { print!("V{:02},",idx); } println!( ); for idx in 0..100 { print!("(\"prifix_{}_suffix\",Kw::V{:02}),",idx, idx); } println!( ); for idx in 0..100 { print!("\"prifix_{}_suffix\" => &Kw::V{:02},",idx, idx); } }
use std::sync::Arc; use actix_web::web; use crate::graphql::Schema; use deadpool_postgres::Pool; use serde::{Deserialize, Serialize}; use tokio_pg_mapper_derive::PostgresMapper; #[derive(Serialize, Deserialize, PostgresMapper)] #[pg_mapper(table = "todo_list")] pub struct Level { pub id: i32, pub creator: String, pub startcode: String, pub endcode: String, pub name: String, } #[derive(Serialize, Deserialize, PostgresMapper)] #[pg_mapper(table = "todo_list")] pub struct Score { pub id: i32, pub challengeid: i32, pub score: i32, pub username: String, } #[derive(Clone)] pub struct QContext { pub dbpool: Arc<Pool>, } pub struct Data { pub pool: Arc<Pool>, pub schema: std::sync::Arc<Schema>, }
use wasm_bindgen::prelude::*; use bincode; use objects::PacketId; use objects::Request; use objects::Answer; use objects::RequestAction; fn vec_to_id(buf: Vec<u32>) -> PacketId { [buf[0], buf[1], buf[2], buf[3]] } #[wasm_bindgen] pub fn request_to_buf(id: Vec<u32>, msg: JsValue) -> Option<Vec<u8>> { let msg: RequestAction = msg.into_serde().ok()?; let req = Request::new(vec_to_id(id), msg); bincode::serialize(&req).ok() } #[wasm_bindgen] pub fn upload_track(id: Vec<u32>, name: String, format: String, data: Vec<u8>) -> Option<Vec<u8>> { let msg = RequestAction::UploadTrack { name, format, data }; let req = Request::new(vec_to_id(id), msg); bincode::serialize(&req).ok() } #[wasm_bindgen] pub struct Wrapper(Option<Answer>); #[wasm_bindgen] impl Wrapper { #[wasm_bindgen(constructor)] pub fn new(buf: Vec<u8>) -> Wrapper { Wrapper(bincode::deserialize(&buf).ok()) } pub fn id(&self) -> Option<Vec<u32>> { if let Some(ref inner) = self.0 { let id = inner.id; Some(vec![id[0], id[1], id[2], id[3]]) } else { None } } pub fn action(&self) -> JsValue { if let Some(ref inner) = self.0 { match inner.msg { Ok(ref answer) => { JsValue::from_serde(&answer).unwrap_or(JsValue::null()) }, Err(ref err) => { JsValue::from_str(&err) } } } else { JsValue::null() } } /*pub fn buffer(&self) -> Option<Vec<u8>> { if let Some(ref inner) = self.0 { match &inner.msg { AnswerAction::StreamNext(ref buf) => Some(buf.clone()), _ => None } } else { None } }*/ }
use std::fmt::{Result, Display, Formatter}; #[derive(Default, Eq, Ord, Copy, Clone, Debug, PartialEq, PartialOrd, Hash)] pub struct Rank(u8); impl Rank { pub fn from_bits(bits: u8) -> Self { debug_assert!(bits < 8); Rank(bits) } pub fn parse(input: char) -> Self { debug_assert!((input as u32) < 128, "it is not even an ASCII character!"); parse_rank(&[input as u8]).unwrap().1 } pub fn char(self) -> char { RANK_SYMBOLS[self.0 as usize] as char } pub fn bits(self) -> u8 { self.0 } } static RANK_SYMBOLS: &'static [u8; 8] = b"87654321"; named!(pub parse_rank(&[u8]) -> Rank, map!(is_a!(RANK_SYMBOLS), |c:&[u8]|{ Rank(RANK_SYMBOLS[0] - c[0]) })); impl Display for Rank { fn fmt(&self, f: &mut Formatter) -> Result { write!(f, "{}", self.char()) } } pub const ALL_RANKS: Rank = Rank(0); pub const _1: Rank = Rank(7); pub const _2: Rank = Rank(6); pub const _3: Rank = Rank(5); pub const _4: Rank = Rank(4); pub const _5: Rank = Rank(3); pub const _6: Rank = Rank(2); pub const _7: Rank = Rank(1); pub const _8: Rank = Rank(0); impl Iterator for Rank { type Item = Rank; fn next(&mut self) -> Option<Self::Item> { if self.0 == 8 { None } else { let result = *self; self.0 += 1; Some(result) } } } #[cfg(test)] mod test { use super::*; use itertools::*; #[test] fn all_ranks() { assert_eq!(ALL_RANKS.collect_vec(), [_8, _7, _6, _5, _4, _3, _2, _1]); } #[test] fn rank_char() { assert_eq!(ALL_RANKS.map(|f| f.char()).collect::<String>(), "87654321"); } #[test] fn rank_display() { assert_eq!(ALL_RANKS.map(|f| format!("{}", f)).join(""), "87654321"); } #[test] fn rank_debug() { assert_eq!(format!("{:?}", _1), "Rank(7)"); assert_eq!(format!("{:?}", _8), "Rank(0)"); } #[test] fn rank_parse() { assert_eq!(['8', '7', '6', '5', '4', '3', '2', '1'].into_iter(). map(|f| Rank::parse(*f)).collect_vec(), [_8, _7, _6, _5, _4, _3, _2, _1]); } }
use chrono::prelude::*; use std::error::Error; use std::fs::OpenOptions; use std::io::prelude::*; use std::{thread, time}; pub struct Config { pub duration_in_millis: u32, pub log_path: String, } impl Config { pub fn new(args: &[String]) -> Result<Config, &'static str> { if args.len() < 3 { return Err("not enough argument"); } // TODO: magic number constant let duration_in_millis: u32 = match args[1].trim().parse() { Ok(d) => d, Err(_) => return Err("invalid param: duration"), }; let log_path = args[2].clone(); Ok(Config { duration_in_millis, log_path }) } } #[repr(u128)] pub fn run(conf: Config) { let mut file = match OpenOptions::new().append(true).create(true).open(conf.log_path) { Err(why) => panic!("couldn't open file, reason: {}", why.description()), Ok(file) => file, }; let sleep_duration = time::Duration::from_millis(conf.duration_in_millis as u64); let delta = time::Duration::from_millis(10); loop { let now = time::Instant::now(); thread::sleep(sleep_duration); let elapsed = now.elapsed(); if elapsed >= sleep_duration + delta { let timestamp = Local::now().format("%Y-%m-%d %H:%M:%S").to_string(); let log = format!("{} detected hang: {}ms\n", timestamp, elapsed.as_secs()); println!("{}", log); match file.write_all(log.as_bytes()) { Err(why) => panic!("couldn't write: {}", why.description()), Ok(_) => println!("successfully wrote"), } } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_it() { println!("dummy test!"); } #[test] fn test_config_new() { let cmd = String::from("hang-monitor ..."); let test_duration = 100; let test_file_path = "/test/file_path/file.log"; let test_args: Vec<String> = vec![cmd, String::from(test_duration.to_string()), String::from(test_file_path)]; let conf = Config::new(&test_args).unwrap(); assert_eq!(test_duration, conf.duration_in_millis); assert_eq!(test_file_path, conf.log_path); } }
extern crate sdl2; extern crate rand; mod maze; use sdl2::event::Event; use sdl2::keyboard::Keycode; use sdl2::pixels::Color; use sdl2::rect::Rect; use maze::Direction; use maze::GameState; pub fn main() { let mut map = make_new_game(); let width = 10*map.cols; let height = 10*map.rows; println!("width: {}, height: {}", width, height); let sdl_context = sdl2::init().unwrap(); let video_subsystem = sdl_context.video().unwrap(); let window = video_subsystem.window("rust-sdl2 maze", width, height) .position_centered() .opengl() .build() .unwrap(); let mut renderer = window.renderer().build().unwrap(); renderer.clear(); let mut event_pump = sdl_context.event_pump().unwrap(); 'running: loop { for event in event_pump.poll_iter() { match event { Event::Quit {..} | Event::KeyDown { keycode: Some(Keycode::Escape), .. } => { break 'running }, Event::KeyDown { keycode: Some(kc), .. } => { let dir = match kc { Keycode::Left => Direction::Left, Keycode::Right => Direction::Right, Keycode::Up => Direction::Up, Keycode::Down => Direction::Down, _ => { println!("Invalid key"); continue; }, }; match map.move_player(dir) { GameState::Won => { println!("You win!"); map = make_new_game(); }, GameState::Dead => map = death_screen(), _ => { match map.move_trolls() { GameState::Dead => map = death_screen(), _ => continue, } continue; }, }; } _ => {} } } renderer.clear(); renderer.set_draw_color(Color::RGB(255, 255, 255)); renderer.fill_rect(Rect::new(0, 0, height, width)).unwrap(); map.render(&mut renderer); renderer.present(); } } fn make_new_game() -> maze::Maze { let mut map = maze::Maze::from_file("./res/map.txt").unwrap(); map.add_player(); map.add_trolls(10); map } fn death_screen() -> maze::Maze { println!("You were killed. RIP :("); make_new_game() }
use crate::*; use std::cmp::Ordering; use std::fmt; #[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize, Default)] pub struct ExScore(i32); impl ExScore { pub fn from_score(score: i32) -> ExScore { ExScore(score) } pub fn ex_score(&self) -> i32 { self.0 } } impl fmt::Display for ExScore { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.ex_score()) } } impl PartialOrd for ExScore { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.ex_score().partial_cmp(&other.ex_score()) } } impl Ord for ExScore { fn cmp(&self, other: &Self) -> Ordering { self.ex_score().cmp(&other.ex_score()) } }
use iron::typemap::Key; #[derive(Debug)] pub struct ConfigMisc { pub jwt_secret: String, pub jwt_time_variation: i64 } impl Key for ConfigMisc { type Value = ConfigMisc; }
extern crate multipart; extern crate nickel; use std::io::{self, Write}; use nickel::{Action, HttpRouter, MiddlewareResult, Nickel, Request, Response}; use nickel::status::StatusCode; use multipart::server::nickel::MultipartBody; use multipart::server::{Entries, SaveResult}; use multipart::mock::StdoutTee; fn handle_multipart<'mw>(req: &mut Request, mut res: Response<'mw>) -> MiddlewareResult<'mw> { match (*req).multipart_body() { Some(mut multipart) => { match multipart.save().temp() { SaveResult::Full(entries) => process_entries(res, entries), SaveResult::Partial(entries, e) => { println!("Partial errors ... {:?}", e); return process_entries(res, entries.keep_partial()); }, SaveResult::Error(e) => { println!("There are errors in multipart POSTing ... {:?}", e); res.set(StatusCode::InternalServerError); return res.send(format!("Server could not handle multipart POST! {:?}", e)); }, } } None => { res.set(StatusCode::BadRequest); return res.send("Request seems not was a multipart request") } } } /// Processes saved entries from multipart request. /// Returns an OK response or an error. fn process_entries<'mw>(res: Response<'mw>, entries: Entries) -> MiddlewareResult<'mw> { let stdout = io::stdout(); let mut res = res.start()?; if let Err(e) = entries.write_debug(StdoutTee::new(&mut res, &stdout)) { writeln!(res, "Error while reading entries: {}", e).expect("writeln"); } Ok(Action::Halt(res)) } fn main() { let mut srv = Nickel::new(); srv.post("/multipart_upload/", handle_multipart); // Start this example via: // // `cargo run --example nickel --features nickel` // // And - if you are in the root of this repository - do an example // upload via: // // `curl -F file=@LICENSE 'http://localhost:6868/multipart_upload/'` srv.listen("127.0.0.1:6868").expect("Failed to bind server"); }
use futures::{channel::mpsc, Future}; use std::pin::Pin; use std::task::Poll; pub(crate) type ExecutorFuture = Pin<Box<dyn Future<Output = Result<(), Box<dyn std::error::Error>>> + 'static>>; pub struct Executor<I, T> { pub handle: fn(I) -> ExecutorFuture, pub future: Option<ExecutorFuture>, pub tx: Option<mpsc::Sender<T>>, } impl<I, T> Executor<I, T> { pub(crate) fn start( &mut self, get_input: Box<dyn Fn(mpsc::Receiver<T>) -> I>, ) -> Result<(), Box<dyn std::error::Error>> { if self.future.is_some() { eprintln!("MSFS-RS: RESTARTING EXECUTOR"); self.future.take(); self.tx.take(); } let (tx, rx) = mpsc::channel(1); self.tx = Some(tx); let input = get_input(rx); let mut f = (self.handle)(input); let mut context = std::task::Context::from_waker(futures::task::noop_waker_ref()); match match f.as_mut().poll(&mut context) { Poll::Pending => Ok(()), Poll::Ready(v) => v, } { Ok(()) => { self.future = Some(f); Ok(()) } e => e, } } pub(crate) fn send(&mut self, data: Option<T>) -> Result<(), Box<dyn std::error::Error>> { if let Some(data) = data { self.tx.as_mut().unwrap().try_send(data).unwrap(); } else { self.tx.take(); } let mut context = std::task::Context::from_waker(futures::task::noop_waker_ref()); match self.future.as_mut().unwrap().as_mut().poll(&mut context) { Poll::Pending => Ok(()), Poll::Ready(v) => v, } } }
/// Set the length of the vec when the `SetLenOnDrop` value goes out of scope. /// /// Copied from https://github.com/rust-lang/rust/pull/36355 pub struct SetLenOnDrop<'a> { len: &'a mut usize, local_len: usize, } impl<'a> SetLenOnDrop<'a> { #[inline] pub fn new(len: &'a mut usize) -> Self { SetLenOnDrop { local_len: *len, len, } } #[inline] pub fn get(&self) -> usize { self.local_len } #[inline] pub fn increment_len(&mut self, increment: usize) { self.local_len += increment; } } impl<'a> Drop for SetLenOnDrop<'a> { #[inline] fn drop(&mut self) { *self.len = self.local_len; } }
use std::fs; use permutohedron::LexicalPermutation; mod intcode; use intcode::IntcodeComputer; fn generate_permutations(mut list: Vec<i128>) -> Vec<Vec<i128>> { let mut permutations = Vec::new(); loop { permutations.push(list.to_vec()); if !list.next_permutation() { break; } } permutations } fn main() { let input = fs::read_to_string("input.txt") .expect(":("); let memory: Vec<i128> = input .trim() .split(",") .map(|x| x.parse().unwrap()) .collect(); let mut computer = IntcodeComputer::new(memory.clone(), 0); computer.set_input(1); computer.run(); let mut computer = IntcodeComputer::new(memory.clone(), 0); computer.set_input(2); computer.run(); }
#![doc = include_str!("../README.md")] #![recursion_limit = "1024"] // NOTICE: Unfortunately this macro MUST be defined here, in the crate's root module, because Rust // doesn't allow to export `macro_rules!` macros from a `proc-macro` crate type currently, // and so we cannot move the definition into a sub-module and use the `#[macro_export]` // attribute. /// Attempts to merge an [`Option`]ed `$field` of a `$self` struct with the same `$field` of /// `$another` struct. If both are [`Some`], then throws a duplication error with a [`Span`] related /// to the `$another` struct (a later one). /// /// The type of [`Span`] may be explicitly specified as one of the [`SpanContainer`] methods. /// By default, [`SpanContainer::span_ident`] is used. /// /// [`Span`]: proc_macro2::Span /// [`SpanContainer`]: crate::common::SpanContainer /// [`SpanContainer::span_ident`]: crate::common::SpanContainer::span_ident macro_rules! try_merge_opt { ($field:ident: $self:ident, $another:ident => $span:ident) => {{ if let Some(v) = $self.$field { $another .$field .replace(v) .none_or_else(|dup| crate::common::parse::attr::err::dup_arg(&dup.$span()))?; } $another.$field }}; ($field:ident: $self:ident, $another:ident) => { try_merge_opt!($field: $self, $another => span_ident) }; } // NOTICE: Unfortunately this macro MUST be defined here, in the crate's root module, because Rust // doesn't allow to export `macro_rules!` macros from a `proc-macro` crate type currently, // and so we cannot move the definition into a sub-module and use the `#[macro_export]` // attribute. /// Attempts to merge a [`HashMap`] `$field` of a `$self` struct with the same `$field` of /// `$another` struct. If some [`HashMap`] entries are duplicated, then throws a duplication error /// with a [`Span`] related to the `$another` struct (a later one). /// /// The type of [`Span`] may be explicitly specified as one of the [`SpanContainer`] methods. /// By default, [`SpanContainer::span_ident`] is used. /// /// [`HashMap`]: std::collections::HashMap /// [`Span`]: proc_macro2::Span /// [`SpanContainer`]: crate::common::SpanContainer /// [`SpanContainer::span_ident`]: crate::common::SpanContainer::span_ident macro_rules! try_merge_hashmap { ($field:ident: $self:ident, $another:ident => $span:ident) => {{ if !$self.$field.is_empty() { for (ty, rslvr) in $self.$field { $another .$field .insert(ty, rslvr) .none_or_else(|dup| crate::common::parse::attr::err::dup_arg(&dup.$span()))?; } } $another.$field }}; ($field:ident: $self:ident, $another:ident) => { try_merge_hashmap!($field: $self, $another => span_ident) }; } // NOTICE: Unfortunately this macro MUST be defined here, in the crate's root module, because Rust // doesn't allow to export `macro_rules!` macros from a `proc-macro` crate type currently, // and so we cannot move the definition into a sub-module and use the `#[macro_export]` // attribute. /// Attempts to merge a [`HashSet`] `$field` of a `$self` struct with the same `$field` of /// `$another` struct. If some [`HashSet`] entries are duplicated, then throws a duplication error /// with a [`Span`] related to the `$another` struct (a later one). /// /// The type of [`Span`] may be explicitly specified as one of the [`SpanContainer`] methods. /// By default, [`SpanContainer::span_ident`] is used. /// /// [`HashSet`]: std::collections::HashSet /// [`Span`]: proc_macro2::Span /// [`SpanContainer`]: crate::common::SpanContainer /// [`SpanContainer::span_ident`]: crate::common::SpanContainer::span_ident macro_rules! try_merge_hashset { ($field:ident: $self:ident, $another:ident => $span:ident) => {{ if !$self.$field.is_empty() { for ty in $self.$field { $another .$field .replace(ty) .none_or_else(|dup| crate::common::parse::attr::err::dup_arg(&dup.$span()))?; } } $another.$field }}; ($field:ident: $self:ident, $another:ident) => { try_merge_hashset!($field: $self, $another => span_ident) }; } mod common; mod graphql_enum; mod graphql_input_object; mod graphql_interface; mod graphql_object; mod graphql_scalar; mod graphql_subscription; mod graphql_union; mod scalar_value; use proc_macro::TokenStream; use proc_macro_error::{proc_macro_error, ResultExt as _}; /// `#[derive(GraphQLInputObject)]` macro for deriving a /// [GraphQL input object][0] implementation for a Rust struct. Each /// non-ignored field type must itself be [GraphQL input object][0] or a /// [GraphQL scalar][2]. /// /// The `#[graphql]` helper attribute is used for configuring the derived /// implementation. Specifying multiple `#[graphql]` attributes on the same /// definition is totally okay. They all will be treated as a single attribute. /// /// ```rust /// use juniper::GraphQLInputObject; /// /// #[derive(GraphQLInputObject)] /// struct Point2D { /// x: f64, /// y: f64, /// } /// ``` /// /// # Custom name and description /// /// The name of a [GraphQL input object][0] or its [fields][1] may be overridden /// with the `name` attribute's argument. By default, a type name or a struct /// field name is used in a `camelCase`. /// /// The description of a [GraphQL input object][0] or its [fields][1] may be /// specified either with the `description`/`desc` attribute's argument, or with /// a regular Rust doc comment. /// /// ```rust /// # use juniper::GraphQLInputObject; /// # /// #[derive(GraphQLInputObject)] /// #[graphql( /// // Rename the type for GraphQL by specifying the name here. /// name = "Point", /// // You may also specify a description here. /// // If present, doc comments will be ignored. /// desc = "A point is the simplest two-dimensional primitive.", /// )] /// struct Point2D { /// /// Abscissa value. /// x: f64, /// /// #[graphql(name = "y", desc = "Ordinate value")] /// y_coord: f64, /// } /// ``` /// /// # Renaming policy /// /// By default, all [GraphQL input object fields][1] are renamed in a /// `camelCase` manner (so a `y_coord` Rust struct field becomes a /// `yCoord` [value][1] in GraphQL schema, and so on). This complies with /// default GraphQL naming conventions as [demonstrated in spec][0]. /// /// However, if you need for some reason another naming convention, it's /// possible to do so by using the `rename_all` attribute's argument. At the /// moment, it supports the following policies only: `SCREAMING_SNAKE_CASE`, /// `camelCase`, `none` (disables any renaming). /// /// ```rust /// # use juniper::GraphQLInputObject; /// # /// #[derive(GraphQLInputObject)] /// #[graphql(rename_all = "none")] // disables renaming /// struct Point2D { /// x: f64, /// y_coord: f64, // will be `y_coord` instead of `yCoord` in GraphQL schema /// } /// ``` /// /// # Ignoring fields /// /// To omit exposing a Rust field in a GraphQL schema, use the `ignore` /// attribute's argument directly on that field. Ignored fields must implement /// [`Default`] or have the `default = <expression>` attribute's argument. /// /// ```rust /// # use juniper::GraphQLInputObject; /// # /// enum System { /// Cartesian, /// } /// /// #[derive(GraphQLInputObject)] /// struct Point2D { /// x: f64, /// y: f64, /// #[graphql(ignore)] /// shift: f64, // `Default::default()` impl is used. /// #[graphql(skip, default = System::Cartesian)] /// // ^^^^^^^^^^^^^^^^^^^^^^^^^^^ /// // This attribute is required, as we need to be to construct `Point2D` /// // from `{ x: 0.0, y: 0.0 }` GraphQL input. /// system: System, /// } /// ``` /// /// [`ScalarValue`]: juniper::ScalarValue /// [0]: https://spec.graphql.org/October2021#sec-Input-Objects /// [1]: https://spec.graphql.org/October2021#InputFieldsDefinition /// [2]: https://spec.graphql.org/October2021#sec-Scalars #[proc_macro_error] #[proc_macro_derive(GraphQLInputObject, attributes(graphql))] pub fn derive_input_object(input: TokenStream) -> TokenStream { graphql_input_object::derive::expand(input.into()) .unwrap_or_abort() .into() } /// `#[derive(GraphQLEnum)]` macro for deriving a [GraphQL enum][0] /// implementation for Rust enums. /// /// The `#[graphql]` helper attribute is used for configuring the derived /// implementation. Specifying multiple `#[graphql]` attributes on the same /// definition is totally okay. They all will be treated as a single attribute. /// /// ```rust /// use juniper::GraphQLEnum; /// /// #[derive(GraphQLEnum)] /// enum Episode { /// NewHope, /// Empire, /// Jedi, /// } /// ``` /// /// # Custom name, description and deprecation /// /// The name of a [GraphQL enum][0] or its [values][1] may be overridden with /// the `name` attribute's argument. By default, a type name is used or a /// variant name in `SCREAMING_SNAKE_CASE`. /// /// The description of a [GraphQL enum][0] or its [values][1] may be specified /// either with the `description`/`desc` attribute's argument, or with a regular /// Rust doc comment. /// /// [GraphQL enum value][1] may be deprecated by specifying the `deprecated` /// attribute's argument, or with regular a Rust `#[deprecated]` attribute. /// /// ```rust /// # #![allow(deprecated)] /// # /// # use juniper::GraphQLEnum; /// # /// #[derive(GraphQLEnum)] /// #[graphql( /// // Rename the type for GraphQL by specifying the name here. /// name = "AvailableEpisodes", /// // You may also specify a description here. /// // If present, doc comments will be ignored. /// desc = "Possible episodes.", /// )] /// enum Episode { /// /// Doc comment, also acting as description. /// #[deprecated(note = "Don't use it")] /// NewHope, /// /// #[graphql(name = "Jedi", desc = "Arguably the best one in the trilogy")] /// #[graphql(deprecated = "Don't use it")] /// Jedai, /// /// Empire, /// } /// ``` /// /// # Renaming policy /// /// By default, all [GraphQL enum values][1] are renamed in a /// `SCREAMING_SNAKE_CASE` manner (so a `NewHope` Rust enum variant becomes a /// `NEW_HOPE` [value][1] in GraphQL schema, and so on). This complies with /// default GraphQL naming conventions as [demonstrated in spec][0]. /// /// However, if you need for some reason another naming convention, it's /// possible to do so by using the `rename_all` attribute's argument. At the /// moment, it supports the following policies only: `SCREAMING_SNAKE_CASE`, /// `camelCase`, `none` (disables any renaming). /// /// ```rust /// # use juniper::GraphQLEnum; /// # /// #[derive(GraphQLEnum)] /// #[graphql(rename_all = "none")] // disables renaming /// enum Episode { /// NewHope, /// Empire, /// Jedi, /// } /// ``` /// /// # Ignoring enum variants /// /// To omit exposing a Rust enum variant in a GraphQL schema, use the `ignore` /// attribute's argument directly on that variant. Only ignored Rust enum /// variants are allowed to contain fields. /// /// ```rust /// # use juniper::GraphQLEnum; /// # /// #[derive(GraphQLEnum)] /// enum Episode<T> { /// NewHope, /// Empire, /// Jedi, /// #[graphql(ignore)] /// Legends(T), /// } /// ``` /// /// # Custom `ScalarValue` /// /// By default, `#[derive(GraphQLEnum)]` macro generates code, which is generic /// over a [`ScalarValue`] type. This can be changed with the `scalar` /// attribute's argument. /// /// ```rust /// # use juniper::{DefaultScalarValue, GraphQLEnum}; /// # /// #[derive(GraphQLEnum)] /// #[graphql(scalar = DefaultScalarValue)] /// enum Episode { /// NewHope, /// Empire, /// Jedi, /// } /// ``` /// /// [`ScalarValue`]: juniper::ScalarValue /// [0]: https://spec.graphql.org/October2021#sec-Enums /// [1]: https://spec.graphql.org/October2021#sec-Enum-Value #[proc_macro_error] #[proc_macro_derive(GraphQLEnum, attributes(graphql))] pub fn derive_enum(input: TokenStream) -> TokenStream { graphql_enum::derive::expand(input.into()) .unwrap_or_abort() .into() } /// `#[derive(GraphQLScalar)]` macro for deriving a [GraphQL scalar][0] /// implementation. /// /// # Transparent delegation /// /// Sometimes, you want to create a custom [GraphQL scalar][0] type by just /// wrapping an existing one, inheriting all its behavior. In Rust, this is /// often called as ["`Newtype` pattern"][1]. This may be achieved by providing /// a `#[graphql(transparent)]` attribute to the definition: /// ```rust /// # use juniper::{GraphQLObject, GraphQLScalar}; /// # /// #[derive(GraphQLScalar)] /// #[graphql(transparent)] /// struct UserId(String); /// /// #[derive(GraphQLScalar)] /// #[graphql(transparent)] /// struct DroidId { /// value: String, /// } /// /// #[derive(GraphQLObject)] /// struct Pair { /// user_id: UserId, /// droid_id: DroidId, /// } /// ``` /// /// The inherited behaviour may also be customized: /// ```rust /// # use juniper::GraphQLScalar; /// # /// /// Doc comments are used for the GraphQL type description. /// #[derive(GraphQLScalar)] /// #[graphql( /// // Custom GraphQL name. /// name = "MyUserId", /// // Description can also specified in the attribute. /// // This will the doc comment, if one exists. /// description = "...", /// // Optional specification URL. /// specified_by_url = "https://tools.ietf.org/html/rfc4122", /// // Explicit generic scalar. /// scalar = S: juniper::ScalarValue, /// transparent, /// )] /// struct UserId(String); /// ``` /// /// All of the methods inherited from `Newtype`'s field may also be overridden /// with the attributes described below. /// /// # Custom resolving /// /// Customization of a [GraphQL scalar][0] type resolving is possible via /// `#[graphql(to_output_with = <fn path>)]` attribute: /// ```rust /// # use juniper::{GraphQLScalar, ScalarValue, Value}; /// # /// #[derive(GraphQLScalar)] /// #[graphql(to_output_with = to_output, transparent)] /// struct Incremented(i32); /// /// /// Increments [`Incremented`] before converting into a [`Value`]. /// fn to_output<S: ScalarValue>(v: &Incremented) -> Value<S> { /// let inc = v.0 + 1; /// Value::from(inc) /// } /// ``` /// /// # Custom parsing /// /// Customization of a [GraphQL scalar][0] type parsing is possible via /// `#[graphql(from_input_with = <fn path>)]` attribute: /// ```rust /// # use juniper::{DefaultScalarValue, GraphQLScalar, InputValue, ScalarValue}; /// # /// #[derive(GraphQLScalar)] /// #[graphql(from_input_with = Self::from_input, transparent)] /// struct UserId(String); /// /// impl UserId { /// /// Checks whether [`InputValue`] is `String` beginning with `id: ` and /// /// strips it. /// fn from_input<S: ScalarValue>( /// input: &InputValue<S>, /// ) -> Result<Self, String> { /// // ^^^^^^ must implement `IntoFieldError` /// input.as_string_value() /// .ok_or_else(|| format!("Expected `String`, found: {input}")) /// .and_then(|str| { /// str.strip_prefix("id: ") /// .ok_or_else(|| { /// format!( /// "Expected `UserId` to begin with `id: `, \ /// found: {input}", /// ) /// }) /// }) /// .map(|id| Self(id.into())) /// } /// } /// ``` /// /// # Custom token parsing /// /// Customization of which tokens a [GraphQL scalar][0] type should be parsed is /// possible via `#[graphql(parse_token_with = <fn path>)]` or /// `#[graphql(parse_token(<types>)]` attributes: /// ```rust /// # use juniper::{ /// # GraphQLScalar, InputValue, ParseScalarResult, ParseScalarValue, /// # ScalarValue, ScalarToken, Value, /// # }; /// # /// #[derive(GraphQLScalar)] /// #[graphql( /// to_output_with = to_output, /// from_input_with = from_input, /// parse_token_with = parse_token, /// )] /// // ^^^^^^^^^^^^^^^^ Can be replaced with `parse_token(String, i32)`, which /// // tries to parse as `String` first, and then as `i32` if /// // prior fails. /// enum StringOrInt { /// String(String), /// Int(i32), /// } /// /// fn to_output<S: ScalarValue>(v: &StringOrInt) -> Value<S> { /// match v { /// StringOrInt::String(s) => Value::scalar(s.to_owned()), /// StringOrInt::Int(i) => Value::scalar(*i), /// } /// } /// /// fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<StringOrInt, String> { /// v.as_string_value() /// .map(|s| StringOrInt::String(s.into())) /// .or_else(|| v.as_int_value().map(StringOrInt::Int)) /// .ok_or_else(|| format!("Expected `String` or `Int`, found: {v}")) /// } /// /// fn parse_token<S: ScalarValue>(value: ScalarToken<'_>) -> ParseScalarResult<S> { /// <String as ParseScalarValue<S>>::from_str(value) /// .or_else(|_| <i32 as ParseScalarValue<S>>::from_str(value)) /// } /// ``` /// > __NOTE:__ Once we provide all 3 custom functions, there is no sense to /// > follow [`Newtype` pattern][1] anymore. /// /// # All at once /// /// Instead of providing all custom functions separately, it's possible to /// provide a module holding the appropriate `to_output()`, `from_input()` and /// `parse_token()` functions: /// ```rust /// # use juniper::{ /// # GraphQLScalar, InputValue, ParseScalarResult, ParseScalarValue, /// # ScalarValue, ScalarToken, Value, /// # }; /// # /// #[derive(GraphQLScalar)] /// #[graphql(with = string_or_int)] /// enum StringOrInt { /// String(String), /// Int(i32), /// } /// /// mod string_or_int { /// use super::*; /// /// pub(super) fn to_output<S: ScalarValue>(v: &StringOrInt) -> Value<S> { /// match v { /// StringOrInt::String(s) => Value::scalar(s.to_owned()), /// StringOrInt::Int(i) => Value::scalar(*i), /// } /// } /// /// pub(super) fn from_input<S: ScalarValue>(v: &InputValue<S>) -> Result<StringOrInt, String> { /// v.as_string_value() /// .map(|s| StringOrInt::String(s.into())) /// .or_else(|| v.as_int_value().map(StringOrInt::Int)) /// .ok_or_else(|| format!("Expected `String` or `Int`, found: {v}")) /// } /// /// pub(super) fn parse_token<S: ScalarValue>(t: ScalarToken<'_>) -> ParseScalarResult<S> { /// <String as ParseScalarValue<S>>::from_str(t) /// .or_else(|_| <i32 as ParseScalarValue<S>>::from_str(t)) /// } /// } /// # /// # fn main() {} /// ``` /// /// A regular `impl` block is also suitable for that: /// ```rust /// # use juniper::{ /// # GraphQLScalar, InputValue, ParseScalarResult, ParseScalarValue, /// # ScalarValue, ScalarToken, Value, /// # }; /// # /// #[derive(GraphQLScalar)] /// // #[graphql(with = Self)] <- default behaviour, so can be omitted /// enum StringOrInt { /// String(String), /// Int(i32), /// } /// /// impl StringOrInt { /// fn to_output<S: ScalarValue>(&self) -> Value<S> { /// match self { /// Self::String(s) => Value::scalar(s.to_owned()), /// Self::Int(i) => Value::scalar(*i), /// } /// } /// /// fn from_input<S>(v: &InputValue<S>) -> Result<Self, String> /// where /// S: ScalarValue /// { /// v.as_string_value() /// .map(|s| Self::String(s.into())) /// .or_else(|| v.as_int_value().map(Self::Int)) /// .ok_or_else(|| format!("Expected `String` or `Int`, found: {v}")) /// } /// /// fn parse_token<S>(value: ScalarToken<'_>) -> ParseScalarResult<S> /// where /// S: ScalarValue /// { /// <String as ParseScalarValue<S>>::from_str(value) /// .or_else(|_| <i32 as ParseScalarValue<S>>::from_str(value)) /// } /// } /// # /// # fn main() {} /// ``` /// /// At the same time, any custom function still may be specified separately: /// ```rust /// # use juniper::{ /// # GraphQLScalar, InputValue, ParseScalarResult, ScalarValue, /// # ScalarToken, Value /// # }; /// # /// #[derive(GraphQLScalar)] /// #[graphql( /// with = string_or_int, /// parse_token(String, i32) /// )] /// enum StringOrInt { /// String(String), /// Int(i32), /// } /// /// mod string_or_int { /// use super::*; /// /// pub(super) fn to_output<S>(v: &StringOrInt) -> Value<S> /// where /// S: ScalarValue, /// { /// match v { /// StringOrInt::String(s) => Value::scalar(s.to_owned()), /// StringOrInt::Int(i) => Value::scalar(*i), /// } /// } /// /// pub(super) fn from_input<S>(v: &InputValue<S>) -> Result<StringOrInt, String> /// where /// S: ScalarValue, /// { /// v.as_string_value() /// .map(|s| StringOrInt::String(s.into())) /// .or_else(|| v.as_int_value().map(StringOrInt::Int)) /// .ok_or_else(|| format!("Expected `String` or `Int`, found: {v}")) /// } /// /// // No need in `parse_token()` function. /// } /// # /// # fn main() {} /// ``` /// /// # Custom `ScalarValue` /// /// By default, this macro generates code, which is generic over a /// [`ScalarValue`] type. Concrete [`ScalarValue`] type may be specified via /// `#[graphql(scalar = <type>)]` attribute. /// /// It also may be used to provide additional bounds to the [`ScalarValue`] /// generic, like the following: `#[graphql(scalar = S: Trait)]`. /// /// # Additional arbitrary trait bounds /// /// [GraphQL scalar][0] type implementation may be bound with any additional /// trait bounds via `#[graphql(where(<bounds>))]` attribute, like the /// following: `#[graphql(where(S: Trait, Self: fmt::Debug + fmt::Display))]`. /// /// [0]: https://spec.graphql.org/October2021#sec-Scalars /// [1]: https://rust-unofficial.github.io/patterns/patterns/behavioural/newtype.html /// [`ScalarValue`]: juniper::ScalarValue #[proc_macro_error] #[proc_macro_derive(GraphQLScalar, attributes(graphql))] pub fn derive_scalar(input: TokenStream) -> TokenStream { graphql_scalar::derive::expand(input.into()) .unwrap_or_abort() .into() } /// `#[graphql_scalar]` macro.is interchangeable with /// `#[derive(`[`GraphQLScalar`]`)]` macro, and is used for deriving a /// [GraphQL scalar][0] implementation. /// /// ```rust /// # use juniper::graphql_scalar; /// # /// /// Doc comments are used for the GraphQL type description. /// #[graphql_scalar( /// // Custom GraphQL name. /// name = "MyUserId", /// // Description can also specified in the attribute. /// // This will the doc comment, if one exists. /// description = "...", /// // Optional specification URL. /// specified_by_url = "https://tools.ietf.org/html/rfc4122", /// // Explicit generic scalar. /// scalar = S: juniper::ScalarValue, /// transparent, /// )] /// struct UserId(String); /// ``` /// /// # Foreign types /// /// Additionally, `#[graphql_scalar]` can be used directly on foreign types via /// type alias, without using [`Newtype` pattern][1]. /// /// > __NOTE:__ To satisfy [orphan rules] you should provide local /// > [`ScalarValue`] implementation. /// /// ```rust /// # mod date { /// # use std::{fmt, str::FromStr}; /// # /// # pub struct Date; /// # /// # impl FromStr for Date { /// # type Err = String; /// # /// # fn from_str(_: &str) -> Result<Self, Self::Err> { /// # unimplemented!() /// # } /// # } /// # /// # impl fmt::Display for Date { /// # fn fmt(&self, _: &mut fmt::Formatter<'_>) -> fmt::Result { /// # unimplemented!() /// # } /// # } /// # } /// # /// # use juniper::DefaultScalarValue as CustomScalarValue; /// use juniper::{graphql_scalar, InputValue, ScalarValue, Value}; /// /// #[graphql_scalar( /// with = date_scalar, /// parse_token(String), /// scalar = CustomScalarValue, /// )] /// // ^^^^^^^^^^^^^^^^^ local `ScalarValue` implementation /// type Date = date::Date; /// // ^^^^^^^^^^ type from another crate /// /// mod date_scalar { /// use super::*; /// /// pub(super) fn to_output(v: &Date) -> Value<CustomScalarValue> { /// Value::scalar(v.to_string()) /// } /// /// pub(super) fn from_input(v: &InputValue<CustomScalarValue>) -> Result<Date, String> { /// v.as_string_value() /// .ok_or_else(|| format!("Expected `String`, found: {v}")) /// .and_then(|s| s.parse().map_err(|e| format!("Failed to parse `Date`: {e}"))) /// } /// } /// # /// # fn main() { } /// ``` /// /// [0]: https://spec.graphql.org/October2021#sec-Scalars /// [1]: https://rust-unofficial.github.io/patterns/patterns/behavioural/newtype.html /// [orphan rules]: https://bit.ly/3glAGC2 /// [`GraphQLScalar`]: juniper::GraphQLScalar /// [`ScalarValue`]: juniper::ScalarValue #[proc_macro_error] #[proc_macro_attribute] pub fn graphql_scalar(attr: TokenStream, body: TokenStream) -> TokenStream { graphql_scalar::attr::expand(attr.into(), body.into()) .unwrap_or_abort() .into() } /// `#[derive(ScalarValue)]` macro for deriving a [`ScalarValue`] /// implementation. /// /// To derive a [`ScalarValue`] on enum you should mark the corresponding enum /// variants with `as_int`, `as_float`, `as_string`, `into_string`, `as_str` and /// `as_bool` attribute argumentes (names correspond to [`ScalarValue`] required /// methods). /// /// ```rust /// # use std::fmt; /// # /// # use serde::{de, Deserialize, Deserializer, Serialize}; /// # use juniper::ScalarValue; /// # /// #[derive(Clone, Debug, PartialEq, ScalarValue, Serialize)] /// #[serde(untagged)] /// enum MyScalarValue { /// #[value(as_float, as_int)] /// Int(i32), /// Long(i64), /// #[value(as_float)] /// Float(f64), /// #[value( /// into_string, /// as_str, /// as_string = String::clone, /// )] /// // ^^^^^^^^^^^^^ custom resolvers may be provided /// String(String), /// #[value(as_bool)] /// Boolean(bool), /// } /// /// impl<'de> Deserialize<'de> for MyScalarValue { /// fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> { /// struct Visitor; /// /// impl<'de> de::Visitor<'de> for Visitor { /// type Value = MyScalarValue; /// /// fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { /// f.write_str("a valid input value") /// } /// /// fn visit_bool<E: de::Error>(self, b: bool) -> Result<Self::Value, E> { /// Ok(MyScalarValue::Boolean(b)) /// } /// /// fn visit_i32<E: de::Error>(self, n: i32) -> Result<Self::Value, E> { /// Ok(MyScalarValue::Int(n)) /// } /// /// fn visit_i64<E: de::Error>(self, n: i64) -> Result<Self::Value, E> { /// if n <= i64::from(i32::MAX) { /// self.visit_i32(n.try_into().unwrap()) /// } else { /// Ok(MyScalarValue::Long(n)) /// } /// } /// /// fn visit_u32<E: de::Error>(self, n: u32) -> Result<Self::Value, E> { /// if n <= i32::MAX as u32 { /// self.visit_i32(n.try_into().unwrap()) /// } else { /// self.visit_u64(n.into()) /// } /// } /// /// fn visit_u64<E: de::Error>(self, n: u64) -> Result<Self::Value, E> { /// if n <= i64::MAX as u64 { /// self.visit_i64(n.try_into().unwrap()) /// } else { /// // Browser's `JSON.stringify()` serialize all numbers /// // having no fractional part as integers (no decimal /// // point), so we must parse large integers as floating /// // point, otherwise we would error on transferring large /// // floating point numbers. /// Ok(MyScalarValue::Float(n as f64)) /// } /// } /// /// fn visit_f64<E: de::Error>(self, f: f64) -> Result<Self::Value, E> { /// Ok(MyScalarValue::Float(f)) /// } /// /// fn visit_str<E: de::Error>(self, s: &str) -> Result<Self::Value, E> { /// self.visit_string(s.into()) /// } /// /// fn visit_string<E: de::Error>(self, s: String) -> Result<Self::Value, E> { /// Ok(MyScalarValue::String(s)) /// } /// } /// /// de.deserialize_any(Visitor) /// } /// } /// ``` /// /// [`ScalarValue`]: juniper::ScalarValue #[proc_macro_error] #[proc_macro_derive(ScalarValue, attributes(value))] pub fn derive_scalar_value(input: TokenStream) -> TokenStream { scalar_value::expand_derive(input.into()) .unwrap_or_abort() .into() } /// `#[graphql_interface]` macro for generating a [GraphQL interface][1] /// implementation for traits and its implementers. /// /// Specifying multiple `#[graphql_interface]` attributes on the same definition /// is totally okay. They all will be treated as a single attribute. /// /// [GraphQL interfaces][1] are more like structurally-typed interfaces, while /// Rust's traits are more like type classes. Using `impl Trait` isn't an /// option, so you have to cover all trait's methods with type's fields or /// impl block. /// /// Another difference between [GraphQL interface][1] type and Rust trait is /// that the former serves both as an _abstraction_ and a _value downcastable to /// concrete implementers_, while in Rust, a trait is an _abstraction only_ and /// you need a separate type to downcast into a concrete implementer, like enum /// or [trait object][3], because trait doesn't represent a type itself. /// Macro uses Rust enums only to represent a value type of a /// [GraphQL interface][1]. /// /// [GraphQL interface][1] can be represented with struct in case methods don't /// have any arguments: /// /// ```rust /// use juniper::{graphql_interface, GraphQLObject}; /// /// // NOTICE: By default a `CharacterValue` enum is generated by macro to represent values of this /// // GraphQL interface. /// #[graphql_interface(for = Human)] // enumerating all implementers is mandatory /// struct Character { /// id: String, /// } /// /// #[derive(GraphQLObject)] /// #[graphql(impl = CharacterValue)] // notice the enum type name, not trait name /// struct Human { /// id: String, // this field is used to resolve Character::id /// home_planet: String, /// } /// ``` /// /// Also [GraphQL interface][1] can be represented with trait: /// /// ```rust /// use juniper::{graphql_interface, GraphQLObject}; /// /// // NOTICE: By default a `CharacterValue` enum is generated by macro to represent values of this /// // GraphQL interface. /// #[graphql_interface(for = Human)] // enumerating all implementers is mandatory /// trait Character { /// fn id(&self) -> &str; /// } /// /// #[derive(GraphQLObject)] /// #[graphql(impl = CharacterValue)] // notice the enum type name, not trait name /// struct Human { /// id: String, // this field is used to resolve Character::id /// home_planet: String, /// } /// ``` /// /// > __NOTE:__ Struct or trait representing interface acts only as a blueprint /// > for names of methods, their arguments and return type, so isn't /// > actually used at a runtime. But no-one is stopping you from /// > implementing trait manually for your own usage. /// /// # Custom name, description, deprecation and argument defaults /// /// The name of [GraphQL interface][1], its field, or a field argument may be overridden with a /// `name` attribute's argument. By default, a type name is used or `camelCased` method/argument /// name. /// /// The description of [GraphQL interface][1], its field, or a field argument may be specified /// either with a `description`/`desc` attribute's argument, or with a regular Rust doc comment. /// /// A field of [GraphQL interface][1] may be deprecated by specifying a `deprecated` attribute's /// argument, or with regular Rust `#[deprecated]` attribute. /// /// The default value of a field argument may be specified with a `default` attribute argument (if /// no exact value is specified then [`Default::default`] is used). /// /// ```rust /// # use juniper::graphql_interface; /// # /// #[graphql_interface(name = "Character", desc = "Possible episode characters.")] /// trait Chrctr { /// #[graphql(name = "id", desc = "ID of the character.")] /// #[graphql(deprecated = "Don't use it")] /// fn some_id( /// &self, /// #[graphql(name = "number", desc = "Arbitrary number.")] /// #[graphql(default = 5)] /// num: i32, /// ) -> &str; /// } /// /// // NOTICE: Rust docs are used as GraphQL description. /// /// Possible episode characters. /// #[graphql_interface] /// trait CharacterWithDocs { /// /// ID of the character. /// #[deprecated] /// fn id(&self, #[graphql(default)] num: i32) -> &str; /// } /// ``` /// /// # Interfaces implementing other interfaces /// /// GraphQL allows implementing interfaces on other interfaces in addition to /// objects. /// /// > __NOTE:__ Every interface has to specify all other interfaces/objects it /// > implements or is implemented for. Missing one of `for = ` or /// > `impl = ` attributes is an understandable compile-time error. /// /// ```rust /// # extern crate juniper; /// use juniper::{graphql_interface, graphql_object, ID}; /// /// #[graphql_interface(for = [HumanValue, Luke])] /// struct Node { /// id: ID, /// } /// /// #[graphql_interface(impl = NodeValue, for = Luke)] /// struct Human { /// id: ID, /// home_planet: String, /// } /// /// struct Luke { /// id: ID, /// } /// /// #[graphql_object(impl = [HumanValue, NodeValue])] /// impl Luke { /// fn id(&self) -> &ID { /// &self.id /// } /// /// // As `String` and `&str` aren't distinguished by /// // GraphQL spec, you can use them interchangeably. /// // Same is applied for `Cow<'a, str>`. /// // ⌄⌄⌄⌄⌄⌄⌄⌄⌄⌄⌄⌄ /// fn home_planet() -> &'static str { /// "Tatooine" /// } /// } /// ``` /// /// # GraphQL subtyping and additional `null`able fields /// /// GraphQL allows implementers (both objects and other interfaces) to return /// "subtypes" instead of an original value. Basically, this allows you to /// impose additional bounds on the implementation. /// /// Valid "subtypes" are: /// - interface implementer instead of an interface itself: /// - `I implements T` in place of a `T`; /// - `Vec<I implements T>` in place of a `Vec<T>`. /// - non-`null` value in place of a `null`able: /// - `T` in place of a `Option<T>`; /// - `Vec<T>` in place of a `Vec<Option<T>>`. /// /// These rules are recursively applied, so `Vec<Vec<I implements T>>` is a /// valid "subtype" of a `Option<Vec<Option<Vec<Option<T>>>>>`. /// /// Also, GraphQL allows implementers to add `null`able fields, which aren't /// present on an original interface. /// /// ```rust /// # extern crate juniper; /// use juniper::{graphql_interface, graphql_object, ID}; /// /// #[graphql_interface(for = [HumanValue, Luke])] /// struct Node { /// id: ID, /// } /// /// #[graphql_interface(for = HumanConnectionValue)] /// struct Connection { /// nodes: Vec<NodeValue>, /// } /// /// #[graphql_interface(impl = NodeValue, for = Luke)] /// struct Human { /// id: ID, /// home_planet: String, /// } /// /// #[graphql_interface(impl = ConnectionValue)] /// struct HumanConnection { /// nodes: Vec<HumanValue>, /// // ^^^^^^^^^^ notice not `NodeValue` /// // This can happen, because every `Human` is a `Node` too, so we are /// // just imposing additional bounds, which still can be resolved with /// // `... on Connection { nodes }`. /// } /// /// struct Luke { /// id: ID, /// } /// /// #[graphql_object(impl = [HumanValue, NodeValue])] /// impl Luke { /// fn id(&self) -> &ID { /// &self.id /// } /// /// fn home_planet(language: Option<String>) -> &'static str { /// // ^^^^^^^^^^^^^^ /// // Notice additional `null`able field, which is missing on `Human`. /// // Resolving `...on Human { homePlanet }` will provide `None` for /// // this argument. /// match language.as_deref() { /// None | Some("en") => "Tatooine", /// Some("ko") => "타투인", /// _ => todo!(), /// } /// } /// } /// # /// # fn main() {} /// ``` /// /// # Renaming policy /// /// By default, all [GraphQL interface][1] fields and their arguments are renamed /// via `camelCase` policy (so `fn my_id(&self) -> String` becomes `myId` field /// in GraphQL schema, and so on). This complies with default GraphQL naming /// conventions [demonstrated in spec][0]. /// /// However, if you need for some reason apply another naming convention, it's /// possible to do by using `rename_all` attribute's argument. At the moment it /// supports the following policies only: `SCREAMING_SNAKE_CASE`, `camelCase`, /// `none` (disables any renaming). /// /// ```rust /// # use juniper::{graphql_interface, graphql_object}; /// # /// #[graphql_interface(for = Human, rename_all = "none")] // disables renaming /// trait Character { /// // NOTICE: In the generated GraphQL schema this field and its argument /// // will be `detailed_info` and `info_kind`. /// fn detailed_info(&self, info_kind: String) -> String; /// } /// /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[graphql_object(impl = CharacterValue, rename_all = "none")] /// impl Human { /// fn id(&self) -> &str { /// &self.id /// } /// /// fn home_planet(&self) -> &str { /// &self.home_planet /// } /// /// // You can return `&str` even if trait definition returns `String`. /// fn detailed_info(&self, info_kind: String) -> &str { /// (info_kind == "planet") /// .then_some(&self.home_planet) /// .unwrap_or(&self.id) /// } /// } /// ``` /// /// # Ignoring trait methods /// /// To omit some trait method to be assumed as a [GraphQL interface][1] field /// and ignore it, use an `ignore` attribute's argument directly on that method. /// /// ```rust /// # use juniper::graphql_interface; /// # /// #[graphql_interface] /// trait Character { /// fn id(&self) -> &str; /// /// #[graphql(ignore)] /// fn kaboom(&mut self); /// } /// ``` /// /// # Custom context /// /// By default, the generated implementation tries to infer [`Context`] type from signatures of /// trait methods, and uses [unit type `()`][4] if signatures contains no [`Context`] arguments. /// /// If [`Context`] type cannot be inferred or is inferred incorrectly, then specify it explicitly /// with `context` attribute's argument. /// /// If trait method represents a [GraphQL interface][1] field and its argument is named as `context` /// or `ctx` then this argument is assumed as [`Context`] and will be omitted in GraphQL schema. /// Additionally, any argument may be marked as [`Context`] with a `context` attribute's argument. /// /// ```rust /// # use std::collections::HashMap; /// # use juniper::{graphql_interface, graphql_object}; /// # /// struct Database { /// humans: HashMap<String, Human>, /// droids: HashMap<String, Droid>, /// } /// impl juniper::Context for Database {} /// /// #[graphql_interface(for = [Human, Droid], Context = Database)] /// trait Character { /// fn id<'db>(&self, ctx: &'db Database) -> Option<&'db str>; /// fn info<'db>(&self, #[graphql(context)] db: &'db Database) -> Option<&'db str>; /// } /// /// struct Human { /// id: String, /// home_planet: String, /// } /// #[graphql_object(impl = CharacterValue, Context = Database)] /// impl Human { /// fn id<'db>(&self, context: &'db Database) -> Option<&'db str> { /// context.humans.get(&self.id).map(|h| h.id.as_str()) /// } /// fn info<'db>(&self, #[graphql(context)] db: &'db Database) -> Option<&'db str> { /// db.humans.get(&self.id).map(|h| h.home_planet.as_str()) /// } /// fn home_planet(&self) -> &str { /// &self.home_planet /// } /// } /// /// struct Droid { /// id: String, /// primary_function: String, /// } /// #[graphql_object(impl = CharacterValue, Context = Database)] /// impl Droid { /// fn id<'db>(&self, ctx: &'db Database) -> Option<&'db str> { /// ctx.droids.get(&self.id).map(|h| h.id.as_str()) /// } /// fn info<'db>(&self, #[graphql(context)] db: &'db Database) -> Option<&'db str> { /// db.droids.get(&self.id).map(|h| h.primary_function.as_str()) /// } /// fn primary_function(&self) -> &str { /// &self.primary_function /// } /// } /// ``` /// /// # Using `Executor` /// /// If an [`Executor`] is required in a trait method to resolve a [GraphQL interface][1] field, /// specify it as an argument named as `executor` or explicitly marked with an `executor` /// attribute's argument. Such method argument will be omitted in GraphQL schema. /// /// However, this requires to explicitly parametrize over [`ScalarValue`], as [`Executor`] does so. /// /// ```rust /// # use juniper::{graphql_interface, graphql_object, Executor, LookAheadMethods as _, ScalarValue}; /// # /// // NOTICE: Specifying `ScalarValue` as existing type parameter. /// #[graphql_interface(for = Human, scalar = S)] /// trait Character<S: ScalarValue> { /// fn id<'a>(&self, executor: &'a Executor<'_, '_, (), S>) -> &'a str; /// /// fn name<'b>( /// &'b self, /// #[graphql(executor)] another: &Executor<'_, '_, (), S>, /// ) -> &'b str; /// } /// /// struct Human { /// id: String, /// name: String, /// } /// #[graphql_object(scalar = S: ScalarValue, impl = CharacterValue<S>)] /// impl Human { /// async fn id<'a, S>(&self, executor: &'a Executor<'_, '_, (), S>) -> &'a str /// where /// S: ScalarValue, /// { /// executor.look_ahead().field_name() /// } /// /// async fn name<'b, S>(&'b self, _executor: &Executor<'_, '_, (), S>) -> &'b str { /// &self.name /// } /// } /// ``` /// /// # Custom `ScalarValue` /// /// By default, `#[graphql_interface]` macro generates code, which is generic /// over a [`ScalarValue`] type. This may introduce a problem when at least one /// of [GraphQL interface][1] implementers is restricted to a concrete /// [`ScalarValue`] type in its implementation. To resolve such problem, a /// concrete [`ScalarValue`] type should be specified with a `scalar` /// attribute's argument. /// /// ```rust /// # use juniper::{graphql_interface, DefaultScalarValue, GraphQLObject}; /// # /// // NOTICE: Removing `Scalar` argument will fail compilation. /// #[graphql_interface(for = Human, scalar = DefaultScalarValue)] /// trait Character { /// fn id(&self) -> &str; /// } /// /// #[derive(GraphQLObject)] /// #[graphql(impl = CharacterValue, scalar = DefaultScalarValue)] /// struct Human { /// id: String, /// home_planet: String, /// } /// ``` /// /// [`Context`]: juniper::Context /// [`Executor`]: juniper::Executor /// [`ScalarValue`]: juniper::ScalarValue /// [0]: https://spec.graphql.org/October2021 /// [1]: https://spec.graphql.org/October2021#sec-Interfaces /// [2]: https://doc.rust-lang.org/stable/reference/items/traits.html#object-safety /// [3]: https://doc.rust-lang.org/stable/reference/types/trait-object.html /// [4]: https://doc.rust-lang.org/stable/std/primitive.unit.html #[proc_macro_error] #[proc_macro_attribute] pub fn graphql_interface(attr: TokenStream, body: TokenStream) -> TokenStream { self::graphql_interface::attr::expand(attr.into(), body.into()) .unwrap_or_abort() .into() } /// `#[derive(GraphQLInterface)]` macro for generating a [GraphQL interface][1] /// implementation for traits and its implementers. /// /// This macro is applicable only to structs and useful in case [interface][1] /// fields don't have any arguments: /// /// ```rust /// use juniper::{GraphQLInterface, GraphQLObject}; /// /// // NOTICE: By default a `CharacterValue` enum is generated by macro to represent values of this /// // GraphQL interface. /// #[derive(GraphQLInterface)] /// #[graphql(for = Human)] // enumerating all implementers is mandatory /// struct Character { /// id: String, /// } /// /// #[derive(GraphQLObject)] /// #[graphql(impl = CharacterValue)] // notice the enum type name, not trait name /// struct Human { /// id: String, // this field is used to resolve Character::id /// home_planet: String, /// } /// ``` /// /// For more info and possibilities see [`#[graphql_interface]`] macro. /// /// [`#[graphql_interface]`]: crate::graphql_interface /// [1]: https://spec.graphql.org/October2021#sec-Interfaces #[proc_macro_error] #[proc_macro_derive(GraphQLInterface, attributes(graphql))] pub fn derive_interface(body: TokenStream) -> TokenStream { self::graphql_interface::derive::expand(body.into()) .unwrap_or_abort() .into() } /// `#[derive(GraphQLObject)]` macro for deriving a [GraphQL object][1] /// implementation for structs. /// /// The `#[graphql]` helper attribute is used for configuring the derived /// implementation. Specifying multiple `#[graphql]` attributes on the same /// definition is totally okay. They all will be treated as a single attribute. /// /// ``` /// use juniper::GraphQLObject; /// /// #[derive(GraphQLObject)] /// struct Query { /// // NOTICE: By default, field names will be converted to `camelCase`. /// // In the generated GraphQL schema this field will be available /// // as `apiVersion`. /// api_version: &'static str, /// } /// ``` /// /// # Custom name, description and deprecation /// /// The name of [GraphQL object][1] or its field may be overridden with a `name` /// attribute's argument. By default, a type name is used or `camelCased` field /// name. /// /// The description of [GraphQL object][1] or its field may be specified either /// with a `description`/`desc` attribute's argument, or with a regular Rust doc /// comment. /// /// A field of [GraphQL object][1] may be deprecated by specifying a /// `deprecated` attribute's argument, or with regular Rust `#[deprecated]` /// attribute. /// /// ``` /// # use juniper::GraphQLObject; /// # /// #[derive(GraphQLObject)] /// #[graphql( /// // Rename the type for GraphQL by specifying the name here. /// name = "Human", /// // You may also specify a description here. /// // If present, doc comments will be ignored. /// desc = "Possible episode human.", /// )] /// struct HumanWithAttrs { /// #[graphql(name = "id", desc = "ID of the human.")] /// #[graphql(deprecated = "Don't use it")] /// some_id: String, /// } /// /// // Rust docs are used as GraphQL description. /// /// Possible episode human. /// #[derive(GraphQLObject)] /// struct HumanWithDocs { /// // Doc comments also work on fields. /// /// ID of the human. /// #[deprecated] /// id: String, /// } /// ``` /// /// # Renaming policy /// /// By default, all [GraphQL object][1] fields are renamed via `camelCase` /// policy (so `api_version: String` becomes `apiVersion` field in GraphQL /// schema, and so on). This complies with default GraphQL naming conventions /// [demonstrated in spec][0]. /// /// However, if you need for some reason apply another naming convention, it's /// possible to do by using `rename_all` attribute's argument. At the moment it /// supports the following policies only: `SCREAMING_SNAKE_CASE`, `camelCase`, /// `none` (disables any renaming). /// /// ``` /// # use juniper::GraphQLObject; /// # /// #[derive(GraphQLObject)] /// #[graphql(rename_all = "none")] // disables renaming /// struct Query { /// // NOTICE: In the generated GraphQL schema this field will be available /// // as `api_version`. /// api_version: String, /// } /// ``` /// /// # Ignoring struct fields /// /// To omit exposing a struct field in the GraphQL schema, use an `ignore` /// attribute's argument directly on that field. /// /// ``` /// # use juniper::GraphQLObject; /// # /// #[derive(GraphQLObject)] /// struct Human { /// id: String, /// #[graphql(ignore)] /// home_planet: String, /// } /// ``` /// /// # Custom `ScalarValue` /// /// By default, `#[derive(GraphQLObject)]` macro generates code, which is /// generic over a [`ScalarValue`] type. This may introduce a problem when at /// least one of its fields is restricted to a concrete [`ScalarValue`] type in /// its implementation. To resolve such problem, a concrete [`ScalarValue`] type /// should be specified with a `scalar` attribute's argument. /// /// ``` /// # use juniper::{DefaultScalarValue, GraphQLObject}; /// # /// #[derive(GraphQLObject)] /// // NOTICE: Removing `scalar` argument will fail compilation. /// #[graphql(scalar = DefaultScalarValue)] /// struct Human { /// id: String, /// helper: Droid, /// } /// /// #[derive(GraphQLObject)] /// #[graphql(scalar = DefaultScalarValue)] /// struct Droid { /// id: String, /// } /// ``` /// /// [`ScalarValue`]: juniper::ScalarValue /// [1]: https://spec.graphql.org/October2021#sec-Objects #[proc_macro_error] #[proc_macro_derive(GraphQLObject, attributes(graphql))] pub fn derive_object(body: TokenStream) -> TokenStream { self::graphql_object::derive::expand(body.into()) .unwrap_or_abort() .into() } /// `#[graphql_object]` macro for generating a [GraphQL object][1] /// implementation for structs with computable field resolvers (declared via /// a regular Rust `impl` block). /// /// It enables you to write GraphQL field resolvers for a type by declaring a /// regular Rust `impl` block. Under the hood, the macro implements /// [`GraphQLType`]/[`GraphQLValue`] traits. /// /// Specifying multiple `#[graphql_object]` attributes on the same definition /// is totally okay. They all will be treated as a single attribute. /// /// ``` /// use juniper::graphql_object; /// /// // We can declare the type as a plain struct without any members. /// struct Query; /// /// #[graphql_object] /// impl Query { /// // WARNING: Only GraphQL fields can be specified in this `impl` block. /// // If normal methods are required on the struct, they can be /// // defined either in a separate "normal" `impl` block, or /// // marked with `#[graphql(ignore)]` attribute. /// /// // This defines a simple, static field which does not require any /// // context. /// // Such field can return any value that implements `GraphQLType` and /// // `GraphQLValue` traits. /// // /// // NOTICE: By default, field names will be converted to `camelCase`. /// // In the generated GraphQL schema this field will be available /// // as `apiVersion`. /// fn api_version() -> &'static str { /// "0.1" /// } /// /// // This field takes two arguments. /// // GraphQL arguments are just regular function parameters. /// // /// // NOTICE: In `juniper`, arguments are non-nullable by default. For /// // optional arguments, you have to specify them as `Option<_>`. /// async fn add(a: f64, b: f64, c: Option<f64>) -> f64 { /// a + b + c.unwrap_or(0.0) /// } /// } /// ``` /// /// # Accessing self /// /// Fields may also have a `self` receiver. /// /// ``` /// # use juniper::graphql_object; /// # /// struct Person { /// first_name: String, /// last_name: String, /// } /// /// #[graphql_object] /// impl Person { /// fn first_name(&self) -> &str { /// &self.first_name /// } /// /// fn last_name(&self) -> &str { /// &self.last_name /// } /// /// fn full_name(&self) -> String { /// self.build_full_name() /// } /// /// // This method is useful only to define GraphQL fields, but is not /// // a field itself, so we ignore it in schema. /// #[graphql(ignore)] /// fn build_full_name(&self) -> String { /// format!("{} {}", self.first_name, self.last_name) /// } /// } /// ``` /// /// # Custom name, description, deprecation and argument defaults /// /// The name of [GraphQL object][1], its field, or a field argument may be /// overridden with a `name` attribute's argument. By default, a type name is /// used or `camelCased` method/argument name. /// /// The description of [GraphQL object][1], its field, or a field argument may /// be specified either with a `description`/`desc` attribute's argument, or /// with a regular Rust doc comment. /// /// A field of [GraphQL object][1] may be deprecated by specifying a /// `deprecated` attribute's argument, or with regular Rust `#[deprecated]` /// attribute. /// /// The default value of a field argument may be specified with a `default` /// attribute argument (if no exact value is specified then [`Default::default`] /// is used). /// /// ``` /// # use juniper::graphql_object; /// # /// struct HumanWithAttrs; /// /// #[graphql_object( /// // Rename the type for GraphQL by specifying the name here. /// name = "Human", /// // You may also specify a description here. /// // If present, doc comments will be ignored. /// desc = "Possible episode human.", /// )] /// impl HumanWithAttrs { /// #[graphql(name = "id", desc = "ID of the human.")] /// #[graphql(deprecated = "Don't use it")] /// fn some_id( /// &self, /// #[graphql(name = "number", desc = "Arbitrary number.")] /// // You may specify default values. /// // A default can be any valid expression that yields the right type. /// #[graphql(default = 5)] /// num: i32, /// ) -> &str { /// "Don't use me!" /// } /// } /// /// struct HumanWithDocs; /// /// // Rust docs are used as GraphQL description. /// /// Possible episode human. /// #[graphql_object] /// impl HumanWithDocs { /// // Doc comments also work on fields. /// /// ID of the human. /// #[deprecated] /// fn id( /// &self, /// // If expression is not specified then `Default::default()` is used. /// #[graphql(default)] num: i32, /// ) -> &str { /// "Deprecated" /// } /// } /// ``` /// /// # Renaming policy /// /// By default, all [GraphQL object][1] fields and their arguments are renamed /// via `camelCase` policy (so `fn api_version() -> String` becomes `apiVersion` /// field in GraphQL schema, and so on). This complies with default GraphQL /// naming conventions [demonstrated in spec][0]. /// /// However, if you need for some reason apply another naming convention, it's /// possible to do by using `rename_all` attribute's argument. At the moment it /// supports the following policies only: `SCREAMING_SNAKE_CASE`, `camelCase`, /// `none` (disables any renaming). /// /// ``` /// # use juniper::graphql_object; /// # /// struct Query; /// /// #[graphql_object(rename_all = "none")] // disables renaming /// impl Query { /// // NOTICE: In the generated GraphQL schema this field will be available /// // as `api_version`. /// fn api_version() -> &'static str { /// "0.1" /// } /// /// // NOTICE: In the generated GraphQL schema these field arguments will be /// // available as `arg_a` and `arg_b`. /// async fn add(arg_a: f64, arg_b: f64, c: Option<f64>) -> f64 { /// arg_a + arg_b + c.unwrap_or(0.0) /// } /// } /// ``` /// /// # Ignoring methods /// /// To omit some method to be assumed as a [GraphQL object][1] field and ignore /// it, use an `ignore` attribute's argument directly on that method. /// /// ``` /// # use juniper::graphql_object; /// # /// struct Human(String); /// /// #[graphql_object] /// impl Human { /// fn id(&self) -> &str { /// &self.0 /// } /// /// #[graphql(ignore)] /// fn kaboom(&mut self) {} /// } /// ``` /// /// # Custom context /// /// By default, the generated implementation tries to infer [`Context`] type /// from signatures of `impl` block methods, and uses [unit type `()`][4] if /// signatures contains no [`Context`] arguments. /// /// If [`Context`] type cannot be inferred or is inferred incorrectly, then /// specify it explicitly with `context` attribute's argument. /// /// If method argument is named as `context` or `ctx` then this argument is /// assumed as [`Context`] and will be omitted in GraphQL schema. /// Additionally, any argument may be marked as [`Context`] with a `context` /// attribute's argument. /// /// ``` /// # use std::collections::HashMap; /// # use juniper::graphql_object; /// # /// struct Database { /// humans: HashMap<String, Human>, /// } /// impl juniper::Context for Database {} /// /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[graphql_object(context = Database)] /// impl Human { /// fn id<'db>(&self, context: &'db Database) -> Option<&'db str> { /// context.humans.get(&self.id).map(|h| h.id.as_str()) /// } /// fn info<'db>(&self, context: &'db Database) -> Option<&'db str> { /// context.humans.get(&self.id).map(|h| h.home_planet.as_str()) /// } /// } /// ``` /// /// # Using `Executor` /// /// If an [`Executor`] is required in a method to resolve a [GraphQL object][1] /// field, specify it as an argument named as `executor` or explicitly marked /// with an `executor` attribute's argument. Such method argument will be /// omitted in GraphQL schema. /// /// However, this requires to explicitly parametrize over [`ScalarValue`], as /// [`Executor`] does so. /// /// ``` /// # use juniper::{graphql_object, Executor, GraphQLObject, LookAheadMethods as _, ScalarValue}; /// # /// struct Human { /// name: String, /// } /// /// // NOTICE: Specifying `ScalarValue` as custom named type parameter. /// // Its name should be similar to the one used in methods. /// #[graphql_object(scalar = S: ScalarValue)] /// impl Human { /// async fn id<'a, S: ScalarValue>( /// &self, /// executor: &'a Executor<'_, '_, (), S>, /// ) -> &'a str { /// executor.look_ahead().field_name() /// } /// /// fn name<'b, S: ScalarValue>( /// &'b self, /// #[graphql(executor)] _another: &Executor<'_, '_, (), S>, /// ) -> &'b str { /// &self.name /// } /// } /// ``` /// /// # Custom `ScalarValue` /// /// By default, `#[graphql_object]` macro generates code, which is generic over /// a [`ScalarValue`] type. This may introduce a problem when at least one of /// its fields is restricted to a concrete [`ScalarValue`] type in its /// implementation. To resolve such problem, a concrete [`ScalarValue`] type /// should be specified with a `scalar` attribute's argument. /// /// ``` /// # use juniper::{graphql_object, DefaultScalarValue, GraphQLObject}; /// # /// struct Human(String); /// /// // NOTICE: Removing `scalar` argument will fail compilation. /// #[graphql_object(scalar = DefaultScalarValue)] /// impl Human { /// fn id(&self) -> &str { /// &self.0 /// } /// /// fn helper(&self) -> Droid { /// Droid { /// id: self.0.clone(), /// } /// } /// } /// /// #[derive(GraphQLObject)] /// #[graphql(scalar = DefaultScalarValue)] /// struct Droid { /// id: String, /// } /// ``` /// /// [`Context`]: juniper::Context /// [`Executor`]: juniper::Executor /// [`GraphQLType`]: juniper::GraphQLType /// [`GraphQLValue`]: juniper::GraphQLValue /// [`ScalarValue`]: juniper::ScalarValue /// [0]: https://spec.graphql.org/October2021 /// [1]: https://spec.graphql.org/October2021#sec-Objects #[proc_macro_error] #[proc_macro_attribute] pub fn graphql_object(attr: TokenStream, body: TokenStream) -> TokenStream { self::graphql_object::attr::expand(attr.into(), body.into()) .unwrap_or_abort() .into() } /// `#[graphql_subscription]` macro for generating a [GraphQL subscription][1] /// implementation for structs with computable field resolvers (declared via /// a regular Rust `impl` block). /// /// It enables you to write GraphQL field resolvers for a type by declaring a /// regular Rust `impl` block. Under the hood, the macro implements /// [`GraphQLType`]/[`GraphQLSubscriptionValue`] traits. /// /// Specifying multiple `#[graphql_subscription]` attributes on the same /// definition is totally okay. They all will be treated as a single attribute. /// /// This macro is similar to [`#[graphql_object]` macro](macro@graphql_object) /// and has all its properties, but requires methods to be `async` and return /// [`Stream`] of values instead of a value itself. /// /// ``` /// # use futures::stream::{self, BoxStream}; /// use juniper::graphql_subscription; /// /// // We can declare the type as a plain struct without any members. /// struct Subscription; /// /// #[graphql_subscription] /// impl Subscription { /// // WARNING: Only GraphQL fields can be specified in this `impl` block. /// // If normal methods are required on the struct, they can be /// // defined either in a separate "normal" `impl` block, or /// // marked with `#[graphql(ignore)]` attribute. /// /// // This defines a simple, static field which does not require any /// // context. /// // Such field can return a `Stream` of any value implementing /// // `GraphQLType` and `GraphQLValue` traits. /// // /// // NOTICE: Method must be `async`. /// async fn api_version() -> BoxStream<'static, &'static str> { /// Box::pin(stream::once(async { "0.1" })) /// } /// } /// ``` /// /// [`GraphQLType`]: juniper::GraphQLType /// [`GraphQLSubscriptionValue`]: juniper::GraphQLSubscriptionValue /// [`Stream`]: futures::Stream /// [1]: https://spec.graphql.org/October2021#sec-Subscription #[proc_macro_error] #[proc_macro_attribute] pub fn graphql_subscription(attr: TokenStream, body: TokenStream) -> TokenStream { self::graphql_subscription::attr::expand(attr.into(), body.into()) .unwrap_or_abort() .into() } /// `#[derive(GraphQLUnion)]` macro for deriving a [GraphQL union][1] implementation for enums and /// structs. /// /// The `#[graphql]` helper attribute is used for configuring the derived implementation. Specifying /// multiple `#[graphql]` attributes on the same definition is totally okay. They all will be /// treated as a single attribute. /// /// ``` /// use derive_more::From; /// use juniper::{GraphQLObject, GraphQLUnion}; /// /// #[derive(GraphQLObject)] /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[derive(GraphQLObject)] /// struct Droid { /// id: String, /// primary_function: String, /// } /// /// #[derive(From, GraphQLUnion)] /// enum CharacterEnum { /// Human(Human), /// Droid(Droid), /// } /// ``` /// /// # Custom name and description /// /// The name of [GraphQL union][1] may be overriden with a `name` attribute's argument. By default, /// a type name is used. /// /// The description of [GraphQL union][1] may be specified either with a `description`/`desc` /// attribute's argument, or with a regular Rust doc comment. /// /// ``` /// # use juniper::{GraphQLObject, GraphQLUnion}; /// # /// # #[derive(GraphQLObject)] /// # struct Human { /// # id: String, /// # home_planet: String, /// # } /// # /// # #[derive(GraphQLObject)] /// # struct Droid { /// # id: String, /// # primary_function: String, /// # } /// # /// #[derive(GraphQLUnion)] /// #[graphql(name = "Character", desc = "Possible episode characters.")] /// enum Chrctr { /// Human(Human), /// Droid(Droid), /// } /// /// // NOTICE: Rust docs are used as GraphQL description. /// /// Possible episode characters. /// #[derive(GraphQLUnion)] /// enum CharacterWithDocs { /// Human(Human), /// Droid(Droid), /// } /// /// // NOTICE: `description` argument takes precedence over Rust docs. /// /// Not a GraphQL description anymore. /// #[derive(GraphQLUnion)] /// #[graphql(description = "Possible episode characters.")] /// enum CharacterWithDescription { /// Human(Human), /// Droid(Droid), /// } /// ``` /// /// # Custom context /// /// By default, the generated implementation uses [unit type `()`][4] as [`Context`]. To use a /// custom [`Context`] type for [GraphQL union][1] variants types or external resolver functions, /// specify it with `context` attribute's argument. /// /// ``` /// # use juniper::{GraphQLObject, GraphQLUnion}; /// # /// #[derive(GraphQLObject)] /// #[graphql(Context = CustomContext)] /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[derive(GraphQLObject)] /// #[graphql(Context = CustomContext)] /// struct Droid { /// id: String, /// primary_function: String, /// } /// /// pub struct CustomContext; /// impl juniper::Context for CustomContext {} /// /// #[derive(GraphQLUnion)] /// #[graphql(Context = CustomContext)] /// enum Character { /// Human(Human), /// Droid(Droid), /// } /// ``` /// /// # Custom `ScalarValue` /// /// By default, this macro generates code, which is generic over a /// [`ScalarValue`] type. This may introduce a problem when at least one of /// [GraphQL union][1] variants is restricted to a concrete [`ScalarValue`] type /// in its implementation. To resolve such problem, a concrete [`ScalarValue`] /// type should be specified with a `scalar` attribute's argument. /// /// ``` /// # use juniper::{DefaultScalarValue, GraphQLObject, GraphQLUnion}; /// # /// #[derive(GraphQLObject)] /// #[graphql(scalar = DefaultScalarValue)] /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[derive(GraphQLObject)] /// struct Droid { /// id: String, /// primary_function: String, /// } /// /// // NOTICE: Removing `Scalar` argument will fail compilation. /// #[derive(GraphQLUnion)] /// #[graphql(scalar = DefaultScalarValue)] /// enum Character { /// Human(Human), /// Droid(Droid), /// } /// ``` /// /// # Ignoring enum variants /// /// To omit exposing an enum variant in the GraphQL schema, use an `ignore` /// attribute's argument directly on that variant. /// /// > __WARNING__: /// > It's the _library user's responsibility_ to ensure that ignored enum variant is _never_ /// > returned from resolvers, otherwise resolving the GraphQL query will __panic at runtime__. /// /// ``` /// # use std::marker::PhantomData; /// use derive_more::From; /// use juniper::{GraphQLObject, GraphQLUnion}; /// /// #[derive(GraphQLObject)] /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[derive(GraphQLObject)] /// struct Droid { /// id: String, /// primary_function: String, /// } /// /// #[derive(From, GraphQLUnion)] /// enum Character<S> { /// Human(Human), /// Droid(Droid), /// #[from(ignore)] /// #[graphql(ignore)] /// _State(PhantomData<S>), /// } /// ``` /// /// # External resolver functions /// /// To use a custom logic for resolving a [GraphQL union][1] variant, an external resolver function /// may be specified with: /// - either a `with` attribute's argument on an enum variant; /// - or an `on` attribute's argument on an enum/struct itself. /// /// ``` /// # use juniper::{GraphQLObject, GraphQLUnion}; /// # /// #[derive(GraphQLObject)] /// #[graphql(Context = CustomContext)] /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[derive(GraphQLObject)] /// #[graphql(Context = CustomContext)] /// struct Droid { /// id: String, /// primary_function: String, /// } /// /// pub struct CustomContext { /// droid: Droid, /// } /// impl juniper::Context for CustomContext {} /// /// #[derive(GraphQLUnion)] /// #[graphql(Context = CustomContext)] /// enum Character { /// Human(Human), /// #[graphql(with = Character::droid_from_context)] /// Droid(Droid), /// } /// /// impl Character { /// // NOTICE: The function signature must contain `&self` and `&Context`, /// // and return `Option<&VariantType>`. /// fn droid_from_context<'c>(&self, ctx: &'c CustomContext) -> Option<&'c Droid> { /// Some(&ctx.droid) /// } /// } /// /// #[derive(GraphQLUnion)] /// #[graphql(Context = CustomContext)] /// #[graphql(on Droid = CharacterWithoutDroid::droid_from_context)] /// enum CharacterWithoutDroid { /// Human(Human), /// #[graphql(ignore)] /// Droid, /// } /// /// impl CharacterWithoutDroid { /// fn droid_from_context<'c>(&self, ctx: &'c CustomContext) -> Option<&'c Droid> { /// if let Self::Droid = self { /// Some(&ctx.droid) /// } else { /// None /// } /// } /// } /// ``` /// /// # Deriving structs /// /// Specifying external resolver functions is mandatory for using a struct as a [GraphQL union][1], /// because this is the only way to declare [GraphQL union][1] variants in this case. /// /// ``` /// # use std::collections::HashMap; /// # use juniper::{GraphQLObject, GraphQLUnion}; /// # /// #[derive(GraphQLObject)] /// #[graphql(Context = Database)] /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[derive(GraphQLObject)] /// #[graphql(Context = Database)] /// struct Droid { /// id: String, /// primary_function: String, /// } /// /// struct Database { /// humans: HashMap<String, Human>, /// droids: HashMap<String, Droid>, /// } /// impl juniper::Context for Database {} /// /// #[derive(GraphQLUnion)] /// #[graphql( /// Context = Database, /// on Human = Character::get_human, /// on Droid = Character::get_droid, /// )] /// struct Character { /// id: String, /// } /// /// impl Character { /// fn get_human<'db>(&self, ctx: &'db Database) -> Option<&'db Human>{ /// ctx.humans.get(&self.id) /// } /// /// fn get_droid<'db>(&self, ctx: &'db Database) -> Option<&'db Droid>{ /// ctx.droids.get(&self.id) /// } /// } /// ``` /// /// [`Context`]: juniper::Context /// [`ScalarValue`]: juniper::ScalarValue /// [1]: https://spec.graphql.org/October2021#sec-Unions /// [4]: https://doc.rust-lang.org/stable/std/primitive.unit.html #[proc_macro_error] #[proc_macro_derive(GraphQLUnion, attributes(graphql))] pub fn derive_union(body: TokenStream) -> TokenStream { self::graphql_union::derive::expand(body.into()) .unwrap_or_abort() .into() } /// `#[graphql_union]` macro for deriving a [GraphQL union][1] implementation for traits. /// /// Specifying multiple `#[graphql_union]` attributes on the same definition is totally okay. They /// all will be treated as a single attribute. /// /// A __trait has to be [object safe][2]__, because schema resolvers will need to return a /// [trait object][3] to specify a [GraphQL union][1] behind it. The [trait object][3] has to be /// [`Send`] and [`Sync`]. /// /// ``` /// use juniper::{graphql_union, GraphQLObject}; /// /// #[derive(GraphQLObject)] /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[derive(GraphQLObject)] /// struct Droid { /// id: String, /// primary_function: String, /// } /// /// #[graphql_union] /// trait Character { /// // NOTICE: The method signature must contain `&self` and return `Option<&VariantType>`. /// fn as_human(&self) -> Option<&Human> { None } /// fn as_droid(&self) -> Option<&Droid> { None } /// } /// /// impl Character for Human { /// fn as_human(&self) -> Option<&Human> { Some(&self) } /// } /// /// impl Character for Droid { /// fn as_droid(&self) -> Option<&Droid> { Some(&self) } /// } /// ``` /// /// # Custom name and description /// /// The name of [GraphQL union][1] may be overriden with a `name` attribute's argument. By default, /// a type name is used. /// /// The description of [GraphQL union][1] may be specified either with a `description`/`desc` /// attribute's argument, or with a regular Rust doc comment. /// /// ``` /// # use juniper::{graphql_union, GraphQLObject}; /// # /// # #[derive(GraphQLObject)] /// # struct Human { /// # id: String, /// # home_planet: String, /// # } /// # /// # #[derive(GraphQLObject)] /// # struct Droid { /// # id: String, /// # primary_function: String, /// # } /// # /// #[graphql_union(name = "Character", desc = "Possible episode characters.")] /// trait Chrctr { /// fn as_human(&self) -> Option<&Human> { None } /// fn as_droid(&self) -> Option<&Droid> { None } /// } /// /// // NOTICE: Rust docs are used as GraphQL description. /// /// Possible episode characters. /// trait CharacterWithDocs { /// fn as_human(&self) -> Option<&Human> { None } /// fn as_droid(&self) -> Option<&Droid> { None } /// } /// /// // NOTICE: `description` argument takes precedence over Rust docs. /// /// Not a GraphQL description anymore. /// #[graphql_union(description = "Possible episode characters.")] /// trait CharacterWithDescription { /// fn as_human(&self) -> Option<&Human> { None } /// fn as_droid(&self) -> Option<&Droid> { None } /// } /// # /// # impl Chrctr for Human {} /// # impl Chrctr for Droid {} /// # impl CharacterWithDocs for Human {} /// # impl CharacterWithDocs for Droid {} /// # impl CharacterWithDescription for Human {} /// # impl CharacterWithDescription for Droid {} /// ``` /// /// # Custom context /// /// By default, the generated implementation tries to infer [`Context`] type from signatures of /// trait methods, and uses [unit type `()`][4] if signatures contains no [`Context`] arguments. /// /// If [`Context`] type cannot be inferred or is inferred incorrectly, then specify it explicitly /// with `context` attribute's argument. /// /// ``` /// # use std::collections::HashMap; /// # use juniper::{graphql_union, GraphQLObject}; /// # /// #[derive(GraphQLObject)] /// #[graphql(Context = Database)] /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[derive(GraphQLObject)] /// #[graphql(Context = Database)] /// struct Droid { /// id: String, /// primary_function: String, /// } /// /// struct Database { /// humans: HashMap<String, Human>, /// droids: HashMap<String, Droid>, /// } /// impl juniper::Context for Database {} /// /// #[graphql_union(Context = Database)] /// trait Character { /// fn as_human<'db>(&self, ctx: &'db Database) -> Option<&'db Human> { None } /// fn as_droid<'db>(&self, ctx: &'db Database) -> Option<&'db Droid> { None } /// } /// /// impl Character for Human { /// fn as_human<'db>(&self, ctx: &'db Database) -> Option<&'db Human> { /// ctx.humans.get(&self.id) /// } /// } /// /// impl Character for Droid { /// fn as_droid<'db>(&self, ctx: &'db Database) -> Option<&'db Droid> { /// ctx.droids.get(&self.id) /// } /// } /// ``` /// /// # Custom `ScalarValue` /// /// By default, `#[graphql_union]` macro generates code, which is generic over /// a [`ScalarValue`] type. This may introduce a problem when at least one of /// [GraphQL union][1] variants is restricted to a concrete [`ScalarValue`] type /// in its implementation. To resolve such problem, a concrete [`ScalarValue`] /// type should be specified with a `scalar` attribute's argument. /// /// ``` /// # use juniper::{graphql_union, DefaultScalarValue, GraphQLObject}; /// # /// #[derive(GraphQLObject)] /// #[graphql(scalar = DefaultScalarValue)] /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[derive(GraphQLObject)] /// struct Droid { /// id: String, /// primary_function: String, /// } /// /// // NOTICE: Removing `Scalar` argument will fail compilation. /// #[graphql_union(scalar = DefaultScalarValue)] /// trait Character { /// fn as_human(&self) -> Option<&Human> { None } /// fn as_droid(&self) -> Option<&Droid> { None } /// } /// # /// # impl Character for Human {} /// # impl Character for Droid {} /// ``` /// /// # Ignoring trait methods /// /// To omit some trait method to be assumed as a [GraphQL union][1] variant and /// ignore it, use an `ignore` attribute's argument directly on that method. /// /// ``` /// # use juniper::{graphql_union, GraphQLObject}; /// # /// # #[derive(GraphQLObject)] /// # struct Human { /// # id: String, /// # home_planet: String, /// # } /// # /// # #[derive(GraphQLObject)] /// # struct Droid { /// # id: String, /// # primary_function: String, /// # } /// # /// #[graphql_union] /// trait Character { /// fn as_human(&self) -> Option<&Human> { None } /// fn as_droid(&self) -> Option<&Droid> { None } /// #[graphql(ignore)] /// fn id(&self) -> &str; /// } /// # /// # impl Character for Human { /// # fn id(&self) -> &str { self.id.as_str() } /// # } /// # /// # impl Character for Droid { /// # fn id(&self) -> &str { self.id.as_str() } /// # } /// ``` /// /// # External resolver functions /// /// It's not mandatory to use trait methods as [GraphQL union][1] variant resolvers, and instead /// custom functions may be specified with an `on` attribute's argument. /// /// ``` /// # use std::collections::HashMap; /// # use juniper::{graphql_union, GraphQLObject}; /// # /// #[derive(GraphQLObject)] /// #[graphql(Context = Database)] /// struct Human { /// id: String, /// home_planet: String, /// } /// /// #[derive(GraphQLObject)] /// #[graphql(Context = Database)] /// struct Droid { /// id: String, /// primary_function: String, /// } /// /// struct Database { /// humans: HashMap<String, Human>, /// droids: HashMap<String, Droid>, /// } /// impl juniper::Context for Database {} /// /// #[graphql_union(Context = Database)] /// #[graphql_union( /// on Human = DynCharacter::get_human, /// on Droid = get_droid, /// )] /// trait Character { /// #[graphql(ignore)] /// fn id(&self) -> &str; /// } /// /// impl Character for Human { /// fn id(&self) -> &str { self.id.as_str() } /// } /// /// impl Character for Droid { /// fn id(&self) -> &str { self.id.as_str() } /// } /// /// // NOTICE: The trait object is always `Send` and `Sync`. /// type DynCharacter<'a> = dyn Character + Send + Sync + 'a; /// /// impl<'a> DynCharacter<'a> { /// fn get_human<'db>(&self, ctx: &'db Database) -> Option<&'db Human> { /// ctx.humans.get(self.id()) /// } /// } /// /// // NOTICE: Custom resolver function doesn't have to be a method of a type. /// // It's only a matter of the function signature to match the requirements. /// fn get_droid<'db>(ch: &DynCharacter<'_>, ctx: &'db Database) -> Option<&'db Droid> { /// ctx.droids.get(ch.id()) /// } /// ``` /// /// [`Context`]: juniper::Context /// [`ScalarValue`]: juniper::ScalarValue /// [1]: https://spec.graphql.org/October2021#sec-Unions /// [2]: https://doc.rust-lang.org/stable/reference/items/traits.html#object-safety /// [3]: https://doc.rust-lang.org/stable/reference/types/trait-object.html /// [4]: https://doc.rust-lang.org/stable/std/primitive.unit.html #[proc_macro_error] #[proc_macro_attribute] pub fn graphql_union(attr: TokenStream, body: TokenStream) -> TokenStream { self::graphql_union::attr::expand(attr.into(), body.into()) .unwrap_or_abort() .into() }
use boards::board::Board; use boards::nrf51dk::{Nrf51dk}; use Semaphore; use Kernel; pub fn task1(maybe_sem: Option<Semaphore>) { if let Some(sem) = maybe_sem { //print("semaphore from 1"); let board: Nrf51dk = Nrf51dk::new(); match sem { Semaphore::Button1 => { for _i in 0..10 { board.led_toggle(0); Kernel::os_sleep(200); board.led_toggle(1); } }, _ => () } } //print("now waiting from 1"); return Kernel::os_wait(Semaphore::Button1); }
use crate::parsing::Id; use crate::query::{BuiltInMacro, QueryConstFn, QueryError, Used}; use runestick::{CompileMeta, Item, Span}; use std::sync::Arc; /// Query interface for the interpreter. pub(crate) trait IrQuery { /// Query for the given meta. fn query_meta( &mut self, span: Span, item: &Item, used: Used, ) -> Result<Option<CompileMeta>, QueryError>; /// Get resolved internal macro with the given id. fn builtin_macro_for( &self, span: Span, id: Option<Id>, ) -> Result<Arc<BuiltInMacro>, QueryError>; /// Query for the constant function related to the given id. fn const_fn_for(&self, span: Span, id: Option<Id>) -> Result<Arc<QueryConstFn>, QueryError>; }
#[cfg(all(not(target_arch = "wasm32"), test))] mod test; use anyhow::*; use num_bigint::BigInt; use liblumen_alloc::erts::exception::{self, *}; use liblumen_alloc::erts::process::trace::Trace; use liblumen_alloc::erts::process::Process; use liblumen_alloc::erts::term::prelude::*; /// `-/1` prefix operator. #[native_implemented::function(erlang:-/1)] pub fn result(process: &Process, number: Term) -> exception::Result<Term> { match number.decode().unwrap() { TypedTerm::SmallInteger(small_integer) => { let number_isize: isize = small_integer.into(); let negated_isize = -number_isize; let negated_number: Term = process.integer(negated_isize); Ok(negated_number) } TypedTerm::BigInteger(big_integer) => { let big_int: &BigInt = big_integer.as_ref().into(); let negated_big_int = -big_int; let negated_number = process.integer(negated_big_int); Ok(negated_number) } TypedTerm::Float(float) => { let number_f64: f64 = float.into(); let negated_f64: f64 = -number_f64; let negated_number = process.float(negated_f64); Ok(negated_number) } _ => Err(badarith( Trace::capture(), Some(anyhow!("number ({}) is neither an integer nor a float", number).into()), ) .into()), } }
#[doc = "Reader of register INI5_READ_QOS"] pub type R = crate::R<u32, super::INI5_READ_QOS>; #[doc = "Writer for register INI5_READ_QOS"] pub type W = crate::W<u32, super::INI5_READ_QOS>; #[doc = "Register INI5_READ_QOS `reset()`'s with value 0x04"] impl crate::ResetValue for super::INI5_READ_QOS { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x04 } } #[doc = "Reader of field `AR_QOS`"] pub type AR_QOS_R = crate::R<u8, u8>; #[doc = "Write proxy for field `AR_QOS`"] pub struct AR_QOS_W<'a> { w: &'a mut W, } impl<'a> AR_QOS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } impl R { #[doc = "Bits 0:3 - Read channel QoS setting"] #[inline(always)] pub fn ar_qos(&self) -> AR_QOS_R { AR_QOS_R::new((self.bits & 0x0f) as u8) } } impl W { #[doc = "Bits 0:3 - Read channel QoS setting"] #[inline(always)] pub fn ar_qos(&mut self) -> AR_QOS_W { AR_QOS_W { w: self } } }
//! Some color utilities that are useful for implementing color transforms, //! anaglyph modes, etc. use std::ops::Add; /// Represents a color pub struct Color { r: u8, g: u8, b: u8, } impl From<(f32, f32, f32)> for Color { /// Convert a tuple of RGB in the [0, 1] range into a color fn from((r, g, b): (f32, f32, f32)) -> Color { let r = (r * 255.0) as u8; let g = (g * 255.0) as u8; let b = (b * 255.0) as u8; Color { r: r, g: g, b: b } } } impl From<u32> for Color { /// Convert a packed integer into a color, where the compnents are RGB /// from most significant to least significant byte fn from(i: u32) -> Color { let r = ((i >> 16) & 0xFF) as u8; let g = ((i >> 8) & 0xFF) as u8; let b = (i & 0xFF) as u8; Color { r: r, g: g, b: b } } } impl Into<u32> for Color { /// Convert a color into RGB packed format, where the compnents are RGB /// from most significant to least significant byte fn into(self) -> u32 { let r = self.r as u32; let g = self.g as u32; let b = self.b as u32; (r << 16) | (g << 8) | b } } impl<'a> Into<u32> for &'a Color { /// Convert a color reference into RGB packed format fn into(self) -> u32 { let r = self.r as u32; let g = self.g as u32; let b = self.b as u32; (r << 16) | (g << 8) | b } } impl From<(u8, u8, u8)> for Color { /// Convert a tuple of u8's (R, G, B) into a color fn from((r, g, b): (u8, u8, u8)) -> Color { Color { r: r, g: g, b: b } } } impl Into<(u8, u8, u8)> for Color { /// Convert a color into a tuple of u8's (R, G, B) fn into(self) -> (u8, u8, u8) { (self.r, self.g, self.b) } } impl<'a> Into<(u8, u8, u8)> for &'a Color { /// Convert a color into a tuple of u8's (R, G, B) fn into(self) -> (u8, u8, u8) { (self.r, self.g, self.b) } } impl Color { /// Scale a color by a uniform constant factor pub fn scale_by(&self, u: u8) -> Color { let s = u as u32; let r = self.r as u32; let g = self.g as u32; let b = self.b as u32; Color { r: (s * r / 255) as u8, g: (s * g / 255) as u8, b: (s * b / 255) as u8, } } } impl Add for Color { type Output = Color; fn add(self, other: Color) -> Color { Color { r: self.r.saturating_add(other.r), g: self.g.saturating_add(other.g), b: self.b.saturating_add(other.b), } } }
// Copyright 2021 Chiral Ltd. // Licensed under the Apache-2.0 license (https://opensource.org/licenses/Apache-2.0) // This file may not be copied, modified, or distributed // except according to those terms. //! //! Generation of canonicalized SMILES for molecules //! Use GIVP orbits directly, assume that GIVP orbits were symmetric orbits //! //! # Examples: //! //! ```rust //! use graph_canonicalization; //! //! assert_eq!(graph_canonicalization::ext::molecule::get_canon_smiles(&String::from("c1ccccc1CN")), "NCc1ccccc1".to_string()); //! ``` //! use crate::core; use super::atom; use super::bond; use super::extendable_hash; use super::molecule; /// Implement the Trait required by smiles_writer from crate chem impl chem::smiles_writer::TraitMoleculeForSMILES for molecule::Molecule { fn get_neighbours_of_atom(&self, atom: &usize) -> Vec<usize> { self.atoms[*atom].bonds.iter() .map(|b| b.tid) .collect() } fn get_bond_symbol(&self, atom_1: &usize, atom_2: &usize) -> String { let b: Vec<bond::Bond> = self.atoms[*atom_1].bonds.clone().into_iter() .filter(|b| b.tid == *atom_2) .collect(); if b.len() > 0 { b[0].bond_char() } else { String::from("No such bond") } } fn get_atom_symbol(&self, atom: &usize) -> String { self.atoms[*atom].kind.to_string() } fn get_atom_ranking(&self, atom: &usize, rankings: &Vec<usize>) -> usize { rankings[*atom] } fn count_of_atoms(&self) -> usize { self.atoms.len() } } /// Break symmetry to get canonical numbering fn get_canon_numbering( vv: &core::graph::VertexVec<atom::Atom>, orbits_givp: &Vec<core::orbit_ops::Orbit>, numbering_givp: &Vec<usize> ) -> Vec<usize> { if orbits_givp.len() == 0 { numbering_givp.clone() } else { let mut orbits = orbits_givp.clone(); let mut numbering = numbering_givp.clone(); while orbits.len() > 0 { orbits.sort_by_key(|ob| numbering[ob[0]]); orbits.reverse(); for i in 1..orbits[0].len() { numbering[orbits[0][i]] = numbering[orbits[0][0]] - 1; } core::givp::run::<extendable_hash::AtomExtendable>(&vv, &mut numbering, &mut orbits); } numbering } } /// Generate canonical SMILES from the input SMILES string /// stereochemistry is not taken into consideration at the moment pub fn get_canon_smiles(smiles: &String) -> String { let mol = molecule::Molecule::from_smiles(smiles); let vv = core::graph::VertexVec::init((0..mol.atoms.len()).collect(), mol.atoms.clone()); let mut orbits: Vec<core::orbit_ops::Orbit> = vec![]; let mut numbering: Vec<usize> = vec![]; core::givp::run::<extendable_hash::AtomExtendable>(&vv, &mut numbering, &mut orbits); numbering = get_canon_numbering(&vv, &orbits, &numbering); chem::smiles_writer::write_smiles_for_mol(&mol, &numbering) } #[cfg(test)] mod test_ext_mol_canon_smiles { use super::*; use rand::Rng; #[test] fn test_get_canon_smiles() { type InputType1 = String; type ReturnType = String; let test_data: Vec<(InputType1, ReturnType)> = vec![ ( "c1ccccc1CN", "NCc1ccccc1" ), ( "COc1ccc(C(=O)C[n+]2c(C)n(Cc3c4c(cc5c3OCC5)OCC4)c3ccccc32)cc1", "COc1ccc(cc1)C(=O)C[n+]1c2ccccc2n(Cc2c3CCOc3cc3CCOc23)c1C", ), ( r#"Cc1[nH]c2ccccc2c1/C=C1\SC(=N)N(c2nccs2)C1=O"#, // 926178 "Cc1[nH]c2ccccc2c1C=C1SC(=N)N(C1=O)c1nccs1" ), ( "C(C)(C)CCNCCC(C)(C)", "CC(C)CCNCCC(C)C" ) ].into_iter().map(|td| (td.0.to_string(), td.1.to_string())).collect(); let mut rng = rand::thread_rng(); for td in test_data.iter() { let (smiles, results) = td; assert_eq!(get_canon_smiles(smiles), *results); // random test for _ in 0..5 { let mol = molecule::Molecule::from_smiles(smiles); let times = rng.gen_range(0..5); let mut random_rankings: Vec<usize> = (0..mol.atoms.len()).collect(); for _ in 0..times{ let atom_1 = rng.gen_range(0..mol.atoms.len()); let atom_2 = rng.gen_range(0..mol.atoms.len()); let temp = random_rankings[atom_1]; random_rankings[atom_1] = random_rankings[atom_2]; random_rankings[atom_2] = temp; } let random_smiles = chem::smiles_writer::write_smiles_for_mol(&mol, &random_rankings); assert_eq!(get_canon_smiles(&random_smiles), *results); } } } }
use crate::{DocBase, VarType}; const TIME_DESC: &'static str = r#" Function time returns UNIX time of current bar for the specified resolution and session or NaN if time point is out-of-session. "#; const TIME_EXAMPLE: &'static str = r#" ```pine study("Time", overlay=true) // Try this on chart AAPL,1 timeinrange(res, sess) => not na(time(res, sess)) ? 1 : 0 plot(timeinrange("1", "1300-1400"), color=color.red) // This plots 1.0 at every start of 10 minute bar on a 1 minute chart: newbar(res) => change(time(res)) == 0 ? 0 : 1 plot(newbar("10")) ``` While setting up a session you can specify not just the hours and minutes but also the days of the week that will be included in that session. If the days aren't specified, the session is considered to have been set from Monday to Friday (Saturday and Sunday are excluded as the weekend days), i.e. "1100-2000" is the same as "1100-1200:23456". For example, on a symbol that is traded seven days a week with the 24-hour trading session the following script will not color Saturdays and Sundays. "#; const TIME_ARGUMENTS: &'static str = r#" **resolution (string)** Resolution. **session (string)** Session specification. Optional argument, session of the symbol used by default. "#; pub fn gen_doc() -> Vec<DocBase> { vec![ DocBase { var_type: VarType::Variable, name: "time", signatures: vec![], description: "Current bar time in UNIX format. It is the number of milliseconds that have elapsed since 00:00:00 UTC, 1 January 1970.", example: "", returns: "", arguments: "", remarks: "", links: "", }, DocBase { var_type: VarType::Function, name: "time", signatures: vec![], description: TIME_DESC, example: "", returns: "UNIX time.", arguments: TIME_ARGUMENTS, remarks: "UNIX time is the number of milliseconds that have elapsed since 00:00:00 UTC, 1 January 1970.", links: "", }, ] }
#![cfg_attr(feature = "pedantic", warn(clippy::pedantic))] #![warn(clippy::use_self)] #![warn(clippy::map_flatten)] #![warn(clippy::map_unwrap_or)] #![warn(deprecated_in_future)] #![warn(future_incompatible)] #![warn(noop_method_call)] #![warn(unreachable_pub)] #![warn(missing_debug_implementations)] #![warn(rust_2018_compatibility)] #![warn(rust_2021_compatibility)] #![warn(rust_2018_idioms)] #![warn(unused)] #![deny(warnings)] use std::time::Duration; use chrono_humanize::HumanTime; use clap::{Parser, Subcommand}; use crates_io_api::{CrateResponse, SyncClient, Version}; use crates::CrateResponseExt; mod crates; fn main() -> anyhow::Result<()> { let cli = Cli::parse(); match cli.info { Info::Info { report } => report.report(), } } #[derive(Debug, Parser)] struct Cli { #[clap(subcommand)] info: Info, } #[derive(Debug, Subcommand)] enum Info { Info { #[clap(flatten)] report: Report, }, } #[derive(Debug, Parser)] struct Report { #[arg(long, short)] /// Report documentation URL documentation: bool, #[arg(long, short = 'D')] /// Report number of crate downloads downloads: bool, #[arg(long, short = 'H')] /// Report crate homepage URL homepage: bool, #[arg(long, short)] /// Report crate repository URL repository: bool, #[arg(long, short)] /// Report more details verbose: bool, #[arg(long, short = 'V', action = clap::ArgAction::Count)] /// Report version history of the crate (5 last versions), twice for full history versions: u8, #[arg(long, short)] /// Report crate features features: bool, #[arg(id = "crate", required = true)] /// crates to report crates: Vec<String>, } impl Report { fn report(&self) -> anyhow::Result<()> { let client = SyncClient::new( "cargo-info (cargo-info@mountall.com)", Duration::from_millis(10), )?; self.crates .iter() .map(|krate| client.get_crate(krate)) .collect::<Result<Vec<_>, _>>()? .into_iter() .for_each(|krate| self.report_crate(krate)); Ok(()) } fn report_crate(&self, krate: CrateResponse) { let mut default = true; if self.documentation { default = false; fmtools::println!({ krate.documentation() }); } if self.downloads { default = false; fmtools::println!({ krate.downloads() }); } if self.homepage { default = false; fmtools::println!({ krate.homepage() }); } if self.repository { default = false; fmtools::println!({ krate.repository() }); } if self.versions > 0 { default = false; fmtools::println!({ print_last_versions(krate.versions(), self.version_limit(), None, self.verbose) }); } if self.features { default = false; fmtools::println!({ krate.show_features(self.verbose) }); } if default { show_crate(krate, 5, self.verbose); } println!(); } fn version_limit(&self) -> usize { match self.versions { 0 => 0, 1 => 5, _ => usize::MAX, } } } fn show_crate(krate: CrateResponse, limit: usize, verbose: bool) { // let width = 16; fmtools::println!( if verbose { {"Crate:":<16}{krate.name()} "\n" {"Version:":<16}{krate.max_version()} "\n" {"Description:":<16}{krate.description()} "\n" {"Downloads:":<16}{krate.downloads()} "\n" {"Homepage:":<16}{krate.homepage()} "\n" {"Documentation:":<16}{krate.documentation()} "\n" {"Repository:":<16}{krate.repository()} "\n" {"License:":<16}{krate.license()} "\n" {"Features:":<16}{krate.show_features(false)} "\n" {"Keywords:":<16}{krate.show_keywords()} "\n" {"Created:":<16}{krate.created_at():#} "\n" {"Updated:":<16}{krate.updated_at():#} } else { {"Crate:":<16}{krate.name()} "\n" {"Version:":<16}{krate.max_version()} "\n" {"Description:":<16}{krate.description()} "\n" {"Downloads:":<16}{krate.downloads()} "\n" {"Homepage:":<16}{krate.homepage()} "\n" {"Documentation:":<16}{krate.documentation()} "\n" {"Repository:":<16}{krate.repository()} "\n" {"Updated:":<16}{krate.updated_at():#} "\n" {"Version history:":<16} "\n\n" {print_last_versions(krate.versions(), limit, " ", false)} } ) } fn print_last_versions<'a>( versions: &[Version], limit: usize, prefix: impl Into<Option<&'a str>>, _verbose: bool, ) -> String { let prefix = prefix.into().unwrap_or_default(); let version_width = versions .iter() .take(limit) .map(|v| v.num.len()) .max() .unwrap_or(0); // Make sure the column header is taken into account and add a gutter. let version_width = std::cmp::max(version_width, "VERSION".len()) + 2; fmtools::format!( {prefix}{"VERSION",version_width:<1$}{"RELEASED":<16}{"DOWNLOADS":<11}"\n\n" for version in versions.iter().take(limit) { let created = HumanTime::from(version.created_at); {prefix}{version.num,version_width:<1$}{created:<16}{version.downloads:<11} if version.yanked { "\t\t(yanked)" } "\n" } let length = versions.len(); if limit < length { "\n" {prefix} "... use -VV to show all "{length}" versions" } ) // Consider adding some more useful information in verbose mode }
#[derive(Clone, Debug)] pub enum Platform { PosixX8664, PosixX8664Spectest, PosixX8664Wasi, CKBVMAssemblyScript, CKBVMSpectest, Unknown, } // A Config specifies the global config for a build. #[derive(Clone, Debug)] pub struct Config { // Path of cc, usually the result of "$ which gcc". pub binary_cc: String, pub binary_wavm: String, // Platform flag and their files. pub platform: Platform, pub platform_ckb_vm_assemblyscript_h: &'static str, pub platform_ckb_vm_assemblyscript_lds: &'static str, pub platform_ckb_vm_assemblyscript_runtime_s: &'static str, pub platform_ckb_vm_spectest_h: &'static str, pub platform_ckb_vm_spectest_lds: &'static str, pub platform_ckb_vm_spectest_runtime_s: &'static str, pub platform_posix_x86_64_h: &'static str, pub platform_posix_x86_64_runtime_s: &'static str, pub platform_posix_x86_64_spectest_h: &'static str, pub platform_posix_x86_64_spectest_runtime_s: &'static str, pub platform_posix_x86_64_wasi_h: &'static str, pub platform_posix_x86_64_wasi_runtime_s: &'static str, pub platform_common_wavm_h: &'static str, pub platform_common_wasi_h: &'static str, } impl Default for Config { fn default() -> Self { Config { binary_cc: String::from("gcc"), binary_wavm: String::from("wavm"), platform: Platform::Unknown, platform_ckb_vm_assemblyscript_h: include_str!("./platform/ckb_vm_assemblyscript.h"), platform_ckb_vm_assemblyscript_lds: include_str!("./platform/ckb_vm_assemblyscript.lds"), platform_ckb_vm_assemblyscript_runtime_s: include_str!("./platform/ckb_vm_assemblyscript_runtime.S"), platform_ckb_vm_spectest_h: include_str!("./platform/ckb_vm_spectest.h"), platform_ckb_vm_spectest_lds: include_str!("./platform/ckb_vm_spectest.lds"), platform_ckb_vm_spectest_runtime_s: include_str!("./platform/ckb_vm_spectest_runtime.S"), platform_posix_x86_64_h: include_str!("./platform/posix_x86_64.h"), platform_posix_x86_64_runtime_s: include_str!("./platform/posix_x86_64_runtime.S"), platform_posix_x86_64_spectest_h: include_str!("./platform/posix_x86_64_spectest.h"), platform_posix_x86_64_spectest_runtime_s: include_str!("./platform/posix_x86_64_spectest_runtime.S"), platform_posix_x86_64_wasi_h: include_str!("./platform/posix_x86_64_wasi.h"), platform_posix_x86_64_wasi_runtime_s: include_str!("./platform/posix_x86_64_wasi_runtime.S"), platform_common_wavm_h: include_str!("./platform/common/wavm.h"), platform_common_wasi_h: include_str!("./platform/common/wasi.h"), } } } #[derive(Clone, Debug, Default)] pub struct Middle { // Config is the global config for a build. pub config: Config, // CurrentDir is the caller's working directory, or the empty string to use // the current directory of the running process. pub current_dir: std::path::PathBuf, // Source wasm/wast file. pub file: std::path::PathBuf, // File stem is the source wasm/wast file's name without extension. // Example: file_stem(helloworld.wasm) => helloworld pub file_stem: String, // Template path. pub path_prog: std::path::PathBuf, // xx_build pub path_platform_code_folder: std::path::PathBuf, // xx_build/platform pub path_platform_common_code_folder: std::path::PathBuf, // xx_build/platform/common pub path_platform_common_wavm_h: std::path::PathBuf, // xx_build/platform/common/wavm.h pub path_platform_common_wasi_h: std::path::PathBuf, // xx_build/platform/common/wasi.h pub path_platform_header: std::path::PathBuf, // xx_build/platform/xx.h pub path_platform_lds: Option<std::path::PathBuf>, // xx_build/platform/xx.lds pub path_platform_s: std::path::PathBuf, // xx_build/platform/xx_runtime.s pub path_object: std::path::PathBuf, // xx_build/xx.o pub path_glue: std::path::PathBuf, // xx_build/xx_glue.h pub path_c: std::path::PathBuf, // xx_build/xx.c pub path_precompiled: std::path::PathBuf, // xx_build/xx_precompiled.wasm pub path_output: std::path::PathBuf, // xx } impl Middle { // Set global config for middle. pub fn init_config(&mut self, config: Config) { self.config = config; } // Initialize the compilation environment. pub fn init_file<P: AsRef<std::path::Path>>(&mut self, p: P) { self.current_dir = std::env::current_dir().unwrap(); self.file = p.as_ref().to_path_buf(); self.file_stem = self.file.file_stem().unwrap().to_str().unwrap().to_string(); self.path_prog = self.file.with_file_name(format!("{}_build", self.file_stem)); self.path_platform_code_folder = self.path_prog.join("platform"); self.path_platform_common_code_folder = self.path_platform_code_folder.join("common"); match self.config.platform { Platform::CKBVMAssemblyScript => { self.path_platform_header = self.path_platform_code_folder.join("ckb_vm_assemblyscript.h"); self.path_platform_lds = Some(self.path_platform_code_folder.join("ckb_vm_assemblyscript.lds")); self.path_platform_s = self.path_platform_code_folder.join("ckb_vm_assemblyscript_runtime.S"); } Platform::CKBVMSpectest => { self.path_platform_header = self.path_platform_code_folder.join("ckb_vm_spectest.h"); self.path_platform_lds = Some(self.path_platform_code_folder.join("ckb_vm_spectest.lds")); self.path_platform_s = self.path_platform_code_folder.join("ckb_vm_spectest_runtime.S"); } Platform::PosixX8664 => { self.path_platform_header = self.path_platform_code_folder.join("posix_x86_64.h"); self.path_platform_s = self.path_platform_code_folder.join("posix_x86_64_runtime.S"); } Platform::PosixX8664Spectest => { self.path_platform_header = self.path_platform_code_folder.join("posix_x86_64_spectest.h"); self.path_platform_s = self.path_platform_code_folder.join("posix_x86_64_spectest_runtime.S"); } Platform::PosixX8664Wasi => { self.path_platform_header = self.path_platform_code_folder.join("posix_x86_64_wasi.h"); self.path_platform_s = self.path_platform_code_folder.join("posix_x86_64_wasi_runtime.S"); } Platform::Unknown => { panic!("unreachable"); } } self.path_object = self.path_prog.join(self.file_stem.clone() + ".o"); self.path_glue = self.path_prog.join(self.file_stem.clone() + "_glue.h"); self.path_c = self.path_prog.join(self.file_stem.clone() + ".c"); self.path_precompiled = self.path_prog.join(self.file_stem.clone() + "_precompiled.wasm"); self.path_platform_common_wavm_h = self.path_platform_common_code_folder.join("wavm.h"); self.path_platform_common_wasi_h = self.path_platform_common_code_folder.join("wasi.h"); self.path_output = self.path_prog.join(self.file_stem.clone()); } }
use crate::BLOCK_224_256_LEN as BLOCK_LEN; use crate::PAD_AND_LENGTH_224_256_LEN as PAD_AND_LENGTH_LEN; use crate::STATE_224_256_LEN as STATE_LEN; use crate::WORD_224_256_LEN as WORD_LEN; use crate::{inner_full_pad, inner_pad, process_block_224_256, zero_block}; use crate::{Error, Hash, Sha2}; /// Digest length in bytes (256-bits) pub const DIGEST_LEN: usize = 32; // Initial state words const INITIAL_STATE: [u32; STATE_LEN] = [ 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19, ]; /// Implementation of the SHA-256 transform pub struct Sha256 { state: [u32; STATE_LEN], block: [u8; BLOCK_LEN], index: usize, bit_index: usize, total_len: u64, hash: Hash, } impl Sha2 for Sha256 { type Block = [u8; BLOCK_LEN]; type Digest = [u8; DIGEST_LEN]; type State = [u32; STATE_LEN]; /// Create a newly initialized SHA-256 transform fn new() -> Self { Self { state: INITIAL_STATE, block: [0_u8; BLOCK_LEN], index: 0, bit_index: 0, total_len: 0, hash: Hash::Sha256, } } fn encode_state(&self) -> Self::Digest { let mut res = [0_u8; DIGEST_LEN]; for (i, word) in self.state.iter().enumerate() { res[i * WORD_LEN..(i + 1) * WORD_LEN].copy_from_slice(word.to_be_bytes().as_ref()); } res } fn process_block(&mut self) { process_block_224_256(&mut self.state, &mut self.block, &mut self.index); } fn pad(&mut self) -> Result<(), Error> { inner_pad( &mut self.block, self.index, self.bit_index, self.total_len as u128, &self.hash, ) } fn full_pad(&mut self) { inner_full_pad(&mut self.block, self.total_len as u128, &self.hash); } fn index(&self) -> usize { self.index } fn increment_index(&mut self) { self.index += 1; } fn bit_index(&self) -> usize { self.bit_index } fn set_bit_index(&mut self, index: usize) { self.bit_index = index; } fn total_len(&self) -> u128 { self.total_len as u128 } fn increment_total_len(&mut self, len: usize) -> Result<(), Error> { let len = len as u64; if len + self.total_len > u64::MAX { return Err(Error::InvalidLength); } // increase the total length of the message self.total_len += len; Ok(()) } fn hash(&self) -> &Hash { &self.hash } fn initial_state(&mut self) { self.state.copy_from_slice(INITIAL_STATE.as_ref()); } fn block_mut(&mut self) -> &mut [u8] { &mut self.block } fn zero_block(&mut self) { zero_block(&mut self.block); } fn reset_counters(&mut self) { self.index = 0; self.bit_index = 0; self.total_len = 0; } } #[cfg(test)] mod tests { use super::*; #[test] fn rfc_vector1() { let input = b"abc"; let expected = [ 0xba, 0x78, 0x16, 0xbf, 0x8f, 0x01, 0xcf, 0xea, 0x41, 0x41, 0x40, 0xde, 0x5d, 0xae, 0x22, 0x23, 0xb0, 0x03, 0x61, 0xa3, 0x96, 0x17, 0x7a, 0x9c, 0xb4, 0x10, 0xff, 0x61, 0xf2, 0x00, 0x15, 0xad, ]; let mut sha = Sha256::new(); sha.input(input.as_ref()).unwrap(); let digest = sha.finalize().unwrap(); assert_eq!(digest, expected); } #[test] fn rfc_vector2() { let input = b"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; let expected = [ 0x24, 0x8d, 0x6a, 0x61, 0xd2, 0x06, 0x38, 0xb8, 0xe5, 0xc0, 0x26, 0x93, 0x0c, 0x3e, 0x60, 0x39, 0xa3, 0x3c, 0xe4, 0x59, 0x64, 0xff, 0x21, 0x67, 0xf6, 0xec, 0xed, 0xd4, 0x19, 0xdb, 0x06, 0xc1, ]; let mut sha = Sha256::new(); sha.input(input.as_ref()).unwrap(); let digest = sha.finalize().unwrap(); assert_eq!(digest, expected); } #[test] fn rfc_vector3() { let input = b"a"; let expected = [ 0xcd, 0xc7, 0x6e, 0x5c, 0x99, 0x14, 0xfb, 0x92, 0x81, 0xa1, 0xc7, 0xe2, 0x84, 0xd7, 0x3e, 0x67, 0xf1, 0x80, 0x9a, 0x48, 0xa4, 0x97, 0x20, 0x0e, 0x04, 0x6d, 0x39, 0xcc, 0xc7, 0x11, 0x2c, 0xd0, ]; let mut sha = Sha256::new(); for _i in 0..1_000_000 { sha.input(input.as_ref()).unwrap(); } let digest = sha.finalize().unwrap(); assert_eq!(digest, expected); } #[test] fn rfc_vector4() { let input = b"0123456701234567012345670123456701234567012345670123456701234567"; let expected = [ 0x59, 0x48, 0x47, 0x32, 0x84, 0x51, 0xbd, 0xfa, 0x85, 0x05, 0x62, 0x25, 0x46, 0x2c, 0xc1, 0xd8, 0x67, 0xd8, 0x77, 0xfb, 0x38, 0x8d, 0xf0, 0xce, 0x35, 0xf2, 0x5a, 0xb5, 0x56, 0x2b, 0xfb, 0xb5, ]; let mut sha = Sha256::new(); for _i in 0..10 { sha.input(input.as_ref()).unwrap(); } let digest = sha.finalize().unwrap(); assert_eq!(digest, expected); } // FIXME: skip vector 5, 7, and 9 since the `final_bits` API is unimplemented #[test] fn rfc_vector5() { let input = []; let expected = [ 0xd6, 0xd3, 0xe0, 0x2a, 0x31, 0xa8, 0x4a, 0x8c, 0xaa, 0x97, 0x18, 0xed, 0x6c, 0x20, 0x57, 0xbe, 0x09, 0xdb, 0x45, 0xe7, 0x82, 0x3e, 0xb5, 0x07, 0x9c, 0xe7, 0xa5, 0x73, 0xa3, 0x76, 0x0f, 0x95, ]; let mut sha = Sha256::new(); sha.input(input.as_ref()).unwrap(); let digest = sha.final_bits(0x68, 5).unwrap(); assert_eq!(digest, expected); } #[test] fn rfc_vector6() { let input = b"\x19"; let expected = [ 0x68, 0xaa, 0x2e, 0x2e, 0xe5, 0xdf, 0xf9, 0x6e, 0x33, 0x55, 0xe6, 0xc7, 0xee, 0x37, 0x3e, 0x3d, 0x6a, 0x4e, 0x17, 0xf7, 0x5f, 0x95, 0x18, 0xd8, 0x43, 0x70, 0x9c, 0x0c, 0x9b, 0xc3, 0xe3, 0xd4, ]; let mut sha = Sha256::new(); sha.input(input.as_ref()).unwrap(); let digest = sha.finalize().unwrap(); assert_eq!(digest, expected); } #[test] fn rfc_vector7() { let input = [ 0xbe, 0x27, 0x46, 0xc6, 0xdb, 0x52, 0x76, 0x5f, 0xdb, 0x2f, 0x88, 0x70, 0x0f, 0x9a, 0x73, ]; let expected = [ 0x77, 0xec, 0x1d, 0xc8, 0x9c, 0x82, 0x1f, 0xf2, 0xa1, 0x27, 0x90, 0x89, 0xfa, 0x09, 0x1b, 0x35, 0xb8, 0xcd, 0x96, 0x0b, 0xca, 0xf7, 0xde, 0x01, 0xc6, 0xa7, 0x68, 0x07, 0x56, 0xbe, 0xb9, 0x72, ]; let mut sha = Sha256::new(); sha.input(input.as_ref()).unwrap(); let digest = sha.final_bits(0x60, 3).unwrap(); assert_eq!(digest, expected); } #[test] fn rfc_vector8() { let input = b"\xe3\xd7\x25\x70\xdc\xdd\x78\x7c\xe3\x88\x7a\xb2\xcd\x68\x46\x52"; let expected = [ 0x17, 0x5e, 0xe6, 0x9b, 0x02, 0xba, 0x9b, 0x58, 0xe2, 0xb0, 0xa5, 0xfd, 0x13, 0x81, 0x9c, 0xea, 0x57, 0x3f, 0x39, 0x40, 0xa9, 0x4f, 0x82, 0x51, 0x28, 0xcf, 0x42, 0x09, 0xbe, 0xab, 0xb4, 0xe8, ]; let mut sha = Sha256::new(); sha.input(input.as_ref()).unwrap(); let digest = sha.finalize().unwrap(); assert_eq!(digest, expected); } #[test] fn rfc_vector9() { let input = [ 0x3e, 0x74, 0x03, 0x71, 0xc8, 0x10, 0xc2, 0xb9, 0x9f, 0xc0, 0x4e, 0x80, 0x49, 0x07, 0xef, 0x7c, 0xf2, 0x6b, 0xe2, 0x8b, 0x57, 0xcb, 0x58, 0xa3, 0xe2, 0xf3, 0xc0, 0x07, 0x16, 0x6e, 0x49, 0xc1, 0x2e, 0x9b, 0xa3, 0x4c, 0x01, 0x04, 0x06, 0x91, 0x29, 0xea, 0x76, 0x15, 0x64, 0x25, 0x45, 0x70, 0x3a, 0x2b, 0xd9, 0x01, 0xe1, 0x6e, 0xb0, 0xe0, 0x5d, 0xeb, 0xa0, 0x14, 0xeb, 0xff, 0x64, 0x06, 0xa0, 0x7d, 0x54, 0x36, 0x4e, 0xff, 0x74, 0x2d, 0xa7, 0x79, 0xb0, 0xb3, ]; let expected = [ 0x3e, 0x9a, 0xd6, 0x46, 0x8b, 0xbb, 0xad, 0x2a, 0xc3, 0xc2, 0xcd, 0xc2, 0x92, 0xe0, 0x18, 0xba, 0x5f, 0xd7, 0x0b, 0x96, 0x0c, 0xf1, 0x67, 0x97, 0x77, 0xfc, 0xe7, 0x08, 0xfd, 0xb0, 0x66, 0xe9, ]; let mut sha = Sha256::new(); sha.input(input.as_ref()).unwrap(); let digest = sha.final_bits(0xa0, 3).unwrap(); assert_eq!(digest, expected); } #[test] fn rfc_vector10() { let input = [ 0x83, 0x26, 0x75, 0x4e, 0x22, 0x77, 0x37, 0x2f, 0x4f, 0xc1, 0x2b, 0x20, 0x52, 0x7a, 0xfe, 0xf0, 0x4d, 0x8a, 0x05, 0x69, 0x71, 0xb1, 0x1a, 0xd5, 0x71, 0x23, 0xa7, 0xc1, 0x37, 0x76, 0x00, 0x00, 0xd7, 0xbe, 0xf6, 0xf3, 0xc1, 0xf7, 0xa9, 0x08, 0x3a, 0xa3, 0x9d, 0x81, 0x0d, 0xb3, 0x10, 0x77, 0x7d, 0xab, 0x8b, 0x1e, 0x7f, 0x02, 0xb8, 0x4a, 0x26, 0xc7, 0x73, 0x32, 0x5f, 0x8b, 0x23, 0x74, 0xde, 0x7a, 0x4b, 0x5a, 0x58, 0xcb, 0x5c, 0x5c, 0xf3, 0x5b, 0xce, 0xe6, 0xfb, 0x94, 0x6e, 0x5b, 0xd6, 0x94, 0xfa, 0x59, 0x3a, 0x8b, 0xeb, 0x3f, 0x9d, 0x65, 0x92, 0xec, 0xed, 0xaa, 0x66, 0xca, 0x82, 0xa2, 0x9d, 0x0c, 0x51, 0xbc, 0xf9, 0x33, 0x62, 0x30, 0xe5, 0xd7, 0x84, 0xe4, 0xc0, 0xa4, 0x3f, 0x8d, 0x79, 0xa3, 0x0a, 0x16, 0x5c, 0xba, 0xbe, 0x45, 0x2b, 0x77, 0x4b, 0x9c, 0x71, 0x09, 0xa9, 0x7d, 0x13, 0x8f, 0x12, 0x92, 0x28, 0x96, 0x6f, 0x6c, 0x0a, 0xdc, 0x10, 0x6a, 0xad, 0x5a, 0x9f, 0xdd, 0x30, 0x82, 0x57, 0x69, 0xb2, 0xc6, 0x71, 0xaf, 0x67, 0x59, 0xdf, 0x28, 0xeb, 0x39, 0x3d, 0x54, 0xd6, ]; let expected = [ 0x97, 0xdb, 0xca, 0x7d, 0xf4, 0x6d, 0x62, 0xc8, 0xa4, 0x22, 0xc9, 0x41, 0xdd, 0x7e, 0x83, 0x5b, 0x8a, 0xd3, 0x36, 0x17, 0x63, 0xf7, 0xe9, 0xb2, 0xd9, 0x5f, 0x4f, 0x0d, 0xa6, 0xe1, 0xcc, 0xbc, ]; let mut sha = Sha256::new(); sha.input(input.as_ref()).unwrap(); let digest = sha.finalize().unwrap(); assert_eq!(digest, expected); } }
//! Substrate utils fn main() { substrate_build_script_utils::generate_cargo_keys(); substrate_build_script_utils::rerun_if_git_head_changed(); }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use crate::common::{KeyAttributes, KeyRequestType, KeyType, KmsKey}; use crate::crypto_provider::{AsymmetricProviderKey, CryptoProvider}; use fidl_fuchsia_kms::{AsymmetricKeyAlgorithm, AsymmetricPrivateKeyRequest, KeyOrigin, Status}; pub struct KmsAsymmetricKey { provider_key: Box<dyn AsymmetricProviderKey>, key_name: String, deleted: bool, key_origin: KeyOrigin, } impl KmsKey for KmsAsymmetricKey { fn get_key_name(&self) -> &str { &self.key_name } fn is_deleted(&self) -> bool { self.deleted } fn handle_request(&self, req: KeyRequestType) -> Result<(), fidl::Error> { let KeyRequestType::AsymmetricPrivateKeyRequest(req) = req; self.handle_asym_request(req)?; Ok(()) } fn get_key_type(&self) -> KeyType { KeyType::AsymmetricPrivateKey } fn get_provider_name(&self) -> &str { self.provider_key.get_provider_name() } fn get_key_data(&self) -> Vec<u8> { self.provider_key.get_key_data() } fn delete(&mut self) -> Result<(), Status> { // Inform the provider to delete key. self.provider_key.delete().map_err(|_| Status::InternalError)?; self.deleted = true; Ok(()) } } impl KmsAsymmetricKey { /// Create a new KmsAsymmetricKey object. /// /// # Arguments /// /// * `provider` - The crypto provider to generate the key with. /// * `key_name` - The name for the new key. /// * `key_algorithm` - The algorithm for the new key. pub fn new<C: CryptoProvider + ?Sized>( provider: &C, key_name: &str, key_algorithm: AsymmetricKeyAlgorithm, ) -> Result<Self, Status> { // Ask the provider to generate a provider key. let provider_key = provider.generate_asymmetric_key(key_algorithm, key_name).map_err( debug_err_fn!(Status::InternalError, "Failed to generate asymmetric key: {:?}"), )?; // Create the key object. Ok(KmsAsymmetricKey { provider_key, key_name: key_name.to_string(), deleted: false, key_origin: KeyOrigin::Generated, }) } /// Parse the key data and generate a new KmsAsymmetricKey object. /// /// # Arguments /// /// * `key_name` - The name for the new key. /// * `key_attributes` - The attributes for the new key. pub fn parse_key(key_name: &str, key_attributes: KeyAttributes) -> Result<Self, Status> { if key_attributes.key_type != KeyType::AsymmetricPrivateKey { // The key is a different type. return Err(Status::KeyNotFound); } // It is safe to unwrap here because KeyType would always be AsymmetricPrivateKey when // we called read_key_attributes_from_file in key_manager and asymmetric_key_algorithm // would never be None. let key_algorithm = key_attributes.asymmetric_key_algorithm.unwrap(); // Ask the provider to parse the key. let provider_key = key_attributes .provider .parse_asymmetric_key(&key_attributes.key_data, key_algorithm) .map_err(debug_err_fn!( Status::ParseKeyError, "Failed to parse asymmetric key data: {:?}" ))?; // Create the key object. Ok(KmsAsymmetricKey { provider_key, key_name: key_name.to_string(), deleted: false, key_origin: key_attributes.key_origin, }) } /// Import an asymmetric private key and returns the imported key object. /// /// # Arguments /// /// * `provider` - The crypto provider to parse the private key data. /// * `data` - The PKCS8 DER encoded private key data. /// * `key_name` - The name for the new key. /// * `key_algorithm` - The algorithm for the new key. pub fn import_key<C: CryptoProvider + ?Sized>( provider: &C, data: &[u8], key_name: &str, key_algorithm: AsymmetricKeyAlgorithm, ) -> Result<Self, Status> { let provider_key = provider.import_asymmetric_key(data, key_algorithm, key_name).map_err( debug_err_fn!(Status::ParseKeyError, "Failed to import asymmetric key: {:?}"), )?; // Create the key object. Ok(KmsAsymmetricKey { provider_key, key_name: key_name.to_string(), deleted: false, key_origin: KeyOrigin::Imported, }) } pub fn get_key_algorithm(&self) -> AsymmetricKeyAlgorithm { self.provider_key.get_key_algorithm() } pub fn get_key_origin(&self) -> KeyOrigin { self.key_origin } pub fn handle_asym_request( &self, _req: AsymmetricPrivateKeyRequest, ) -> Result<(), fidl::Error> { Ok(()) } }
//! This module contains a set of performance debugging tools, akin to telemetry. //! //! To assist in the optimization of the program the following tools are provided. //! The macros `timeit!` and `DEBUG_timeit!` should be used when timing a block of code. //! `timeit!` is a macro that will print the elapsed time and cycle counts at the end of the code //! block. //! `DEBUG_timeit!` is a macro that should be used in conjunction with a statically initialized //! `DebugStruct`, `reset_frame_debugging` and `draw_debuginfo`. `draw_debuginfo` will render debug data to the given //! canvas. `DebugStruct` collects timings across a specified frame such that `draw_debuginfo` can deliver averaged statistics. //! //! When utilizing the module past the following to the file of interest. //! ``` //!#[macro_use] //!use crate::{timeit, DEBUG_timeit}; //!use crate::debug_tools::*; //! ``` #![allow(unused)] use std::collections::HashMap; use std::time::{Instant, Duration}; use crate::misc::StopWatch; use crate::WindowCanvas; use crate::inputhandler::*; use crate::rendertools::*; pub const MAX_AVG_N : usize = 3; #[derive(Copy, Clone)] pub struct CountsAndDuration{ pub count: usize, pub duration: Duration, } pub struct DebugStruct{ pub instant: StopWatch, pub count: usize, pub count_per_frame: usize, pub cpu_counts: u64, pub durations: Duration, pub durations_per_frame: [CountsAndDuration; MAX_AVG_N], pub _cycles: u64, } impl DebugStruct{ pub fn new()->DebugStruct{ DebugStruct{ instant: StopWatch::new(), //replace with stopwatch count: 0, count_per_frame: 0, cpu_counts: 0, durations: Duration::new(0,0), durations_per_frame: [CountsAndDuration{ count: 0, duration: Duration::new(0,0) }; MAX_AVG_N], _cycles: 0 } } } #[cfg(target_arch = "x86")] pub fn get_clock_cycle()->u64{unsafe{ let mut rt = 0; rt = core::arch::x86::_rdtsc(); return rt; }} #[cfg(target_arch = "x86_64")] pub fn get_clock_cycle()->u64{unsafe{ let mut rt = 0; rt = core::arch::x86_64::_rdtsc(); return rt; }} pub struct DebugRenderStruct{ bkg_color : [f32; 4], font_color: [f32; 4], font_size: f32, x: i32, y: i32, width : i32, height: i32, } pub static mut GLOBAL_DEBUG_TIMEIT : Option<HashMap<String, DebugStruct>> = None; pub static mut GLOBAL_DEBUG_COUNT : usize = 0; pub static mut GLOBAL_DEBUG_RENDER : DebugRenderStruct = DebugRenderStruct{ bkg_color: [0f32, 0f32, 0f32, 0.5], font_color: [1f32, 1f32, 1f32, 1f32], font_size: 22f32, x: 0, y: 0, width: 100, height: 100}; /// This function initializes the debug render. /// /// If this function is set with `None` the render will be set with defaults. pub fn init_debugging( rect: Option<[i32; 4]>){unsafe{ GLOBAL_DEBUG_TIMEIT = Some(HashMap::new()); match rect { Some(_rect)=>{ let [x, y, w, h] = _rect; GLOBAL_DEBUG_RENDER.x = x; GLOBAL_DEBUG_RENDER.y = y; GLOBAL_DEBUG_RENDER.width = w; GLOBAL_DEBUG_RENDER.height = h; }, None=>{} } }} /// This function resets the timing and counting information carried by a global struct. /// /// This function should be set at the frame end, the end of `circuit_sim` as to accurately /// calculate count and timing averages. pub fn reset_frame_debugging(){unsafe{ GLOBAL_DEBUG_COUNT += 1; let db = GLOBAL_DEBUG_TIMEIT.as_mut().expect("GLOBAL_DEBUG not init."); for (k, v) in db.iter_mut(){ v.count_per_frame = 0; v.durations_per_frame[GLOBAL_DEBUG_COUNT % MAX_AVG_N].duration = Duration::new(0, 0); v.durations_per_frame[GLOBAL_DEBUG_COUNT % MAX_AVG_N].count = 0; } }} /// This macro will print elapsed time and clock cycles over the course of a code block. /// /// ## Example /// ``` /// timeit!{{ /// //Do some work /// }} /// /// timeit!{"name of block", { /// //Do some work /// //The name of timeit block will be printed before results. /// }} /// ``` #[macro_export] macro_rules! timeit{ ($x:block) => { let DEBUG_time = std::time::Instant::now(); let DEBUG_cycles = get_clock_cycle(); $x; let DEBUG_cycles = get_clock_cycle() - DEBUG_cycles; let DEBUG_duration = DEBUG_time.elapsed(); println!("Debug timeit : {:?} {}", DEBUG_duration, DEBUG_cycles); }; ($s:expr , $x:block)=>{ let DEBUG_time = std::time::Instant::now(); let DEBUG_cycles = get_clock_cycle(); $x; let DEBUG_cycles = get_clock_cycle() - DEBUG_cycles; let DEBUG_duration = DEBUG_time.elapsed(); println!("Debug timeit | {} : {:?} {}", $s, DEBUG_duration, DEBUG_cycles); }; } /// This macro will store elapsed time and clock cycles over the course of a code block. /// /// ## Example /// ``` /// /// DEBUG_timeit!{"name of block", { /// //Do some work /// //The name of timeit block will be set for results. /// }} /// ``` /// #[macro_export] macro_rules! DEBUG_timeit{ ($x:tt, $y:block)=> {unsafe{ match GLOBAL_DEBUG_TIMEIT.as_mut(){ Some(DEBUG_debug)=> { if DEBUG_debug.contains_key(&$x.to_string()){ } else { DEBUG_debug.insert($x.to_string(), DebugStruct::new());//TODO should only be new it key does not exist } let DEBUG_struct = DEBUG_debug.get_mut(&$x.to_string()).unwrap(); DEBUG_struct.instant.reset(); $y; DEBUG_struct.durations += DEBUG_struct.instant.get_time(); DEBUG_struct.count += 1; DEBUG_struct.durations_per_frame[GLOBAL_DEBUG_COUNT % MAX_AVG_N].duration += DEBUG_struct.instant.get_time(); DEBUG_struct.durations_per_frame[GLOBAL_DEBUG_COUNT % MAX_AVG_N].count += 1; DEBUG_struct.count_per_frame += 1; }, None=>{} } }} } //pub fn update_debuginfo(keyboardinfo: &KeyboardInfo, textinfo: &TextInfo, mouseinfo: &MouseInfo){ // //TODO // //some form of interaction maybe //} /// Draws the debug info to the canvas provided. The results will be rendered across the entire /// canvas. pub fn draw_debuginfo(canvas: &mut WindowCanvas){unsafe{ let _x = GLOBAL_DEBUG_RENDER.x; let _y = GLOBAL_DEBUG_RENDER.y; let w = GLOBAL_DEBUG_RENDER.width; let h = GLOBAL_DEBUG_RENDER.height; draw_rect(canvas, [_x, _y, w, h], GLOBAL_DEBUG_RENDER.bkg_color, true); let mut x = _x + 0; let mut y = _y + h; y -= GLOBAL_DEBUG_RENDER.font_size as i32;//TODO this is trashy draw_string(canvas, "DEBUG:", _x, y, GLOBAL_DEBUG_RENDER.font_color, GLOBAL_DEBUG_RENDER.font_size); y -= 2*GLOBAL_DEBUG_RENDER.font_size as i32;//TODO this is trashy draw_string(canvas, " tag | counts | counts_per_frame | avg_duration | tot_dur_frame", _x, y, GLOBAL_DEBUG_RENDER.font_color, GLOBAL_DEBUG_RENDER.font_size); y -= GLOBAL_DEBUG_RENDER.font_size as i32;//TODO this is trashy match GLOBAL_DEBUG_TIMEIT.as_mut(){ Some(db)=>{ for (k, v) in db.iter(){ let mut _k = k.clone(); _k.truncate(10); draw_string(canvas, &format!("{}", _k), x, y, GLOBAL_DEBUG_RENDER.font_color, GLOBAL_DEBUG_RENDER.font_size); //TAG draw_string(canvas, &format!("{:8}", v.count), x+75, y, GLOBAL_DEBUG_RENDER.font_color, GLOBAL_DEBUG_RENDER.font_size);//TOTAL Counts draw_string(canvas, &format!("{:>8}", v.count_per_frame), x+200, y, GLOBAL_DEBUG_RENDER.font_color, GLOBAL_DEBUG_RENDER.font_size); draw_string(canvas, &format!("{:>8.2?}", v.durations/(v.count + 1) as u32), x+320, y, GLOBAL_DEBUG_RENDER.font_color, GLOBAL_DEBUG_RENDER.font_size); draw_string(canvas, &format!("{:>8.2?}", v.durations/(v.count + 1) as u32 * v.count_per_frame as u32), x+420, y, GLOBAL_DEBUG_RENDER.font_color, GLOBAL_DEBUG_RENDER.font_size); y -= GLOBAL_DEBUG_RENDER.font_size as i32; if y < GLOBAL_DEBUG_RENDER.font_size as i32 { break; } } }, None=>{ draw_string(canvas, "Debug struct has not been init.", _x+10, _y, C4_RED, GLOBAL_DEBUG_RENDER.font_size); } } }} #[test] fn debugging_test1(){ fn add(x: f32, y: f32)->f32{ return x + y ; } let x = 10.0; timeit!{{ add(x, 12.0); }}; } #[test] fn debugging_test2(){ fn add(x: f32, y: f32)->f32{ return x + y ; } let x = 10.0; timeit!{{ let a = add(x, 12.0); //let b = add(x, 12.0); }}; } #[test] fn debugging_test3(){ fn add(x: f32, y: f32)->f32{ return x + y ; } init_debugging(None); let x = 10; DEBUG_timeit!( "debug testing",{ let y = add(x as f32, 12.0); match x { 10 => { let y = add(10.0, 11.0); }, _=> { } } }); unsafe{ println!("{:?}", GLOBAL_DEBUG_TIMEIT.as_mut().unwrap().len()); } }
use incrementalmerkletree::Hashable; use super::pedersen_hash_root; use rand::{Rng,SeedableRng,StdRng}; impl Hashable for PedersenDigest { fn combine(left: &Self, right: &Self) -> PedersenDigest { PedersenDigest(pedersen_hash_root(left.0.clone(), right.0.clone())) } fn blank() -> PedersenDigest { PedersenDigest([0, 0, 0, 0]) } } #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub struct PedersenDigest(pub [u64; 4]); impl PedersenDigest { pub fn rand(seed: usize) -> PedersenDigest { let seed: [usize; 1] = [seed]; let mut rng = StdRng::from_seed(&seed); PedersenDigest([rng.gen(), rng.gen(), rng.gen(), rng.gen()]) } }
use std::path::PathBuf; use deck_core::OutputId; use crate::local::dir::{DirFuture, Directory, ReadPath, WritePath}; #[derive(Debug)] pub struct OutputsDir; impl Directory for OutputsDir { type Id = OutputId; type Input = PathBuf; type Output = PathBuf; const NAME: &'static str = "outputs"; fn precompute_id<'a>(&'a self, _input: &'a Self::Input) -> DirFuture<'a, Self::Id> { unimplemented!() } fn compute_id<'a>(&'a self, _path: &'a ReadPath) -> DirFuture<'a, Self::Id> { unimplemented!() } fn read<'a>(&'a self, _path: &'a ReadPath) -> DirFuture<'a, Option<Self::Output>> { unimplemented!() } fn write<'a>( &'a self, _path: &'a mut WritePath, _input: Self::Input, ) -> DirFuture<'a, Self::Output> { unimplemented!() } }
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(extended_compare_and_swap)] use std::sync::atomic::{AtomicIsize, ATOMIC_ISIZE_INIT}; use std::sync::atomic::Ordering::*; static ATOMIC: AtomicIsize = ATOMIC_ISIZE_INIT; fn main() { // Make sure codegen can emit all the intrinsics correctly ATOMIC.compare_exchange(0, 1, Relaxed, Relaxed).ok(); ATOMIC.compare_exchange(0, 1, Acquire, Relaxed).ok(); ATOMIC.compare_exchange(0, 1, Release, Relaxed).ok(); ATOMIC.compare_exchange(0, 1, AcqRel, Relaxed).ok(); ATOMIC.compare_exchange(0, 1, SeqCst, Relaxed).ok(); ATOMIC.compare_exchange(0, 1, Acquire, Acquire).ok(); ATOMIC.compare_exchange(0, 1, AcqRel, Acquire).ok(); ATOMIC.compare_exchange(0, 1, SeqCst, Acquire).ok(); ATOMIC.compare_exchange(0, 1, SeqCst, SeqCst).ok(); ATOMIC.compare_exchange_weak(0, 1, Relaxed, Relaxed).ok(); ATOMIC.compare_exchange_weak(0, 1, Acquire, Relaxed).ok(); ATOMIC.compare_exchange_weak(0, 1, Release, Relaxed).ok(); ATOMIC.compare_exchange_weak(0, 1, AcqRel, Relaxed).ok(); ATOMIC.compare_exchange_weak(0, 1, SeqCst, Relaxed).ok(); ATOMIC.compare_exchange_weak(0, 1, Acquire, Acquire).ok(); ATOMIC.compare_exchange_weak(0, 1, AcqRel, Acquire).ok(); ATOMIC.compare_exchange_weak(0, 1, SeqCst, Acquire).ok(); ATOMIC.compare_exchange_weak(0, 1, SeqCst, SeqCst).ok(); }
/* Copyright ⓒ 2017 contributors. Licensed under the MIT license (see LICENSE or <http://opensource.org /licenses/MIT>) or the Apache License, Version 2.0 (see LICENSE of <http://www.apache.org/licenses/LICENSE-2.0>), at your option. All files in the project carrying such notice may not be copied, modified, or distributed except according to those terms. */ extern crate build_helper; use build_helper::*; fn main() { // For things which have a stable value, test for that value. For everything else, just ensure can read the value at all. let _ = target::features(); let _ = target::has_atomic(); }
// Copyright 2021 rust-ipfs-api Developers // // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. // #[cfg(feature = "with-actix")] pub use actix_rt::main; #[cfg(feature = "with-actix")] pub use ipfs_api_backend_actix as ipfs_api; #[cfg(feature = "with-hyper")] pub use tokio::main; #[cfg(feature = "with-hyper")] pub use ipfs_api_backend_hyper as ipfs_api;
#![allow(unused_imports)] #![allow(dead_code)] extern crate chrono; #[macro_use] extern crate log; extern crate pretty_env_logger; extern crate uinput; extern crate uinput_sys; #[macro_use] extern crate maplit; use std::{fs::File, io::{self}, process}; use std::collections::HashMap; use std::env; use std::fs::OpenOptions; use std::io::{Error, ErrorKind, Read}; use std::mem; use std::os::unix::io::AsRawFd; use inotify::Inotify; use ioctl_rs; use mio::{Events, Interest, Poll, Token}; use mio::event::Event; use mio::unix::SourceFd; use nix::sys::signal::SigSet; use nix::sys::signalfd::SignalFd; use uinput::Device; use uinput_sys::*; use uinput_sys::input_event; use kbct::{Kbct, KbctRootConf, KbctEvent}; use std::sync::atomic::Ordering::Release; use uinput::event::keyboard::Function::Press; use kbct::KbctKeyStatus::*; struct EventLoop { events: Events, poll: Poll, running: bool, handlers: HashMap<Token, Box<dyn EventObserver>>, } enum ObserverResult { Nothing, Unsubcribe, Terminate { status: i32 }, SubscribeNew(Box<dyn EventObserver>), } impl EventLoop { fn run(&mut self) -> io::Result<()> { while self.running { self.poll.poll(&mut self.events, None)?; for ev in self.events.iter() { let handler = self.handlers.get_mut(&ev.token()).unwrap(); match handler.on_event(ev)? { ObserverResult::Nothing => {} ObserverResult::Unsubcribe => { unimplemented!() } ObserverResult::Terminate { status: _status } => { self.running = false; } ObserverResult::SubscribeNew(_x) => { unimplemented!() } } } } Ok(()) } fn register_observer(&mut self, fd: i32, token: Token, obs: Box<dyn EventObserver>) -> io::Result<()> { self.poll.registry().register(&mut SourceFd(&fd), token, Interest::READABLE)?; if self.handlers.contains_key(&token) { Err(Error::new(ErrorKind::AlreadyExists, "The handler token already registered")) } else { self.handlers.insert(token, obs); Ok(()) } } } trait EventObserver { fn on_event(&mut self, _: &Event) -> io::Result<ObserverResult>; } struct SignalReceiver { signal_fd: SignalFd, } impl SignalReceiver { fn register(evloop: &mut EventLoop) -> io::Result<()> { let mut mask = SigSet::empty(); mask.add(nix::sys::signal::SIGTERM); mask.add(nix::sys::signal::SIGINT); mask.thread_block().unwrap(); const SIG_EVENT: Token = Token(1); let sfd = nix::sys::signalfd::SignalFd::with_flags( &mask, nix::sys::signalfd::SfdFlags::SFD_NONBLOCK).unwrap(); evloop.register_observer(sfd.as_raw_fd(), SIG_EVENT, Box::new(SignalReceiver { signal_fd: (sfd) }))?; trace!("Registered SIGTERM, SIGINT handlers"); Ok(()) } } impl EventObserver for SignalReceiver { fn on_event(&mut self, _: &Event) -> io::Result<ObserverResult> { info!("Received signal, stopping"); Ok(ObserverResult::Terminate { status: 0 }) } } struct KeyboardMapper { file: File, device: Device, raw_buffer: [u8; KeyboardMapper::BUF_SIZE], kbct: Kbct, } impl KeyboardMapper { const MAX_EVS: usize = 1024; const BUF_SIZE: usize = mem::size_of::<input_event>() * KeyboardMapper::MAX_EVS; fn register(evloop: &mut EventLoop, dev_file: String) -> io::Result<()> { let kbct_conf_yaml = std::fs::read_to_string("./conf.yaml") .expect("Could not open config yaml file"); let kbct_conf = KbctRootConf::parse(kbct_conf_yaml) .expect("Could not parse yaml file"); let kbct = Kbct::new( kbct_conf, |x| match keycodes::name_to_code(format!("KEY_{}", x.to_uppercase()).as_str()) { -1 => None, x => Some(x) }).expect("Could not create kbct instance"); let kb_mapper = Box::new(KeyboardMapper { file: OpenOptions::new() .read(true) .write(false) .open(dev_file)?, device: KeyboardMapper::open_uinput_device()?, raw_buffer: [0; KeyboardMapper::BUF_SIZE], kbct, }); kb_mapper.grab_keyboard()?; const DEVICE_EVENT: Token = Token(0); evloop.register_observer(kb_mapper.file.as_raw_fd(), DEVICE_EVENT, kb_mapper) } fn open_uinput_device() -> io::Result<uinput::Device> { let mut builder = uinput::default().unwrap() .name("test").unwrap() .event(uinput::event::Keyboard::All).unwrap() .event(uinput::event::Controller::All).unwrap(); for item in uinput::event::relative::Position::iter_variants() { builder = builder.event(item).unwrap(); } for item in uinput::event::relative::Wheel::iter_variants() { builder = builder.event(item).unwrap(); } Ok(builder.create().unwrap()) } fn grab_keyboard(&self) -> Result<(), Error> { info!("Trying to grab device {:?}", self.file); const EVIOCGRAB: u32 = 1074021776; match unsafe { ioctl_rs::ioctl(self.file.as_raw_fd(), EVIOCGRAB, 1) } { 0 => Ok(()), _ => Err(Error::last_os_error()), } } } impl EventObserver for KeyboardMapper { fn on_event(&mut self, _: &Event) -> io::Result<ObserverResult> { // trace!("vent") let events_count = self.file.read(&mut self.raw_buffer)? / mem::size_of::<input_event>(); let events = unsafe { mem::transmute::<[u8; KeyboardMapper::BUF_SIZE], [input_event; KeyboardMapper::MAX_EVS]>(self.raw_buffer) }; for i in 0..events_count { let x = events[i]; if events[i].kind == EV_KEY as u16 { let ev = match events[i].value { 0 => Released, 2 => Pressed, 1 => Clicked, _ => panic!("Unknown event value") }; let result = self.kbct.map_event(KbctEvent { code: events[i].code as i32, ev_type: ev }); for x in result { println!("Mapped {:?}", x); let value = match x.ev_type { Released | ForceReleased => 0, Pressed => 2, Clicked => 1, }; self.device.write(EV_KEY, x.code, value).unwrap(); } } else { self.device.write(x.kind as i32, x.code as i32, x.value); } } Ok(ObserverResult::Nothing) } } struct DeviceWatcher { inotify: Inotify } impl DeviceWatcher { fn register(evloop: &mut EventLoop) -> io::Result<()> { //Setup inotify poll reader let mut watcher = DeviceWatcher { inotify: inotify::Inotify::init() .expect("Error while initializing inotify instance") }; watcher.inotify .add_watch( "/dev/input", inotify::WatchMask::CREATE | inotify::WatchMask::DELETE, ) .expect("Failed to add file watch"); const SIG_INOTIFY: Token = Token(2); evloop.register_observer(watcher.inotify.as_raw_fd(), SIG_INOTIFY, Box::new(watcher))?; Ok(()) } } impl EventObserver for DeviceWatcher { fn on_event(&mut self, _: &Event) -> io::Result<ObserverResult> { let mut buffer = [0; 1024]; let events = self.inotify.read_events_blocking(&mut buffer) .expect("Error while reading events"); for event in events { if event.mask.contains(inotify::EventMask::CREATE) { if event.mask.contains(inotify::EventMask::ISDIR) { println!("Directory created: {:?}", event.name); } else { println!("File created: {:?}", event.name); } } else if event.mask.contains(inotify::EventMask::DELETE) { if event.mask.contains(inotify::EventMask::ISDIR) { println!("Directory deleted: {:?}", event.name); } else { println!("File deleted: {:?}", event.name); } } else if event.mask.contains(inotify::EventMask::MODIFY) { if event.mask.contains(inotify::EventMask::ISDIR) { println!("Directory modified: {:?}", event.name); } else { println!("File modified: {:?}", event.name); } } } Ok(ObserverResult::Nothing) } } fn main() -> io::Result<()> { pretty_env_logger::init_timed(); let mut evloop = EventLoop { poll: Poll::new()?, events: Events::with_capacity(1024), running: true, handlers: HashMap::new(), }; SignalReceiver::register(&mut evloop)?; KeyboardMapper::register(&mut evloop, "/dev/input/event2".to_string())?; DeviceWatcher::register(&mut evloop)?; println!("Starting..."); info!("Starting laykeymap event loop, pid={}", process::id()); evloop.run()?; Ok(()) } mod keycodes;
use core::fmt::{self, Debug, Display}; use Fail; without_std! { /// An error with context around it. /// /// The context is intended to be a human-readable, user-facing explanation for the /// error that has occurred. The underlying error is not assumed to be end-user-relevant /// information. /// /// The `Display` impl for `Context` only prints the human-readable context, while the /// `Debug` impl also prints the underlying error. pub struct Context<D: Display + Send + Sync + 'static> { context: D, } impl<D: Display + Send + Sync + 'static> Context<D> { /// Creates a new context without an underlying error message. pub fn new(context: D) -> Context<D> { Context { context } } /// Returns a reference to the context provided with this error. pub fn get_context(&self) -> &D { &self.context } /// Maps `Context<D>` to `Context<T>` by applying a function to the contained context. pub fn map<F, T>(self, op: F) -> Context<T> where F: FnOnce(D) -> T, T: Display + Send + Sync + 'static { Context { context: op(self.context), } } pub(crate) fn with_err<E: Fail>(context: D, _: E) -> Context<D> { Context { context } } } impl<D: Display + Send + Sync + 'static> Fail for Context<D> { } impl<D: Display + Send + Sync + 'static> Debug for Context<D> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.context) } } impl<D: Display + Send + Sync + 'static> Display for Context<D> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.context) } } #[test] fn test_map() { let ctx = Context::new("a string").map(|s| format!("{} with some more stuff", s)); assert_eq!(ctx.context, String::from("a string with some more stuff")); } } with_std! { use {Error, Backtrace}; /// An error with context around it. /// /// The context is intended to be a human-readable, user-facing explanation for the /// error that has occurred. The underlying error is not assumed to be end-user-relevant /// information. /// /// The `Display` impl for `Context` only prints the human-readable context, while the /// `Debug` impl also prints the underlying error. pub struct Context<D: Display + Send + Sync + 'static> { context: D, failure: Either<Backtrace, Error>, } impl<D: Display + Send + Sync + 'static> Context<D> { /// Creates a new context without an underlying error message. pub fn new(context: D) -> Context<D> { let failure = Either::This(Backtrace::new()); Context { context, failure } } /// Returns a reference to the context provided with this error. pub fn get_context(&self) -> &D { &self.context } /// Maps `Context<D>` to `Context<T>` by applying a function to the contained context. pub fn map<F, T>(self, op: F) -> Context<T> where F: FnOnce(D) -> T, T: Display + Send + Sync + 'static { Context { context: op(self.context), failure: self.failure, } } pub(crate) fn with_err<E: Into<Error>>(context: D, error: E) -> Context<D> { let failure = Either::That(error.into()); Context { context, failure } } } impl<D: Display + Send + Sync + 'static> Fail for Context<D> { fn name(&self) -> Option<&str> { self.failure.as_cause().and_then(|x| x.name()) } fn cause(&self) -> Option<&Fail> { self.failure.as_cause() } fn backtrace(&self) -> Option<&Backtrace> { Some(self.failure.backtrace()) } } impl<D: Display + Send + Sync + 'static> Debug for Context<D> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}\n\n{}", self.failure, self.context) } } impl<D: Display + Send + Sync + 'static> Display for Context<D> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.context) } } enum Either<A, B> { This(A), That(B), } impl Either<Backtrace, Error> { fn backtrace(&self) -> &Backtrace { match *self { Either::This(ref backtrace) => backtrace, Either::That(ref error) => error.backtrace(), } } fn as_cause(&self) -> Option<&Fail> { match *self { Either::This(_) => None, Either::That(ref error) => Some(error.as_fail()) } } } impl Debug for Either<Backtrace, Error> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Either::This(ref backtrace) => write!(f, "{:?}", backtrace), Either::That(ref error) => write!(f, "{:?}", error), } } } #[test] fn test_map() { let ctx = Context::new("a string").map(|s| format!("{} with some more stuff", s)); assert_eq!(ctx.context, String::from("a string with some more stuff")); } } impl<D> From<D> for Context<D> where D: Display + Send + Sync + 'static, { fn from(display: D) -> Context<D> { Context::new(display) } }
# ! [ doc = "Basic-timers" ] # [ doc = r" Register block" ] # [ repr ( C ) ] pub struct BTim { # [ doc = "0x00 - control register 1" ] pub cr1: Cr1, # [ doc = "0x04 - control register 2" ] pub cr2: Cr2, _reserved0: [u8; 4usize], # [ doc = "0x0c - DMA/Interrupt enable register" ] pub dier: Dier, # [ doc = "0x10 - status register" ] pub sr: Sr, # [ doc = "0x14 - event generation register" ] pub egr: Egr, _reserved1: [u8; 12usize], # [ doc = "0x24 - counter" ] pub cnt: Cnt, # [ doc = "0x28 - prescaler" ] pub psc: Psc, # [ doc = "0x2c - auto-reload register" ] pub arr: Arr, } # [ doc = "control register 1" ] # [ repr ( C ) ] pub struct Cr1 { register: ::volatile_register::RW<u32>, } # [ doc = "control register 1" ] pub mod cr1 { # [ doc = r" Value read from the register" ] pub struct R { bits: u32, } # [ doc = r" Value to write to the register" ] pub struct W { bits: u32, } impl super::Cr1 { # [ doc = r" Modifies the contents of the register" ] pub fn modify<F>(&mut self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W { let bits = self.register.read(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.write(w.bits); } # [ doc = r" Reads the contents of the register" ] pub fn read(&self) -> R { R { bits: self.register.read() } } # [ doc = r" Writes to the register" ] pub fn write<F>(&mut self, f: F) where F: FnOnce(&mut W) -> &mut W { let mut w = W::reset_value(); f(&mut w); self.register.write(w.bits); } } # [ doc = "Value of the field ARPE" ] pub struct ArpeR { bits: u8, } impl ArpeR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u8 { self.bits } } # [ doc = "Value of the field OPM" ] pub struct OpmR { bits: u8, } impl OpmR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u8 { self.bits } } # [ doc = "Value of the field URS" ] pub struct UrsR { bits: u8, } impl UrsR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u8 { self.bits } } # [ doc = "Value of the field UDIS" ] pub struct UdisR { bits: u8, } impl UdisR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u8 { self.bits } } # [ doc = "Value of the field CEN" ] pub struct CenR { bits: u8, } impl CenR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u8 { self.bits } } # [ doc = r" Proxy" ] pub struct _ArpeW<'a> { register: &'a mut W, } impl<'a> _ArpeW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u8) -> &'a mut W { const MASK: u8 = 1; const OFFSET: u8 = 7; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } # [ doc = r" Proxy" ] pub struct _OpmW<'a> { register: &'a mut W, } impl<'a> _OpmW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u8) -> &'a mut W { const MASK: u8 = 1; const OFFSET: u8 = 3; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } # [ doc = r" Proxy" ] pub struct _UrsW<'a> { register: &'a mut W, } impl<'a> _UrsW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u8) -> &'a mut W { const MASK: u8 = 1; const OFFSET: u8 = 2; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } # [ doc = r" Proxy" ] pub struct _UdisW<'a> { register: &'a mut W, } impl<'a> _UdisW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u8) -> &'a mut W { const MASK: u8 = 1; const OFFSET: u8 = 1; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } # [ doc = r" Proxy" ] pub struct _CenW<'a> { register: &'a mut W, } impl<'a> _CenW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u8) -> &'a mut W { const MASK: u8 = 1; const OFFSET: u8 = 0; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } impl R { # [ doc = r" Value of the register as raw bits" ] pub fn bits(&self) -> u32 { self.bits } fn _arpe(&self) -> u8 { const MASK: u8 = 1; const OFFSET: u8 = 7; ((self.bits >> OFFSET) & MASK as u32) as u8 } # [ doc = "Bit 7 - Auto-reload preload enable" ] pub fn arpe(&self) -> ArpeR { ArpeR { bits: self._arpe() } } fn _opm(&self) -> u8 { const MASK: u8 = 1; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u32) as u8 } # [ doc = "Bit 3 - One-pulse mode" ] pub fn opm(&self) -> OpmR { OpmR { bits: self._opm() } } fn _urs(&self) -> u8 { const MASK: u8 = 1; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) as u8 } # [ doc = "Bit 2 - Update request source" ] pub fn urs(&self) -> UrsR { UrsR { bits: self._urs() } } fn _udis(&self) -> u8 { const MASK: u8 = 1; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u32) as u8 } # [ doc = "Bit 1 - Update disable" ] pub fn udis(&self) -> UdisR { UdisR { bits: self._udis() } } fn _cen(&self) -> u8 { const MASK: u8 = 1; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 } # [ doc = "Bit 0 - Counter enable" ] pub fn cen(&self) -> CenR { CenR { bits: self._cen() } } } impl W { # [ doc = r" Reset value of the register" ] pub fn reset_value() -> W { W { bits: 0 } } # [ doc = r" Writes raw `bits` to the register" ] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } # [ doc = "Bit 7 - Auto-reload preload enable" ] pub fn arpe(&mut self) -> _ArpeW { _ArpeW { register: self } } # [ doc = "Bit 3 - One-pulse mode" ] pub fn opm(&mut self) -> _OpmW { _OpmW { register: self } } # [ doc = "Bit 2 - Update request source" ] pub fn urs(&mut self) -> _UrsW { _UrsW { register: self } } # [ doc = "Bit 1 - Update disable" ] pub fn udis(&mut self) -> _UdisW { _UdisW { register: self } } # [ doc = "Bit 0 - Counter enable" ] pub fn cen(&mut self) -> _CenW { _CenW { register: self } } } } # [ doc = "control register 2" ] # [ repr ( C ) ] pub struct Cr2 { register: ::volatile_register::RW<u32>, } # [ doc = "control register 2" ] pub mod cr2 { # [ doc = r" Value read from the register" ] pub struct R { bits: u32, } # [ doc = r" Value to write to the register" ] pub struct W { bits: u32, } impl super::Cr2 { # [ doc = r" Modifies the contents of the register" ] pub fn modify<F>(&mut self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W { let bits = self.register.read(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.write(w.bits); } # [ doc = r" Reads the contents of the register" ] pub fn read(&self) -> R { R { bits: self.register.read() } } # [ doc = r" Writes to the register" ] pub fn write<F>(&mut self, f: F) where F: FnOnce(&mut W) -> &mut W { let mut w = W::reset_value(); f(&mut w); self.register.write(w.bits); } } # [ doc = "Value of the field MMS" ] pub struct MmsR { bits: u8, } impl MmsR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u8 { self.bits } } # [ doc = r" Proxy" ] pub struct _MmsW<'a> { register: &'a mut W, } impl<'a> _MmsW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u8) -> &'a mut W { const MASK: u8 = 7; const OFFSET: u8 = 4; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } impl R { # [ doc = r" Value of the register as raw bits" ] pub fn bits(&self) -> u32 { self.bits } fn _mms(&self) -> u8 { const MASK: u8 = 7; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) as u8 } # [ doc = "Bits 4:6 - Master mode selection" ] pub fn mms(&self) -> MmsR { MmsR { bits: self._mms() } } } impl W { # [ doc = r" Reset value of the register" ] pub fn reset_value() -> W { W { bits: 0 } } # [ doc = r" Writes raw `bits` to the register" ] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } # [ doc = "Bits 4:6 - Master mode selection" ] pub fn mms(&mut self) -> _MmsW { _MmsW { register: self } } } } # [ doc = "DMA/Interrupt enable register" ] # [ repr ( C ) ] pub struct Dier { register: ::volatile_register::RW<u32>, } # [ doc = "DMA/Interrupt enable register" ] pub mod dier { # [ doc = r" Value read from the register" ] pub struct R { bits: u32, } # [ doc = r" Value to write to the register" ] pub struct W { bits: u32, } impl super::Dier { # [ doc = r" Modifies the contents of the register" ] pub fn modify<F>(&mut self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W { let bits = self.register.read(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.write(w.bits); } # [ doc = r" Reads the contents of the register" ] pub fn read(&self) -> R { R { bits: self.register.read() } } # [ doc = r" Writes to the register" ] pub fn write<F>(&mut self, f: F) where F: FnOnce(&mut W) -> &mut W { let mut w = W::reset_value(); f(&mut w); self.register.write(w.bits); } } # [ doc = "Value of the field UDE" ] pub struct UdeR { bits: u8, } impl UdeR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u8 { self.bits } } # [ doc = "Value of the field UIE" ] pub struct UieR { bits: u8, } impl UieR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u8 { self.bits } } # [ doc = r" Proxy" ] pub struct _UdeW<'a> { register: &'a mut W, } impl<'a> _UdeW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u8) -> &'a mut W { const MASK: u8 = 1; const OFFSET: u8 = 8; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } # [ doc = r" Proxy" ] pub struct _UieW<'a> { register: &'a mut W, } impl<'a> _UieW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u8) -> &'a mut W { const MASK: u8 = 1; const OFFSET: u8 = 0; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } impl R { # [ doc = r" Value of the register as raw bits" ] pub fn bits(&self) -> u32 { self.bits } fn _ude(&self) -> u8 { const MASK: u8 = 1; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) as u8 } # [ doc = "Bit 8 - Update DMA request enable" ] pub fn ude(&self) -> UdeR { UdeR { bits: self._ude() } } fn _uie(&self) -> u8 { const MASK: u8 = 1; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 } # [ doc = "Bit 0 - Update interrupt enable" ] pub fn uie(&self) -> UieR { UieR { bits: self._uie() } } } impl W { # [ doc = r" Reset value of the register" ] pub fn reset_value() -> W { W { bits: 0 } } # [ doc = r" Writes raw `bits` to the register" ] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } # [ doc = "Bit 8 - Update DMA request enable" ] pub fn ude(&mut self) -> _UdeW { _UdeW { register: self } } # [ doc = "Bit 0 - Update interrupt enable" ] pub fn uie(&mut self) -> _UieW { _UieW { register: self } } } } # [ doc = "status register" ] # [ repr ( C ) ] pub struct Sr { register: ::volatile_register::RW<u32>, } # [ doc = "status register" ] pub mod sr { # [ doc = r" Value read from the register" ] pub struct R { bits: u32, } # [ doc = r" Value to write to the register" ] pub struct W { bits: u32, } impl super::Sr { # [ doc = r" Modifies the contents of the register" ] pub fn modify<F>(&mut self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W { let bits = self.register.read(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.write(w.bits); } # [ doc = r" Reads the contents of the register" ] pub fn read(&self) -> R { R { bits: self.register.read() } } # [ doc = r" Writes to the register" ] pub fn write<F>(&mut self, f: F) where F: FnOnce(&mut W) -> &mut W { let mut w = W::reset_value(); f(&mut w); self.register.write(w.bits); } } # [ doc = "Value of the field UIF" ] pub struct UifR { bits: u8, } impl UifR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u8 { self.bits } } # [ doc = r" Proxy" ] pub struct _UifW<'a> { register: &'a mut W, } impl<'a> _UifW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u8) -> &'a mut W { const MASK: u8 = 1; const OFFSET: u8 = 0; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } impl R { # [ doc = r" Value of the register as raw bits" ] pub fn bits(&self) -> u32 { self.bits } fn _uif(&self) -> u8 { const MASK: u8 = 1; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 } # [ doc = "Bit 0 - Update interrupt flag" ] pub fn uif(&self) -> UifR { UifR { bits: self._uif() } } } impl W { # [ doc = r" Reset value of the register" ] pub fn reset_value() -> W { W { bits: 0 } } # [ doc = r" Writes raw `bits` to the register" ] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } # [ doc = "Bit 0 - Update interrupt flag" ] pub fn uif(&mut self) -> _UifW { _UifW { register: self } } } } # [ doc = "event generation register" ] # [ repr ( C ) ] pub struct Egr { register: ::volatile_register::WO<u32>, } # [ doc = "event generation register" ] pub mod egr { # [ doc = r" Value to write to the register" ] pub struct W { bits: u32, } impl super::Egr { # [ doc = r" Writes to the register" ] pub fn write<F>(&mut self, f: F) where F: FnOnce(&mut W) -> &mut W { let mut w = W::reset_value(); f(&mut w); self.register.write(w.bits); } } # [ doc = r" Proxy" ] pub struct _UgW<'a> { register: &'a mut W, } impl<'a> _UgW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u8) -> &'a mut W { const MASK: u8 = 1; const OFFSET: u8 = 0; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } impl W { # [ doc = r" Reset value of the register" ] pub fn reset_value() -> W { W { bits: 0 } } # [ doc = r" Writes raw `bits` to the register" ] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } # [ doc = "Bit 0 - Update generation" ] pub fn ug(&mut self) -> _UgW { _UgW { register: self } } } } # [ doc = "counter" ] # [ repr ( C ) ] pub struct Cnt { register: ::volatile_register::RW<u32>, } # [ doc = "counter" ] pub mod cnt { # [ doc = r" Value read from the register" ] pub struct R { bits: u32, } # [ doc = r" Value to write to the register" ] pub struct W { bits: u32, } impl super::Cnt { # [ doc = r" Modifies the contents of the register" ] pub fn modify<F>(&mut self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W { let bits = self.register.read(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.write(w.bits); } # [ doc = r" Reads the contents of the register" ] pub fn read(&self) -> R { R { bits: self.register.read() } } # [ doc = r" Writes to the register" ] pub fn write<F>(&mut self, f: F) where F: FnOnce(&mut W) -> &mut W { let mut w = W::reset_value(); f(&mut w); self.register.write(w.bits); } } # [ doc = "Value of the field CNT" ] pub struct CntR { bits: u16, } impl CntR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u16 { self.bits } } # [ doc = r" Proxy" ] pub struct _CntW<'a> { register: &'a mut W, } impl<'a> _CntW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u16) -> &'a mut W { const MASK: u16 = 65535; const OFFSET: u8 = 0; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } impl R { # [ doc = r" Value of the register as raw bits" ] pub fn bits(&self) -> u32 { self.bits } fn _cnt(&self) -> u16 { const MASK: u16 = 65535; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u16 } # [ doc = "Bits 0:15 - Low counter value" ] pub fn cnt(&self) -> CntR { CntR { bits: self._cnt() } } } impl W { # [ doc = r" Reset value of the register" ] pub fn reset_value() -> W { W { bits: 0 } } # [ doc = r" Writes raw `bits` to the register" ] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } # [ doc = "Bits 0:15 - Low counter value" ] pub fn cnt(&mut self) -> _CntW { _CntW { register: self } } } } # [ doc = "prescaler" ] # [ repr ( C ) ] pub struct Psc { register: ::volatile_register::RW<u32>, } # [ doc = "prescaler" ] pub mod psc { # [ doc = r" Value read from the register" ] pub struct R { bits: u32, } # [ doc = r" Value to write to the register" ] pub struct W { bits: u32, } impl super::Psc { # [ doc = r" Modifies the contents of the register" ] pub fn modify<F>(&mut self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W { let bits = self.register.read(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.write(w.bits); } # [ doc = r" Reads the contents of the register" ] pub fn read(&self) -> R { R { bits: self.register.read() } } # [ doc = r" Writes to the register" ] pub fn write<F>(&mut self, f: F) where F: FnOnce(&mut W) -> &mut W { let mut w = W::reset_value(); f(&mut w); self.register.write(w.bits); } } # [ doc = "Value of the field PSC" ] pub struct PscR { bits: u16, } impl PscR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u16 { self.bits } } # [ doc = r" Proxy" ] pub struct _PscW<'a> { register: &'a mut W, } impl<'a> _PscW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u16) -> &'a mut W { const MASK: u16 = 65535; const OFFSET: u8 = 0; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } impl R { # [ doc = r" Value of the register as raw bits" ] pub fn bits(&self) -> u32 { self.bits } fn _psc(&self) -> u16 { const MASK: u16 = 65535; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u16 } # [ doc = "Bits 0:15 - Prescaler value" ] pub fn psc(&self) -> PscR { PscR { bits: self._psc() } } } impl W { # [ doc = r" Reset value of the register" ] pub fn reset_value() -> W { W { bits: 0 } } # [ doc = r" Writes raw `bits` to the register" ] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } # [ doc = "Bits 0:15 - Prescaler value" ] pub fn psc(&mut self) -> _PscW { _PscW { register: self } } } } # [ doc = "auto-reload register" ] # [ repr ( C ) ] pub struct Arr { register: ::volatile_register::RW<u32>, } # [ doc = "auto-reload register" ] pub mod arr { # [ doc = r" Value read from the register" ] pub struct R { bits: u32, } # [ doc = r" Value to write to the register" ] pub struct W { bits: u32, } impl super::Arr { # [ doc = r" Modifies the contents of the register" ] pub fn modify<F>(&mut self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W { let bits = self.register.read(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.write(w.bits); } # [ doc = r" Reads the contents of the register" ] pub fn read(&self) -> R { R { bits: self.register.read() } } # [ doc = r" Writes to the register" ] pub fn write<F>(&mut self, f: F) where F: FnOnce(&mut W) -> &mut W { let mut w = W::reset_value(); f(&mut w); self.register.write(w.bits); } } # [ doc = "Value of the field ARR" ] pub struct ArrR { bits: u16, } impl ArrR { # [ doc = r" Value of the field as raw bits" ] pub fn bits(&self) -> u16 { self.bits } } # [ doc = r" Proxy" ] pub struct _ArrW<'a> { register: &'a mut W, } impl<'a> _ArrW<'a> { # [ doc = r" Writes raw `bits` to the field" ] pub unsafe fn bits(self, bits: u16) -> &'a mut W { const MASK: u16 = 65535; const OFFSET: u8 = 0; self.register.bits &= !((MASK as u32) << OFFSET); self.register.bits |= ((bits & MASK) as u32) << OFFSET; self.register } } impl R { # [ doc = r" Value of the register as raw bits" ] pub fn bits(&self) -> u32 { self.bits } fn _arr(&self) -> u16 { const MASK: u16 = 65535; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u16 } # [ doc = "Bits 0:15 - Low Auto-reload value" ] pub fn arr(&self) -> ArrR { ArrR { bits: self._arr() } } } impl W { # [ doc = r" Reset value of the register" ] pub fn reset_value() -> W { W { bits: 0 } } # [ doc = r" Writes raw `bits` to the register" ] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } # [ doc = "Bits 0:15 - Low Auto-reload value" ] pub fn arr(&mut self) -> _ArrW { _ArrW { register: self } } } }
//! Copyright 2017 Robert L Snyder, Ithaca, NY <zoltatech@gmail.com> <robscary@gmail.com> //! //! Licensed under the Apache License, Version 2.0 (the "License"); //! you may not use this file except in compliance with the License. //! You may obtain a copy of the License at //! //! http://www.apache.org/licenses/LICENSE-2.0 //! Unless required by applicable law or agreed to in writing, software //! distributed under the License is distributed on an "AS IS" BASIS, //! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //! See the License for the specific language governing permissions and //! limitations under the License. mod structures; use structures::Message; use structures::Endpoint; use std::string; fn main() { let sender = Endpoint::new(string::String::from("Banana")); let recipient_list = vec![Endpoint::new(string::String::from("Monkey"))]; let message: Message = Message::new(sender, recipient_list); }
mod config; mod error; #[macro_use] extern crate lazy_static; use anyhow::Result; use config::config; pub use error::Error; use serde_json::{Map, Value}; use std::collections::HashMap; pub async fn verify(code: String) -> Result<GoogleProfile, Error> { let body = make_token_request_body(code); let obj = token_request(body).await?; let payload = get_payload(&obj)?; make_google_profile(&payload) } fn make_google_profile(payload: &Map<String, Value>) -> Result<GoogleProfile, Error> { let user_id = payload .get(&"sub".to_string()) .ok_or_else(|| Error::GoogleResponseIsInvalid("subject is not found in payload".into()))? .to_string() .replace('\"', ""); let email = payload .get(&"email".to_string()) .ok_or_else(|| Error::GoogleResponseIsInvalid("email is not found in payload".into()))? .to_string() .replace('\"', ""); let name = "default_name".to_string(); log::info!("{} {}", user_id, email); Ok(GoogleProfile { user_id, email, name, }) } fn make_token_request_body(code: String) -> HashMap<&'static str, String> { let mut body = HashMap::new(); body.insert("client_id", config().google_oauth_client_id); body.insert("client_secret", config().google_oauth_client_secret); body.insert("redirect_uri", config().google_oauth_redirect_uri); body.insert("code", code); body.insert("grant_type", "authorization_code".to_string()); body } async fn token_request(body: HashMap<&str, String>) -> Result<Map<String, Value>, Error> { let res = reqwest::Client::new() .post("https://accounts.google.com/o/oauth2/token") .json(&body) .send() .await .map_err(Error::ReqwestError)?; let body = res.text().await.map_err(Error::ReqwestError)?; let json: serde_json::Value = serde_json::from_str(&body).map_err(Error::SerdeJsonError)?; Ok(json.as_object().unwrap().clone()) } fn get_payload(obj: &Map<String, Value>) -> Result<Map<String, Value>, Error> { let token = &obj .get(&"id_token".to_string()) .ok_or_else(|| Error::GoogleResponseIsInvalid("id_token is not found".into()))? .to_string() .replace('\"', "") .replace(',', ""); let mut segments = token.split('.'); let _encoded_header = segments .next() .ok_or_else(|| Error::GoogleResponseIsInvalid("could not get first segment".into()))?; let encoded_payload = segments .next() .ok_or_else(|| Error::GoogleResponseIsInvalid("could not get second segment".into()))?; let payload_string = String::from_utf8( base64::decode_config(&encoded_payload, base64::URL_SAFE_NO_PAD).map_err(|_| { Error::GoogleResponseIsInvalid("payload is not encoded in base64 ".into()) })?, ) .map_err(Error::FromUtf8Error)?; let payload_json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&payload_string) .map_err(Error::SerdeJsonError)?; Ok(payload_json .as_object() .ok_or_else(|| Error::GoogleResponseIsInvalid("second segment is invalid".into()))? .clone()) } #[derive(Clone, Debug)] pub struct GoogleProfile { pub user_id: String, pub email: String, pub name: String, } pub trait RegisterUser { fn register(&mut self, profile: &GoogleProfile) -> Result<()>; }
//! Rewards module. use std::convert::{Infallible, TryFrom, TryInto}; use num_traits::Zero; use once_cell::sync::Lazy; use thiserror::Error; use crate::{ context::Context, core::consensus::beacon, error, module::{self, Module as _, Parameters as _}, modules, storage, types::address::{Address, SignatureAddressSpec}, }; #[cfg(test)] mod test; pub mod types; /// Unique module name. const MODULE_NAME: &str = "rewards"; /// Errors emitted by the rewards module. #[derive(Error, Debug, oasis_runtime_sdk_macros::Error)] pub enum Error { #[error("invalid argument")] #[sdk_error(code = 1)] InvalidArgument, } /// Parameters for the rewards module. #[derive(Clone, Debug, Default, cbor::Encode, cbor::Decode)] pub struct Parameters { pub schedule: types::RewardSchedule, pub participation_threshold_numerator: u64, pub participation_threshold_denominator: u64, } /// Errors emitted during rewards parameter validation. #[derive(Error, Debug)] pub enum ParameterValidationError { #[error("invalid participation threshold (numerator > denominator)")] InvalidParticipationThreshold, #[error("invalid schedule")] InvalidSchedule(#[from] types::RewardScheduleError), } impl module::Parameters for Parameters { type Error = ParameterValidationError; fn validate_basic(&self) -> Result<(), Self::Error> { self.schedule.validate_basic()?; if self.participation_threshold_numerator > self.participation_threshold_denominator { return Err(ParameterValidationError::InvalidParticipationThreshold); } if self.participation_threshold_denominator.is_zero() { return Err(ParameterValidationError::InvalidParticipationThreshold); } Ok(()) } } /// Genesis state for the rewards module. #[derive(Clone, Debug, Default, cbor::Encode, cbor::Decode)] pub struct Genesis { pub parameters: Parameters, } /// State schema constants. pub mod state { /// Last epoch that we processed. pub const LAST_EPOCH: &[u8] = &[0x01]; /// Map of epochs to rewards pending distribution. pub const REWARDS: &[u8] = &[0x02]; } pub struct Module<Accounts: modules::accounts::API> { _accounts: std::marker::PhantomData<Accounts>, } /// Module's address that has the reward pool. pub static ADDRESS_REWARD_POOL: Lazy<Address> = Lazy::new(|| Address::from_module(MODULE_NAME, "reward-pool")); impl<Accounts: modules::accounts::API> Module<Accounts> { fn query_parameters<C: Context>(ctx: &mut C, _args: ()) -> Result<Parameters, Error> { Ok(Self::params(ctx.runtime_state())) } } impl<Accounts: modules::accounts::API> module::Module for Module<Accounts> { const NAME: &'static str = MODULE_NAME; type Error = Infallible; type Event = (); type Parameters = Parameters; } impl<Accounts: modules::accounts::API> module::MethodHandler for Module<Accounts> { fn dispatch_query<C: Context>( ctx: &mut C, method: &str, args: cbor::Value, ) -> module::DispatchResult<cbor::Value, Result<cbor::Value, error::RuntimeError>> { match method { "rewards.Parameters" => module::dispatch_query(ctx, args, Self::query_parameters), _ => module::DispatchResult::Unhandled(args), } } } impl<Accounts: modules::accounts::API> Module<Accounts> { /// Initialize state from genesis. fn init<C: Context>(ctx: &mut C, genesis: Genesis) { genesis .parameters .validate_basic() .expect("invalid genesis parameters"); // Set genesis parameters. Self::set_params(ctx.runtime_state(), genesis.parameters); } /// Migrate state from a previous version. fn migrate<C: Context>(_ctx: &mut C, _from: u32) -> bool { // No migrations currently supported. false } } impl<Accounts: modules::accounts::API> module::MigrationHandler for Module<Accounts> { type Genesis = Genesis; fn init_or_migrate<C: Context>( ctx: &mut C, meta: &mut modules::core::types::Metadata, genesis: Self::Genesis, ) -> bool { let version = meta.versions.get(Self::NAME).copied().unwrap_or_default(); if version == 0 { // Initialize state from genesis. Self::init(ctx, genesis); meta.versions.insert(Self::NAME.to_owned(), Self::VERSION); return true; } // Perform migration. Self::migrate(ctx, version) } } impl<Accounts: modules::accounts::API> module::AuthHandler for Module<Accounts> {} impl<Accounts: modules::accounts::API> module::BlockHandler for Module<Accounts> { fn end_block<C: Context>(ctx: &mut C) { let epoch = ctx.epoch(); // Load previous epoch. let mut store = storage::PrefixStore::new(ctx.runtime_state(), &MODULE_NAME); let mut tstore = storage::TypedStore::new(&mut store); let previous_epoch: beacon::EpochTime = tstore.get(&state::LAST_EPOCH).unwrap_or_default(); tstore.insert(&state::LAST_EPOCH, &epoch); // Load rewards accumulator for the current epoch. let epochs = storage::TypedStore::new(storage::PrefixStore::new(store, &state::REWARDS)); let mut rewards: types::EpochRewards = epochs.get(epoch.to_storage_key()).unwrap_or_default(); // Reward each good entity. for entity_id in &ctx.runtime_round_results().good_compute_entities { let address = Address::from_sigspec(&SignatureAddressSpec::Ed25519(entity_id.into())); rewards.pending.entry(address).or_default().increment(); } // Punish each bad entity by forbidding rewards for this epoch. for entity_id in &ctx.runtime_round_results().bad_compute_entities { let address = Address::from_sigspec(&SignatureAddressSpec::Ed25519(entity_id.into())); rewards.pending.entry(address).or_default().forbid(); } // Disburse any rewards for previous epochs when the epoch changes. if epoch != previous_epoch { let params = Self::params(ctx.runtime_state()); let store = storage::PrefixStore::new(ctx.runtime_state(), &MODULE_NAME); let mut epochs = storage::TypedStore::new(storage::PrefixStore::new(store, &state::REWARDS)); let epoch_rewards: Vec<(DecodableEpochTime, types::EpochRewards)> = epochs.iter().collect(); // Remove all epochs that we will process. for (epoch, _) in &epoch_rewards { epochs.remove(epoch.0.to_storage_key()); } // Process accumulated rewards for previous epochs. 'epochs: for (epoch, rewards) in epoch_rewards { let epoch = epoch.0; // Fetch reward schedule for the given epoch. let reward = params.schedule.for_epoch(epoch); if reward.amount().is_zero() { continue; } // Disburse rewards. for address in rewards.for_disbursement( params.participation_threshold_numerator, params.participation_threshold_denominator, ) { match Accounts::transfer(ctx, *ADDRESS_REWARD_POOL, address, &reward) { Ok(_) => {} Err(modules::accounts::Error::InsufficientBalance) => { // Since rewards are the same for the whole epoch, if there is not // enough in the pool, just continue with the next epoch which may // specify a lower amount or a different denomination. continue 'epochs; } Err(err) => panic!("failed to disburse rewards: {:?}", err), } } } } // Update rewards for current epoch. let store = storage::PrefixStore::new(ctx.runtime_state(), &MODULE_NAME); let mut epochs = storage::TypedStore::new(storage::PrefixStore::new(store, &state::REWARDS)); epochs.insert(epoch.to_storage_key(), rewards); } } impl<Accounts: modules::accounts::API> module::InvariantHandler for Module<Accounts> {} /// A trait that exists solely to convert `beacon::EpochTime` to bytes for use as a storage key. trait ToStorageKey { fn to_storage_key(&self) -> [u8; 8]; } impl ToStorageKey for beacon::EpochTime { fn to_storage_key(&self) -> [u8; 8] { self.to_be_bytes() } } /// A struct that exists solely to decode `beacon::EpochTime` previously encoded via `ToStorageKey`. struct DecodableEpochTime(beacon::EpochTime); impl TryFrom<&[u8]> for DecodableEpochTime { type Error = std::array::TryFromSliceError; fn try_from(value: &[u8]) -> Result<Self, Self::Error> { Ok(DecodableEpochTime(beacon::EpochTime::from_be_bytes( value.try_into()?, ))) } }
//! Structure management components and systems use bevy::prelude::*; use crate::core::Position; #[derive(Default, Debug, Clone)] pub struct Structure { max_hp: u16, hp: u16, attackable: bool, ar: u8, } impl Structure { pub fn player_base() -> Structure { Structure { max_hp: 1000, hp: 1000, attackable: true, ar: 125, } } } #[derive(Default, Debug, Clone, Bundle)] pub struct PlayerBaseBundle { pub structure: Structure, pub position: Position, }
use std::mem; pub struct List<T> { head: Link<T>, } type Link<T> = Option<Box<Node<T>>>; struct Node<T> { elem: T, //i32, next: Link<T>, } //There are three common methods which can create iterators from a collection: //iter(), which iterates over &T. //iter_mut(), which iterates over &mut T. //into_iter(), which iterates over T. //Creating an iterator of your own involves two steps: creating a struct to hold the iterator's state, and then implementing Iterator for that struct. //tuple structs are useful for trivial wrappers around other types pub struct IntoIter<T> (List<T>); impl<T> List<T> { pub fn into_iter(self) -> IntoIter<T> { IntoIter(self) } } impl<T> Iterator for IntoIter<T> { type Item = T; fn next(&mut self) -> Option<Self::Item> { // -> Option<T> self.0.pop() } } //The basic logic we want is to hold a pointer to the current node we want to yield next //Because that node may not exist (the list is empty or we're otherwise done iterating), //we want that reference to be an Option. When we yield an element, we want to proceed to the current node's next node. pub struct Iter<'a, T> { next : Option<&'a Node<T>> } impl<T> List<T> { //You may be thinking "wow that &** thing is really janky", and you're not wrong. //Normally Rust is very good at doing this kind of conversion implicitly, through a process called //deref coercion, where basically it can insert *'s throughout your code to make it type-check. //It can do this because we have the borrow checker to ensure we never mess up pointers! pub fn iter<'a>(&'a self) -> Iter<'a, T>{ Iter{next : self.head.as_ref().map(|node| &**node)} //alternatively, with turbofish ::<> The turbofish, ::<>, that lets us tell the compiler what we think the types of those generics should be //map is a generic function. pub fn map<U,F>(self, f: F) -> Option<U> //so turbofish in this case says map should return ::<&Node<T>> and I dont care/know about other type //this in turn lets compiler know it should apply deref coercion apploed to it, so we don't //have to do the **'s ourselves //self.next = node.head.as_ref().map::<&Node<T>, _>(|node| &node); //upon fixing the refenrecing with as_ref and &**node, we can finally apply lifetime elision //so this function needs no lifetimes, or to say there is a lifetime but it is not needed, //we can do, as of rust 2018, explicitly elided lifetime: '_ rather than 'a } } impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; //below we don't need life time a. it shadows the a defined above in impl<'a, T> //self just takes the lifetime define dthere, no need to write to a function with only one //input that is self // important: for the next function, it establishes no constraint between the lifetime of input // and putput, so it can be written as folowing: //fn next<'b>(&'b mut self) -> Option<&'a T >{ //so we can call next over and over unconditionally, which is fine for shared references, like: /* list.push(1); list.push(2); list.push(3); let mut iter = list.iter(); let x = iter.next().unwrap(); let y = iter.next().unwrap(); let z = iter.next().unwrap(); */ //so iter is able to return references to many new variables, as it is shared reference. //howver this won't work itermut(),as mutable references can't coexist fn next(&mut self) -> Option<Self::Item>{ self.next.map(|node| { self.next = node.next.as_ref().map(|node| &**node); &node.elem }) } } pub struct IterMut<'a, T> { next: Option<&'a mut Node<T>>, } impl<T> List<T> { pub fn iter_mut(&mut self) -> IterMut<'_, T> { IterMut { next: self.head.as_mut().map(|node| &mut **node) } } } impl<'a, T> Iterator for IterMut<'a, T> { type Item = &'a mut T; //&mut is not Copy, so you can't give reference to self.next lie .as_ref() //because if you cpoied &mut you would have 2 &muts to the same location in memory //insted we should properly take() the value: fn next<'b>(&'b mut self) -> Option<&'a mut T> { self.next.take().map(|node| { self.next = node.next.as_mut().map(|node| &mut **node); &mut node.elem }) } //why it works? //We take the Option<&mut> so we have exclusive access to the mutable reference. No need to worry about someone looking at it again. //Rust understands that it's okto shard a mutable reference into the subfields of the pointed-to struct, //because there's no way to "go back up", and they're definitely disjoint. } impl<T> List<T>{ pub fn new() -> Self{ //we don't put <T> here, as it is inferred by the return type Self, which has <T> in its //definition List { head: None} } pub fn push(&mut self, elem: T) { let new_node = Box::new(Node { elem: elem, //memreplace is a common idiom, Option made it a method take() //next: mem::replace(&mut self.head, None), next: self.head.take(), }); self.head = Some(new_node); } //match option { None => None, Some(x) => Some(y) } is such an incredibly common idiom that it was called map. //map takes a function to execute on the x in the Some(x) to produce the y in Some(y). //We could write a proper fn and pass it to map, but we'd much rather write what to do inline.The way to do this is with a closure. //Closures are anonymous functions with an extra super-power: they can refer to local variables outside the closure! //This makes them super useful for doing all sorts of conditional logic. pub fn pop(&mut self) -> Option<T> { self.head.take().map(|node| { self.head = node.next; node.elem }) } //return the element at the head of the list pub fn peek(&self) -> Option<&T> { self.head.as_ref().map(|node| { &node.elem }) } pub fn peek_mut(&mut self) ->Option<&mut T> { self.head.as_mut().map(|node| { &mut node.elem }) } } impl<T> Drop for List<T> { fn drop(&mut self) { let mut cur_link = self.head.take(); //mem::replace(&mut self.head, None); // `while let` == "do this thing until this pattern doesn't match" while let Some(mut boxed_node) = cur_link { cur_link = boxed_node.next.take(); //mem::replace(&mut boxed_node.next, None); } } } //pub mod first; mod tests { use super::List; #[test] fn it_works() { assert_eq!(2 + 2, 4); } #[test] fn basics() { let mut list = List::new(); // Check empty list behaves right assert_eq!(list.pop(), None); // Populate list list.push(1); list.push(2); list.push(3); // Check normal removal assert_eq!(list.pop(), Some(3)); assert_eq!(list.pop(), Some(2)); // Push some more just to make sure nothing's corrupted list.push(4); list.push(5); // Check normal removal assert_eq!(list.pop(), Some(5)); assert_eq!(list.pop(), Some(4)); // Check exhaustion assert_eq!(list.pop(), Some(1)); assert_eq!(list.pop(), None); } #[test] fn peek() { let mut list = List::new(); assert_eq!(list.peek(), None); assert_eq!(list.peek_mut(), None); list.push(1); list.push(2); list.push(3); assert_eq!(list.peek(), Some(&3)); assert_eq!(list.peek_mut(), Some(&mut 3)); // following creates a pattern that will be matched against the argument to the closure; // |&mut value| means "the argument is a mutable reference, but just copy the value it points to into value // as peek_mut returns an arg of &mut val as well. // list.peek_mut().map(|&mut value| { // value = 42 // }); //in the following, it matches argument (&mut value) returned by peek_mut to value arg of //the closure such that, the type of the value will be &mut i32 and we can mutate it list.peek_mut().map(|value|{ *value = 42 }); assert_eq!(list.peek(), Some(&42)); assert_eq!(list.pop(), Some(42)); } #[test] fn into_iter() { let mut list = List::new(); list.push(1); list.push(2); list.push(3); let mut iter = list.into_iter(); assert_eq!(iter.next(), Some(3)); assert_eq!(iter.next(), Some(2)); assert_eq!(iter.next(), Some(1)); assert_eq!(iter.next(), None); } #[test] fn iter() { let mut list = List::new(); list.push(1); list.push(2); list.push(3); let mut iter = list.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&2)); assert_eq!(iter.next(), Some(&1)); } #[test] fn iter_mut() { let mut list = List::new(); list.push(1); list.push(2); list.push(3); let mut iter = list.iter_mut(); assert_eq!(iter.next(), Some(&mut 3)); assert_eq!(iter.next(), Some(&mut 2)); assert_eq!(iter.next(), Some(&mut 1)); } }
#![cfg_attr(not(feature = "std"), no_std)] use ink_lang as ink; pub use self::nfticket::NfticketTrait; pub use nftmart_contract::*; #[ink::contract(env = CustomEnvironment)] mod nfticket { use super::*; use primitives::{Meeting, NFTicketError}; use ink_prelude::string::String; #[ink(storage)] pub struct NfticketTrait { min_create_fee: Balance, // 创建活动最少需要多少的费用(创建 class 需要质押,还有服务费) min_ticket_fee: Balance, // 创建门票最少需要多少的费用(创建 nft需要质押,还有服务费) } impl NfticketTrait { #[ink(constructor)] pub fn new() -> Self { unimplemented!() } /// 添加活动,只能受模板合约调用,需要提供活动合约地址 #[ink(message, payable)] pub fn add_meeting( &mut self, meeting_addr: AccountId, name: String, desc: String, poster: String, uri: String, start_time: u64, end_time: u64, start_sale_time: u64, end_sale_time: u64, ) -> Result<(), NFTicketError >{ unimplemented!() } // 创建门票,返回门票NFT的class_id 和 token_id #[ink(message, payable)] pub fn create_ticket(&mut self, buyer: AccountId, metadata: String) -> Result<(u32, u64), NFTicketError> { unimplemented!() } #[ink(message)] pub fn get_meeting(&self, meeting_addr: AccountId) -> Meeting{ unimplemented!() } #[ink(message)] pub fn get_min_create_meeting_fee(&self) -> Balance{ unimplemented!() } #[ink(message,)] pub fn get_min_create_ticket_fee(&self) -> Balance{ unimplemented!() } #[ink(message)] pub fn fetch_random( &self ) -> [u8; 32] { unimplemented!() } #[ink(message)] pub fn create_class( &mut self, creater: AccountId, metadata: Metadata, name: Chars, description: Chars, properties: u8, ) -> u128 { unimplemented!() } } }
extern crate dns_parser; #[macro_use] extern crate log; extern crate net2; extern crate mio; extern crate rotor; extern crate libc; extern crate rand; extern crate multimap; extern crate nix; extern crate byteorder; mod fsm; use fsm::{AddressFamily, FSM, Command, DEFAULT_TTL}; mod services; use services::{ServicesInner, Services, ServiceData}; mod net; use std::sync::mpsc::Sender; use std::io; use std::thread; use dns_parser::Name; use std::sync::{Arc, Mutex, RwLock}; pub struct Responder { handle: Option<thread::JoinHandle<()>>, services: Services, txs_notifiers: Vec<(Sender<Command>, rotor::Notifier)>, } pub struct Service<'a> { responder: &'a Responder, id: usize, } impl Responder { pub fn new() -> io::Result<Responder> { let txs_notifiers = Arc::new(Mutex::new(Vec::with_capacity(2))); let mut hostname = try!(net::gethostname()); if !hostname.ends_with(".local") { hostname.push_str(".local"); } let services = Arc::new(RwLock::new(ServicesInner::new(hostname))); let mut config = rotor::Config::new(); config.slab_capacity(32); config.mio().notify_capacity(32); let mut loop_ = rotor::Loop::new(&config).unwrap(); { let (fsm, tx) = try!(FSM::new(AddressFamily::Inet, &services)); let txs_notifiers = txs_notifiers.clone(); loop_.add_machine_with(move |scope| { fsm.register(scope).unwrap(); txs_notifiers.lock().unwrap() .push((tx, scope.notifier())); rotor::Response::ok(fsm) }).unwrap(); } { let (fsm, tx) = try!(FSM::new(AddressFamily::Inet6, &services)); let txs_notifiers = txs_notifiers.clone(); loop_.add_machine_with(move |scope| { fsm.register(scope).unwrap(); txs_notifiers.lock().unwrap() .push((tx, scope.notifier())); rotor::Response::ok(fsm) }).unwrap(); } let handle = try!(thread::Builder::new().name("mdns-responder".to_owned()).spawn(move || { loop_.run(()).unwrap(); })); Ok(Responder { handle: Some(handle), services: services, txs_notifiers: Arc::try_unwrap(txs_notifiers).unwrap() .into_inner().unwrap() }) } pub fn register(&self, svc_type: String, svc_name: String, port: u16, txt: &[&str]) -> Service { let txt = if txt.is_empty() { vec![0] } else { txt.into_iter().flat_map(|entry| { let entry = entry.as_bytes(); if entry.len() > 255 { panic!("{:?} is too long for a TXT record", entry); } std::iter::once(entry.len() as u8).chain(entry.into_iter().cloned()) }).collect() }; let svc = ServiceData { typ: Name::from_str(format!("{}.local", svc_type)).unwrap(), name: Name::from_str(format!("{}.{}.local", svc_name, svc_type)).unwrap(), port: port, txt: txt, }; self.send_unsolicited(svc.clone(), DEFAULT_TTL, true); let id = self.services .write().unwrap() .register(svc); Service { responder: self, id: id, } } fn send_unsolicited(&self, svc: ServiceData, ttl: u32, include_ip: bool) { self.send(Command::SendUnsolicited { svc: svc, ttl: ttl, include_ip: include_ip, }); } fn send(&self, cmd: Command) { for &(ref tx, ref notifier) in self.txs_notifiers.iter() { tx.send(cmd.clone()).expect("responder died"); notifier.wakeup().unwrap(); } } } impl Drop for Responder { fn drop(&mut self) { self.send(Command::Shutdown); self.handle.take().map(|h| h.join()); } } impl <'a> Drop for Service<'a> { fn drop(&mut self) { let svc = self.responder.services .write().unwrap() .unregister(self.id); self.responder.send_unsolicited(svc, 0, false); } }
mod body; mod errors; mod request; mod response; mod relay; mod utils; pub use body::*; pub use errors::*; pub use request::*; pub use response::*; pub use relay::*; pub use utils::*;
/// ```rust,ignore /// 1137. 第 N 个泰波那契数 /// /// 泰波那契序列 Tn 定义如下: /// /// T0 = 0, T1 = 1, T2 = 1, 且在 n >= 0 的条件下 Tn+3 = Tn + Tn+1 + Tn+2 /// /// 给你整数 n,请返回第 n 个泰波那契数 Tn 的值。 /// /// /// /// 示例 1: /// /// 输入:n = 4 /// 输出:4 /// 解释: /// T_3 = 0 + 1 + 1 = 2 /// T_4 = 1 + 1 + 2 = 4 /// /// 示例 2: /// /// 输入:n = 25 /// 输出:1389537 /// /// /// /// 提示: /// /// 0 <= n <= 37 /// 答案保证是一个 32 位整数,即 answer <= 2^31 - 1。 /// /// 来源:力扣(LeetCode) /// 链接:https://leetcode-cn.com/problems/n-th-tribonacci-number /// 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。 /// ``` pub fn tribonacci(n: i32) -> i32 { let (mut a, mut b, mut c) = (0, 1, 1); for _ in 0..n { let (a1, b1, c1) = (b, c, a + b + c); a = a1; b = b1; c = c1; } a } #[cfg(test)] mod test { use super::*; #[test] fn test_tribonacci() { assert_eq!(tribonacci(4), 4); assert_eq!(tribonacci(25), 1389537); } }
//! Provides functionality for extracting data from CSV files, //! along with additional functionality for manipulation and cleansing of extracted data. //! //! This module's functions takes, processes and returns Vectors of Strings or f64(floats). //! This means there is no restriction on manipulation and visability of data for the user. extern crate csv; extern crate regex; fn vec_str_to_float(string_vec: Vec<String>) -> Vec<f64> { // Convert values in vector from strings to floats let num_vec_result: Result<Vec<f64>, _> = string_vec.iter().map(|x| x.parse()).collect(); let num_vec = num_vec_result.expect("There was an error converting row values from string to float"); // Returns vector of numbers num_vec } /// Returns a row of string data from a specified CSV file as a ```Vec<String>```. /// /// ```row_num``` specifies the the row of data to take from the csv file. /// /// ```start``` specifies the position of the first element in the row to be read. /// /// ```end``` specifies the position of the last element in the row to be read. /// /// ```file_name``` specifies the CSV file to read data from. pub fn get_str_row(row_num: usize, start: usize, end: usize, file_name: &str) -> Vec<String> { // Creates a reader for the CSV file specified let mut rdr = csv::Reader::from_file(file_name).unwrap(); // Get Nth line from CSV file let result = rdr.records().nth(row_num); let row = match result.expect("There was an error reading specified row") { Ok(r) => r, Err(error) => panic!("The specified row does not exist: {:?}", error), }; // Takes only elements of rows specified let row = row[start..end].to_vec(); // Returns row of strings row } /// Returns a row of numerical data from a specified CSV file as a ```Vec<f64>```. /// /// ```row_num``` specifies the the row of data to take from the csv file. /// /// ```start``` specifies the position of the first element in the row to be read. /// /// ```end``` specifies the position of the last element in the row to be read. /// /// ```file_name``` specifies the CSV file to read data from. pub fn get_num_row(row_num: usize, start: usize, end: usize, file_name: &str) -> Vec<f64> { // Fetch specified row as Vector of strings let string_row = get_str_row(row_num, start, end, file_name); let num_row = vec_str_to_float(string_row); // Returns row of numbers num_row } /// Returns a column of string data from a specified CSV file as a ```Vec<String>```. /// /// ```col_num``` specifies the the column of data to take from the csv file. /// /// ```start``` specifies the position of the first element in the column to be read. /// /// ```end``` specifies the position of the last element in the column to be read. /// /// ```file_name``` specifies the CSV file to read data from. pub fn get_str_col(col_num: usize, start: usize, end: usize, file_name: &str) -> Vec<String> { // Creates a reader for the CSV file specified let mut rdr = csv::Reader::from_file(file_name).unwrap(); // Creates an iterator for all records let full_result_iter = rdr.records(); // Trims iterator leaving records within specified bounds let spec_result_iter = full_result_iter.take(end).skip(start); // Takes the element from each record in iterator and adds them // to a Vector one by one producing column of values specified let mut col = Vec::new(); for result in spec_result_iter { let record = result.expect("There was an error reading a specified row"); let val = record[col_num].clone(); col.push(val); } // Return collmn of values col } /// Returns a column of numerical data from a specified CSV file as a ```Vec<f64>```. /// /// ```col_num``` specifies the the column of data to take from the csv file. /// /// ```start``` specifies the position of the first element in the column to be read. /// /// ```end``` specifies the position of the last element in the column to be read. /// /// ```file_name``` specifies the CSV file to read data from. pub fn get_num_col(row_num: usize, start: usize, end: usize, file_name: &str) -> Vec<f64> { // Fetch specified column as Vector of strings let string_col = get_str_col(row_num, start, end, file_name); let num_col = vec_str_to_float(string_col); // Returns column of numbers num_col } /// Returns the headers (top row) of string data from a specified CSV file as a ```Vec<String>```. /// /// ```start``` specifies the position of the first element in the headers to be read. /// /// ```end``` specifies the position of the last element in the headers to be read. /// /// ```file_name``` specifies the CSV file to read data from. pub fn get_headers(start: usize, end: usize, file_name: &str) -> Vec<String> { // Creates a reader for the CSV file specified let mut rdr = csv::Reader::from_file(file_name).unwrap(); // Get header line from CSV file let result = rdr.headers(); let headers = match result { Ok(r) => r, Err(error) => panic!("Headers do not exist: {:?}", error), }; // Takes only elements of rows specified let headers = headers[start..end].to_vec(); // Returns header of strings headers } /// Performs a specified numerical transformation on each element of a ```Vec<f64>```. /// /// ```vec1``` is the vector to be transformed. /// /// ```trans_func``` is the transformation function applied to each element of the vector. /// /// # Examples /// /// ```should_panic /// use rustplot::data_parser; /// /// let num_row = data_parser::get_num_row(0, 0, 10, "./test.csv"); /// /// fn transform(num1: f64) -> f64 { /// num1 + 10.0 /// } /// let f: fn(f64) -> f64 = transform; /// /// let new_vec = data_parser::vec_num_transform(&num_row, f); /// ``` pub fn vec_num_transform(vec1: &Vec<f64>, trans_func: fn(f64) -> f64) -> Vec<f64> { vec1.iter().map(|x| trans_func(*x)).collect() } /// Transforms all vectors elements to their log base 10 values. /// /// ```vec1``` is the vector to be transformed. pub fn vec_log(vec1: &Vec<f64>) -> Vec<f64> { fn log_f(num1: f64) -> f64 { num1.log(10.0) } let f: fn(f64) -> f64 = log_f; vec_num_transform(vec1, f) } /// Transforms all vectors elements to the natural log of their values. /// /// ```vec1``` is the vector to be transformed. pub fn vec_ln(vec1: &Vec<f64>) -> Vec<f64> { fn ln_f(num1: f64) -> f64 { num1.ln() } let f: fn(f64) -> f64 = ln_f; vec_num_transform(vec1, f) } /// Performs a specified numerical transformation on corresponding elements of two ```Vec<f64>```s. /// /// ```vec1``` is the vector to be transformed. /// /// ```trans_func``` is the transformation function applied to each element of both vectors. /// /// # Examples /// /// ```should_panic /// use rustplot::data_parser; /// /// let num_row_1 = data_parser::get_num_row(0, 0, 10, "./test.csv"); /// let num_row_2 = data_parser::get_num_row(1, 0, 10, "./test.csv"); /// /// fn transform(num1: f64, num2: f64) -> f64 { /// (num1 + num2) / 2.0 /// } /// let f: fn(f64, f64) -> f64 = transform; /// /// let new_vec = data_parser::vecs_num_transform(&num_row_1, &num_row_2, f); /// ``` pub fn vecs_num_transform( vec1: &Vec<f64>, vec2: &Vec<f64>, trans_func: fn(f64, f64) -> f64, ) -> Vec<f64> { if vec1.len() != vec2.len() { panic!("Vectors must have the same number of elements"); } let mut res_vec = Vec::new(); for i in 0..vec1.len() { let ele1 = vec1[i]; let ele2 = vec2[i]; let res = trans_func(ele1, ele2); res_vec.push(res); } res_vec } /// Returns a vector with elements with values of the sum of the corresponding elements of the two provided vectors. pub fn vec_add(vec1: &Vec<f64>, vec2: &Vec<f64>) -> Vec<f64> { fn add(num1: f64, num2: f64) -> f64 { num1 + num2 } let f: fn(f64, f64) -> f64 = add; vecs_num_transform(vec1, vec2, f) } /// Returns a vector with elements with values of the difference between the corresponding elements of the two provided vectors. /// /// ```vec1``` is the vector whos elements will be the minuend. /// /// ```vec2``` is the vector whos elements will be the subtrahend. pub fn vec_sub(vec1: &Vec<f64>, vec2: &Vec<f64>) -> Vec<f64> { fn sub(num1: f64, num2: f64) -> f64 { num1 - num2 } let f: fn(f64, f64) -> f64 = sub; vecs_num_transform(vec1, vec2, f) } /// Returns a vector with elements with values of the product of the corresponding elements of the two provided vectors. pub fn vec_mul(vec1: &Vec<f64>, vec2: &Vec<f64>) -> Vec<f64> { fn mul(num1: f64, num2: f64) -> f64 { num1 * num2 } let f: fn(f64, f64) -> f64 = mul; vecs_num_transform(vec1, vec2, f) } /// Returns a vector with elements with values of the quotient of the corresponding elements of the two provided vectors. /// /// ```vec1``` is the vector whos elements will be the dividend. /// /// ```vec2``` is the vector whos elements will be the divisor. pub fn vec_div(vec1: &Vec<f64>, vec2: &Vec<f64>) -> Vec<f64> { fn div(num1: f64, num2: f64) -> f64 { num1 / num2 } let f: fn(f64, f64) -> f64 = div; vecs_num_transform(vec1, vec2, f) } /// Returns a vector with elements with values of the sum of the corresponding elements of the two provided vectors. /// /// ```vec1``` is the vector whos elements will be the base. /// /// ```vec2``` is the vector whos elements will be the exponent. pub fn vec_pow(vec1: &Vec<f64>, vec2: &Vec<f64>) -> Vec<f64> { fn pow(num1: f64, num2: f64) -> f64 { num1.powf(num2) } let f: fn(f64, f64) -> f64 = pow; vecs_num_transform(vec1, vec2, f) } /// Returns a Vec<usize> (integer vector) with the index of each element matching the predicate function provided. /// /// ```vec1``` is a numerical vector for which each element will be checked with the predicate function. /// /// ```pred_func``` is the predicate function applied to each element of the vector. /// /// This function is designed for use with ```vec_remove_where()``` and ```vec_keep_where()```. /// /// # Examples /// /// ```should_panic /// use rustplot::data_parser; /// /// let num_row = data_parser::get_num_row(0, 0, 10, "./test.csv"); /// /// fn pred(num1: f64) -> bool { /// num1 <= 7.0 /// } /// let f: fn(f64) -> bool = pred; /// /// let matching_indexes = data_parser::num_pred(&num_row, f); /// ``` pub fn num_pred(vec1: &Vec<f64>, pred_func: fn(f64) -> bool) -> Vec<usize> { let mut match_vec = Vec::new(); for i in 0..vec1.len() { let ele1 = vec1[i]; let pred = pred_func(ele1); if pred { match_vec.push(i); } } match_vec } /// Returns a Vec<usize> (string vector) with the index of each element matching the regular expression provided. /// /// ```vec1``` is a string vector for which each element will be checked against the regular expression. /// /// ```regex_string``` is the regular expression applied to each element of the vector. /// /// This function is designed for use with ```vec_remove_where()``` and ```vec_keep_where()```. pub fn reg_match(vec1: &Vec<String>, regex_string: &str) -> Vec<usize> { let mut match_vec = Vec::new(); for i in 0..vec1.len() { use self::regex::Regex; let re = Regex::new(regex_string).unwrap(); let ele1: &str = &vec1[i]; let pred = re.is_match(ele1); if pred { match_vec.push(i); } } match_vec } /// Returns a copy of the provided vector with elements specified by a list of indexes removed. /// /// ```vec1``` can be any vector implementing the Clone Trait, which will have elements removed from it. /// /// ```index_vec``` is a list of integers specifying the elements that should be removed. /// /// This function is designed for use with ```num_pred()``` and ```reg_match()```. pub fn vec_remove_where<T: Clone>(vec1: &Vec<T>, index_vec: &Vec<usize>) -> Vec<T> { let mut mut_index_vec = index_vec.clone(); mut_index_vec.reverse(); let mut keep_vec = vec1.to_vec(); for i in 0..mut_index_vec.len() { keep_vec.remove(mut_index_vec[i]); } keep_vec } /// Returns a copy of the provided vector only keeping elements specified by a list of indexes. /// /// ```vec1``` can be any vector implementing the Clone Trait, which will have specified elements kept. /// /// ```index_vec``` is a list of integers specifying the elements that should be kept. /// /// This function is designed for use with ```num_pred()``` and ```reg_match()```. pub fn vec_keep_where<T: Clone>(vec1: &Vec<T>, index_vec: &Vec<usize>) -> Vec<T> { let mut keep_vec = Vec::new(); for i in 0..index_vec.len() { keep_vec.push(vec1[index_vec[i]].clone()); } keep_vec } #[cfg(test)] mod tests { use super::*; #[test] fn add_tests() { let vec_1: Vec<f64> = vec![ 3.000000000000000, 5.000000000000000, 4.000000000000000, 0.010000000000000, 0.050000000000000, 2000.000000000000000, 10000.000000000000000, ]; let vec_2: Vec<f64> = vec![ 8.000000000000000, 2.000000000000000, 4.000000000000000, 0.060000000000000, 1000.000000000000000, 0.100000000000000, 10000.000000000000000, ]; let add_result = vec![ 11.000000000000000, 7.000000000000000, 8.000000000000000, 0.06999999999999999, 1000.050000000000000, 2000.100000000000000, 20000.000000000000000, ]; assert_eq!(vec_add(&vec_1, &vec_2), add_result); } #[test] fn sub_tests() { let vec_1: Vec<f64> = vec![ 3.000000000000000, 5.000000000000000, 4.000000000000000, 0.010000000000000, 0.050000000000000, 2000.000000000000000, 10000.000000000000000, ]; let vec_2: Vec<f64> = vec![ 8.000000000000000, 2.000000000000000, 4.000000000000000, 0.060000000000000, 1000.000000000000000, 0.100000000000000, 10000.000000000000000, ]; let sub_result = vec![ -5.000000000000000, 3.000000000000000, 0.000000000000000, -0.049999999999999996, -999.950000000000000, 1999.900000000000000, 0.000000000000000, ]; assert_eq!(vec_sub(&vec_1, &vec_2), sub_result); } #[test] fn mul_tests() { let vec_1: Vec<f64> = vec![ 3.000000000000000, 5.000000000000000, 4.000000000000000, 0.010000000000000, 0.050000000000000, 2000.000000000000000, 10000.000000000000000, ]; let vec_2: Vec<f64> = vec![ 8.000000000000000, 2.000000000000000, 4.000000000000000, 0.060000000000000, 1000.000000000000000, 0.100000000000000, 10000.000000000000000, ]; let mul_result = vec![ 24.000000000000000, 10.000000000000000, 16.000000000000000, 0.000600000000000, 50.000000000000000, 200.000000000000000, 100000000.000000000000000, ]; assert_eq!(vec_mul(&vec_1, &vec_2), mul_result); } #[test] fn div_tests() { let vec_1: Vec<f64> = vec![ 3.000000000000000, 5.000000000000000, 4.000000000000000, 0.010000000000000, 0.050000000000000, 2000.000000000000000, 10000.000000000000000, ]; let vec_2: Vec<f64> = vec![ 8.000000000000000, 2.000000000000000, 4.000000000000000, 0.060000000000000, 1000.000000000000000, 0.100000000000000, 10000.000000000000000, ]; let div_result = vec![ 0.375000000000000, 2.500000000000000, 1.000000000000000, 0.16666666666666669, 0.000050000000000, 20000.000000000000000, 1.000000000000000, ]; assert_eq!(vec_div(&vec_1, &vec_2), div_result); } #[test] fn pow_tests() { let vec_1: Vec<f64> = vec![ 3.000000000000000, 5.000000000000000, 4.000000000000000, 0.010000000000000, 0.050000000000000, 2000.000000000000000, 10000.000000000000000, ]; let vec_2: Vec<f64> = vec![ 8.000000000000000, 2.000000000000000, 4.000000000000000, 0.060000000000000, 1000.000000000000000, 0.100000000000000, 10000.000000000000000, ]; use std::f64; let pow_result = vec![ 6561.000000000000000, 25.000000000000000, 256.000000000000000, 0.7585775750291838, 0.000000000000000, 2.138469199982376, f64::INFINITY, ]; assert_eq!(vec_pow(&vec_1, &vec_2), pow_result); } #[test] fn log_tests() { let vec_1: Vec<f64> = vec![ 3.000000000000000, 5.000000000000000, 4.000000000000000, 0.010000000000000, 0.050000000000000, 2000.000000000000000, 10000.000000000000000, ]; let log_result = vec![ 0.47712125471966244, 0.6989700043360187, 0.6020599913279623, -1.9999999999999996, -1.301029995663981, 3.301029995663981, 4.000000000000000, ]; assert_eq!(vec_log(&vec_1), log_result); } #[test] fn ln_tests() { let vec_1: Vec<f64> = vec![ 3.000000000000000, 5.000000000000000, 4.000000000000000, 0.010000000000000, 0.050000000000000, 2000.000000000000000, 10000.000000000000000, ]; let ln_result = vec![ 1.0986122886681098, 1.6094379124341003, 1.3862943611198906, -4.605170185988091, -2.995732273553991, 7.600902459542082, 9.210340371976184, ]; assert_eq!(vec_ln(&vec_1), ln_result); } #[test] fn trans_vec_tests() { let vec_1: Vec<f64> = vec![16.0, 5.0, 4.0, 0.01, 0.5, 2000.0, 10000.0]; let result = vec![26.0, 15.0, 14.0, 10.01, 10.5, 2010.0, 10010.0]; fn test_trans(num1: f64) -> f64 { num1 + 10.0 } let f: fn(f64) -> f64 = test_trans; let trans_res = vec_num_transform(&vec_1, f); assert_eq!(trans_res, result); } #[test] fn trans_vecs_tests() { let vec_1: Vec<f64> = vec![16.0, 5.0, 4.0, 0.01, 0.5, 2000.0, 10000.0]; let vec_2: Vec<f64> = vec![8.0, 3.0, 4.0, 0.06, 1000.0, 0.1, 10000.0]; let result = vec![ 12.0, 4.0, 4.0, 0.034999999999999996, 500.25, 1000.05, 10000.0, ]; fn test_trans(num1: f64, num2: f64) -> f64 { (num1 + num2) / 2.0 } let f: fn(f64, f64) -> f64 = test_trans; let trans_res = vecs_num_transform(&vec_1, &vec_2, f); assert_eq!(trans_res, result); } #[test] fn num_pred_tests() { let vec_1: Vec<f64> = vec![2.0, 10.0, 7.0, 8.0, 4.0]; let result = vec![0, 2, 4]; fn test_pred(num1: f64) -> bool { num1 <= 7.0 } let f: fn(f64) -> bool = test_pred; let pred_res = num_pred(&vec_1, f); assert_eq!(pred_res, result); } #[test] fn reg_match_tests() { let vec_1: Vec<String> = vec![ String::from("123456789"), String::from("12345"), String::from("123567"), String::from("6830498"), String::from("5937468"), ]; let result = vec![3, 4]; let reg_res = reg_match(&vec_1, r"^.*(68)+.*$"); assert_eq!(reg_res, result); } #[test] fn remove_where_tests() { let vec_1: Vec<f64> = vec![2.0, 7.0, 6.0, 4.0, 10.0]; let index_1 = vec![0, 2, 3]; let result_1 = vec![7.0, 10.0]; let rm_res_1 = vec_remove_where(&vec_1, &index_1); assert_eq!(rm_res_1, result_1); let vec_2: Vec<String> = vec![ String::from("123456789"), String::from("12345"), String::from("123567"), String::from("6830498"), String::from("5937468"), ]; let index_2 = vec![0, 2, 3]; let result_2 = vec![String::from("12345"), String::from("5937468")]; let rm_res_2 = vec_remove_where(&vec_2, &index_2); assert_eq!(rm_res_2, result_2); } #[test] fn keep_where_tests() { let vec_1: Vec<f64> = vec![2.0, 7.0, 6.0, 4.0, 10.0]; let index_1 = vec![0, 2, 3]; let result_1 = vec![2.0, 6.0, 4.0]; let kp_res_1 = vec_keep_where(&vec_1, &index_1); assert_eq!(kp_res_1, result_1); let vec_2: Vec<String> = vec![ String::from("123456789"), String::from("12345"), String::from("123567"), String::from("6830498"), String::from("5937468"), ]; let index_2 = vec![0, 2, 3]; let result_2 = vec![ String::from("123456789"), String::from("123567"), String::from("6830498"), ]; let kp_res_2 = vec_keep_where(&vec_2, &index_2); assert_eq!(kp_res_2, result_2); } #[test] fn get_str_row_tests() { let result = vec![ String::from("75"), String::from("16"), String::from("15"), String::from("96"), ]; let num_row_res = get_str_row(2, 1, 5, "./resources/data_parser_tests.csv"); assert_eq!(num_row_res, result); } #[test] fn get_str_col_tests() { let result = vec![ String::from("6"), String::from("33"), String::from("15"), String::from("40"), String::from("48"), ]; let num_col_res = get_str_col(3, 0, 5, "./resources/data_parser_tests.csv"); assert_eq!(num_col_res, result); } #[test] fn get_num_row_tests() { let result = vec![75.0, 16.0, 15.0, 96.0]; let num_row_res = get_num_row(2, 1, 5, "./resources/data_parser_tests.csv"); assert_eq!(num_row_res, result); } #[test] fn get_num_col_tests() { let result = vec![6.0, 33.0, 15.0, 40.0, 48.0]; let num_col_res = get_num_col(3, 0, 5, "./resources/data_parser_tests.csv"); assert_eq!(num_col_res, result); } #[test] fn get_headers_tests() { let result = vec![String::from("h2"), String::from("h3"), String::from("h4")]; let headers_res = get_headers(1, 4, "./resources/data_parser_tests.csv"); assert_eq!(headers_res, result); } }
use futures_core::future::BoxFuture; use crate::cursor::HasCursor; use crate::database::Database; use crate::describe::Describe; /// A type that contains or can provide a database connection to use for executing queries /// against the database. /// /// No guarantees are provided that successive queries run on the same physical database /// connection. A [`Connection`](trait.Connection.html) is an `Executor` that guarantees that successive /// queries are run on the same physical database connection. /// /// Implementations are provided for [`&Pool`](struct.Pool.html), /// [`&mut PoolConnection`](struct.PoolConnection.html), /// and [`&mut Connection`](trait.Connection.html). pub trait Executor where Self: Send, { /// The specific database that this type is implemented for. type Database: Database; /// Executes the query for its side-effects and /// discarding any potential result rows. /// /// Returns the number of rows affected, or 0 if not applicable. fn execute<'e, 'q: 'e, 'c: 'e, E: 'e>( &'c mut self, query: E, ) -> BoxFuture<'e, crate::Result<u64>> where E: Execute<'q, Self::Database>; /// Executes a query for its result. /// /// Returns a [`Cursor`] that can be used to iterate through the [`Row`]s /// of the result. /// /// [`Cursor`]: crate::cursor::Cursor /// [`Row`]: crate::row::Row fn fetch<'e, 'q, E>(&'e mut self, query: E) -> <Self::Database as HasCursor<'e, 'q>>::Cursor where E: Execute<'q, Self::Database>; /// Prepare the SQL query and return type information about its parameters /// and results. /// /// This is used by the query macros during compilation to /// power their type inference. #[doc(hidden)] fn describe<'e, 'q, E: 'e>( &'e mut self, query: E, ) -> BoxFuture<'e, crate::Result<Describe<Self::Database>>> where E: Execute<'q, Self::Database>; } // HACK: Generic Associated Types (GATs) will enable us to rework how the Executor bound is done // in Query to remove the need for this. pub trait RefExecutor<'e> { type Database: Database; fn fetch_by_ref<'q, E>(self, query: E) -> <Self::Database as HasCursor<'e, 'q>>::Cursor where E: Execute<'q, Self::Database>; } /// A type that may be executed against a database connection. pub trait Execute<'q, DB> where Self: Send, DB: Database, { /// Returns the query to be executed and the arguments to bind against the query, if any. /// /// Returning `None` for `Arguments` indicates to use a "simple" query protocol and to not /// prepare the query. Returning `Some(Default::default())` is an empty arguments object that /// will be prepared (and cached) before execution. fn into_parts(self) -> (&'q str, Option<DB::Arguments>); } impl<'q, DB> Execute<'q, DB> for &'q str where DB: Database, { #[inline] fn into_parts(self) -> (&'q str, Option<DB::Arguments>) { (self, None) } } impl<T> Executor for &'_ mut T where T: Executor, { type Database = T::Database; fn execute<'e, 'q: 'e, 'c: 'e, E: 'e>( &'c mut self, query: E, ) -> BoxFuture<'e, crate::Result<u64>> where E: Execute<'q, Self::Database>, { (**self).execute(query) } fn fetch<'e, 'q, E>(&'e mut self, query: E) -> <Self::Database as HasCursor<'_, 'q>>::Cursor where E: Execute<'q, Self::Database>, { (**self).fetch(query) } #[doc(hidden)] fn describe<'e, 'q, E: 'e>( &'e mut self, query: E, ) -> BoxFuture<'e, crate::Result<Describe<Self::Database>>> where E: Execute<'q, Self::Database>, { (**self).describe(query) } } // The following impl lets `&mut &Pool` continue to work // This pattern was required in rbatis_core < 0.3 // Going forward users will likely naturally use `&Pool` instead impl<'c, T> RefExecutor<'c> for &'c mut T where T: Copy + RefExecutor<'c>, { type Database = T::Database; #[inline] fn fetch_by_ref<'q, E>(self, query: E) -> <Self::Database as HasCursor<'c, 'q>>::Cursor where E: Execute<'q, Self::Database>, { (*self).fetch_by_ref(query) } }
#[path = "lib/erlang.rs"] pub mod erlang; #[path = "lib/maps.rs"] pub mod maps; test_stderr_substrings!( backtrace, vec![ "native_implemented/otp/tests/internal/lib/backtrace/init.erl:9, in init:bad_reverse/1", "native_implemented/otp/tests/internal/lib/backtrace/init.erl:11, in init:bad_reverse/1", "native_implemented/otp/src/erlang/tl_1.rs:4, in erlang:tl/1", "Process (#PID<0.2.0>) exited abnormally.", "badarg" ] );
use crate::DlmError; use async_channel::{Receiver, Sender}; use chrono::Local; use indicatif::{MultiProgress, ProgressBar, ProgressDrawTarget, ProgressStyle}; use std::cmp::{min, Ordering}; const PENDING: &str = "pending"; pub struct ProgressBarManager { main_pb: ProgressBar, file_pb_count: usize, pub tx: Sender<ProgressBar>, pub rx: Receiver<ProgressBar>, } impl ProgressBarManager { pub async fn init(max_concurrent_downloads: usize, main_pb_len: u64) -> ProgressBarManager { let mp = MultiProgress::new(); // Refresh terminal 5 times per seconds let draw_target = ProgressDrawTarget::stdout_with_hz(5); mp.set_draw_target(draw_target); // main progress bar let main_style = ProgressStyle::default_bar() .template("{bar:133} {pos}/{len}") .expect("templating should not fail"); let main_pb = mp.add(ProgressBar::new(0)); main_pb.set_style(main_style); main_pb.set_length(main_pb_len); // `file_pb_count` progress bars are shared between the threads at anytime let file_pb_count = min(max_concurrent_downloads, main_pb_len as usize); // If you need a multi-producer multi-consumer channel where only one consumer sees each message, you can use the async-channel crate. // There are also channels for use outside of asynchronous Rust, such as std::sync::mpsc and crossbeam::channel. // These channels wait for messages by blocking the thread, which is not allowed in asynchronous code. // ref: https://tokio.rs/tokio/tutorial/channels let (tx, rx): (Sender<ProgressBar>, Receiver<ProgressBar>) = async_channel::bounded(file_pb_count); let dl_style = ProgressStyle::default_bar() .template("{msg} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} (speed:{bytes_per_sec}) (eta:{eta})") .expect("templating should not fail") .progress_chars("#>-"); for _ in 0..file_pb_count { let file_pb = mp.add(ProgressBar::new(0)); file_pb.set_style(dl_style.clone()); file_pb.set_message(ProgressBarManager::message_progress_bar(PENDING)); tx.send(file_pb).await.expect("channel should not fail"); } ProgressBarManager { main_pb, file_pb_count, rx, tx, } } pub async fn finish_all(&self) -> Result<(), DlmError> { for _ in 0..self.file_pb_count { let pb = self.rx.recv().await?; pb.finish_and_clear(); } self.main_pb.finish(); Ok(()) } pub fn increment_global_progress(&self) { self.main_pb.inc(1) } pub fn message_progress_bar(s: &str) -> String { let max = 35; // arbitrary limit let count = s.chars().count(); match count.cmp(&max) { Ordering::Greater => s.chars().take(max).collect(), Ordering::Equal => s.to_string(), Ordering::Less => format!("{}{}", s, " ".repeat(max - count)), } } pub fn log_above_progress_bars(&self, msg: String) { ProgressBarManager::log_above_progress_bar(&self.main_pb, msg) } fn log_above_progress_bar(pb: &ProgressBar, msg: String) { pb.println(format!( "[{}] {}", Local::now().naive_local().format("%Y-%m-%d %H:%M:%S"), msg )); } pub fn reset_progress_bar(pb: &ProgressBar) { pb.reset(); pb.set_message(ProgressBarManager::message_progress_bar(PENDING)); } }
pub mod lib{ use std::io::{Write, Read, ErrorKind}; use std::process::exit; use std::fs::OpenOptions; // The Read trait implements read_to_end and read_to_string functions which // read from a buffer until EOF is reached, but for applications like output // piped to stdout there is no EOF, so the functions hangs until timeout. This // function allows a buffer to be fully read to the end, but instead of // expecting EOF the read completes when there are 0 bytes left to read. pub fn read_til_empty<T: Read>(buffer: &mut T) -> Vec<u8> { let mut total: Vec<u8> = Vec::new(); let mut temp = [0 as u8; 1024]; loop{ match buffer.read(&mut temp) { Ok(n_bytes) => { println!("{} bytes read...", n_bytes); total.extend_from_slice(&temp); if n_bytes == 0 { break; } } Err(e) => match e.kind() { ErrorKind::WouldBlock => break, ErrorKind::BrokenPipe => { println!("Connection closed."); exit(1); } _ => panic!(e), } } temp = [0; 1024]; } return total; } pub fn hexdump(received: bool, n_bytes: usize, chars: &[u8], filename: &str) { let num_lines = (chars.len() / 16) as usize; let mut hexdump = String::new(); let mode = if received { "Received" } else { "Sent" }; hexdump.push_str(&format!("{} {} bytes to the socket\n", mode, n_bytes)); let mut char_idx: usize; for line_num in 0..num_lines { hexdump.push_str(&format!("{:08X} ", line_num*16)); for char_offset in 0..16 { char_idx = line_num * 16 + char_offset; hexdump.push_str(&format!("{:02X} ", chars[char_idx])); if (char_offset+1) % 4 == 0 { hexdump.push(' '); } } hexdump.push('\t'); for char_offset in 0..16 { char_idx = line_num * 16 + char_offset; if chars[char_idx] <= 126 && chars[char_idx] >= 32 { hexdump.push(chars[char_idx] as char); } else{ hexdump.push('.'); } } hexdump.push('\n'); } let mut file = OpenOptions::new().create(true) .append(true) .open(filename).unwrap(); file.write(hexdump.as_bytes()) .expect("Error writing hexdump to file."); file.flush().unwrap(); } pub struct CliArgs { pub port: String, pub host: String, pub command: Option<String>, pub output: Option<String> } }
use chrono::NaiveDate; use chrono::NaiveDateTime; use diesel; use diesel::expression::dsl; use diesel::prelude::*; use diesel::sql_types; use models::*; use schema::{artists, event_artists, events, organization_users, organizations, venues}; use utils::errors::DatabaseError; use utils::errors::ErrorCode; use utils::errors::*; use uuid::Uuid; use validator::{Validate, ValidationError, ValidationErrors}; #[derive(Associations, Identifiable, Queryable, AsChangeset)] #[belongs_to(Organization)] #[derive(Clone, QueryableByName, Serialize, Deserialize, PartialEq, Debug)] #[belongs_to(Venue)] #[table_name = "events"] pub struct Event { pub id: Uuid, pub name: String, pub organization_id: Uuid, pub venue_id: Option<Uuid>, pub created_at: NaiveDateTime, pub event_start: Option<NaiveDateTime>, pub door_time: Option<NaiveDateTime>, pub status: String, pub publish_date: Option<NaiveDateTime>, pub redeem_date: Option<NaiveDateTime>, pub fee_in_cents: Option<i64>, pub promo_image_url: Option<String>, pub additional_info: Option<String>, pub age_limit: Option<i32>, pub top_line_info: Option<String>, pub cancelled_at: Option<NaiveDateTime>, pub updated_at: NaiveDateTime, } #[derive(Default, Insertable, Serialize, Deserialize, Validate)] #[table_name = "events"] pub struct NewEvent { pub name: String, pub organization_id: Uuid, pub venue_id: Option<Uuid>, pub event_start: Option<NaiveDateTime>, pub door_time: Option<NaiveDateTime>, #[serde(default = "NewEvent::default_status", skip_deserializing)] pub status: String, pub publish_date: Option<NaiveDateTime>, pub redeem_date: Option<NaiveDateTime>, pub fee_in_cents: Option<i64>, #[validate(url)] pub promo_image_url: Option<String>, pub additional_info: Option<String>, pub age_limit: Option<i32>, #[validate(length(max = "100"))] pub top_line_info: Option<String>, } impl NewEvent { pub fn commit(&self, conn: &PgConnection) -> Result<Event, DatabaseError> { self.validate()?; diesel::insert_into(events::table) .values(self) .get_result(conn) .to_db_error(ErrorCode::InsertError, "Could not create new event") } pub fn default_status() -> String { EventStatus::Draft.to_string() } } #[derive(AsChangeset, Default, Deserialize, Validate)] #[table_name = "events"] pub struct EventEditableAttributes { pub name: Option<String>, pub venue_id: Option<Uuid>, pub event_start: Option<NaiveDateTime>, pub door_time: Option<NaiveDateTime>, pub publish_date: Option<NaiveDateTime>, pub redeem_date: Option<NaiveDateTime>, pub fee_in_cents: Option<i64>, #[validate(url)] pub promo_image_url: Option<String>, pub additional_info: Option<String>, pub age_limit: Option<i32>, pub cancelled_at: Option<NaiveDateTime>, #[validate(length(max = "100"))] pub top_line_info: Option<String>, } impl Event { pub fn create( name: &str, status: EventStatus, organization_id: Uuid, venue_id: Option<Uuid>, event_start: Option<NaiveDateTime>, door_time: Option<NaiveDateTime>, publish_date: Option<NaiveDateTime>, ) -> NewEvent { NewEvent { name: name.into(), status: status.to_string(), organization_id, venue_id, event_start, door_time, publish_date, ..Default::default() } } pub fn status(&self) -> EventStatus { self.status.parse::<EventStatus>().unwrap() } pub fn update( &self, attributes: EventEditableAttributes, conn: &PgConnection, ) -> Result<Event, DatabaseError> { attributes.validate()?; DatabaseError::wrap( ErrorCode::UpdateError, "Could not update event", diesel::update(self) .set((attributes, events::updated_at.eq(dsl::now))) .get_result(conn), ) } pub fn publish(self, conn: &PgConnection) -> Result<Event, DatabaseError> { if self.status() == EventStatus::Published { return Event::find(self.id, conn); } let mut errors = ValidationErrors::new(); if self.venue_id.is_none() { errors.add( "venue_id", ValidationError::new("Event can't be published without a venue"), ); } else { let venue = self.venue(conn)?.unwrap(); venue.validate_for_publish()?; } if !errors.is_empty() { return Err(errors.into()); } diesel::update(&self) .set(( events::status.eq(EventStatus::Published.to_string()), events::publish_date.eq(dsl::now.nullable()), events::updated_at.eq(dsl::now), )).execute(conn) .to_db_error(ErrorCode::UpdateError, "Could not publish record")?; Event::find(self.id, conn) } pub fn find(id: Uuid, conn: &PgConnection) -> Result<Event, DatabaseError> { DatabaseError::wrap( ErrorCode::QueryError, "Error loading event", events::table.find(id).first::<Event>(conn), ) } pub fn cancel(self, conn: &PgConnection) -> Result<Event, DatabaseError> { diesel::update(&self) .set(events::cancelled_at.eq(dsl::now.nullable())) .get_result(conn) .to_db_error(ErrorCode::UpdateError, "Could not update event") } pub fn find_all_events_from_venue( venue_id: &Uuid, conn: &PgConnection, ) -> Result<Vec<Event>, DatabaseError> { DatabaseError::wrap( ErrorCode::QueryError, "Error loading event via venue", events::table .filter(events::venue_id.eq(venue_id)) .order_by(events::name) .load(conn), ) } pub fn find_all_events_from_organization( organization_id: &Uuid, conn: &PgConnection, ) -> Result<Vec<Event>, DatabaseError> { DatabaseError::wrap( ErrorCode::QueryError, "Error loading events via organization", events::table .filter(events::organization_id.eq(organization_id)) .order_by(events::name) .load(conn), ) } pub fn guest_list( &self, query: &str, conn: &PgConnection, ) -> Result<Vec<RedeemableTicket>, DatabaseError> { let q = include_str!("../queries/retrieve_guest_list.sql"); diesel::sql_query(q) .bind::<sql_types::Uuid, _>(self.id) .bind::<sql_types::Text, _>(query) .load::<RedeemableTicket>(conn) .to_db_error(ErrorCode::QueryError, "Could not load guest list") } pub fn search( query_filter: Option<String>, region_id: Option<Uuid>, start_time: Option<NaiveDateTime>, end_time: Option<NaiveDateTime>, status_filter: Option<Vec<EventStatus>>, user: Option<User>, conn: &PgConnection, ) -> Result<Vec<Event>, DatabaseError> { let query_like = match query_filter { Some(n) => format!("%{}%", n), None => "%".to_string(), }; let mut query = events::table .left_join(venues::table.on(events::venue_id.eq(venues::id.nullable()))) .inner_join(organizations::table.on(organizations::id.eq(events::organization_id))) .left_join( organization_users::table .on(organization_users::organization_id.eq(organizations::id)), ).left_join( event_artists::table .inner_join( artists::table.on(event_artists::artist_id .eq(artists::id) .and(artists::name.ilike(query_like.clone()))), ).on(events::id.eq(event_artists::event_id)), ).filter( events::name .ilike(query_like.clone()) .or(venues::id .is_not_null() .and(venues::name.ilike(query_like.clone()))).or(artists::id.is_not_null()), ).filter( events::event_start .gt(start_time .unwrap_or_else(|| NaiveDate::from_ymd(1970, 1, 1).and_hms(0, 0, 0))), ).filter( events::event_start .lt(end_time .unwrap_or_else(|| NaiveDate::from_ymd(3970, 1, 1).and_hms(0, 0, 0))), ).select(events::all_columns) .distinct() .order_by(events::event_start.asc()) .then_order_by(events::name.asc()) .into_boxed(); match user { Some(user) => { // Admin results include all drafts across organizations if !user .get_global_scopes() .contains(&Scopes::OrgAdmin.to_string()) { query = query.filter( events::status .ne(EventStatus::Draft.to_string()) .or(organizations::owner_user_id.eq(user.id)) .or(organization_users::user_id.eq(user.id)), ); } } None => { query = query.filter(events::status.ne(EventStatus::Draft.to_string())); } } if let Some(statuses) = status_filter { let statuses: Vec<String> = statuses .into_iter() .map(|status| status.to_string()) .collect(); query = query.filter(events::status.eq_any(statuses)); } if let Some(region_id) = region_id { query = query.filter(venues::region_id.eq(region_id)); } let result = query.load(conn); DatabaseError::wrap(ErrorCode::QueryError, "Unable to load all events", result) } pub fn add_artist(&self, artist_id: Uuid, conn: &PgConnection) -> Result<(), DatabaseError> { EventArtist::create(self.id, artist_id, 0, None) .commit(conn) .map(|_| ()) } pub fn organization(&self, conn: &PgConnection) -> Result<Organization, DatabaseError> { Organization::find(self.organization_id, conn) } pub fn venue(&self, conn: &PgConnection) -> Result<Option<Venue>, DatabaseError> { match self.venue_id { Some(venue_id) => { let venue = Venue::find(venue_id, conn); match venue { Ok(venue) => Ok(Some(venue)), Err(e) => Err(e), } } None => Ok(None), } } pub fn add_ticket_type( &self, name: String, quantity: u32, start_date: NaiveDateTime, end_date: NaiveDateTime, wallet_id: Uuid, increment: Option<i32>, conn: &PgConnection, ) -> Result<TicketType, DatabaseError> { let asset_name = format!("{}.{}", self.name, &name); let ticket_type = TicketType::create(self.id, name, start_date, end_date, increment).commit(conn)?; let asset = Asset::create(ticket_type.id, asset_name).commit(conn)?; TicketInstance::create_multiple(asset.id, 0, quantity, wallet_id, conn)?; Ok(ticket_type) } pub fn ticket_types(&self, conn: &PgConnection) -> Result<Vec<TicketType>, DatabaseError> { TicketType::find_by_event_id(self.id, conn) } pub fn issuer_wallet(&self, conn: &PgConnection) -> Result<Wallet, DatabaseError> { Wallet::find_default_for_organization(self.organization_id, conn) } pub fn for_display(self, conn: &PgConnection) -> Result<DisplayEvent, DatabaseError> { let venue: Option<DisplayVenue> = self.venue(conn)?.and_then(|venue| Some(venue.into())); Ok(DisplayEvent { id: self.id, name: self.name, event_start: self.event_start, door_time: self.door_time, promo_image_url: self.promo_image_url, additional_info: self.additional_info, top_line_info: self.top_line_info, venue, }) } } #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] pub struct DisplayEvent { pub id: Uuid, pub name: String, pub event_start: Option<NaiveDateTime>, pub door_time: Option<NaiveDateTime>, pub promo_image_url: Option<String>, pub additional_info: Option<String>, pub top_line_info: Option<String>, pub venue: Option<DisplayVenue>, }
use crate::io::Buf; use crate::postgres::database::Postgres; use byteorder::NetworkEndian; #[derive(Debug)] pub struct BackendKeyData { /// The process ID of this database. pub process_id: u32, /// The secret key of this database. pub secret_key: u32, } impl BackendKeyData { pub(crate) fn read(mut buf: &[u8]) -> crate::Result<Self> { let process_id = buf.get_u32::<NetworkEndian>()?; let secret_key = buf.get_u32::<NetworkEndian>()?; Ok(Self { process_id, secret_key, }) } } #[cfg(test)] mod tests { use super::BackendKeyData; const BACKEND_KEY_DATA: &[u8] = b"\0\0'\xc6\x89R\xc5+"; #[test] fn it_decodes_backend_key_data() { let message = BackendKeyData::read(BACKEND_KEY_DATA).unwrap(); assert_eq!(message.process_id, 10182); assert_eq!(message.secret_key, 2303903019); } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use async_recursion::async_recursion; use common_ast::ast::split_conjunctions_expr; use common_ast::ast::split_equivalent_predicate_expr; use common_ast::ast::Expr; use common_ast::ast::JoinCondition; use common_ast::ast::JoinOperator; use common_catalog::table_context::TableContext; use common_exception::ErrorCode; use common_exception::Result; use common_exception::Span; use common_expression::type_check::common_super_type; use common_functions::BUILTIN_FUNCTIONS; use crate::binder::JoinPredicate; use crate::binder::Visibility; use crate::normalize_identifier; use crate::optimizer::ColumnSet; use crate::optimizer::RelExpr; use crate::optimizer::SExpr; use crate::planner::binder::scalar::ScalarBinder; use crate::planner::binder::wrap_cast; use crate::planner::binder::Binder; use crate::planner::semantic::NameResolutionContext; use crate::plans::BoundColumnRef; use crate::plans::Filter; use crate::plans::Join; use crate::plans::JoinType; use crate::plans::ScalarExpr; use crate::BindContext; use crate::MetadataRef; pub struct JoinConditions { pub(crate) left_conditions: Vec<ScalarExpr>, pub(crate) right_conditions: Vec<ScalarExpr>, pub(crate) non_equi_conditions: Vec<ScalarExpr>, pub(crate) other_conditions: Vec<ScalarExpr>, } impl Binder { #[async_recursion] pub(super) async fn bind_join( &mut self, bind_context: &BindContext, left_context: BindContext, right_context: BindContext, left_child: SExpr, right_child: SExpr, join: &common_ast::ast::Join, ) -> Result<(SExpr, BindContext)> { check_duplicate_join_tables(&left_context, &right_context)?; let mut bind_context = bind_context.replace(); match &join.op { JoinOperator::LeftOuter | JoinOperator::RightOuter | JoinOperator::FullOuter if join.condition == JoinCondition::None => { return Err(ErrorCode::SemanticError( "outer join should contain join conditions".to_string(), )); } JoinOperator::CrossJoin if join.condition != JoinCondition::None => { return Err(ErrorCode::SemanticError( "cross join should not contain join conditions".to_string(), )); } _ => (), }; let mut left_join_conditions: Vec<ScalarExpr> = vec![]; let mut right_join_conditions: Vec<ScalarExpr> = vec![]; let mut non_equi_conditions: Vec<ScalarExpr> = vec![]; let mut other_conditions: Vec<ScalarExpr> = vec![]; let mut join_condition_resolver = JoinConditionResolver::new( self.ctx.clone(), &self.name_resolution_ctx, self.metadata.clone(), join.op.clone(), &left_context, &right_context, &mut bind_context, &join.condition, ); join_condition_resolver .resolve( &mut left_join_conditions, &mut right_join_conditions, &mut non_equi_conditions, &mut other_conditions, &join.op, ) .await?; let join_conditions = JoinConditions { left_conditions: left_join_conditions, right_conditions: right_join_conditions, non_equi_conditions, other_conditions, }; let s_expr = match &join.op { JoinOperator::Inner => { self.bind_join_with_type(JoinType::Inner, join_conditions, left_child, right_child) } JoinOperator::LeftOuter => { self.bind_join_with_type(JoinType::Left, join_conditions, left_child, right_child) } JoinOperator::RightOuter => { self.bind_join_with_type(JoinType::Right, join_conditions, left_child, right_child) } JoinOperator::FullOuter => { self.bind_join_with_type(JoinType::Full, join_conditions, left_child, right_child) } JoinOperator::CrossJoin => { self.bind_join_with_type(JoinType::Cross, join_conditions, left_child, right_child) } JoinOperator::LeftSemi => { bind_context = left_context; self.bind_join_with_type( JoinType::LeftSemi, join_conditions, left_child, right_child, ) } JoinOperator::RightSemi => { bind_context = right_context; self.bind_join_with_type( JoinType::RightSemi, join_conditions, left_child, right_child, ) } JoinOperator::LeftAnti => { bind_context = left_context; self.bind_join_with_type( JoinType::LeftAnti, join_conditions, left_child, right_child, ) } JoinOperator::RightAnti => { bind_context = right_context; self.bind_join_with_type( JoinType::RightAnti, join_conditions, left_child, right_child, ) } }?; Ok((s_expr, bind_context)) } pub fn bind_join_with_type( &mut self, join_type: JoinType, join_conditions: JoinConditions, mut left_child: SExpr, mut right_child: SExpr, ) -> Result<SExpr> { let left_conditions = join_conditions.left_conditions; let right_conditions = join_conditions.right_conditions; let mut non_equi_conditions = join_conditions.non_equi_conditions; let other_conditions = join_conditions.other_conditions; if join_type == JoinType::Cross && (!left_conditions.is_empty() || !right_conditions.is_empty()) { return Err(ErrorCode::SemanticError( "Join conditions should be empty in cross join", )); } self.push_down_other_conditions( &mut left_child, &mut right_child, other_conditions, &mut non_equi_conditions, )?; let logical_join = Join { left_conditions, right_conditions, non_equi_conditions, join_type, marker_index: None, from_correlated_subquery: false, contain_runtime_filter: false, }; Ok(SExpr::create_binary( logical_join.into(), left_child, right_child, )) } fn push_down_other_conditions( &self, left_child: &mut SExpr, right_child: &mut SExpr, other_conditions: Vec<ScalarExpr>, non_equi_conditions: &mut Vec<ScalarExpr>, ) -> Result<()> { if other_conditions.is_empty() { return Ok(()); } let left_prop = RelExpr::with_s_expr(left_child).derive_relational_prop()?; let right_prop = RelExpr::with_s_expr(right_child).derive_relational_prop()?; let mut left_push_down = vec![]; let mut right_push_down = vec![]; let mut need_push_down = false; for predicate in other_conditions.iter() { let pred = JoinPredicate::new(predicate, &left_prop, &right_prop); match pred { JoinPredicate::Left(_) => { need_push_down = true; left_push_down.push(predicate.clone()); } JoinPredicate::Right(_) => { need_push_down = true; right_push_down.push(predicate.clone()); } _ => { non_equi_conditions.push(predicate.clone()); } } } if !need_push_down { return Ok(()); } if !left_push_down.is_empty() { *left_child = SExpr::create_unary( Filter { predicates: left_push_down, is_having: false, } .into(), left_child.clone(), ); } if !right_push_down.is_empty() { *right_child = SExpr::create_unary( Filter { predicates: right_push_down, is_having: false, } .into(), right_child.clone(), ); } Ok(()) } } // Wrap nullable for column binding depending on join type. fn wrap_nullable_for_column( join_type: &JoinOperator, left_context: &BindContext, right_context: &BindContext, bind_context: &mut BindContext, ) { match join_type { JoinOperator::LeftOuter => { for column in left_context.all_column_bindings() { bind_context.add_column_binding(column.clone()); } for column in right_context.all_column_bindings().iter() { let mut nullable_column = column.clone(); nullable_column.data_type = Box::new(column.data_type.wrap_nullable()); bind_context.add_column_binding(nullable_column); } } JoinOperator::RightOuter => { for column in left_context.all_column_bindings() { let mut nullable_column = column.clone(); nullable_column.data_type = Box::new(column.data_type.wrap_nullable()); bind_context.add_column_binding(nullable_column); } for column in right_context.all_column_bindings().iter() { bind_context.add_column_binding(column.clone()); } } JoinOperator::FullOuter => { for column in left_context.all_column_bindings() { let mut nullable_column = column.clone(); nullable_column.data_type = Box::new(column.data_type.wrap_nullable()); bind_context.add_column_binding(nullable_column); } for column in right_context.all_column_bindings().iter() { let mut nullable_column = column.clone(); nullable_column.data_type = Box::new(column.data_type.wrap_nullable()); bind_context.add_column_binding(nullable_column); } } _ => { for column in left_context.all_column_bindings() { bind_context.add_column_binding(column.clone()); } for column in right_context.all_column_bindings() { bind_context.add_column_binding(column.clone()); } } } } pub fn check_duplicate_join_tables( left_context: &BindContext, right_context: &BindContext, ) -> Result<()> { let left_column_bindings = left_context.all_column_bindings(); let left_table_name = if left_column_bindings.is_empty() { None } else { left_column_bindings[0].table_name.as_ref() }; let right_column_bindings = right_context.all_column_bindings(); let right_table_name = if right_column_bindings.is_empty() { None } else { right_column_bindings[0].table_name.as_ref() }; if let Some(left) = left_table_name { if let Some(right) = right_table_name { if left.eq(right) { return Err(ErrorCode::SemanticError(format!( "Duplicated table name {} in the same FROM clause", left ))); } } } Ok(()) } struct JoinConditionResolver<'a> { ctx: Arc<dyn TableContext>, name_resolution_ctx: &'a NameResolutionContext, metadata: MetadataRef, join_op: JoinOperator, left_context: &'a BindContext, right_context: &'a BindContext, join_context: &'a mut BindContext, join_condition: &'a JoinCondition, } impl<'a> JoinConditionResolver<'a> { #[allow(clippy::too_many_arguments)] pub fn new( ctx: Arc<dyn TableContext>, name_resolution_ctx: &'a NameResolutionContext, metadata: MetadataRef, join_op: JoinOperator, left_context: &'a BindContext, right_context: &'a BindContext, join_context: &'a mut BindContext, join_condition: &'a JoinCondition, ) -> Self { Self { ctx, name_resolution_ctx, metadata, join_op, left_context, right_context, join_context, join_condition, } } pub async fn resolve( &mut self, left_join_conditions: &mut Vec<ScalarExpr>, right_join_conditions: &mut Vec<ScalarExpr>, non_equi_conditions: &mut Vec<ScalarExpr>, other_join_conditions: &mut Vec<ScalarExpr>, join_op: &JoinOperator, ) -> Result<()> { match &self.join_condition { JoinCondition::On(cond) => { self.resolve_on( cond, left_join_conditions, right_join_conditions, non_equi_conditions, other_join_conditions, ) .await?; } JoinCondition::Using(identifiers) => { let using_columns = identifiers .iter() .map(|ident| { ( ident.span, normalize_identifier(ident, self.name_resolution_ctx).name, ) }) .collect(); self.resolve_using( using_columns, left_join_conditions, right_join_conditions, join_op, ) .await?; } JoinCondition::Natural => { // NATURAL is a shorthand form of USING: it forms a USING list consisting of all column names that appear in both input tables // As with USING, these columns appear only once in the output table // Todo(xudong963) If there are no common column names, NATURAL JOIN behaves like JOIN ... ON TRUE, producing a cross-product join. let mut using_columns = vec![]; // Find common columns in both input tables self.find_using_columns(&mut using_columns)?; self.resolve_using( using_columns, left_join_conditions, right_join_conditions, join_op, ) .await? } JoinCondition::None => { wrap_nullable_for_column( &self.join_op, self.left_context, self.right_context, self.join_context, ); } } Ok(()) } async fn resolve_on( &mut self, condition: &Expr, left_join_conditions: &mut Vec<ScalarExpr>, right_join_conditions: &mut Vec<ScalarExpr>, non_equi_conditions: &mut Vec<ScalarExpr>, other_join_conditions: &mut Vec<ScalarExpr>, ) -> Result<()> { let conjunctions = split_conjunctions_expr(condition); for expr in conjunctions.iter() { self.resolve_predicate( expr, left_join_conditions, right_join_conditions, non_equi_conditions, other_join_conditions, ) .await?; } wrap_nullable_for_column( &self.join_op, self.left_context, self.right_context, self.join_context, ); Ok(()) } async fn resolve_predicate( &self, predicate: &Expr, left_join_conditions: &mut Vec<ScalarExpr>, right_join_conditions: &mut Vec<ScalarExpr>, non_equi_conditions: &mut Vec<ScalarExpr>, other_join_conditions: &mut Vec<ScalarExpr>, ) -> Result<()> { let mut join_context = (*self.join_context).clone(); wrap_nullable_for_column( &self.join_op, self.left_context, self.right_context, &mut join_context, ); let mut scalar_binder = ScalarBinder::new( &mut join_context, self.ctx.clone(), self.name_resolution_ctx, self.metadata.clone(), &[], ); // Given two tables: t1(a, b), t2(a, b) // A predicate can be regarded as an equi-predicate iff: // // - The predicate is literally an equivalence expression, e.g. `t1.a = t2.a` // - Each side of `=` only contains columns from one table and the both sides are disjoint. // For example, `t1.a + t1.b = t2.a` is a valid one while `t1.a + t2.a = t2.b` isn't. // // Only equi-predicate can be exploited by common join algorithms(e.g. sort-merge join, hash join). let mut added = if let Some((left, right)) = split_equivalent_predicate_expr(predicate) { let (left, _) = scalar_binder.bind(&left).await?; let (right, _) = scalar_binder.bind(&right).await?; self.add_equi_conditions(left, right, left_join_conditions, right_join_conditions)? } else { false }; if !added { added = self .add_other_conditions(predicate, other_join_conditions) .await?; if !added { let (predicate, _) = scalar_binder.bind(predicate).await?; non_equi_conditions.push(predicate); } } Ok(()) } async fn resolve_using( &mut self, using_columns: Vec<(Span, String)>, left_join_conditions: &mut Vec<ScalarExpr>, right_join_conditions: &mut Vec<ScalarExpr>, join_op: &JoinOperator, ) -> Result<()> { wrap_nullable_for_column( &self.join_op, self.left_context, self.right_context, self.join_context, ); let left_columns_len = self.left_context.columns.len(); for (span, join_key) in using_columns.iter() { let join_key_name = join_key.as_str(); let left_scalar = if let Some(col_binding) = self.join_context.columns [0..left_columns_len] .iter() .find(|col_binding| col_binding.column_name == join_key_name) { ScalarExpr::BoundColumnRef(BoundColumnRef { span: *span, column: col_binding.clone(), }) } else { return Err(ErrorCode::SemanticError(format!( "column {} specified in USING clause does not exist in left table", join_key_name )) .set_span(*span)); }; let right_scalar = if let Some(col_binding) = self.join_context.columns [left_columns_len..] .iter() .find(|col_binding| col_binding.column_name == join_key_name) { ScalarExpr::BoundColumnRef(BoundColumnRef { span: *span, column: col_binding.clone(), }) } else { return Err(ErrorCode::SemanticError(format!( "column {} specified in USING clause does not exist in right table", join_key_name )) .set_span(*span)); }; let idx = !matches!(join_op, JoinOperator::RightOuter) as usize; if let Some(col_binding) = self .join_context .columns .iter_mut() .filter(|col_binding| col_binding.column_name == join_key_name) .nth(idx) { // Always make the second using column in the join_context invisible in unqualified wildcard. col_binding.visibility = Visibility::UnqualifiedWildcardInVisible; } self.add_equi_conditions( left_scalar, right_scalar, left_join_conditions, right_join_conditions, )?; } Ok(()) } fn add_equi_conditions( &self, mut left: ScalarExpr, mut right: ScalarExpr, left_join_conditions: &mut Vec<ScalarExpr>, right_join_conditions: &mut Vec<ScalarExpr>, ) -> Result<bool> { let left_used_columns = left.used_columns(); let right_used_columns = right.used_columns(); let (left_columns, right_columns) = self.left_right_columns()?; // Bump types of left conditions and right conditions let left_type = left.data_type()?; let right_type = right.data_type()?; if left_type.ne(&right_type) { let least_super_type = common_super_type( left_type.clone(), right_type.clone(), &BUILTIN_FUNCTIONS.default_cast_rules, ) .ok_or_else(|| { ErrorCode::Internal(format!( "Left type {left_type} and right type {right_type} cannot be matched" )) })?; // Wrap cast for both left and right, `cast` can change the physical type of the data block // Related issue: https://github.com/datafuselabs/databend/issues/7650 left = wrap_cast(&left, &least_super_type); right = wrap_cast(&right, &least_super_type); } if left_used_columns.is_subset(&left_columns) && right_used_columns.is_subset(&right_columns) { left_join_conditions.push(left); right_join_conditions.push(right); return Ok(true); } else if left_used_columns.is_subset(&right_columns) && right_used_columns.is_subset(&left_columns) { left_join_conditions.push(right); right_join_conditions.push(left); return Ok(true); } Ok(false) } async fn add_other_conditions( &self, predicate: &Expr, other_join_conditions: &mut Vec<ScalarExpr>, ) -> Result<bool> { let mut join_context = (*self.join_context).clone(); wrap_nullable_for_column( &JoinOperator::Inner, self.left_context, self.right_context, &mut join_context, ); let mut scalar_binder = ScalarBinder::new( &mut join_context, self.ctx.clone(), self.name_resolution_ctx, self.metadata.clone(), &[], ); let (predicate, _) = scalar_binder.bind(predicate).await?; let predicate_used_columns = predicate.used_columns(); let (left_columns, right_columns) = self.left_right_columns()?; match self.join_op { JoinOperator::LeftOuter => { if predicate_used_columns.is_subset(&right_columns) { other_join_conditions.push(predicate); return Ok(true); } } JoinOperator::RightOuter => { if predicate_used_columns.is_subset(&left_columns) { other_join_conditions.push(predicate); return Ok(true); } } JoinOperator::Inner => { if predicate_used_columns.is_subset(&left_columns) || predicate_used_columns.is_subset(&right_columns) { other_join_conditions.push(predicate); return Ok(true); } } _ => { return Ok(false); } } Ok(false) } fn left_right_columns(&self) -> Result<(ColumnSet, ColumnSet)> { let left_columns: ColumnSet = self.left_context .all_column_bindings() .iter() .fold(ColumnSet::new(), |mut acc, v| { acc.insert(v.index); acc }); let right_columns: ColumnSet = self.right_context .all_column_bindings() .iter() .fold(ColumnSet::new(), |mut acc, v| { acc.insert(v.index); acc }); Ok((left_columns, right_columns)) } fn find_using_columns(&self, using_columns: &mut Vec<(Span, String)>) -> Result<()> { for left_column in self.left_context.all_column_bindings().iter() { for right_column in self.right_context.all_column_bindings().iter() { if left_column.column_name == right_column.column_name { using_columns.push((None, left_column.column_name.clone())); } } } Ok(()) } }
use anchor_lang::prelude::*; #[program] pub mod yta_token { use super::*; pub fn initialize(ctx: Context<Initialize>) -> ProgramResult { let yta_token = &mut ctx.accounts.yta_account; yta_token.amount=0; yta_token.authority = *ctx.accounts.authority.key; Ok(()) } pub fn increase(ctx: Context<Increase>,amount:u64)->ProgramResult{ let yta_token = &mut ctx.accounts.yta_account; yta_token.amount+=amount; Ok(()) } } #[derive(Accounts)] pub struct Initialize<'info> { #[account(init)] pub yta_account:ProgramAccount<'info,YTAToken>, #[account(signer)] pub authority:AccountInfo<'info>, pub rent:Sysvar<'info,Rent> } #[derive(Accounts)] pub struct Increase<'info>{ #[account(mut, has_one = authority)] pub yta_account:ProgramAccount<'info,YTAToken>, #[account(signer)] pub authority:AccountInfo<'info>, } #[account] pub struct YTAToken{ pub amount:u64, pub authority:Pubkey }
use crate::interface::RedeemStakeBatchReceipt; use crate::{ domain, interface::{BatchId, TimestampedStakeBalance, YoctoNear}, }; use near_sdk::serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(crate = "near_sdk::serde")] pub struct RedeemStakeBatch { pub id: BatchId, pub balance: TimestampedStakeBalance, /// if receipt is present it means the STAKE has been redeemed and the unstaked NEAR is still locked /// by the staking pool for withdrawal pub receipt: Option<RedeemStakeBatchReceipt>, /// the NEAR value of the redeemed STAKE computed from [stake_token_value](RedeemStakeBatchReceipt::stake_token_value) pub redeemed_stake_value: Option<YoctoNear>, } impl RedeemStakeBatch { pub fn from(batch: domain::RedeemStakeBatch, receipt: Option<RedeemStakeBatchReceipt>) -> Self { let redeemed_stake_value = receipt.as_ref().map(|receipt| { domain::StakeTokenValue::from(receipt.stake_token_value.clone()) .stake_to_near(batch.balance().amount().into()) .into() }); Self { id: BatchId(batch.id().0.into()), balance: batch.balance().into(), receipt, redeemed_stake_value, } } }
use std::collections::HashMap; use std::env; use std::fs::File; use std::io::{self, Read}; use std::process; use regex::{Captures, Regex}; const PROG_NAME: &str = "ser"; fn usage() { println!("USAGE: {} [-h, --help] [FILE]", PROG_NAME); } fn exit_err(output: &str) -> ! { eprintln!("{}", output); process::exit(1) } fn main() { let mut args = env::args_os(); args.next(); let mut buffer = String::new(); let get_from_stdin = |buffer: &mut String| { if let Err(_) = io::stdin().read_to_string(buffer) { exit_err("unable to read from stdin!"); } }; if let Some(arg_os) = args.next() { if let Ok(arg) = arg_os.into_string() { if &arg == "-h" || &arg == "--help" { usage(); return; } else if &arg == "-" { get_from_stdin(&mut buffer); } else { if let Ok(mut handle) = File::open(&arg) { if let Err(_) = handle.read_to_string(&mut buffer) { exit_err("unable to read from file!"); } } else { exit_err("unable to open file!"); } } } else { exit_err("unable to parse input!"); } } else { get_from_stdin(&mut buffer); } let buffer = buffer; let environment = { let mut environment = HashMap::new(); for (label, value) in env::vars() { environment.insert(label, value); } environment }; let r = Regex::new(r"(~+)#([a-zA-Z-_]+)#(~+)").unwrap(); let repeat = |c: char, n: usize| -> String { std::iter::repeat(c).take(n).collect() }; let mut error = None; let output = r.replace_all(&buffer, |cap: &Captures| -> String { if cap[1].len() == cap[3].len() { if cap[1].len() % 2 == 0 { let n = cap[1].len() / 2; format!("{}#{}#{}", repeat('~', n), &cap[2], repeat('~', n)) } else { let n = (cap[1].len() - 1) / 2; if let Some(value) = environment.get(&cap[2]) { format!("{}{}{}", repeat('~', n), value, repeat('~', n)) } else { error = Some( format!("environment variable '{}' does not exist!", &cap[2])); "".to_string() } } } else { error = Some("mismatched formatting!".to_string()); "".to_string() } }); if let Some(err) = error { exit_err(&err); } print!("{}", output); }
#[doc = "Reader of register PCI2C"] pub type R = crate::R<u32, super::PCI2C>; #[doc = "Writer for register PCI2C"] pub type W = crate::W<u32, super::PCI2C>; #[doc = "Register PCI2C `reset()`'s with value 0"] impl crate::ResetValue for super::PCI2C { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `P0`"] pub type P0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P0`"] pub struct P0_W<'a> { w: &'a mut W, } impl<'a> P0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `P1`"] pub type P1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P1`"] pub struct P1_W<'a> { w: &'a mut W, } impl<'a> P1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `P2`"] pub type P2_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P2`"] pub struct P2_W<'a> { w: &'a mut W, } impl<'a> P2_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `P3`"] pub type P3_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P3`"] pub struct P3_W<'a> { w: &'a mut W, } impl<'a> P3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `P4`"] pub type P4_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P4`"] pub struct P4_W<'a> { w: &'a mut W, } impl<'a> P4_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `P5`"] pub type P5_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P5`"] pub struct P5_W<'a> { w: &'a mut W, } impl<'a> P5_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Reader of field `P6`"] pub type P6_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P6`"] pub struct P6_W<'a> { w: &'a mut W, } impl<'a> P6_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Reader of field `P7`"] pub type P7_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P7`"] pub struct P7_W<'a> { w: &'a mut W, } impl<'a> P7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `P8`"] pub type P8_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P8`"] pub struct P8_W<'a> { w: &'a mut W, } impl<'a> P8_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "Reader of field `P9`"] pub type P9_R = crate::R<bool, bool>; #[doc = "Write proxy for field `P9`"] pub struct P9_W<'a> { w: &'a mut W, } impl<'a> P9_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } impl R { #[doc = "Bit 0 - I2C Module 0 Power Control"] #[inline(always)] pub fn p0(&self) -> P0_R { P0_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - I2C Module 1 Power Control"] #[inline(always)] pub fn p1(&self) -> P1_R { P1_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - I2C Module 2 Power Control"] #[inline(always)] pub fn p2(&self) -> P2_R { P2_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - I2C Module 3 Power Control"] #[inline(always)] pub fn p3(&self) -> P3_R { P3_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - I2C Module 4 Power Control"] #[inline(always)] pub fn p4(&self) -> P4_R { P4_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - I2C Module 5 Power Control"] #[inline(always)] pub fn p5(&self) -> P5_R { P5_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - I2C Module 6 Power Control"] #[inline(always)] pub fn p6(&self) -> P6_R { P6_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - I2C Module 7 Power Control"] #[inline(always)] pub fn p7(&self) -> P7_R { P7_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - I2C Module 8 Power Control"] #[inline(always)] pub fn p8(&self) -> P8_R { P8_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - I2C Module 9 Power Control"] #[inline(always)] pub fn p9(&self) -> P9_R { P9_R::new(((self.bits >> 9) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - I2C Module 0 Power Control"] #[inline(always)] pub fn p0(&mut self) -> P0_W { P0_W { w: self } } #[doc = "Bit 1 - I2C Module 1 Power Control"] #[inline(always)] pub fn p1(&mut self) -> P1_W { P1_W { w: self } } #[doc = "Bit 2 - I2C Module 2 Power Control"] #[inline(always)] pub fn p2(&mut self) -> P2_W { P2_W { w: self } } #[doc = "Bit 3 - I2C Module 3 Power Control"] #[inline(always)] pub fn p3(&mut self) -> P3_W { P3_W { w: self } } #[doc = "Bit 4 - I2C Module 4 Power Control"] #[inline(always)] pub fn p4(&mut self) -> P4_W { P4_W { w: self } } #[doc = "Bit 5 - I2C Module 5 Power Control"] #[inline(always)] pub fn p5(&mut self) -> P5_W { P5_W { w: self } } #[doc = "Bit 6 - I2C Module 6 Power Control"] #[inline(always)] pub fn p6(&mut self) -> P6_W { P6_W { w: self } } #[doc = "Bit 7 - I2C Module 7 Power Control"] #[inline(always)] pub fn p7(&mut self) -> P7_W { P7_W { w: self } } #[doc = "Bit 8 - I2C Module 8 Power Control"] #[inline(always)] pub fn p8(&mut self) -> P8_W { P8_W { w: self } } #[doc = "Bit 9 - I2C Module 9 Power Control"] #[inline(always)] pub fn p9(&mut self) -> P9_W { P9_W { w: self } } }
use crate::error::Error; use std::{io::Read, str, u16}; use tokio::io::AsyncReadExt; /* * By "atom", I mean an individual part of a minecraft packet, such as an int, varint, or string. */ /* * The other methods here are non-async since we'll read a full packet at a time * before parsing it, but we need to async read the number at the start of each packet that tells * us how long it is before we can read it into a byte buffer */ pub async fn read_varint_async<S: AsyncReadExt + Unpin>(source: &mut S) -> Result<i32, Error> { let mut num_read: u64 = 0; let mut result: i32 = 0; let mut buf = [0; 1]; // 1 byte at a time loop { let _bytes_read = source.read_exact(&mut buf).await?; let byte = buf[0]; let value = (byte & 0b01111111) as i32; result |= value << (7 * num_read); num_read += 1; if num_read > 5 { return Err("VarInt is too big".into()); } if byte & 0b10000000 == 0 { break; } } Ok(result) } pub fn read_varint(source: &mut impl Read) -> Result<i32, Error> { let mut num_read: u64 = 0; let mut result: i32 = 0; let mut buf = [0; 1]; // 1 byte at a time loop { let _bytes_read = source.read_exact(&mut buf)?; let byte = buf[0]; let value = (byte & 0b01111111) as i32; result |= value << (7 * num_read); num_read += 1; if num_read > 5 { return Err("VarInt is too big".into()); } if byte & 0b10000000 == 0 { break; } } Ok(result) } #[cfg(test)] struct VarIntTestCase(i32, Vec<u8>); #[cfg(test)] fn cases() -> Vec<VarIntTestCase> { vec![ VarIntTestCase(0, vec![0x00]), VarIntTestCase(1, vec![0x01]), VarIntTestCase(255, vec![0xff, 0x01]), VarIntTestCase(2147483647, vec![0xff, 0xff, 0xff, 0xff, 0x07]), ] } #[tokio::test] async fn test_read_varint_async() -> Result<(), Error> { use std::io::Cursor; for case in cases() { let mut buf = Cursor::new(case.1); assert_eq!(case.0, read_varint_async(&mut buf).await?); } Ok(()) } #[test] fn test_read_varint() -> Result<(), Error> { use std::io::Cursor; for case in cases() { let mut buf = Cursor::new(case.1); assert_eq!(case.0, read_varint(&mut buf)?); } Ok(()) } pub fn read_string(source: &mut impl Read) -> Result<String, Error> { let size = read_varint(source)? as usize; let mut buf: Vec<u8> = vec![0; size]; source.read_exact(&mut buf)?; Ok(str::from_utf8(&buf)?.to_owned()) } #[test] fn test_read_string() -> Result<(), Error> { let mut buf: &[u8] = &[0x02, 0x48, 0x49]; // Varint<2>, Utf8<H>, Utf8<I> assert_eq!("HI", read_string(&mut buf)?); Ok(()) } pub fn read_u16(source: &mut impl Read) -> Result<u16, Error> { let mut buf = [0; 2]; source.read(&mut buf)?; Ok(u16::from_be_bytes(buf)) } #[test] fn test_read_u16() -> Result<(), Error> { let mut buf: &[u8] = &(1 as u16).to_be_bytes(); assert_eq!(1, read_u16(&mut buf)?); Ok(()) } pub fn read_i64(source: &mut impl Read) -> Result<i64, Error> { let mut buf = [0; 8]; source.read(&mut buf)?; Ok(i64::from_be_bytes(buf)) } #[test] fn test_read_i64() -> Result<(), Error> { let mut buf: &[u8] = &(-1 as i64).to_be_bytes(); assert_eq!(-1, read_i64(&mut buf)?); Ok(()) }
#![allow(non_snake_case)] use wallets_macro::{DlCR, DlDefault, DlStruct}; use wallets_types::{EthChain, EthChainToken, EthChainTokenAuth, EthChainTokenDefault, EthChainTokenNonAuth,EthChainTokenShared}; use crate::kits::{CArray, CBool,CR, CStruct,to_str,to_c_char,Assignment}; use crate::types::{CChainShared, CTokenShared}; use std::os::raw::c_char; #[repr(C)] #[derive(Debug, DlStruct, DlDefault, DlCR)] pub struct CEthChainToken { pub chainTokenSharedId:*mut c_char, pub show:CBool, pub contractAddress:*mut c_char, pub ethChainTokenShared: *mut CEthChainTokenShared, } #[repr(C)] #[derive(Debug, DlStruct, DlDefault, DlCR)] pub struct CEthChainTokenShared { pub tokenShared: *mut CTokenShared, pub tokenType: *mut c_char, pub gasLimit: i64, pub gasPrice: *mut c_char, pub decimal: i32, } #[repr(C)] #[derive(Debug, DlStruct, DlDefault, DlCR)] pub struct CEthChainTokenDefault { pub chainTokenSharedId: *mut c_char, pub netType: *mut c_char, pub position: i64, pub contractAddress: *mut c_char, pub ethChainTokenShared: *mut CEthChainTokenShared, } #[repr(C)] #[derive(Debug, DlStruct, DlDefault, DlCR)] pub struct CEthChainTokenAuth { pub chainTokenSharedId: *mut c_char, pub netType: *mut c_char, pub position: i64, pub contractAddress: *mut c_char, pub ethChainTokenShared: *mut CEthChainTokenShared, } #[repr(C)] #[derive(Debug, DlStruct, DlDefault, DlCR)] pub struct CEthChainTokenNonAuth { pub chainTokenSharedId: *mut c_char, pub netType: *mut c_char, pub position: i64, pub contractAddress: *mut c_char, pub ethChainTokenShared: *mut CEthChainTokenShared, } #[repr(C)] #[derive(Debug, DlStruct, DlDefault, DlCR)] pub struct CEthChain { pub chainShared: *mut CChainShared, pub tokens: *mut CArray<CEthChainToken>, }
use futures::stream::Fuse; use futures::{Async, Poll, Stream}; /// A simple extension to the futures::Stream allowing to take elements in /// batches of a given maximum size. pub trait BatchStream { /// Create a stream of batches of stream items. Each batch will contain /// at most max_batch_size elements. fn batch_stream(self, max_batch_size: usize) -> Batch<Self> where Self: Stream + Sized, { Batch::new(self, max_batch_size) } } impl<T> BatchStream for T where T: Stream {} /// Batch combinator. pub struct Batch<S> { stream: Fuse<S>, max_size: usize, } impl<S> Batch<S> { /// Create a new batch combinator. fn new(stream: S, max_batch_size: usize) -> Batch<S> where S: Stream, { Batch { stream: stream.fuse(), max_size: max_batch_size, } } } impl<S> Stream for Batch<S> where S: Stream, { type Item = Vec<S::Item>; type Error = S::Error; fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { let mut res = Vec::new(); while res.len() < self.max_size { if let Async::Ready(item) = self.stream.poll()? { if let Some(item) = item { res.push(item); } else if res.is_empty() { return Ok(Async::Ready(None)); } else { return Ok(Async::Ready(Some(res))); } } else { break; } } if res.is_empty() { Ok(Async::NotReady) } else { Ok(Async::Ready(Some(res))) } } }
use super::*; pub use std::collections::BTreeSet; #[derive(Clone, PartialEq, PartialOrd, Eq, Ord)] pub struct TypeDef { pub row: Row, pub generics: Vec<ElementType>, } impl From<Row> for TypeDef { fn from(row: Row) -> Self { Self { row, generics: Vec::new() } } } impl TypeDef { pub fn with_generics(mut self) -> Self { self.generics = self.generic_params().map(|generic| ElementType::GenericParam(generic.name().to_string())).collect(); self } pub fn is_callback(&self) -> bool { !self.is_winrt() && self.kind() == TypeKind::Delegate } pub fn has_default_constructor(&self) -> bool { for attribute in self.attributes() { if attribute.name() == "ActivatableAttribute" { if attribute.args().iter().any(|arg| matches!(arg.1, ConstantValue::TypeDef(_))) { continue; } else { return true; } } } false } pub fn invoke_method(&self) -> MethodDef { self.methods().find(|m| m.name() == "Invoke").expect("`Invoke` method not found") } pub fn default_interface(&self) -> Option<Self> { for interface in self.interface_impls() { if interface.is_default() { if let ElementType::TypeDef(def) = interface.generic_interface(&self.generics) { return Some(def); } } } None } pub fn interfaces(&self) -> impl Iterator<Item = Self> + '_ { self.interface_impls().filter_map(move |i| if let ElementType::TypeDef(def) = i.generic_interface(&self.generics) { Some(def) } else { None }) } pub fn is_packed(&self) -> bool { if self.kind() != TypeKind::Struct { return false; } if self.class_layout().is_some() { return true; } self.fields().any(|field| field.signature(Some(self)).is_packed()) } pub fn size(&self) -> usize { if self.kind() == TypeKind::Struct { self.fields().fold(0, |sum, field| sum + field.signature(Some(self)).size()) } else { 1 } } pub fn is_handle(&self) -> bool { self.has_attribute("NativeTypedefAttribute") } pub fn include_dependencies(&self, include: TypeInclude) { match self.kind() { TypeKind::Interface => { if include == TypeInclude::Minimal { return; } self.interfaces().for_each(|i| i.include_definition(include)); self.methods().for_each(|m| m.include_dependencies()); } TypeKind::Class => { if include == TypeInclude::Minimal { if let Some(default_interface) = self.default_interface() { default_interface.include_definition(TypeInclude::Minimal); } return; } // TODO: test for this? self.generics.iter().for_each(|g| g.include_definition(TypeInclude::Minimal)); self.interfaces().for_each(|i| i.include_definition(TypeInclude::Full)); self.bases().for_each(|b| b.include_definition(TypeInclude::Full)); self.attributes().for_each(|attribute| match attribute.name() { "StaticAttribute" | "ActivatableAttribute" | "ComposableAttribute" => { for (_, arg) in attribute.args() { if let ConstantValue::TypeDef(def) = arg { def.include_definition(TypeInclude::Full); } } } _ => {} }); } TypeKind::Struct => match self.type_name() { TypeName::BSTR => { let reader = TypeReader::get_mut(); reader.include_type_name(TypeName::SysStringLen, include); reader.include_type_name(TypeName::SysAllocStringLen, include); reader.include_type_name(TypeName::SysFreeString, include); } _ => { self.fields().for_each(|f| f.include_definition(Some(self), TypeInclude::Minimal)); if let Some(dependency) = self.is_convertible_to() { dependency.include_definition(TypeInclude::Minimal); } } }, TypeKind::Delegate => self.invoke_method().include_dependencies(), TypeKind::Enum => {} } } pub fn include_definition(&self, include: TypeInclude) { let type_name = self.type_name(); if type_name.namespace().is_empty() { self.include_dependencies(TypeInclude::Minimal); } else { TypeReader::get_mut().include_type_name(type_name, include); for generic in &self.generics { generic.include_definition(include); } } } // TODO: for sys definitions the features are less demanding since interfaces won't have dependencies pub fn features(&self, features: &mut BTreeSet<&'static str>, keys: &mut std::collections::HashSet<Row>) { if !keys.insert(self.row.clone()) { return; } let namespace = self.namespace(); if !namespace.is_empty() { features.insert(self.namespace()); } for generic in &self.generics { generic.features(features, keys); } match self.kind() { TypeKind::Class => { if let Some(def) = self.default_interface() { features.insert(def.namespace()); } } TypeKind::Struct => { self.fields().for_each(|def| def.features(Some(self), features, keys)); if let Some(def) = self.is_convertible_to() { // TODO: wonky features.insert(def.type_name().namespace); } } TypeKind::Delegate => self.invoke_method().signature(&[]).features(features, keys), _ => {} } if let Some(entry) = TypeReader::get().get_type_entry(self.type_name()) { for def in &entry.def { if let ElementType::TypeDef(def) = def { def.features(features, keys); } } } } pub fn is_udt(&self) -> bool { // TODO: should this just check whether the struct has > 1 fields rather than is_handle? self.kind() == TypeKind::Struct && !self.is_handle() } pub fn is_convertible(&self) -> bool { match self.kind() { TypeKind::Interface | TypeKind::Class | TypeKind::Struct => true, TypeKind::Delegate => self.is_winrt(), _ => false, } } pub fn is_primitive(&self) -> bool { self.kind() == TypeKind::Enum } pub fn is_explicit(&self) -> bool { self.row.u32(0) & 0b1_0000 != 0 } pub fn has_explicit(&self) -> bool { if self.kind() != TypeKind::Struct { return false; } if self.is_explicit() { true } else { self.fields().any(|f| f.signature(Some(self)).has_explicit()) } } pub fn type_signature(&self) -> String { match self.kind() { TypeKind::Interface => self.interface_signature(), TypeKind::Class => format!("rc({};{})", self.type_name(), self.default_interface().unwrap_or_else(|| panic!("`{}` does not have a default interface.", self.type_name())).interface_signature()), TypeKind::Enum => format!("enum({};{})", self.type_name(), self.underlying_type().type_signature()), TypeKind::Struct => { let mut result = format!("struct({}", self.type_name()); for field in self.fields() { result.push(';'); result.push_str(&field.signature(Some(self)).kind.type_signature()); } result.push(')'); result } TypeKind::Delegate => { if self.generics.is_empty() { format!("delegate({})", self.interface_signature()) } else { self.interface_signature() } } } } pub fn underlying_type(&self) -> ElementType { if let Some(field) = self.fields().next() { if let Some(constant) = field.constant() { return constant.value_type(); } else { return field.signature(Some(self)).kind; } } unimplemented!(); } fn interface_signature(&self) -> String { let guid = self.guid(); if self.generics.is_empty() { format!("{{{:#?}}}", guid) } else { let mut result = format!("pinterface({{{:#?}}}", guid); for generic in &self.generics { result.push(';'); result.push_str(&generic.type_signature()); } result.push(')'); result } } pub fn is_winrt(&self) -> bool { self.row.u32(0) & 0b100_0000_0000_0000 != 0 } pub fn is_interface(&self) -> bool { self.row.u32(0) & 0b10_0000 != 0 } pub fn name(&self) -> &'static str { self.row.str(1) } pub fn namespace(&self) -> &'static str { self.row.str(2) } pub fn type_name(&self) -> TypeName { TypeName::new(self.namespace(), self.name()) } pub fn extends(&self) -> TypeName { let extends = self.row.u32(3); if extends == 0 { TypeName::None } else { TypeDefOrRef::decode(self.row.file, extends).type_name() } } // TODO: rename base_classes pub fn bases(&self) -> impl Iterator<Item = TypeDef> { Bases(self.clone()) } pub fn base_interfaces(&self) -> (Vec<Self>, bool) { let mut result = Vec::new(); let mut next = self.clone(); let mut inspectable = false; while let Some(base) = next .interface_impls() .filter_map(|i| match i.generic_interface(&[]) { ElementType::TypeDef(def) => Some(def), ElementType::IUnknown => None, ElementType::IInspectable => { inspectable = true; None } _ => unimplemented!(), }) .next() { next = base.clone(); result.push(base); } (result, inspectable) } pub fn fields(&self) -> impl Iterator<Item = Field> { self.row.list(4, TableIndex::Field).map(Field) } pub fn methods(&self) -> impl Iterator<Item = MethodDef> { self.row.list(5, TableIndex::MethodDef).map(MethodDef) } pub fn generic_params(&self) -> impl Iterator<Item = GenericParam> { self.row.file.equal_range(TableIndex::GenericParam, 2, TypeOrMethodDef::TypeDef(self.clone()).encode()).map(GenericParam) } pub fn interface_impls(&self) -> impl Iterator<Item = InterfaceImpl> { self.row.file.equal_range(TableIndex::InterfaceImpl, 0, self.row.row + 1).map(InterfaceImpl) } pub fn nested_types(&self) -> Option<&BTreeMap<&'static str, TypeDef>> { TypeReader::get().nested_types(self) } pub fn attributes(&self) -> impl Iterator<Item = Attribute> { self.row.file.attributes(HasAttribute::TypeDef(self.clone())) } fn has_attribute(&self, name: &str) -> bool { self.attributes().any(|attribute| attribute.name() == name) } pub fn has_flags(&self) -> bool { self.has_attribute("FlagsAttribute") } pub fn is_exclusive(&self) -> bool { self.has_attribute("ExclusiveToAttribute") } pub fn is_scoped(&self) -> bool { self.is_winrt() || self.has_attribute("ScopedEnumAttribute") } pub fn is_api_contract(&self) -> bool { self.has_attribute("ApiContractAttribute") } pub fn is_agile(&self) -> bool { self.attributes().any(|attribute| { if attribute.name() == "MarshalingBehaviorAttribute" { if let Some((_, ConstantValue::I32(2))) = attribute.args().get(0) { return true; } } false }) } pub fn is_convertible_to(&self) -> Option<&ElementType> { self.attributes().find_map(|attribute| { if attribute.name() == "AlsoUsableForAttribute" { if let Some((_, ConstantValue::String(name))) = attribute.args().get(0) { return TypeReader::get().get_type((self.namespace(), name.as_str())); } } None }) } pub fn is_public_composable(&self) -> bool { self.attributes().any(|attribute| attribute.name() == "ComposableAttribute" && attribute.args().iter().any(|arg| matches!(arg, (_, ConstantValue::I32(2))))) } pub fn is_blittable(&self) -> bool { match self.kind() { TypeKind::Struct => { // TODO: should be "if self.can_drop().is_some() {" once win32metadata bugs are fixed (423, 422, 421, 389) if self.type_name() == TypeName::BSTR { false } else { self.fields().all(|f| f.is_blittable(Some(self))) } } TypeKind::Enum => true, _ => false, } } pub fn kind(&self) -> TypeKind { if self.is_interface() { TypeKind::Interface } else { match self.extends() { TypeName::Enum => TypeKind::Enum, TypeName::Delegate => TypeKind::Delegate, TypeName::Struct => TypeKind::Struct, _ => TypeKind::Class, } } } pub fn version(&self) -> (u16, u16) { for attribute in self.attributes() { match attribute.name() { "ContractVersionAttribute" | "VersionAttribute" => { for (_, value) in attribute.args() { if let ConstantValue::U32(value) = value { return ((value >> 16) as u16, (value & 0xFFFF) as u16); } } } _ => {} } } (0, 0) } pub fn guid(&self) -> GUID { GUID::from_attributes(self.attributes()).expect("TypeDef::guid") } pub fn is_nullable(&self) -> bool { matches!(self.kind(), TypeKind::Interface | TypeKind::Class | TypeKind::Delegate) } pub fn enclosing_type(&self) -> Option<Self> { self.row.file.equal_range(TableIndex::NestedClass, 0, self.row.row + 1).map(NestedClass).next().map(|nested| nested.enclosing_type()) } pub fn class_layout(&self) -> Option<ClassLayout> { self.row.file.equal_range(TableIndex::ClassLayout, 2, self.row.row + 1).map(ClassLayout).next() } pub fn overridable_interfaces(&self) -> Vec<TypeDef> { self.interface_impls().filter(|interface| interface.is_overridable()).map(|interface| interface.interface().resolve(None)).chain(self.bases().next().iter().flat_map(|base| base.overridable_interfaces())).collect() } pub fn overridable_methods(&self) -> BTreeSet<&'static str> { self.overridable_interfaces().iter().flat_map(|interface| interface.methods().map(|method| method.name())).collect() } pub fn async_kind(&self) -> AsyncKind { match self.type_name() { TypeName::IAsyncAction => AsyncKind::Action, TypeName::IAsyncActionWithProgress => AsyncKind::ActionWithProgress, TypeName::IAsyncOperation => AsyncKind::Operation, TypeName::IAsyncOperationWithProgress => AsyncKind::OperationWithProgress, _ => AsyncKind::None, } } } struct Bases(TypeDef); impl Iterator for Bases { type Item = TypeDef; fn next(&mut self) -> Option<Self::Item> { let extends = self.0.extends(); if extends == TypeName::Object { None } else { self.0 = TypeReader::get().expect_type_def(extends); Some(self.0.clone()) } } }
use druid::kurbo::{Affine, Shape, TranslateScale}; use druid::widget::prelude::*; use druid::{Color, Data, KeyOrValue}; use crate::data::GlyphDetail; use crate::theme; /// A widget that draws a glyph. pub struct GlyphPainter { color: KeyOrValue<Color>, placeholder_color: KeyOrValue<Color>, draw_frame: bool, } impl GlyphPainter { pub fn new() -> Self { GlyphPainter { color: theme::PRIMARY_TEXT_COLOR.into(), placeholder_color: theme::PLACEHOLDER_GLYPH_COLOR.into(), draw_frame: false, } } pub fn color(mut self, color: impl Into<KeyOrValue<Color>>) -> Self { self.color = color.into(); self } pub fn draw_layout_frame(mut self, draw_frame: bool) -> Self { self.draw_frame = draw_frame; self } } impl Widget<GlyphDetail> for GlyphPainter { fn event(&mut self, _ctx: &mut EventCtx, _event: &Event, _data: &mut GlyphDetail, _env: &Env) {} fn lifecycle( &mut self, _ctx: &mut LifeCycleCtx, _event: &LifeCycle, _data: &GlyphDetail, _env: &Env, ) { } fn update(&mut self, ctx: &mut UpdateCtx, old: &GlyphDetail, data: &GlyphDetail, _env: &Env) { if !old.outline.same(&data.outline) || old.glyph.advance != data.glyph.advance || (ctx.env_changed() && (ctx.env_key_changed(&self.color) || ctx.env_key_changed(&self.placeholder_color))) { ctx.request_layout(); } } fn layout( &mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &GlyphDetail, _env: &Env, ) -> Size { let glyph_layout_bounds = data.layout_bounds(); let aspect_ratio = glyph_layout_bounds.aspect_ratio(); let width = bc.max().width; let size = bc.constrain_aspect_ratio(aspect_ratio, width); let scale = size.width / glyph_layout_bounds.width(); let inking_rect = TranslateScale::scale(scale) * data.outline.bounding_box(); let paint_insets = inking_rect - glyph_layout_bounds; let baseline = glyph_layout_bounds.min_y().abs() * scale; ctx.set_paint_insets(paint_insets); ctx.set_baseline_offset(baseline); size } fn paint(&mut self, ctx: &mut PaintCtx, data: &GlyphDetail, env: &Env) { let glyph_bounds = data.layout_bounds(); let paint_rect = ctx.size().to_rect(); let scale = paint_rect.height() as f64 / glyph_bounds.height(); let baseline = glyph_bounds.max_y().abs() * scale; let affine = Affine::new([scale as f64, 0.0, 0.0, -scale as f64, 0.0, baseline]); let glyph_color = if data.is_placeholder_glyph() { self.placeholder_color.resolve(env) } else { self.color.resolve(env) }; if self.draw_frame { let frame_rect = (glyph_bounds.size() * scale).to_rect(); ctx.stroke(frame_rect, &glyph_color, 0.5); } ctx.fill(affine * &*data.outline, &glyph_color); } } impl Default for GlyphPainter { fn default() -> Self { GlyphPainter::new() } }
use rustc::mir; use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::layout::{self, Align, LayoutOf, TyLayout}; use rustc_data_structures::indexed_vec::Idx; use rustc::mir::interpret::{GlobalId, Value, Scalar, EvalResult, Pointer, ScalarMaybeUndef}; use super::{EvalContext, Machine, ValTy}; use interpret::memory::HasMemory; #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum Place { /// A place referring to a value allocated in the `Memory` system. Ptr { /// A place may have an invalid (integral or undef) pointer, /// since it might be turned back into a reference /// before ever being dereferenced. ptr: ScalarMaybeUndef, align: Align, extra: PlaceExtra, }, /// A place referring to a value on the stack. Represented by a stack frame index paired with /// a Mir local index. Local { frame: usize, local: mir::Local }, } #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum PlaceExtra { None, Length(u64), Vtable(Pointer), DowncastVariant(usize), } impl<'tcx> Place { /// Produces a Place that will error if attempted to be read from pub fn undef() -> Self { Self::from_scalar_ptr(ScalarMaybeUndef::Undef, Align::from_bytes(1, 1).unwrap()) } pub fn from_scalar_ptr(ptr: ScalarMaybeUndef, align: Align) -> Self { Place::Ptr { ptr, align, extra: PlaceExtra::None, } } pub fn from_ptr(ptr: Pointer, align: Align) -> Self { Self::from_scalar_ptr(ScalarMaybeUndef::Scalar(ptr.into()), align) } pub fn to_ptr_align_extra(self) -> (ScalarMaybeUndef, Align, PlaceExtra) { match self { Place::Ptr { ptr, align, extra } => (ptr, align, extra), _ => bug!("to_ptr_and_extra: expected Place::Ptr, got {:?}", self), } } pub fn to_ptr_align(self) -> (ScalarMaybeUndef, Align) { let (ptr, align, _extra) = self.to_ptr_align_extra(); (ptr, align) } pub fn to_ptr(self) -> EvalResult<'tcx, Pointer> { // At this point, we forget about the alignment information -- the place has been turned into a reference, // and no matter where it came from, it now must be aligned. self.to_ptr_align().0.unwrap_or_err()?.to_ptr() } pub(super) fn elem_ty_and_len( self, ty: Ty<'tcx>, tcx: TyCtxt<'_, 'tcx, '_> ) -> (Ty<'tcx>, u64) { match ty.sty { ty::TyArray(elem, n) => (elem, n.unwrap_usize(tcx)), ty::TySlice(elem) => { match self { Place::Ptr { extra: PlaceExtra::Length(len), .. } => (elem, len), _ => { bug!( "elem_ty_and_len of a TySlice given non-slice place: {:?}", self ) } } } _ => bug!("elem_ty_and_len expected array or slice, got {:?}", ty), } } } impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { /// Reads a value from the place without going through the intermediate step of obtaining /// a `miri::Place` pub fn try_read_place( &self, place: &mir::Place<'tcx>, ) -> EvalResult<'tcx, Option<Value>> { use rustc::mir::Place::*; match *place { // Might allow this in the future, right now there's no way to do this from Rust code anyway Local(mir::RETURN_PLACE) => err!(ReadFromReturnPointer), // Directly reading a local will always succeed Local(local) => self.frame().locals[local].access().map(Some), // No fast path for statics. Reading from statics is rare and would require another // Machine function to handle differently in miri. Promoted(_) | Static(_) => Ok(None), Projection(ref proj) => self.try_read_place_projection(proj), } } pub fn read_field( &self, base: Value, variant: Option<usize>, field: mir::Field, mut base_layout: TyLayout<'tcx>, ) -> EvalResult<'tcx, (Value, TyLayout<'tcx>)> { if let Some(variant_index) = variant { base_layout = base_layout.for_variant(self, variant_index); } let field_index = field.index(); let field = base_layout.field(self, field_index)?; if field.size.bytes() == 0 { return Ok(( Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits: 0, size: 0 })), field, )); } let offset = base_layout.fields.offset(field_index); let value = match base { // the field covers the entire type Value::ScalarPair(..) | Value::Scalar(_) if offset.bytes() == 0 && field.size == base_layout.size => base, // extract fields from types with `ScalarPair` ABI Value::ScalarPair(a, b) => { let val = if offset.bytes() == 0 { a } else { b }; Value::Scalar(val) }, Value::ByRef(base_ptr, align) => { let offset = base_layout.fields.offset(field_index); let ptr = base_ptr.ptr_offset(offset, self)?; let align = align.min(base_layout.align).min(field.align); assert!(!field.is_unsized()); Value::ByRef(ptr, align) }, Value::Scalar(val) => bug!("field access on non aggregate {:#?}, {:#?}", val, base_layout), }; Ok((value, field)) } fn try_read_place_projection( &self, proj: &mir::PlaceProjection<'tcx>, ) -> EvalResult<'tcx, Option<Value>> { use rustc::mir::ProjectionElem::*; let base = match self.try_read_place(&proj.base)? { Some(base) => base, None => return Ok(None), }; let base_ty = self.place_ty(&proj.base); let base_layout = self.layout_of(base_ty)?; match proj.elem { Field(field, _) => Ok(Some(self.read_field(base, None, field, base_layout)?.0)), // The NullablePointer cases should work fine, need to take care for normal enums Downcast(..) | Subslice { .. } | // reading index 0 or index 1 from a ByVal or ByVal pair could be optimized ConstantIndex { .. } | Index(_) | // No way to optimize this projection any better than the normal place path Deref => Ok(None), } } /// Returns a value and (in case of a ByRef) if we are supposed to use aligned accesses. pub(super) fn eval_and_read_place( &mut self, place: &mir::Place<'tcx>, ) -> EvalResult<'tcx, Value> { // Shortcut for things like accessing a fat pointer's field, // which would otherwise (in the `eval_place` path) require moving a `ScalarPair` to memory // and returning an `Place::Ptr` to it if let Some(val) = self.try_read_place(place)? { return Ok(val); } let place = self.eval_place(place)?; self.read_place(place) } pub fn read_place(&self, place: Place) -> EvalResult<'tcx, Value> { match place { Place::Ptr { ptr, align, extra } => { assert_eq!(extra, PlaceExtra::None); Ok(Value::ByRef(ptr.unwrap_or_err()?, align)) } Place::Local { frame, local } => self.stack[frame].locals[local].access(), } } pub fn eval_place(&mut self, mir_place: &mir::Place<'tcx>) -> EvalResult<'tcx, Place> { use rustc::mir::Place::*; let place = match *mir_place { Local(mir::RETURN_PLACE) => self.frame().return_place, Local(local) => Place::Local { frame: self.cur_frame(), local, }, Promoted(ref promoted) => { let instance = self.frame().instance; let val = self.read_global_as_value(GlobalId { instance, promoted: Some(promoted.0), })?; if let Value::ByRef(ptr, align) = val { Place::Ptr { ptr: ptr.into(), align, extra: PlaceExtra::None, } } else { bug!("evaluated promoted and got {:#?}", val); } } Static(ref static_) => { let layout = self.layout_of(self.place_ty(mir_place))?; let instance = ty::Instance::mono(*self.tcx, static_.def_id); let cid = GlobalId { instance, promoted: None }; let alloc = Machine::init_static(self, cid)?; Place::Ptr { ptr: ScalarMaybeUndef::Scalar(Scalar::Ptr(alloc.into())), align: layout.align, extra: PlaceExtra::None, } } Projection(ref proj) => { let ty = self.place_ty(&proj.base); let place = self.eval_place(&proj.base)?; return self.eval_place_projection(place, ty, &proj.elem); } }; self.dump_local(place); Ok(place) } pub fn place_field( &mut self, base: Place, field: mir::Field, mut base_layout: TyLayout<'tcx>, ) -> EvalResult<'tcx, (Place, TyLayout<'tcx>)> { match base { Place::Ptr { extra: PlaceExtra::DowncastVariant(variant_index), .. } => { base_layout = base_layout.for_variant(&self, variant_index); } _ => {} } let field_index = field.index(); let field = base_layout.field(&self, field_index)?; let offset = base_layout.fields.offset(field_index); // Do not allocate in trivial cases let (base_ptr, base_align, base_extra) = match base { Place::Ptr { ptr, align, extra } => (ptr, align, extra), Place::Local { frame, local } => { match (self.stack[frame].locals[local].access()?, &base_layout.abi) { // in case the field covers the entire type, just return the value (Value::Scalar(_), &layout::Abi::Scalar(_)) | (Value::ScalarPair(..), &layout::Abi::ScalarPair(..)) if offset.bytes() == 0 && field.size == base_layout.size => { return Ok((base, field)) }, _ => self.force_allocation(base)?.to_ptr_align_extra(), } } }; let offset = match base_extra { PlaceExtra::Vtable(tab) => { let (_, align) = self.size_and_align_of_dst( base_layout.ty, base_ptr.to_value_with_vtable(tab), )?; offset.abi_align(align) } _ => offset, }; let ptr = base_ptr.ptr_offset(offset, &self)?; let align = base_align.min(base_layout.align).min(field.align); let extra = if !field.is_unsized() { PlaceExtra::None } else { match base_extra { PlaceExtra::None => bug!("expected fat pointer"), PlaceExtra::DowncastVariant(..) => { bug!("Rust doesn't support unsized fields in enum variants") } PlaceExtra::Vtable(_) | PlaceExtra::Length(_) => {} } base_extra }; Ok((Place::Ptr { ptr, align, extra }, field)) } pub fn val_to_place(&self, val: Value, ty: Ty<'tcx>) -> EvalResult<'tcx, Place> { let layout = self.layout_of(ty)?; Ok(match self.tcx.struct_tail(ty).sty { ty::TyDynamic(..) => { let (ptr, vtable) = self.into_ptr_vtable_pair(val)?; Place::Ptr { ptr, align: layout.align, extra: PlaceExtra::Vtable(vtable), } } ty::TyStr | ty::TySlice(_) => { let (ptr, len) = self.into_slice(val)?; Place::Ptr { ptr, align: layout.align, extra: PlaceExtra::Length(len), } } _ => Place::from_scalar_ptr(self.into_ptr(val)?, layout.align), }) } pub fn place_index( &mut self, base: Place, outer_ty: Ty<'tcx>, n: u64, ) -> EvalResult<'tcx, Place> { // Taking the outer type here may seem odd; it's needed because for array types, the outer type gives away the length. let base = self.force_allocation(base)?; let (base_ptr, align) = base.to_ptr_align(); let (elem_ty, len) = base.elem_ty_and_len(outer_ty, self.tcx.tcx); let elem_size = self.layout_of(elem_ty)?.size; assert!( n < len, "Tried to access element {} of array/slice with length {}", n, len ); let ptr = base_ptr.ptr_offset(elem_size * n, &*self)?; Ok(Place::Ptr { ptr, align, extra: PlaceExtra::None, }) } pub(super) fn place_downcast( &mut self, base: Place, variant: usize, ) -> EvalResult<'tcx, Place> { // FIXME(solson) let base = self.force_allocation(base)?; let (ptr, align) = base.to_ptr_align(); let extra = PlaceExtra::DowncastVariant(variant); Ok(Place::Ptr { ptr, align, extra }) } pub fn eval_place_projection( &mut self, base: Place, base_ty: Ty<'tcx>, proj_elem: &mir::ProjectionElem<'tcx, mir::Local, Ty<'tcx>>, ) -> EvalResult<'tcx, Place> { use rustc::mir::ProjectionElem::*; match *proj_elem { Field(field, _) => { let layout = self.layout_of(base_ty)?; Ok(self.place_field(base, field, layout)?.0) } Downcast(_, variant) => { self.place_downcast(base, variant) } Deref => { let val = self.read_place(base)?; let pointee_type = match base_ty.sty { ty::TyRawPtr(ref tam) => tam.ty, ty::TyRef(_, ty, _) => ty, ty::TyAdt(def, _) if def.is_box() => base_ty.boxed_ty(), _ => bug!("can only deref pointer types"), }; trace!("deref to {} on {:?}", pointee_type, val); self.val_to_place(val, pointee_type) } Index(local) => { let value = self.frame().locals[local].access()?; let ty = self.tcx.types.usize; let n = self .value_to_scalar(ValTy { value, ty })? .to_bits(self.tcx.data_layout.pointer_size)?; self.place_index(base, base_ty, n as u64) } ConstantIndex { offset, min_length, from_end, } => { // FIXME(solson) let base = self.force_allocation(base)?; let (base_ptr, align) = base.to_ptr_align(); let (elem_ty, n) = base.elem_ty_and_len(base_ty, self.tcx.tcx); let elem_size = self.layout_of(elem_ty)?.size; assert!(n >= min_length as u64); let index = if from_end { n - u64::from(offset) } else { u64::from(offset) }; let ptr = base_ptr.ptr_offset(elem_size * index, &self)?; Ok(Place::Ptr { ptr, align, extra: PlaceExtra::None }) } Subslice { from, to } => { // FIXME(solson) let base = self.force_allocation(base)?; let (base_ptr, align) = base.to_ptr_align(); let (elem_ty, n) = base.elem_ty_and_len(base_ty, self.tcx.tcx); let elem_size = self.layout_of(elem_ty)?.size; assert!(u64::from(from) <= n - u64::from(to)); let ptr = base_ptr.ptr_offset(elem_size * u64::from(from), &self)?; // sublicing arrays produces arrays let extra = if self.type_is_sized(base_ty) { PlaceExtra::None } else { PlaceExtra::Length(n - u64::from(to) - u64::from(from)) }; Ok(Place::Ptr { ptr, align, extra }) } } } pub fn place_ty(&self, place: &mir::Place<'tcx>) -> Ty<'tcx> { self.monomorphize( place.ty(self.mir(), *self.tcx).to_ty(*self.tcx), self.substs(), ) } }
extern crate log; extern crate rand; use go::Vertex; use go::PASS; use go::GoGame; use go::Stone; use go::stone; use go::VIRT_LEN; use rand::Rng; use std::sync::atomic::{AtomicUsize, Ordering}; use std::cmp; use std::collections; use std::cell; use std::ops::Index; mod zobrist; use self::zobrist::BoardHasher; use self::zobrist::PosHash; #[cfg(test)] mod test; const NODE_PRIOR: u32 = 10; const EXPANSION_THRESHOLD: u32 = 8 + NODE_PRIOR; const UCT_C: f64 = 1.4; const RAVE_C: f64 = 0.0; const RAVE_EQUIV: f64 = 3500.0; #[derive(Clone)] pub struct Node { player: Stone, pub children: Vec<(Vertex, PosHash)>, parents: Vec<PosHash>, num_plays: u32, num_wins: u32, num_rave_plays: u32, num_rave_wins: u32, } struct NodeTable { nodes: Vec<(cell::Cell<PosHash>, cell::UnsafeCell<Node>)>, size: AtomicUsize, } impl NodeTable { fn with_capacity(c: usize) -> NodeTable { let mut table = NodeTable { nodes: vec![], size: AtomicUsize::new(0), }; for _ in 0 .. c { table.nodes.push((cell::Cell::new(PosHash::None), cell::UnsafeCell::new(Node::new(stone::EMPTY)))); } return table; } fn get_mut(&self, hash: &PosHash) -> &mut Node { match self.find(hash) { Ok(i) => unsafe { let p_mut: *mut Node = self.nodes[i].1.get(); &mut *p_mut }, Err(_) => panic!("no entry for {:?}", hash), } } fn contains_key(&self, hash: &PosHash) -> bool { return self.find(hash).is_ok(); } fn insert(&self, hash: PosHash, node: Node) { if self.size.load(Ordering::SeqCst) + 1 == self.nodes.len() { // Always leave at least one empty guard value. panic!("NodeTable is already full!"); } match self.find(&hash) { Ok(_) => panic!("{:?} is already in the table", hash), Err(i) => unsafe { let p_mut:*mut Node = self.nodes[i].1.get(); *p_mut = node; self.nodes[i].0.set(hash); }, } self.size.fetch_add(1, Ordering::SeqCst); } // Returns either the position of the value for the hash, or the position // where it should be inserted. fn find(&self, hash: &PosHash) -> Result<usize, usize> { // This hash table uses linear probing. let start = hash.as_index() % self.nodes.len(); match self.nodes[start].0.get() { PosHash::None => return Err(start), h if h == *hash => return Ok(start), _ => {}, } let mut i = (start + 1) % self.nodes.len(); while i != start { match self.nodes[i].0.get() { PosHash::None => return Err(i), h if h == *hash => return Ok(i), _ => i = (i + 1) % self.nodes.len(), } } panic!("table is completely full"); } } impl Index<PosHash> for NodeTable { type Output = Node; fn index<'a>(&'a self, _index: PosHash) -> &'a Node { return self.get_mut(&_index); } } pub struct Controller { pub root: Node, nodes: NodeTable, hasher: BoardHasher, } fn black_wins(game: &mut GoGame, last_move: Stone, rng: &mut rand::StdRng, amaf_color_map: &mut Vec<Stone>) -> bool { let double_komi = 13; let mut color_to_play = last_move; let mut num_consecutive_passes = 0; let mut num_moves = 0; while num_consecutive_passes < 2 { // println!("{:?}", game); color_to_play = color_to_play.opponent(); num_moves += 1; let v = game.random_move(color_to_play, rng); if v == PASS { num_consecutive_passes += 1; } else { if amaf_color_map[v.as_index()] == stone::EMPTY { amaf_color_map[v.as_index()] = color_to_play; } game.play(color_to_play, v); num_consecutive_passes = 0; } if num_moves > 700 { warn!("too many moves!"); return false; } } return game.chinese_score() * 2 > double_komi; } impl Controller { pub fn new() -> Controller { Controller { root: Node::new(stone::WHITE), nodes: NodeTable::with_capacity(100000), hasher: BoardHasher::new(), } } pub fn gen_move(&mut self, game: &GoGame, num_rollouts: u32, rng: &mut rand::StdRng) -> Vertex { let mut rollout_game = game.clone(); if rollout_game.possible_moves(game.to_play).is_empty() { return PASS; } let root_hash = self.hasher.hash(game); if self.nodes.contains_key(&root_hash) { info!("reusing root with {:?} visits", self.nodes[root_hash].num_plays) } else { info!("creating a new root"); self.nodes.insert(root_hash, Node::new(game.to_play)); } { let mut root = self.nodes.get_mut(&root_hash); if root.children.is_empty() { self.expand_node(root_hash, &mut root, &mut rollout_game); } } for i in 1 .. num_rollouts + 1 { rollout_game.reset(); for v in game.history.iter() { rollout_game.play(v.0, v.1); } self.run_rollout(i, root_hash, &mut rollout_game, rng); } self.print_statistics(root_hash); let (best_v, best_h) = self.nodes[root_hash].best_move(&self.nodes); info!("selected move {:}", best_v); self.print_statistics(best_h); return best_v; } fn run_rollout(&mut self, num_sims: u32, root_hash: PosHash, game: &mut GoGame, rng: &mut rand::StdRng) { // Map to store who played at which vertex first to update node values by AMAF. let mut amaf_color_map = vec![stone::EMPTY; VIRT_LEN]; let mut hash = root_hash; let mut node = self.nodes.get_mut(&hash); // Run the simulation down the tree until we reach a leaf node. while !node.children.is_empty() { // Shuffle to break ties, todo(swj): find a faster way to break ties. rng.shuffle(&mut node.children); let (vertex, best_hash) = node.best_child(num_sims, &self.nodes); game.play(node.player, vertex); if vertex != PASS && amaf_color_map[vertex.as_index()] == stone::EMPTY { amaf_color_map[vertex.as_index()] = node.player; } hash = best_hash; node = self.nodes.get_mut(&hash); // Expand nodes with no children that are above the threshold. if node.children.is_empty() && node.num_plays > EXPANSION_THRESHOLD { self.expand_node(hash, node, game); } } // Run a random rollout till the end of the game. let black_wins = black_wins(game, node.player, rng, &mut amaf_color_map); // Propagate the new value up the tree, following all possible parent paths. let mut update_nodes = vec![hash]; while !update_nodes.is_empty() { node = self.nodes.get_mut(&update_nodes.pop().unwrap()); update_nodes.extend(node.parents.clone()); let wins = if black_wins && node.player == stone::BLACK || !black_wins && node.player == stone::WHITE { 1 } else { 0 }; node.num_plays += 1; node.num_wins += wins; // Update the rave visits of all child nodes. for &(vertex, hash) in node.children.iter() { let ref mut child = self.nodes.get_mut(&hash); if amaf_color_map[vertex.as_index()] == child.player { child.num_rave_plays += 1; child.num_rave_wins += 1 - wins; // Children are from the other perspective. } } } } fn expand_node(&self, hash: PosHash, node: &mut Node, game: &mut GoGame) { let opponent = node.player.opponent(); for v in game.possible_moves(opponent) { game.play(opponent, v); let child_hash = self.hasher.hash(game); game.undo(1); if !self.nodes.contains_key(&child_hash) { self.nodes.insert(child_hash, Node::new(opponent)); } // Add this node as parent to its new children. self.nodes.get_mut(&child_hash).parents.push(hash); node.children.push((v, child_hash)); } } fn print_statistics(&self, root_hash: PosHash) { let ref root = self.nodes[root_hash]; info!("node hash: {:?}", root_hash); let mut children = root.children.clone(); children.sort_by(|a, b| self.nodes[b.1].num_plays.cmp( &self.nodes[a.1].num_plays)); for i in 0 .. cmp::min(10, children.len()) { let (vertex, hash) = children[i]; let ref child = self.nodes[hash]; info!("{:?}: {:} visits {:?}", vertex, child.num_plays, hash); } self.print_pv(root_hash); } fn print_pv(&self, root_hash: PosHash) { let mut hash = root_hash; let mut node = self.nodes.get_mut(&hash); let mut pv = vec![]; while !node.children.is_empty() { let (vertex, hash) = node.best_move(&self.nodes); node = self.nodes.get_mut(&hash); pv.push((vertex, node.num_plays)); } info!("PV: {:?}", pv); } } impl Node { fn new(player: Stone) -> Node { Node { player: player, children: vec![], parents: vec![], num_plays: NODE_PRIOR, num_wins: NODE_PRIOR / 2, num_rave_plays: 0, num_rave_wins: 0, } } fn best_move(&self, nodes: &NodeTable) -> (Vertex, PosHash) { let mut max_visits = 0; let mut best_child = 0; for i in 0 .. self.children.len() { let num_plays = nodes[self.children[i].1].num_plays; if num_plays > max_visits { best_child = i; max_visits = num_plays; } } return self.children[best_child]; } fn best_child(&self, num_sims: u32, nodes: &NodeTable) -> (Vertex, PosHash) { let mut best_value = -1f64; let mut best_child = 0; for i in 0 .. self.children.len() { let value = nodes[self.children[i].1].rave_urgency(); if value > best_value { best_value = value; best_child = i; } } return self.children[best_child]; } pub fn uct(&self, num_sims: u32) -> f64 { self.num_wins as f64 / self.num_plays as f64 + UCT_C * ((num_sims as f64).ln() / self.num_plays as f64).sqrt() + RAVE_C * (self.num_rave_wins as f64 / self.num_rave_plays as f64) } fn rave_urgency(&self) -> f64 { let value = self.num_wins as f64 / self.num_plays as f64; if self.num_rave_plays == 0 { return value; } let rave_value = self.num_rave_wins as f64 / self.num_rave_plays as f64; let beta = self.num_rave_plays as f64 / ( self.num_rave_plays as f64 + self.num_plays as f64 + (self.num_rave_plays + self.num_plays) as f64 / RAVE_EQUIV); return beta * rave_value + (1.0 - beta) * value } }
use std::path::PathBuf; use std::env; use std::fs::DirBuilder; use std::error::Error; use std::fmt; use std::io; use std::io::prelude::*; #[derive(Debug)] pub enum ValidationError { UserAbort, TargetIsADir, EnvError(io::Error), BuildDirErr(io::Error), } impl Error for ValidationError { fn description(&self) -> &str { match *self { ValidationError::UserAbort => "Abort by user.", ValidationError::TargetIsADir => "The selected target is a directory, not a file.", ValidationError::EnvError(ref err) => err.description().clone(), ValidationError::BuildDirErr(ref err) => err.description().clone(), } } } impl fmt::Display for ValidationError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } fn ask_user(question: &str) -> bool { let mut answer = String::new(); loop { print!("{} (y/n) ", question); io::stdout().flush().ok().expect("Could not flush stdout"); io::stdin().read_line(&mut answer).unwrap(); match answer.trim() { "y" | "yes" => return true, "n" | "no" => return false, _ => continue, }; } } pub fn validate_target_file(target_path: &str) -> Result<PathBuf, ValidationError> { let mut path = PathBuf::from(target_path); // make sure the target directory is absolute if path.is_relative() { // get the current working directory and adoin the provided relative path onto it // potential environment errors are reported back as specialized error type let cwd = match env::current_dir() { Ok(dir) => dir, Err(e) => return Err(ValidationError::EnvError(e).into()), }; path = cwd.join(path); } if path.exists() { if path.is_file() { // The path exists and is a file. Ok(path.to_owned()) } else { Err(ValidationError::TargetIsADir.into()) } } else { // check whether the parent directory exists. If so, create the target directory // if not, ask the user if he's sure that he wants to create the directory recursively let parent_dir = path.parent().unwrap(); if parent_dir.exists() { Ok(path.to_owned()) } else { if ask_user("The path to the target directory does not exist. Create it?") { let _ = match DirBuilder::new().recursive(true).create(parent_dir) { Ok(b) => b, Err(err) => return Err(ValidationError::BuildDirErr(err).into()), }; Ok(path.to_owned()) } else { Err(ValidationError::UserAbort.into()) } } } }
use crate::reader::Arg; use crate::reader::Operation; use crate::autodiff::autodiff::{OUTPUT_NAMES, AutoDiff}; use quote::{quote, format_ident}; use proc_macro2::TokenStream; use syn::Ident; use std::collections::HashMap; pub struct Solver { autodiff: AutoDiff, curr_var: u32, } impl Solver { pub fn new() -> Solver { Solver { autodiff: AutoDiff::new(), // We should have a static instance of this curr_var: 1, } } pub fn solve(&mut self, arg_graph: Arg, grad: TokenStream, solve_for: Vec<String>) -> TokenStream { let mut solution_map: HashMap<String, Vec<TokenStream>> = HashMap::new(); for needed_grad in solve_for { solution_map.insert(needed_grad, Vec::new()); } let calculations = self.solve_operation(arg_graph, grad, &mut solution_map); // Create results of the gradient calculation let mut results = TokenStream::new(); for (variable, solution) in solution_map { if variable != "input".to_string() { let ident: TokenStream = variable.parse().unwrap(); results = quote! { { let mut res = #(#solution)+*; for i in 0..res.shape.len() { if #ident.shape[i] < res.shape[i] { res.sum(i); } } #ident.gradient = Some(Box::new(res)); } #results } } else { results = quote! { #results #(#solution)+* } } } quote! { #calculations #results } } fn solve_operation(&mut self, arg_graph: Arg, grad: TokenStream, solution_map: &mut HashMap<String, Vec<TokenStream>>) -> TokenStream { match arg_graph { Arg::None => panic!("None argument in graph!"), Arg::Item(mut item) => { item = item.replace("&", "").replace(".", " . "); // This is not very nice at all... if let Some(vec) = solution_map.get_mut(&item) { vec.push(grad); } TokenStream::new() } Arg::Operation(op) => self.diff_operation(*op, grad, solution_map) } } fn diff_operation(&mut self, operation: Operation, grad: TokenStream, solution_map: &mut HashMap<String, Vec<TokenStream>>) -> TokenStream { // Get expressions needed to solve for input grad and other needed grads let needed_exprs = Solver::get_needed_expressions(&operation, solution_map); // Construct expression inputs (grad + a & b & ...) let inputs = self.define_inputs(&operation, &grad, &needed_exprs); // Solve every expression at this level collecting the results of the expressions let (expressions, next_level, idents) = self.define_expressions(operation, needed_exprs); // Solve every every expression at sublevel with results of this level let next_level_solved: Vec<TokenStream> = next_level.into_iter().map(|(arg, grad)| { self.solve_operation(arg, grad, solution_map) }).collect(); // Create output block quote! { #(let #idents;)* { #inputs #( #expressions )* } #( #next_level_solved )* } } fn get_needed_expressions(operation: &Operation, solution_map: &HashMap<String, Vec<TokenStream>>) -> Vec<u8> { let mut calc_expression: Vec<u8> = Vec::new(); let mut op_args = vec![&operation.receiver]; op_args.append(&mut operation.args.iter().map(|f| f).collect()); for i in 0..op_args.len() { let input_n = op_args[i].to_tokenstream(); for to_grad_element in solution_map.keys() { if input_n.to_string().contains(to_grad_element) { calc_expression.push(i as u8); } } } calc_expression } fn define_inputs(&self, operation: &Operation, grad: &TokenStream, needed_exprs: &Vec<u8>) -> TokenStream { let mut inputs: Vec<TokenStream> = Vec::new(); let mut input_names = Vec::new(); let exprs = self.autodiff.get_expressions(&operation.method); let mut op_args = vec![&operation.receiver]; op_args.append(&mut operation.args.iter().map(|f| f).collect()); // We should save and use the forward pass if needed for i in 0..op_args.len() { let input_n = op_args[i].to_tokenstream(); let mut calc = false; for needed_exp in needed_exprs { let (_, needed_args) = &exprs[*needed_exp as usize]; if needed_args.contains(&(i as u8)) { calc = true; break } } if calc { inputs.push(input_n); input_names.push(format_ident!("{}", OUTPUT_NAMES[i])); } } // Since we get input as reference in the forwards pass and as owned value in the backwards pass // we should replace to avoid issues. However, it would be even better to keep track of usage and // use an owned value where the operation can be done inplace. for i in 0..inputs.len() { let mut expr_str = inputs[i].to_string(); expr_str = expr_str.replace("input", "(&input)"); inputs[i] = expr_str.parse().unwrap(); } quote! { let grad = #grad; #(let #input_names = #inputs;)* } } fn define_expressions(&mut self, mut operation: Operation, needed_exprs: Vec<u8>) -> (Vec<TokenStream>, Vec<(Arg, TokenStream)>, Vec<Ident>) { let mut output = Vec::new(); let mut next_level: Vec<(Arg, TokenStream)> = Vec::new(); let mut idents = Vec::new(); let exprs = self.autodiff.get_expressions(&operation.method).clone(); for i in 0..exprs.len() { if !needed_exprs.contains(&(i as u8)) { continue; } let (expr, _) = &exprs[i]; let ident = format_ident!("x{}", self.curr_var); self.curr_var += 1; idents.push(ident.clone()); if i == 0 { next_level.push((operation.receiver.clone(), ident.to_string().parse().unwrap())); } else { next_level.push((operation.args.remove(0), ident.to_string().parse().unwrap())); } output.push(quote! { #ident = #expr; }); } (output, next_level, idents) } }
use super::{Deserialize, Serialize}; use std::fmt; #[derive(Serialize, Deserialize)] pub struct Checksum256 { pub value: [u8; 32], } impl Checksum256 { pub fn to_string(&self) -> String { let mut hex_string = String::from(""); for v in &self.value { let hex = format!("{:02x}", v); hex_string += hex.as_str(); } hex_string } } impl fmt::Display for Checksum256 { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.to_string()) } } impl fmt::Debug for Checksum256 { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.to_string()) } }