text
stringlengths
8
4.13M
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use common_base::base::escape_for_key; use common_exception::ErrorCode; use common_exception::Result; use common_meta_api::reply::txn_reply_to_api_result; use common_meta_api::txn_cond_seq; use common_meta_api::txn_op_del; use common_meta_api::txn_op_put; use common_meta_app::app_error::TxnRetryMaxTimes; use common_meta_app::principal::StageFile; use common_meta_app::principal::StageInfo; use common_meta_kvapi::kvapi; use common_meta_kvapi::kvapi::UpsertKVReq; use common_meta_types::ConditionResult::Eq; use common_meta_types::MatchSeq; use common_meta_types::MatchSeqExt; use common_meta_types::MetaError; use common_meta_types::Operation; use common_meta_types::SeqV; use common_meta_types::TxnOp; use common_meta_types::TxnRequest; use crate::serde::deserialize_struct; use crate::serde::serialize_struct; use crate::stage::StageApi; static USER_STAGE_API_KEY_PREFIX: &str = "__fd_stages"; static STAGE_FILE_API_KEY_PREFIX: &str = "__fd_stage_files"; const TXN_MAX_RETRY_TIMES: u32 = 10; pub struct StageMgr { kv_api: Arc<dyn kvapi::KVApi<Error = MetaError>>, stage_prefix: String, stage_file_prefix: String, } impl StageMgr { pub fn create(kv_api: Arc<dyn kvapi::KVApi<Error = MetaError>>, tenant: &str) -> Result<Self> { if tenant.is_empty() { return Err(ErrorCode::TenantIsEmpty( "Tenant can not empty(while role mgr create)", )); } Ok(StageMgr { kv_api, stage_prefix: format!("{}/{}", USER_STAGE_API_KEY_PREFIX, escape_for_key(tenant)?), stage_file_prefix: format!("{}/{}", STAGE_FILE_API_KEY_PREFIX, escape_for_key(tenant)?), }) } } #[async_trait::async_trait] impl StageApi for StageMgr { async fn add_stage(&self, info: StageInfo) -> Result<u64> { let seq = MatchSeq::Exact(0); let val = Operation::Update(serialize_struct( &info, ErrorCode::IllegalUserStageFormat, || "", )?); let key = format!( "{}/{}", self.stage_prefix, escape_for_key(&info.stage_name)? ); let upsert_info = self .kv_api .upsert_kv(UpsertKVReq::new(&key, seq, val, None)); let res = upsert_info.await?.added_or_else(|v| { ErrorCode::StageAlreadyExists(format!("Stage already exists, seq [{}]", v.seq)) })?; Ok(res.seq) } async fn get_stage(&self, name: &str, seq: MatchSeq) -> Result<SeqV<StageInfo>> { let key = format!("{}/{}", self.stage_prefix, escape_for_key(name)?); let kv_api = self.kv_api.clone(); let get_kv = async move { kv_api.get_kv(&key).await }; let res = get_kv.await?; let seq_value = res.ok_or_else(|| ErrorCode::UnknownStage(format!("Unknown stage {}", name)))?; match seq.match_seq(&seq_value) { Ok(_) => Ok(SeqV::new( seq_value.seq, deserialize_struct(&seq_value.data, ErrorCode::IllegalUserStageFormat, || "")?, )), Err(_) => Err(ErrorCode::UnknownStage(format!("Unknown stage {}", name))), } } async fn get_stages(&self) -> Result<Vec<StageInfo>> { let values = self.kv_api.prefix_list_kv(&self.stage_prefix).await?; let mut stage_infos = Vec::with_capacity(values.len()); for (_, value) in values { let stage_info = deserialize_struct(&value.data, ErrorCode::IllegalUserStageFormat, || "")?; stage_infos.push(stage_info); } Ok(stage_infos) } async fn drop_stage(&self, name: &str) -> Result<()> { let stage_key = format!("{}/{}", self.stage_prefix, escape_for_key(name)?); let file_key_prefix = format!("{}/{}/", self.stage_file_prefix, escape_for_key(name)?); let mut retry = 0; while retry < TXN_MAX_RETRY_TIMES { retry += 1; let stage_seq = match self.kv_api.get_kv(&stage_key).await? { Some(seq_v) => seq_v.seq, None => return Err(ErrorCode::UnknownStage(format!("Unknown stage {}", name))), }; // list all stage file keys, and delete them let file_keys = self.kv_api.prefix_list_kv(&file_key_prefix).await?; let mut dels: Vec<TxnOp> = file_keys.iter().map(|(key, _)| txn_op_del(key)).collect(); dels.push(txn_op_del(&stage_key)); let txn_req = TxnRequest { condition: vec![ // stage is not change, prevent add file to stage txn_cond_seq(&stage_key, Eq, stage_seq), ], if_then: dels, else_then: vec![], }; let tx_reply = self.kv_api.transaction(txn_req).await?; let (succ, _) = txn_reply_to_api_result(tx_reply)?; if succ { return Ok(()); } } Err(ErrorCode::TxnRetryMaxTimes( TxnRetryMaxTimes::new("drop_stage", TXN_MAX_RETRY_TIMES).to_string(), )) } async fn add_file(&self, name: &str, file: StageFile) -> Result<u64> { let stage_key = format!("{}/{}", self.stage_prefix, escape_for_key(name)?); let file_key = format!( "{}/{}/{}", self.stage_file_prefix, escape_for_key(name)?, escape_for_key(&file.path)? ); let mut retry = 0; while retry < TXN_MAX_RETRY_TIMES { retry += 1; if let Some(seq_v) = self.kv_api.get_kv(&file_key).await? { return Err(ErrorCode::StageFileAlreadyExists(format!( "Stage file already exists, seq [{}]", seq_v.seq ))); } let (stage_seq, mut old_stage): (_, StageInfo) = if let Some(seq_v) = self.kv_api.get_kv(&stage_key).await? { ( seq_v.seq, deserialize_struct(&seq_v.data, ErrorCode::IllegalUserStageFormat, || "")?, ) } else { return Err(ErrorCode::UnknownStage(format!("Unknown stage {}", name))); }; old_stage.number_of_files += 1; let txn_req = TxnRequest { condition: vec![ // file does not exist txn_cond_seq(&file_key, Eq, 0), // stage is not changed txn_cond_seq(&stage_key, Eq, stage_seq), ], if_then: vec![ txn_op_put( &file_key, serialize_struct(&file, ErrorCode::IllegalStageFileFormat, || "")?, ), txn_op_put( &stage_key, serialize_struct(&old_stage, ErrorCode::IllegalUserStageFormat, || "")?, ), ], else_then: vec![], }; let tx_reply = self.kv_api.transaction(txn_req).await?; let (succ, _) = txn_reply_to_api_result(tx_reply)?; if succ { return Ok(0); } } Err(ErrorCode::TxnRetryMaxTimes( TxnRetryMaxTimes::new("add_file", TXN_MAX_RETRY_TIMES).to_string(), )) } async fn list_files(&self, name: &str) -> Result<Vec<StageFile>> { let list_prefix = format!("{}/{}/", self.stage_file_prefix, escape_for_key(name)?); let values = self.kv_api.prefix_list_kv(&list_prefix).await?; let mut files = Vec::with_capacity(values.len()); for (_, value) in values { let file = deserialize_struct(&value.data, ErrorCode::IllegalStageFileFormat, || "")?; files.push(file) } Ok(files) } async fn remove_files(&self, name: &str, paths: Vec<String>) -> Result<()> { let stage_key = format!("{}/{}", self.stage_prefix, escape_for_key(name)?); let mut retry = 0; while retry < TXN_MAX_RETRY_TIMES { retry += 1; let (stage_seq, mut old_stage): (_, StageInfo) = if let Some(seq_v) = self.kv_api.get_kv(&stage_key).await? { ( seq_v.seq, deserialize_struct(&seq_v.data, ErrorCode::IllegalUserStageFormat, || "")?, ) } else { return Err(ErrorCode::UnknownStage(format!("Unknown stage {}", name))); }; let mut if_then = Vec::with_capacity(paths.len()); for path in &paths { let key = format!( "{}/{}/{}", self.stage_file_prefix, escape_for_key(name)?, escape_for_key(path)? ); if_then.push(txn_op_del(&key)); } old_stage.number_of_files -= paths.len() as u64; if_then.push(txn_op_put( &stage_key, serialize_struct(&old_stage, ErrorCode::IllegalUserStageFormat, || "")?, )); let txn_req = TxnRequest { condition: vec![ // stage is not change txn_cond_seq(&stage_key, Eq, stage_seq), ], if_then, else_then: vec![], }; let tx_reply = self.kv_api.transaction(txn_req).await?; let (succ, _) = txn_reply_to_api_result(tx_reply)?; if succ { return Ok(()); } } Err(ErrorCode::TxnRetryMaxTimes( TxnRetryMaxTimes::new("remove_files", TXN_MAX_RETRY_TIMES).to_string(), )) } }
#[derive(Serialize, Deserialize, Clone, Debug)] pub struct Entity { pub id: u64, pub name: Option<String>, pub color: String, pub sprite: Option<String>, pub x: f64, pub y: f64, pub radius: f64, #[serde(rename="entityType")] pub entity_type: u64, } #[derive(Serialize, Deserialize, Clone, Debug)] pub struct WorldState { pub time: u64, pub entities: Vec<Entity>, } #[derive(Serialize, Deserialize, Clone, Debug)] pub struct TickRequest { pub world_state: WorldState, pub player: Entity, } #[derive(Serialize, Deserialize, Clone, Debug)] pub struct TickResult { pub dx: f64, pub dy: f64, }
//! Essentially, all the engines here are based on the regexp approach. //! The difference is that `regex` engine is the poor man's way where we //! use our own regex pattern rule with the ripgrep executable together, //! while `ctags` and `gtags` maintain theirs which are well polished. mod ctags; mod gtags; mod regex; use super::AddressableUsage; pub use self::ctags::CtagsSearcher; pub use self::gtags::GtagsSearcher; pub use self::regex::RegexSearcher; /// When spawning the ctags/gtags request, we can specify the searching strategy. #[derive(Clone, Debug, PartialEq, Eq, Default)] #[allow(unused)] pub enum QueryType { /// Prefix match. StartWith, /// Exact match. #[default] Exact, /// Substring match. Contain, /// Inherit, } /// Unified tag info. /// /// Parsed from `ctags` and `gtags` output. #[derive(Default, Debug)] pub struct Symbol { /// None for `gtags`. pub name: Option<String>, pub path: String, pub pattern: String, pub line_number: usize, /// ctags only. pub kind: Option<String>, /// ctags only. pub scope: Option<String>, } impl Symbol { /// Parse from the output of `readtags`. /// /// TODO: add more tests pub fn from_readtags(s: &str) -> Option<Self> { let mut items = s.split('\t'); let mut l = Self { name: Some(items.next()?.into()), path: items.next()?.into(), ..Default::default() }; // https://docs.ctags.io/en/latest/man/ctags-client-tools.7.html#parse-readtags-output if let Some(p) = items .clone() .peekable() .peek() .and_then(|p| p.strip_suffix(";\"")) { let search_pattern_used = (p.starts_with('/') && p.ends_with('/')) || (p.len() > 1 && p.starts_with('$') && p.ends_with('$')); if search_pattern_used { let pattern = items.next()?; let pattern_len = pattern.len(); // forward search: `/^foo$/` // backward search: `?^foo$?` if p.starts_with("/^") || p.starts_with("?^") { if p.ends_with("$/") || p.ends_with("$?") { l.pattern = String::from(&pattern[2..pattern_len - 4]); } else { l.pattern = String::from(&pattern[2..pattern_len - 2]); } } else { l.pattern = String::from(&pattern[2..pattern_len]); } } else { return None; } } else { return None; } let maybe_extension = l.path.rsplit_once('.').map(|(_, extension)| extension); for item in items { if let Some((k, v)) = item.split_once(':') { if v.is_empty() { continue; } match k { "kind" => l.kind = Some(ctags::kinds::compact_kind(maybe_extension, v)), "scope" => l.scope = Some(v.into()), "line" => l.line_number = v.parse().expect("line is an integer"), // Unused for now. "language" | "roles" | "access" | "signature" => {} unknown => { tracing::debug!(line = %s, "Unknown field: {}", unknown); } } } } Some(l) } pub fn from_gtags(s: &str) -> Option<Self> { pattern::parse_gtags(s).map(|(line_number, path, pattern)| Self { path: path.into(), pattern: pattern.into(), line_number, ..Default::default() }) } /// Constructs a grep like line with highlighting indices. fn grep_format_inner( &self, kind: &str, query: &str, ignorecase: bool, ) -> (String, Option<Vec<usize>>) { let mut formatted = format!("[{}]{}:{}:1:", kind, self.path, self.line_number); let found = if ignorecase { self.pattern.to_lowercase().find(&query.to_lowercase()) } else { self.pattern.find(query) }; let indices = if let Some(idx) = found { let start = formatted.len() + idx; let end = start + query.len(); Some((start..end).collect()) } else { None }; formatted.push_str(&self.pattern); (formatted, indices) } pub fn grep_format_ctags(&self, query: &str, ignorecase: bool) -> (String, Option<Vec<usize>>) { let kind = self.kind.as_ref().map(|s| s.as_ref()).unwrap_or("tags"); self.grep_format_inner(kind, query, ignorecase) } pub fn grep_format_gtags( &self, kind: &str, query: &str, ignorecase: bool, ) -> (String, Option<Vec<usize>>) { self.grep_format_inner(kind, query, ignorecase) } pub fn into_addressable_usage(self, line: String, indices: Vec<usize>) -> AddressableUsage { AddressableUsage { line, indices, path: self.path, line_number: self.line_number, } } }
#[path = "erlang/abs_1.rs"] pub mod abs_1; #[path = "erlang/add_2.rs"] pub mod add_2; #[path = "erlang/and_2.rs"] pub mod and_2; #[path = "erlang/andalso_2.rs"] pub mod andalso_2; #[path = "erlang/append_element_2.rs"] pub mod append_element_2; #[path = "erlang/apply_2.rs"] pub mod apply_2; #[path = "erlang/atom_to_binary_2.rs"] pub mod atom_to_binary_2; #[path = "erlang/atom_to_list_1.rs"] pub mod atom_to_list_1; #[path = "erlang/band_2.rs"] pub mod band_2; #[path = "erlang/binary_part_2.rs"] pub mod binary_part_2; #[path = "erlang/binary_part_3.rs"] pub mod binary_part_3; #[path = "erlang/binary_to_integer_1.rs"] pub mod binary_to_integer_1; #[path = "erlang/binary_to_integer_2.rs"] pub mod binary_to_integer_2; #[path = "erlang/binary_to_list_1.rs"] pub mod binary_to_list_1; #[path = "erlang/binary_to_list_3.rs"] pub mod binary_to_list_3; #[path = "erlang/binary_to_term_1.rs"] pub mod binary_to_term_1; #[path = "erlang/binary_to_term_2.rs"] pub mod binary_to_term_2; #[path = "erlang/bnot_1.rs"] pub mod bnot_1; #[path = "erlang/bor_2.rs"] pub mod bor_2; #[path = "erlang/bsl_2.rs"] pub mod bsl_2; #[path = "erlang/bsr_2.rs"] pub mod bsr_2; #[path = "erlang/bxor_2.rs"] pub mod bxor_2; #[path = "erlang/cancel_timer_1.rs"] pub mod cancel_timer_1; #[path = "erlang/cancel_timer_2.rs"] pub mod cancel_timer_2; #[path = "erlang/ceil_1.rs"] pub mod ceil_1; #[path = "erlang/concatenate_2.rs"] pub mod concatenate_2; #[path = "erlang/convert_time_unit_3.rs"] pub mod convert_time_unit_3; #[path = "erlang/date_0.rs"] pub mod date_0; #[path = "erlang/delete_element_2.rs"] pub mod delete_element_2; #[path = "erlang/demonitor_1.rs"] pub mod demonitor_1; #[path = "erlang/demonitor_2.rs"] pub mod demonitor_2; #[path = "erlang/display_1.rs"] pub mod display_1; #[path = "erlang/div_2.rs"] pub mod div_2; #[path = "erlang/divide_2.rs"] pub mod divide_2; #[path = "erlang/element_2.rs"] pub mod element_2; #[path = "erlang/erase_0.rs"] pub mod erase_0; #[path = "erlang/erase_1.rs"] pub mod erase_1; #[path = "erlang/error_1.rs"] pub mod error_1; #[path = "erlang/error_2.rs"] pub mod error_2; #[path = "erlang/exit_1.rs"] pub mod exit_1; #[path = "erlang/float_1.rs"] pub mod float_1; #[path = "erlang/float_to_binary_1.rs"] pub mod float_to_binary_1; #[path = "erlang/float_to_binary_2.rs"] pub mod float_to_binary_2; #[path = "erlang/float_to_list_1.rs"] pub mod float_to_list_1; #[path = "erlang/float_to_list_2.rs"] pub mod float_to_list_2; #[path = "erlang/floor_1.rs"] pub mod floor_1; #[path = "erlang/function_exported_3.rs"] pub mod function_exported_3; #[path = "erlang/get_0.rs"] pub mod get_0; #[path = "erlang/get_1.rs"] pub mod get_1; #[path = "erlang/get_keys_0.rs"] pub mod get_keys_0; #[path = "erlang/get_keys_1.rs"] pub mod get_keys_1; #[path = "erlang/hd_1.rs"] pub mod hd_1; #[path = "erlang/insert_element_3.rs"] pub mod insert_element_3; #[path = "erlang/integer_to_binary_1.rs"] pub mod integer_to_binary_1; #[path = "erlang/integer_to_list_1.rs"] pub mod integer_to_list_1; #[path = "erlang/is_alive_0.rs"] pub mod is_alive_0; #[path = "erlang/is_atom_1.rs"] pub mod is_atom_1; #[path = "erlang/is_binary_1.rs"] pub mod is_binary_1; #[path = "erlang/is_boolean_1.rs"] pub mod is_boolean_1; #[path = "erlang/is_float_1.rs"] pub mod is_float_1; #[path = "erlang/is_integer_1.rs"] pub mod is_integer_1; #[path = "erlang/is_list_1.rs"] pub mod is_list_1; #[path = "erlang/is_map_1.rs"] pub mod is_map_1; #[path = "erlang/is_number_1.rs"] pub mod is_number_1; #[path = "erlang/is_pid_1.rs"] pub mod is_pid_1; #[path = "erlang/is_process_alive_1.rs"] pub mod is_process_alive_1; #[path = "erlang/link_1.rs"] pub mod link_1; #[path = "erlang/load_nif_2.rs"] pub mod load_nif_2; #[path = "erlang/module_loaded_1.rs"] pub mod module_loaded_1; #[path = "erlang/nif_error_1.rs"] pub mod nif_error_1; #[path = "erlang/or_2.rs"] pub mod or_2; #[path = "erlang/process_flag_2.rs"] pub mod process_flag_2; #[path = "erlang/seq_trace_2.rs"] pub mod seq_trace_2; #[path = "erlang/seq_trace_info_1.rs"] pub mod seq_trace_info_1; #[path = "erlang/seq_trace_print_2.rs"] pub mod seq_trace_print_2; #[path = "erlang/spawn_1.rs"] pub mod spawn_1; #[path = "erlang/spawn_3.rs"] pub mod spawn_3; #[path = "erlang/spawn_link_1.rs"] pub mod spawn_link_1; #[path = "erlang/spawn_link_3.rs"] pub mod spawn_link_3; #[path = "erlang/spawn_monitor_1.rs"] pub mod spawn_monitor_1; #[path = "erlang/spawn_monitor_3.rs"] pub mod spawn_monitor_3; #[path = "erlang/spawn_opt_2.rs"] pub mod spawn_opt_2; #[path = "erlang/spawn_opt_4.rs"] pub mod spawn_opt_4; #[path = "erlang/system_flag_2.rs"] pub mod system_flag_2; #[path = "erlang/tl_1.rs"] pub mod tl_1;
use msfs::{ self, sim_connect::{data_definition, Period, SimConnectRecv, SIMCONNECT_OBJECT_ID_USER}, }; #[data_definition] #[derive(Debug)] struct ControlSurfaces { #[name = "ELEVATOR POSITION"] #[unit = "Position"] elevator: f64, #[name = "AILERON POSITION"] #[unit = "Position"] ailerons: f64, #[name = "RUDDER POSITION"] #[unit = "Position"] rudder: f64, } #[msfs::standalone_module] async fn module(mut module: msfs::StandaloneModule) -> Result<(), Box<dyn std::error::Error>> { let mut sim = module.open_simconnect("LOG")?; sim.request_data_on_sim_object::<ControlSurfaces>( 0, SIMCONNECT_OBJECT_ID_USER, Period::SimFrame, )?; println!("WASM: LOG INSTALLED"); while let Some(event) = module.next_event().await { match event { SimConnectRecv::SimObjectData(event) => { let data = event.into::<ControlSurfaces>(&sim).unwrap(); println!("WASM: SimObjectData {:?}", data); } _ => {} } } Ok(()) }
pub struct Solution; impl Solution { pub fn max_envelopes(envelopes: Vec<Vec<i32>>) -> i32 { let mut envelopes = envelopes; envelopes.sort_unstable_by_key(|v| (v[0], -v[1])); let mut dp = Vec::with_capacity(envelopes.len()); for v in envelopes { if let Err(i) = dp.binary_search(&v[1]) { if i == dp.len() { dp.push(v[1]); } else { dp[i] = v[1]; } } } dp.len() as i32 } } #[test] fn test0354() { fn case(envelopes: &[[i32; 2]], want: i32) { let envelopes = envelopes.iter().map(|a| a.to_vec()).collect(); let got = Solution::max_envelopes(envelopes); assert_eq!(got, want); } case(&[[5, 4], [6, 4], [6, 7], [2, 3]], 3); }
use std::collections::HashMap; fn main() { let mut source : Vec<&str> = include_str!("./input_a.txt").lines().collect(); let mut amp = 0; let amps = 50; let mut programs: HashMap<usize, Program> = HashMap::new(); let mut p_outputs: HashMap<usize, Vec<isize>> = HashMap::new(); let mut p_inputs: HashMap<usize, Vec<isize>> = HashMap::new(); let mut p_exit_codes: HashMap<usize, Result<isize, isize>> = HashMap::new(); let mut nat_outputs: Vec<isize> = vec![-1; 2]; let mut nat_sleeping: Vec<bool> = vec![false; amps]; for i in 0..amps { programs.insert(i, Program::new(source[0], 100)); p_outputs.insert(i, Vec::new()); p_inputs.insert(i, vec![i as isize]); p_exit_codes.insert(i, Ok(0)); //println!("Amplifier {}: Created!", i); } loop { let mut a_program = programs.get_mut(&amp).unwrap(); let mut outputs = p_outputs.get_mut(&amp).unwrap(); let mut exit_code = p_exit_codes.get_mut(&amp).unwrap(); while exit_code.is_ok() { //println!("Amplifier {}: Running", amp); { let input = p_inputs.get(&amp).unwrap(); *exit_code = run_program(&mut a_program, &input); } if exit_code.is_err() { break; } outputs.push(exit_code.unwrap()); if outputs.len() % 3 == 0 { let y = outputs.pop().unwrap(); let x = outputs.pop().unwrap(); let n = outputs.pop().unwrap() as usize; println!("Amplifier {}: Target Amp: {} ({}, {})", amp, n, x, y); if n == 255 { nat_outputs[0] = x; nat_outputs[1] = y; println!("Amplifier {}: Target Amp: {} ({}, {})", amp, n, x, y); } else { let mut n_input = p_inputs.get_mut(&n).unwrap(); n_input.push(x); n_input.push(y); println!("Amplifier {}: Target Amp: {} ({}, {})", amp, n, x, y); nat_sleeping[n] = false; } } } // Asking for input if *exit_code == Err(3) { *exit_code = Ok(0); let mut input = p_inputs.get_mut(&amp).unwrap(); if a_program.input_ctr == input.len() { input.push(-1); nat_sleeping[amp] = true; } } // Halting if *exit_code == Err(99) { println!("Amplifier {}: Halt!", amp); break; } amp = (amp + 1) % amps; if amp == 0 { let all_sleeping = nat_sleeping.iter().fold(true, |acc, &x| acc && x); if all_sleeping { println!("NAT 255: All Sleeping - Sending output ({}, {})", nat_outputs[0], nat_outputs[1]); let mut input = p_inputs.get_mut(&0).unwrap(); input.push(nat_outputs[0]); input.push(nat_outputs[1]); nat_sleeping[0] = false; } } } } struct Program { positions: Vec<isize>, instruction_ptr: usize, input_ctr: usize, relative_base: usize, ticks: usize } impl Program { fn new(source: &str, extra: usize) -> Program { let mut positions: Vec<isize> = source.split(",").map(|x| x.parse().unwrap()).collect(); let mut memory = vec![0; extra]; positions.append(&mut memory); let mut instruction_ptr = 0; let mut input_ctr = 0; let mut relative_base = 0; let mut ticks = 0; Program { positions, instruction_ptr, input_ctr, relative_base, ticks } } } fn run_program(program: &mut Program, inputs: &[isize]) -> Result<isize, isize> { let mut output = Ok(0); while program.instruction_ptr < program.positions.len() { let instruction = program.positions[program.instruction_ptr]; let opcode = parse_opcode(instruction); program.ticks += 1; match opcode.op { Op::Add => { let param1 = get_param_value(&opcode.param1, program, 1); let param2 = get_param_value(&opcode.param2, program, 2); let param3 = get_param(&opcode.param3, program, 3); //debug_op("Add", &program, 3, (param1, param2, get_param_value(&opcode.param3, program, 3))); program.positions[param3 as usize] = param1 + param2; program.instruction_ptr += 4; }, Op::Multiply => { let param1 = get_param_value(&opcode.param1, program, 1); let param2 = get_param_value(&opcode.param2, program, 2); let param3 = get_param(&opcode.param3, program, 3); //debug_op("Multiply", &program, 3, (param1, param2, get_param_value(&opcode.param3, program, 3))); program.positions[param3 as usize] = param1 * param2; program.instruction_ptr += 4; }, Op::Input => { if program.input_ctr >= inputs.len() { output = Err(3); break; } let param1 = get_param(&opcode.param1, program, 1); let input = inputs[program.input_ctr]; //debug_op("Input", &program, 1, (get_param_value(&opcode.param1, program, 1))); program.input_ctr += 1; program.positions[param1 as usize] = input; program.instruction_ptr += 2; }, Op::Print => { let param1 = get_param_value(&opcode.param1, program, 1); //debug_op("Output", &program, 1, (param1)); output = Ok(param1); program.instruction_ptr += 2; break; }, Op::JumpTrue => { let param1 = get_param_value(&opcode.param1, program, 1); let param2 = get_param_value(&opcode.param2, program, 2); //debug_op("Jump True", &program, 1, (param1, param2)); if param1 > 0 { program.instruction_ptr = param2 as usize; } else { program.instruction_ptr += 3; } }, Op::JumpFalse => { let param1 = get_param_value(&opcode.param1, program, 1); let param2 = get_param_value(&opcode.param2, program, 2); //debug_op("Jump If False", &program, 2, (param1, param2)); if param1 == 0 { program.instruction_ptr = param2 as usize; } else { program.instruction_ptr += 3; } }, Op::LessThan => { let param1 = get_param_value(&opcode.param1, program, 1); let param2 = get_param_value(&opcode.param2, program, 2); let param3 = get_param(&opcode.param3, program, 3); //debug_op("Less Than", &program, 3, (param1, param2, get_param_value(&opcode.param3, program, 3))); if param1 < param2 { program.positions[param3 as usize] = 1; } else { program.positions[param3 as usize] = 0; } program.instruction_ptr += 4; }, Op::EqualTo=> { let param1 = get_param_value(&opcode.param1, program, 1); let param2 = get_param_value(&opcode.param2, program, 2); let param3 = get_param(&opcode.param3, program, 3); //debug_op("Equal To", &program, 3, (param1, param2, get_param_value(&opcode.param3, program, 3))); if param1 == param2 { program.positions[param3 as usize] = 1; } else { program.positions[param3 as usize] = 0; } program.instruction_ptr += 4; }, Op::RelativeBase => { let param1 = get_param_value(&opcode.param1, program, 1); //debug_op("Adjust RB", &program, 3, (param1)); program.relative_base = (program.relative_base as isize + param1) as usize; program.instruction_ptr += 2; }, Op::Halt | _ => { //debug_op("Halt", &program, 3, ()); output = Err(99); break; } } } output } fn debug_op(op: &str, program: &Program, size: usize, param2: impl std::fmt::Debug) { let params = &program.positions[program.instruction_ptr..=program.instruction_ptr+size]; println!("{}: {} {:?} {:?} RB: {}", program.instruction_ptr, op, params, param2, program.relative_base); } #[derive(Debug)] enum Op { Add = 1, Multiply = 2, Input = 3, Print = 4, JumpTrue = 5, JumpFalse = 6, LessThan = 7, EqualTo = 8, RelativeBase = 9, Halt = 99 } #[derive(Debug)] enum ParamMode { Position = 0, Immediate = 1, Relative = 2 } #[derive(Debug)] struct Opcode { op: Op, param1: ParamMode, param2: ParamMode, param3: ParamMode, } fn parse_opcode(opcode: isize) -> Opcode { let mut operation = Opcode { op: Op::Halt, param1: ParamMode::Position, param2: ParamMode::Position, param3: ParamMode::Position}; let digits: Vec<u32> = opcode.to_string().chars().map(|x| x.to_digit(10).unwrap()).collect(); let fill_amount = 5 - digits.len(); let filler = vec![0;fill_amount]; let codes: Vec<u32> = [filler, digits].concat().into_iter().rev().collect(); operation.op = parse_op(codes[1], codes[0]); operation.param1 = parse_param(codes[2]); operation.param2 = parse_param(codes[3]); operation.param3 = parse_param(codes[4]); operation } fn parse_op(d1: u32, d2: u32) -> Op { let code = (d1 * 10) + d2; return match code { 1 => Op::Add, 2 => Op::Multiply, 3 => Op::Input, 4 => Op::Print, 5 => Op::JumpTrue, 6 => Op::JumpFalse, 7 => Op::LessThan, 8 => Op::EqualTo, 9 => Op::RelativeBase, 99 => Op::Halt, _ => panic!("Uknown") } } fn parse_param(param: u32) -> ParamMode { return match param { 0 => ParamMode::Position, 1 => ParamMode::Immediate, 2 => ParamMode::Relative, _ => panic!("Uknown") } } fn get_param(param: &ParamMode, program: &Program, offset: usize) -> isize { let value = program.positions[program.instruction_ptr + offset]; return match param { ParamMode::Position => value, ParamMode::Immediate => value, ParamMode::Relative => program.relative_base as isize + value } } fn get_param_value(param: &ParamMode, program: &Program, offset: usize) -> isize { let value = get_param(param, program, offset); return match param { ParamMode::Position => program.positions[value as usize], ParamMode::Immediate => value, ParamMode::Relative => program.positions[value as usize] } }
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use {Module, Resolver}; use build_reduced_graph::BuildReducedGraphVisitor; use rustc::hir::def_id::{DefId, BUILTIN_MACROS_CRATE, CRATE_DEF_INDEX, DefIndex}; use rustc::hir::def::{Def, Export}; use rustc::hir::map::{self, DefCollector}; use std::cell::Cell; use std::rc::Rc; use syntax::ast; use syntax::errors::DiagnosticBuilder; use syntax::ext::base::{self, Determinacy, MultiModifier, MultiDecorator}; use syntax::ext::base::{NormalTT, SyntaxExtension}; use syntax::ext::expand::Expansion; use syntax::ext::hygiene::Mark; use syntax::ext::tt::macro_rules; use syntax::parse::token::intern; use syntax::util::lev_distance::find_best_match_for_name; use syntax::visit::Visitor; use syntax_pos::Span; #[derive(Clone)] pub struct InvocationData<'a> { pub module: Cell<Module<'a>>, pub def_index: DefIndex, // True if this expansion is in a `const_integer` position, for example `[u32; m!()]`. // c.f. `DefCollector::visit_ast_const_integer`. pub const_integer: bool, // The scope in which the invocation path is resolved. pub legacy_scope: Cell<LegacyScope<'a>>, // The smallest scope that includes this invocation's expansion, // or `Empty` if this invocation has not been expanded yet. pub expansion: Cell<LegacyScope<'a>>, } impl<'a> InvocationData<'a> { pub fn root(graph_root: Module<'a>) -> Self { InvocationData { module: Cell::new(graph_root), def_index: CRATE_DEF_INDEX, const_integer: false, legacy_scope: Cell::new(LegacyScope::Empty), expansion: Cell::new(LegacyScope::Empty), } } } #[derive(Copy, Clone)] pub enum LegacyScope<'a> { Empty, Invocation(&'a InvocationData<'a>), // The scope of the invocation, not including its expansion Expansion(&'a InvocationData<'a>), // The scope of the invocation, including its expansion Binding(&'a LegacyBinding<'a>), } impl<'a> LegacyScope<'a> { fn simplify_expansion(mut invoc: &'a InvocationData<'a>) -> Self { while let LegacyScope::Invocation(_) = invoc.expansion.get() { match invoc.legacy_scope.get() { LegacyScope::Expansion(new_invoc) => invoc = new_invoc, LegacyScope::Binding(_) => break, scope @ _ => return scope, } } LegacyScope::Expansion(invoc) } } pub struct LegacyBinding<'a> { pub parent: LegacyScope<'a>, pub name: ast::Name, ext: Rc<SyntaxExtension>, pub span: Span, } impl<'a> base::Resolver for Resolver<'a> { fn next_node_id(&mut self) -> ast::NodeId { self.session.next_node_id() } fn get_module_scope(&mut self, id: ast::NodeId) -> Mark { let mark = Mark::fresh(); let module = self.module_map[&id]; self.invocations.insert(mark, self.arenas.alloc_invocation_data(InvocationData { module: Cell::new(module), def_index: module.def_id().unwrap().index, const_integer: false, legacy_scope: Cell::new(LegacyScope::Empty), expansion: Cell::new(LegacyScope::Empty), })); mark } fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion) { let invocation = self.invocations[&mark]; self.collect_def_ids(invocation, expansion); self.current_module = invocation.module.get(); let mut visitor = BuildReducedGraphVisitor { resolver: self, legacy_scope: LegacyScope::Invocation(invocation), expansion: mark, }; expansion.visit_with(&mut visitor); invocation.expansion.set(visitor.legacy_scope); } fn add_macro(&mut self, scope: Mark, mut def: ast::MacroDef, export: bool) { if &def.ident.name.as_str() == "macro_rules" { self.session.span_err(def.span, "user-defined macros may not be named `macro_rules`"); } let invocation = self.invocations[&scope]; let binding = self.arenas.alloc_legacy_binding(LegacyBinding { parent: invocation.legacy_scope.get(), name: def.ident.name, ext: Rc::new(macro_rules::compile(&self.session.parse_sess, &def)), span: def.span, }); invocation.legacy_scope.set(LegacyScope::Binding(binding)); self.macro_names.insert(def.ident.name); if export { def.id = self.next_node_id(); DefCollector::new(&mut self.definitions).with_parent(CRATE_DEF_INDEX, |collector| { collector.visit_macro_def(&def) }); self.macro_exports.push(Export { name: def.ident.name, def: Def::Macro(self.definitions.local_def_id(def.id)), }); self.exported_macros.push(def); } } fn add_ext(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>) { if let NormalTT(..) = *ext { self.macro_names.insert(ident.name); } let def_id = DefId { krate: BUILTIN_MACROS_CRATE, index: DefIndex::new(self.macro_map.len()), }; self.macro_map.insert(def_id, ext); self.builtin_macros.insert(ident.name, def_id); } fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec<Mark>) { self.macros_at_scope.insert(id, macros); } fn find_attr_invoc(&mut self, attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> { for i in 0..attrs.len() { let name = intern(&attrs[i].name()); match self.builtin_macros.get(&name) { Some(&def_id) => match *self.get_macro(Def::Macro(def_id)) { MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => { return Some(attrs.remove(i)) } _ => {} }, None => {} } } None } fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, force: bool) -> Result<Rc<SyntaxExtension>, Determinacy> { if path.segments.len() > 1 || path.global || !path.segments[0].parameters.is_empty() { self.session.span_err(path.span, "expected macro name without module separators"); return Err(Determinacy::Determined); } let name = path.segments[0].identifier.name; let invocation = self.invocations[&scope]; if let LegacyScope::Expansion(parent) = invocation.legacy_scope.get() { invocation.legacy_scope.set(LegacyScope::simplify_expansion(parent)); } self.resolve_macro_name(invocation.legacy_scope.get(), name).ok_or_else(|| { if force { let msg = format!("macro undefined: '{}!'", name); let mut err = self.session.struct_span_err(path.span, &msg); self.suggest_macro_name(&name.as_str(), &mut err); err.emit(); Determinacy::Determined } else { Determinacy::Undetermined } }) } } impl<'a> Resolver<'a> { pub fn resolve_macro_name(&mut self, mut scope: LegacyScope<'a>, name: ast::Name) -> Option<Rc<SyntaxExtension>> { let mut possible_time_travel = None; let mut relative_depth: u32 = 0; loop { scope = match scope { LegacyScope::Empty => break, LegacyScope::Expansion(invocation) => { if let LegacyScope::Empty = invocation.expansion.get() { if possible_time_travel.is_none() { possible_time_travel = Some(scope); } invocation.legacy_scope.get() } else { relative_depth += 1; invocation.expansion.get() } } LegacyScope::Invocation(invocation) => { relative_depth = relative_depth.saturating_sub(1); invocation.legacy_scope.get() } LegacyScope::Binding(binding) => { if binding.name == name { if let Some(scope) = possible_time_travel { // Check for disallowed shadowing later self.lexical_macro_resolutions.push((name, scope)); } else if relative_depth > 0 { self.disallowed_shadowing.push(binding); } return Some(binding.ext.clone()); } binding.parent } }; } if let Some(scope) = possible_time_travel { self.lexical_macro_resolutions.push((name, scope)); } self.builtin_macros.get(&name).cloned().map(|def_id| self.get_macro(Def::Macro(def_id))) } fn suggest_macro_name(&mut self, name: &str, err: &mut DiagnosticBuilder<'a>) { if let Some(suggestion) = find_best_match_for_name(self.macro_names.iter(), name, None) { if suggestion != name { err.help(&format!("did you mean `{}!`?", suggestion)); } else { err.help(&format!("have you added the `#[macro_use]` on the module/import?")); } } } fn collect_def_ids(&mut self, invocation: &'a InvocationData<'a>, expansion: &Expansion) { let Resolver { ref mut invocations, arenas, graph_root, .. } = *self; let InvocationData { def_index, const_integer, .. } = *invocation; let visit_macro_invoc = &mut |invoc: map::MacroInvocationData| { invocations.entry(invoc.mark).or_insert_with(|| { arenas.alloc_invocation_data(InvocationData { def_index: invoc.def_index, const_integer: invoc.const_integer, module: Cell::new(graph_root), expansion: Cell::new(LegacyScope::Empty), legacy_scope: Cell::new(LegacyScope::Empty), }) }); }; let mut def_collector = DefCollector::new(&mut self.definitions); def_collector.visit_macro_invoc = Some(visit_macro_invoc); def_collector.with_parent(def_index, |def_collector| { if const_integer { if let Expansion::Expr(ref expr) = *expansion { def_collector.visit_ast_const_integer(expr); } } expansion.visit_with(def_collector) }); } }
use super::Part; use crate::codec::{Decode, Encode}; use crate::{remote_type, RemoteEnum, RemoteObject}; remote_type!( /// A solar panel. Obtained by calling `Part::solar_panel().` object SpaceCenter.SolarPanel { properties: { { Part { /// Returns the part object for this solar panel. /// /// **Game Scenes**: All get: part -> Part } } { Deployable { /// Returns whether the solar panel is deployable. /// /// **Game Scenes**: All get: is_deployable -> bool } } { Deployed { /// Returns whether the solar panel is extended. /// /// **Game Scenes**: All get: is_deployed -> bool, /// Sets whether the solar panel is extended. /// /// **Game Scenes**: All set: set_deployed(bool) } } { State { /// Returns the current state of the solar panel. /// /// **Game Scenes**: All get: state -> SolarPanelState } } { EnergyFlow { /// Returns the current amount of energy being generated by the solar panel, /// in units of charge per second. /// /// **Game Scenes**: All get: energy_flow -> f32 } } { SunExposure { /// Returns the current amount of sunlight that is incident on the solar panel, as /// a percentage. A value between 0 and 1. /// /// **Game Scenes**: All get: sun_exposure -> f32 } } } }); remote_type!( /// The state of a solar panel. enum SolarPanelState { /// Solar panel is fully extended. Extended = 0, /// Solar panel is fully retracted. Retracted = 1, /// Solar panel is being extended. Extending = 2, /// Solar panel is being retracted. Retracting = 3, /// Solar panel is broken Broken = 4, } );
// `without_binary_errors_badarg` in unit tests // `with_binary_without_integer_start_errors_badarg` in unit tests // `with_binary_with_positive_integer_start_without_integer_stop_errors_badarg` in unit tests test_stdout!( with_binary_with_start_less_than_or_equal_to_stop_returns_list_of_bytes, "[0]\n[0, 1]\n[0, 1, 2]\n[1]\n[1, 2]\n[2]\n" ); // `with_binary_with_start_greater_than_stop_errors_badarg` in unit tests
/*! *xmlparser* is a low-level, pull-based, zero-allocation [XML 1.0](https://www.w3.org/TR/xml/) parser. ## Example ```rust for token in xmlparser::Tokenizer::from("<tagname name='value'/>") { println!("{:?}", token); } ``` ## Why a new library The main idea of this library is to provide a fast, low-level and complete XML parser. Unlike other XML parsers, this one can return tokens not with `&str`/`&[u8]` data, but with `StrSpan` objects, which contain a position of the data in the original document. Which can be very useful if you want to post-process tokens even more and want to return errors with a meaningful position. So, this is basically an XML parser framework that can be used to write parsers for XML-based formats, like SVG and to construct a DOM. At the time of writing the only option was `quick-xml` (v0.10), which does not support DTD and token positions. If you are looking for a more high-level solution - checkout [roxmltree](https://github.com/RazrFalcon/roxmltree). ## Benefits - All tokens contain `StrSpan` objects which contain a position of the data in the original document. - Good error processing. All error types contain position (line:column) where it occurred. - No heap allocations. - No dependencies. - Tiny. ~1500 LOC and ~35KiB in the release build according to the `cargo-bloat`. ## Limitations - Currently, only ENTITY objects are parsed from the DOCTYPE. Other ignored. - No tree structure validation. So an XML like `<root><child></root></child>` will be parsed without errors. You should check for this manually. On the other hand `<a/><a/>` will lead to an error. - Duplicated attributes is not an error. So an XML like `<item a="v1" a="v2"/>` will be parsed without errors. You should check for this manually. - UTF-8 only. ## Safety - The library must not panic. Any panic considered as a critical bug and should be reported. - The library forbids the unsafe code. */ #![cfg_attr(feature = "cargo-clippy", allow(unreadable_literal))] #![doc(html_root_url = "https://docs.rs/xmlparser/0.6.1")] #![forbid(unsafe_code)] #![warn(missing_docs)] mod error; mod stream; mod strspan; mod token; mod xml; mod xmlchar; pub use error::*; pub use stream::*; pub use strspan::*; pub use token::*; pub use xml::*; pub use xmlchar::*;
pub mod genesis_block_util; #[macro_export] macro_rules! morgan_storage_controller { () => { ( "morgan_storage_controller".to_string(), morgan_storage_api::id(), ) }; } use morgan_storage_api::storage_processor::process_instruction; morgan_interface::morgan_entrypoint!(process_instruction);
use embedded_graphics::{ draw_target::DrawTarget, prelude::{PixelColor, Point, Primitive}, primitives::{Circle, PrimitiveStyle, PrimitiveStyleBuilder, StrokeAlignment}, Drawable, }; use embedded_gui::{ geometry::{measurement::MeasureSpec, BoundingBox, MeasuredSize}, widgets::{ graphical::radio::{RadioButton, RadioButtonProperties}, Widget, }, WidgetRenderer, }; use crate::{themes::Theme, EgCanvas, ToPoint}; pub struct RadioButtonStyle<C> where C: PixelColor, { pub background_color: C, pub border_color: C, pub checkmark_color: C, pub line_width: u32, pub box_size: u32, pub is_selected: bool, } impl<C> RadioButtonStyle<C> where C: PixelColor, { fn build_box_style(&self) -> PrimitiveStyle<C> { PrimitiveStyleBuilder::new() .stroke_alignment(StrokeAlignment::Inside) .stroke_color(self.border_color) .fill_color(self.background_color) .stroke_width(self.line_width) .build() } fn build_check_style(&self) -> PrimitiveStyle<C> { PrimitiveStyleBuilder::new() .fill_color(self.checkmark_color) .stroke_width(0) .build() } } impl<C> Default for RadioButtonStyle<C> where C: Theme, { fn default() -> Self { Self { background_color: C::BACKGROUND_COLOR, border_color: C::BORDER_COLOR, checkmark_color: C::BORDER_COLOR, line_width: 1, box_size: 9, is_selected: false, } } } impl<C> RadioButtonProperties for RadioButtonStyle<C> where C: PixelColor, { type Color = C; fn measure(&self, spec: MeasureSpec) -> MeasuredSize { MeasuredSize { width: spec.width.apply_to_measured(self.box_size), height: spec.height.apply_to_measured(self.box_size), } } fn set_border_color(&mut self, color: Self::Color) { self.border_color = color; } fn set_background_color(&mut self, color: Self::Color) { self.background_color = color; } fn set_check_mark_color(&mut self, color: Self::Color) { self.checkmark_color = color; } fn set_selected(&mut self, selected: bool) { self.is_selected = selected; } } impl<C, DT> WidgetRenderer<EgCanvas<DT>> for RadioButton<RadioButtonStyle<C>> where C: PixelColor, DT: DrawTarget<Color = C>, RadioButtonStyle<C>: RadioButtonProperties, { fn draw(&mut self, canvas: &mut EgCanvas<DT>) -> Result<(), DT::Error> { // TODO: this overdraws. Instead, draw inside first and border last with transparent fill. Circle::new( self.bounding_box().position.to_point(), self.bounding_box().size.width, ) .into_styled(self.radio_properties.build_box_style()) .draw(&mut canvas.target)?; if self.radio_properties.is_selected { let BoundingBox { position, size } = self.bounding_box(); let space = 2 * self.radio_properties.line_width; let check_bounds = Circle::new( Point::new(position.x + space as i32, position.y + space as i32), size.width - 2 * space, ); check_bounds .into_styled(self.radio_properties.build_check_style()) .draw(&mut canvas.target)?; } Ok(()) } }
use std::fmt::Debug; use proc_monadde_macro::*; pub trait Parametrized<T>{} pub trait Functor<T: Sized+Copy, O: Sized+Copy+Default> { type UnderlyingO: Parametrized<O>; fn map<F: Fn(T) -> O>(&self, f: F) -> Self::UnderlyingO; } pub trait Monad<T: Sized+Copy, O: Sized+Copy+Default> : Functor<T, O> { fn flat_map<F: Fn(T) -> Self::UnderlyingO>(&self, f: F) -> Self::UnderlyingO; } pub trait FilteredFunctor<T: Sized+Copy, O: Sized+Copy+Default> : Functor<T, O> { fn filter_map<F: Fn(T) -> Option<O>>(&self, f: F) -> Self::UnderlyingO; } define_monadde_macro!(); #[macro_export] macro_rules! compre { ($ex:expr; $($id:ident <- $monad:expr),+) => { monadde! { $($monad => $id |>)+ $ex } }; ($ex:expr; $($id:ident <- $monad:expr),+; $cond: expr) => { monadde! { $($monad => $id |>)+ when $cond => $ex } } } #[macro_export] macro_rules! hx_do { ($($id:ident <- $monad:expr),+; $ex:expr) => { monadde! { $($monad => $id |>)+ $ex } }; ($($id:ident <- $monad:expr),+; barrier: $cond: expr; $ex:expr) => { monadde! { $($monad => $id |>)+ when $cond => $ex } } } #[cfg(test)] mod test{ use crate::{Monad, Functor, FilteredFunctor, Parametrized}; impl<T: Sized+Copy> Parametrized<T> for Option<T>{} impl<T: Sized+Copy, O: Sized+Copy+Default> Functor<T, O> for Option<T> { type UnderlyingO = Option<O>; fn map<F: Fn(T) -> O>(&self, f: F) -> Self::UnderlyingO { match self { None => None, Some(x) => Some(f(*x)), } } } impl<T: Sized+Copy, O: Sized+Copy+Default> Monad<T, O> for Option<T> { fn flat_map<F: Fn(T) -> Self::UnderlyingO>(&self, f: F) -> Self::UnderlyingO { match self { None => None, Some(x) => f(*x), } } } impl<T: Sized+Copy, O: Sized+Copy+Default> FilteredFunctor<T, O> for Option<T> { fn filter_map<F: Fn(T) -> Option<O>>(&self, f: F) -> Self::UnderlyingO { match self { None => None, Some(x) => f(*x) } } } impl<T: Sized+Copy> Parametrized<T> for Vec<T> {} impl<T: Sized+Copy, O: Sized+Copy+Default> Functor<T, O> for Vec<T> { type UnderlyingO = Vec<O>; fn map<F: Fn(T) -> O>(&self, f: F) -> Self::UnderlyingO { let mut out_vec = Vec::with_capacity(self.capacity()); for &item in self.iter() { out_vec.push(f(item)); } out_vec } } impl<T: Sized+Copy, O: Sized+Copy+Default> Monad<T, O> for Vec<T> { fn flat_map<F: Fn(T) -> Self::UnderlyingO>(&self, f: F) -> Self::UnderlyingO { let mut out_vec = Vec::with_capacity(self.capacity()); for &item in self.iter() { let mut foo_result = f(item); out_vec.append(&mut foo_result); } out_vec } } impl<T: Sized+Copy, O: Sized+Copy+Default> FilteredFunctor<T, O> for Vec<T> { fn filter_map<F: Fn(T) -> Option<O>>(&self, f: F) -> Self::UnderlyingO { let mut out_vec = Vec::with_capacity(self.capacity()); for &item in self.iter() { match f(item) { None => {}, Some(x) => out_vec.push(x), } } out_vec } } #[test] fn test_simple() { let expected = vec![ ( 3, 4, 5), ( 4, 3, 5), ( 5, 12, 13), ( 6, 8, 10), ( 8, 6, 10), ( 8, 15, 17), ( 9, 12, 15), (12, 5, 13), (12, 9, 15), (15, 8, 17)]; let tripples = compre! { (x, y, z); x <- 1..=17, y <- 1..=17, z <- 1..=17; x*x + y*y == z*z }.collect::<Vec<_>>(); assert_eq!(&expected, &tripples); let tripples = hx_do! { x <- 1..=17, y <- 1..=17, z <- 1..=17; barrier: x*x + y*y == z*z; (x, y, z) }.collect::<Vec<_>>(); assert_eq!(&expected, &tripples); let tripples = monadde! { 1..=17 => x |> 1..=17 => y |> 1..=17 => z |> when x*x + y*y == z*z => (x, y, z) }.collect::<Vec<_>>(); assert_eq!(&expected, &tripples); } #[test] fn test_options() { let res = monadde! { Some(2) => a |> Some(10) => b |> Some(1) => c |> a * b + c }; assert_eq!(Some(21), res); let res = hx_do! { a <- Some(2), b <- None::<i32>, c <- Some(1); a * b + c }; assert_eq!(None, res); } }
use super::GoGame; use super::stone; use super::NEIGHBOURS; use super::DIAG_NEIGHBOURS; use super::VIRT_LEN; use super::MAX_SIZE; use super::Vertex; use super::PASS; extern crate rand; use rand::SeedableRng; #[test] fn stone_opponent() { assert_eq!(stone::WHITE, stone::BLACK.opponent()); assert_eq!(stone::BLACK, stone::WHITE.opponent()); } #[test] fn vertex_neighbours() { for col in 0 .. MAX_SIZE as i16 { for row in 0 .. MAX_SIZE as i16 { let mut expected = vec![GoGame::vertex(col - 1, row), GoGame::vertex(col + 1, row), GoGame::vertex(col, row - 1), GoGame::vertex(col, row + 1)].clone(); expected.sort(); let mut got = NEIGHBOURS[GoGame::vertex(col, row).as_index()].clone(); got.sort(); assert_eq!(expected, got); let mut expected = vec![GoGame::vertex(col - 1, row - 1), GoGame::vertex(col + 1, row - 1), GoGame::vertex(col - 1, row + 1), GoGame::vertex(col + 1, row + 1)].clone(); expected.sort(); let mut got = DIAG_NEIGHBOURS[GoGame::vertex(col, row).as_index()].clone(); got.sort(); assert_eq!(expected, got); } } } #[test] fn can_play_single_stone() { let mut game = GoGame::new(9); let v = GoGame::vertex(2, 2); game.play(stone::BLACK, v); assert_eq!(4, game.num_pseudo_liberties(v)); assert_eq!(false, game.can_play(stone::BLACK, v)); } #[test] fn can_remove_liberties() { let mut game = GoGame::new(9); game.play(stone::BLACK, GoGame::vertex(2, 2)); game.play(stone::WHITE, GoGame::vertex(3, 2)); assert_eq!(3, game.num_pseudo_liberties(GoGame::vertex(2, 2))); assert_eq!(3, game.num_pseudo_liberties(GoGame::vertex(3, 2))); } #[test] fn can_join_strings() { let mut game = GoGame::new(9); let v = GoGame::vertex(2, 2); game.play(stone::BLACK, GoGame::vertex(2, 2)); game.play(stone::BLACK, GoGame::vertex(3, 2)); assert_eq!(6, game.num_pseudo_liberties(v)); } #[test] fn can_capture_single_stone() { let mut game = GoGame::new(9); game.play(stone::WHITE, GoGame::vertex(2, 2)); game.play(stone::BLACK, GoGame::vertex(1, 2)); game.play(stone::BLACK, GoGame::vertex(3, 2)); game.play(stone::BLACK, GoGame::vertex(2, 1)); game.play(stone::BLACK, GoGame::vertex(2, 3)); assert_eq!(stone::EMPTY, game.stone_at(GoGame::vertex(2, 2))); } #[test] fn freedoms_after_capture() { let mut game = GoGame::new(9); game.play(stone::WHITE, GoGame::vertex(0, 0)); game.play(stone::BLACK, GoGame::vertex(1, 0)); game.play(stone::BLACK, GoGame::vertex(1, 1)); game.play(stone::BLACK, GoGame::vertex(0, 1)); assert_eq!(stone::EMPTY, game.stone_at(GoGame::vertex(0, 0))); assert_eq!(6, game.num_pseudo_liberties(GoGame::vertex(0, 1))); } #[test] fn initially_all_moves_possible() { let game = GoGame::new(9); assert_eq!(game.possible_moves(stone::BLACK).len(), 81); } #[test] fn forbid_filling_real_eye() { let mut game = GoGame::new(9); game.play(stone::BLACK, GoGame::vertex(0, 0)); game.play(stone::BLACK, GoGame::vertex(0, 1)); game.play(stone::BLACK, GoGame::vertex(0, 2)); game.play(stone::BLACK, GoGame::vertex(1, 0)); game.play(stone::BLACK, GoGame::vertex(1, 2)); game.play(stone::BLACK, GoGame::vertex(2, 0)); game.play(stone::BLACK, GoGame::vertex(2, 1)); assert_eq!(false, game.can_play(stone::BLACK, GoGame::vertex(1, 1))); } #[test] fn forbid_filling_real_eyes_of_split_group() { let mut game = GoGame::new(9); game.play(stone::BLACK, GoGame::vertex(0, 0)); game.play(stone::BLACK, GoGame::vertex(0, 2)); game.play(stone::BLACK, GoGame::vertex(1, 1)); game.play(stone::BLACK, GoGame::vertex(1, 2)); game.play(stone::BLACK, GoGame::vertex(2, 0)); game.play(stone::BLACK, GoGame::vertex(2, 1)); assert_eq!(false, game.can_play(stone::BLACK, GoGame::vertex(0, 1))); assert_eq!(false, game.can_play(stone::BLACK, GoGame::vertex(1, 0))); } #[test] fn uniform_move_distribution() { let mut rng = rand::StdRng::from_seed(&[42]); let game = GoGame::new(9); let num_valid_moves = game.possible_moves(stone::BLACK).len() as f64; let num_samples = 100000; let mut count = vec![0; VIRT_LEN]; for _ in 0 .. num_samples { count[game.random_move(stone::BLACK, &mut rng).as_index()] += 1; } for v in game.possible_moves(stone::BLACK) { let frac = count[v.as_index()] as f64 / num_samples as f64 * num_valid_moves; assert!(frac > 0.9 && frac < 1.1, format!("{}", frac)); } } #[test] fn chinese_score_full_board() { let mut game = GoGame::new(9); for v in game.possible_moves(stone::BLACK) { game.play(stone::BLACK, v); } assert_eq!(9*9, game.chinese_score()); } #[test] fn chinese_score_also_count_eyes() { let mut game = GoGame::new(9); for col in 0 .. MAX_SIZE as i16 { for row in 0 .. MAX_SIZE as i16 { if col + row % 2 == 0 { game.play(stone::BLACK, GoGame::vertex(col, row)); } } } assert_eq!(9*9, game.chinese_score()); } #[test] fn reset_game() { let mut game = GoGame::new(9); assert_eq!(81, game.possible_moves(stone::BLACK).len()); game.play(stone::BLACK, GoGame::vertex(0, 0)); assert_eq!(80, game.possible_moves(stone::BLACK).len()); game.reset(); assert_eq!(81, game.possible_moves(stone::BLACK).len()); } #[test] fn parse_vertex() { for col in 0 .. MAX_SIZE as i16 { for row in 0 .. MAX_SIZE as i16 { assert_eq!(GoGame::vertex(col, row), format!("{}", GoGame::vertex(col, row)).parse::<Vertex>().unwrap()); } } assert_eq!(PASS, format!("{}", PASS).parse::<Vertex>().unwrap()); } #[test] fn can_undo() { let mut game = GoGame::new(9); game.play(stone::BLACK, GoGame::vertex(0, 0)); game.play(stone::WHITE, GoGame::vertex(1, 0)); game.play(stone::BLACK, GoGame::vertex(4, 4)); let mut second_game = game.clone(); game.play(stone::WHITE, GoGame::vertex(0, 1)); game.undo(1); assert_eq!(format!("{:?}", second_game), format!("{:?}", game)); game.play(stone::WHITE, GoGame::vertex(0, 1)); second_game.play(stone::WHITE, GoGame::vertex(0, 1)); assert_eq!(format!("{:?}", second_game), format!("{:?}", game)); }
use std::cmp; use std::fmt; use std::iter; use std::ops::{Index, Range}; use unicode_width::UnicodeWidthStr; use crate::buffer::units::{ByteIndex, BytePosition, CharPosition}; /// Underlying storage for the buffer contents. /// /// The storage contains at least one (empty) line. #[derive(Debug, PartialEq, Eq)] pub struct Storage { /// The contents of the storage. /// /// Unix-style newlines ("\n") are implicitly inserted between each line. Lines themselves /// cannot contain `\n`. lines: Vec<String>, } impl Storage { /// Returns a new `Storage` with a single empty line. pub fn new() -> Self { Self { lines: vec![String::new()], } } /// Returns the number of lines. pub fn lines(&self) -> usize { self.lines.len() } /// Returns the total byte length of the buffer. pub fn len(&self) -> usize { let mut len = 0; for line in &self.lines { len += line.len() + 1 } len } /// Returns width of a given line in columns. pub fn line_width(&self, line: usize) -> usize { self.lines[line].width() } /// Returns an iterator over the lines of the storage. pub fn iter_lines(&self) -> impl Iterator<Item = &str> { self.lines.iter().map(|line| &**line) } /// Return a slice of the underlying text starting at the given position. /// /// The slice returned may be of any length. pub fn slice_at(&self, pos: BytePosition) -> impl AsRef<[u8]> + '_ { if pos.y == self.lines() { return ""; } let line = &self.lines[pos.y]; if pos.x == line.len() { "\n" } else { &line[pos.x..] } } /// Returns the character position of a given byte. The byte must lie on a character boundary. pub fn byte_to_char_position(&self, byte: ByteIndex) -> CharPosition { let byte_position = self.position_of_byte(byte); let line = &self.lines[byte_position.y]; assert!(line.is_char_boundary(byte_position.x)); let char_index = line .char_indices() .chain(iter::once((line.len(), '\n'))) .enumerate() .find_map(|(char_count, (char_index, _))| { if byte_position.x == char_index { Some(char_count) } else { None } }) .expect("byte position not on character boundary"); CharPosition::new(char_index, byte_position.y) } /// Returns the row and column of a byte index. pub fn position_of_byte(&self, byte: ByteIndex) -> BytePosition { let byte = byte.0; assert!(byte < self.len()); let mut remaining = byte; for (row, line) in self.iter_lines().enumerate() { if remaining <= line.len() { return BytePosition::new(remaining, row); } remaining -= line.len() + 1; } unreachable!(); } /// Replace a byte range in the buffer with a replacement string, like /// [`String::replace_range`]. pub fn replace_range(&mut self, range: Range<usize>, replacement: &str) { // Find the line containing the start of the byte range, and the byte offset from the // start of the line. let mut line_no = 0; let mut byte_offset = range.start; while byte_offset > self.lines[line_no].len() { byte_offset -= self.lines[line_no].len() + 1; line_no += 1; } // Delete any text that is inside the range. let mut bytes_to_consume = range.len(); while bytes_to_consume > 0 { let bytes_to_remove = cmp::min(self.lines[line_no][byte_offset..].len(), bytes_to_consume); self.lines[line_no].replace_range(byte_offset..(byte_offset + bytes_to_remove), ""); bytes_to_consume -= bytes_to_remove; if bytes_to_consume > 0 { // Remove the newline. let next_line = self.lines.remove(line_no + 1); self.lines[line_no].insert_str(byte_offset, &next_line); bytes_to_consume -= 1; } } // Insert the new text. if !replacement.contains('\n') { // Fast path. Just insert the new text into the current line. self.lines[line_no].insert_str(byte_offset, replacement); } else { // We're going to add at least one new line into the underlying lines array. Start by // splitting the current line into two at the insertion point. let end = self.lines[line_no].split_off(byte_offset); self.lines.insert(line_no + 1, end); let mut new_lines = replacement.lines().peekable(); // The first new line is appended at the insertion point. let first_new_line = new_lines .next() .expect("checked replacement text contains newline above"); self.lines[line_no].push_str(first_new_line); while let Some(new_line) = new_lines.next() { line_no += 1; if new_lines.peek().is_some() { // Middle new lines, if any, are inserted as their own lines. self.lines.insert(line_no, new_line.to_owned()); } else { // The last new line is prepended to line split after the insertion point. self.lines[line_no].insert_str(0, new_line); } } } } } impl From<Vec<String>> for Storage { fn from(lines: Vec<String>) -> Self { Self { lines: if lines.is_empty() { vec![String::new()] } else { lines }, } } } impl<'a> From<&'a str> for Storage { fn from(s: &str) -> Self { Self { lines: s.lines().map(|line| line.to_owned()).collect(), } } } impl fmt::Display for Storage { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for line in &self.lines { writeln!(f, "{}", line)?; } Ok(()) } } impl Index<Range<BytePosition>> for Storage { type Output = str; fn index(&self, Range { start, end }: Range<BytePosition>) -> &Self::Output { assert!( start.y == end.y, "cannot index across rows: {:?}", start..end ); &self.lines[start.y][start.x..end.x] } } #[cfg(test)] mod tests { use indoc::indoc; use super::{ByteIndex, BytePosition, Storage}; #[test] fn from_empty_lines() { let storage = Storage::from(vec![]); assert_eq!(storage.lines, vec![String::new()]); } #[test] fn line_width() { let storage = Storage::from(indoc! {" aeioucsz áéíóúčšž 台北1234 QRS12 アイウ12345 "}); for line in 0..storage.lines() { assert_eq!(storage.line_width(line), 8); } } #[test] fn position_of_byte_zero() { let storage = Storage::new(); assert_eq!( storage.position_of_byte(ByteIndex::new(0)), BytePosition::zero() ); } #[test] fn position_of_byte_after_empty_line() { let storage = Storage::from("\na\n"); assert_eq!( storage.position_of_byte(ByteIndex::new(1)), BytePosition::new(0, 1) ); } #[test] fn position_of_byte_beginning_of_line() { let storage = Storage::from("a\nb\nc"); assert_eq!( storage.position_of_byte(ByteIndex::new(2)), BytePosition::new(0, 1) ); } #[test] fn replace_range_deletion() { let mut storage = Storage::from("Goodbye, cruel world!"); storage.replace_range(8..14, ""); assert_eq!(storage.to_string(), "Goodbye, world!\n"); } #[test] fn replace_range_middle() { let mut storage = Storage::from(indoc! {" a b c one three "}); storage.replace_range(10..10, "two "); assert_eq!(storage.to_string(), "a b c\none two three\n"); } #[test] fn replace_range_delete_newline() { let mut storage = Storage::from("this is not \none line"); storage.replace_range(8..13, ""); assert_eq!(storage.to_string(), "this is one line\n"); } #[test] fn replace_range_replacement_contains_newlines() { let mut storage = Storage::from("ae"); storage.replace_range(1..1, "b\nc\nd"); assert_eq!(storage.to_string(), "ab\nc\nde\n"); } #[test] fn replace_range_at_end_of_line() { let mut storage = Storage::from("a\n"); storage.replace_range(1..1, "b"); assert_eq!(storage.to_string(), "ab\n"); } }
extern crate zbx_sender; use zbx_sender::{Response, Result, Sender}; use std::env; fn send_one_value(command: &str) -> Result<Response> { let sender = Sender::new(command.to_owned(), 10051); sender.send(("host1", "key1", "value")) } fn main() { let command = match env::args().nth(1) { Some(cmd) => cmd, None => { let name = env::args().nth(0).unwrap(); panic!("Usage: {} [command]", name) } }; match send_one_value(&command) { Ok(response) => println!("{:?} is success {} ", response, response.success()), Err(e) => println!("Error {}", e), } }
use std::io::Read; fn main() { let mut input = String::new(); std::io::stdin().read_to_string(&mut input).unwrap(); let mut decks: Vec<std::collections::VecDeque<u8>> = input.split("\n\n").map(|deck| { deck.lines().skip(1).map(|card| card.parse().unwrap()).collect() }).collect(); while decks.iter().all(|deck| !deck.is_empty()) { let top_cards: Vec<_> = decks.iter_mut().map(|deck| deck.pop_front().unwrap()).collect(); let index = top_cards.iter().enumerate().max_by_key(|&(_, card)| card).unwrap().0; decks[index].push_back(top_cards[index]); decks[index].push_back(top_cards[1 - index]); } println!("{}", decks.iter().map( |deck| deck.iter().rev().zip(1..).map(|(&card, n)| n * card as usize).sum::<usize>() ).max().unwrap()); }
use std::fmt; use super::super::geometry::Geometry; use super::Item; pub struct Wall{ pub geo:Geometry, pub width: i32, pub height: i32, } impl Item for Wall { fn geometry(&self) -> &Geometry{ &self.geo } } impl fmt::Display for Wall { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Wall: {} width:{}, height:{})", self.geo, self.width, self.height) } }
//! An efficient data structure to sample from a discrete, fixed distribution. //! //! See: <https://www.keithschwarz.com/darts-dice-coins/> for an explanation. use std::fmt::Debug; use fastrand::Rng; /// An efficient data structure to sample from a discrete, fixed distribution. /// /// ``` /// use fuzzcheck::mutators::vose_alias::VoseAlias; /// /// // the discrete distribution is a vector of floats which must add up to 1.0 /// let probabilities = vec![0.5, 0.1, 0.2, 0.2]; /// // create the Vose alias. The `probabilities` vector is moved to `alias.original_probabilities`. /// let alias = VoseAlias::new(probabilities); /// /// // index has a 50% chance of being 0, 10% chance of being 1, 20% chance of being 2, and 20% chance of being 2 /// let index = alias.sample(); /// /// assert!((0 .. 4).contains(&index)); /// ``` #[derive(Debug, Clone)] pub struct VoseAlias { pub original_probabilities: Vec<f64>, alias: Vec<usize>, prob: Vec<f64>, rng: Rng, } impl PartialEq for VoseAlias { #[no_coverage] fn eq(&self, other: &Self) -> bool { self.alias.eq(&other.alias) && self.prob.eq(&other.prob) } } // implementation from https://www.keithschwarz.com/darts-dice-coins/ impl VoseAlias { /// Create a new Vose alias with the given discrete probability distribution. /// /// Important: the probabilities must sum up to ~ 1.0 #[no_coverage] pub fn new(mut probabilities: Vec<f64>) -> VoseAlias { let original_probabilities = probabilities.clone(); // Step 0: ensure sum of probabilities is equal to 1 assert!(!probabilities.is_empty()); let sum = probabilities.iter().fold( 0.0, #[no_coverage] |sum, p| sum + p, ); #[allow(clippy::float_cmp)] if sum != 1.0 { for p in &mut probabilities { *p /= sum; } } let sum = probabilities.iter().fold( 0.0, #[no_coverage] |sum, p| sum + p, ); assert!((sum - 1.0).abs() < 0.1); // Step 1 and 2 let size = probabilities.len(); let mut small = Vec::with_capacity(size); let mut large = Vec::with_capacity(size); let mut alias: Vec<usize> = vec![0; size]; let mut prob: Vec<f64> = vec![0.0; size]; // Step 3 and 4 for (i, p) in probabilities.iter_mut().enumerate() { *p *= size as f64; if *p < 1.0 { small.push(i); } else { large.push(i); } } // Step 5, 6, 7 loop { match (small.pop(), large.pop()) { // Step 5 (Some(l), Some(g)) => { let p_l = probabilities[l]; prob[l] = p_l; // 5.3 alias[l] = g; // 5.4 let p_g = probabilities[g]; let p_g = (p_g + p_l) - 1.0; probabilities[g] = p_g; // 5.5 if p_g < 1.0 { small.push(g); // 5.6 } else { large.push(g); // 5.7 } } // Step 7 (Some(l), None) => { prob[l] = 1.0; } // Step 6 (None, Some(g)) => { prob[g] = 1.0; } (None, None) => break, } } VoseAlias { original_probabilities, alias, prob, rng: Rng::default(), } } /// Sample the Vose alias. /// /// It returns an index within `0` .. `original_probabilities.len()`. #[no_coverage] pub fn sample(&self) -> usize { // Step 1 let i = self.rng.usize(..self.prob.len()); // Step 2 if self.rng.f64() <= unsafe { *self.prob.get_unchecked(i) } { // Step 3 i } else { // Step 4 unsafe { *self.alias.get_unchecked(i) } } } } #[cfg(test)] mod tests { use super::VoseAlias; #[test] #[no_coverage] fn test_probabilities_1() { let alias = VoseAlias::new(vec![0.1, 0.4, 0.2, 0.3]); let mut choices = vec![0, 0, 0, 0]; for _ in 0..100_000 { let i = alias.sample(); choices[i] += 1; } println!("{:?}", choices); } #[test] #[no_coverage] fn test_probabilities_2() { let alias = VoseAlias::new(vec![0.1, 0.4, 0.2, 0.3]); let mut choices = vec![0, 0, 0, 0]; for _ in 0..100_000 { let i = alias.sample(); choices[i] += 1; } println!("{:?}", choices); } }
use crate::prelude::*; use std::hash::{Hash, Hasher}; use std::os::raw::c_void; use std::ptr; #[repr(C)] #[derive(Debug, PartialEq)] pub struct VkSamplerCreateInfo { pub sType: VkStructureType, pub pNext: *const c_void, pub flags: VkSamplerCreateFlagBits, pub magFilter: VkFilter, pub minFilter: VkFilter, pub mipmapMode: VkSamplerMipmapMode, pub addressModeU: VkSamplerAddressMode, pub addressModeV: VkSamplerAddressMode, pub addressModeW: VkSamplerAddressMode, pub mipLodBias: f32, pub anisotropyEnable: VkBool32, pub maxAnisotropy: f32, pub compareEnable: VkBool32, pub compareOp: VkCompareOp, pub minLod: f32, pub maxLod: f32, pub borderColor: VkBorderColor, pub unnormalizedCoordinates: VkBool32, } impl VkSamplerCreateInfo { pub fn new<T>( flags: T, mag_filter: VkFilter, min_filter: VkFilter, mipmap_mode: VkSamplerMipmapMode, address_mode_u: VkSamplerAddressMode, address_mode_v: VkSamplerAddressMode, address_mode_w: VkSamplerAddressMode, mip_lod_bias: f32, anisotropy_enable: bool, max_anisotropy: f32, compare_enable: bool, compare_op: VkCompareOp, min_lod: f32, max_lod: f32, border_color: VkBorderColor, unnormalized_coordinates: bool, ) -> Self where T: Into<VkSamplerCreateFlagBits>, { VkSamplerCreateInfo { sType: VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, pNext: ptr::null(), flags: flags.into(), magFilter: mag_filter, minFilter: min_filter, mipmapMode: mipmap_mode, addressModeU: address_mode_u, addressModeV: address_mode_v, addressModeW: address_mode_w, mipLodBias: mip_lod_bias, anisotropyEnable: anisotropy_enable.into(), maxAnisotropy: max_anisotropy, compareEnable: compare_enable.into(), compareOp: compare_op, minLod: min_lod, maxLod: max_lod, borderColor: border_color, unnormalizedCoordinates: unnormalized_coordinates.into(), } } } impl Eq for VkSamplerCreateInfo {} impl Hash for VkSamplerCreateInfo { fn hash<H: Hasher>(&self, state: &mut H) { // ignore pNext and structure type self.flags.hash(state); self.magFilter.hash(state); self.minFilter.hash(state); self.mipmapMode.hash(state); self.addressModeU.hash(state); self.addressModeV.hash(state); self.addressModeW.hash(state); self.anisotropyEnable.hash(state); self.compareEnable.hash(state); self.compareOp.hash(state); self.borderColor.hash(state); self.unnormalizedCoordinates.hash(state); // cast f32 into native endian byte slice self.mipLodBias.to_ne_bytes().hash(state); self.maxAnisotropy.to_ne_bytes().hash(state); self.minLod.to_ne_bytes().hash(state); self.maxLod.to_ne_bytes().hash(state); } }
pub type Id = String;
use super::VarResult; use crate::ast::stat_expr_types::VarIndex; use crate::ast::syntax_type::{FunctionType, FunctionTypes, SimpleSyntaxType, SyntaxType}; use crate::helper::err_msgs::*; use crate::helper::str_replace; use crate::helper::{ ge1_param_i64, move_element, pine_ref_to_bool, pine_ref_to_f64, pine_ref_to_f64_series, pine_ref_to_i64, }; use crate::runtime::context::{downcast_ctx, Ctx}; use crate::runtime::InputSrc; use crate::types::{ downcast_pf_ref, int2float, Arithmetic, Callable, CallableFactory, Evaluate, EvaluateVal, Float, Int, PineRef, RefData, RuntimeErr, Series, SeriesCall, NA, }; use std::mem; use std::rc::Rc; pub fn series_rma<'a>( src: Float, length: i64, rma: &mut Series<Float>, ) -> Result<Float, RuntimeErr> { let val = rma_func(src, length, rma.at(1))?; rma.update(val); Ok(val) } pub fn series_ema<'a>( src: Float, length: i64, ema: &mut Series<Float>, ) -> Result<Float, RuntimeErr> { let val = ema_func(src, length, ema.at(1))?; ema.update(val); Ok(val) } pub fn ema_func<'a>(source: Float, length: i64, prev_val: Float) -> Result<Float, RuntimeErr> { let mut sum = 0f64; let alpha = 2f64 / (length + 1) as f64; match source { Some(val) => { sum = alpha * val + (1f64 - alpha) * prev_val.unwrap_or(0f64); } None => { return Ok(None); } } Ok(Some(sum)) } pub fn rma_func<'a>(source: Float, length: i64, prev_val: Float) -> Result<Float, RuntimeErr> { let mut sum = 0f64; let alpha = length as f64; println!("with rma {:?} {:?} {:?}", source, prev_val, length); match source { Some(val) => { sum = val + (alpha - 1f64) * prev_val.unwrap_or(0f64); sum /= alpha; } None => { return Ok(None); } } Ok(Some(sum)) } #[derive(Debug, Clone, PartialEq)] struct EmaVal { prev_val: Float, ma_func: *mut (), } impl EmaVal { pub fn new(ma_func: *mut ()) -> EmaVal { EmaVal { prev_val: None, ma_func, } } } impl<'a> SeriesCall<'a> for EmaVal { fn step( &mut self, _ctx: &mut dyn Ctx<'a>, mut param: Vec<Option<PineRef<'a>>>, _func_type: FunctionType<'a>, ) -> Result<PineRef<'a>, RuntimeErr> { move_tuplet!((source, length) = param); let source = pine_ref_to_f64(source); let length = ge1_param_i64("length", pine_ref_to_i64(length))?; let func = unsafe { mem::transmute::<_, fn(Float, i64, Float) -> Result<Float, RuntimeErr>>(self.ma_func) }; let val = func(source, length, mem::replace(&mut self.prev_val, None))?; self.prev_val = val; Ok(PineRef::new(Series::from(val))) } fn copy(&self) -> Box<dyn SeriesCall<'a> + 'a> { Box::new(self.clone()) } } pub const VAR_NAME: &'static str = "ema"; fn declare_ma_var<'a>(name: &'static str, factory: fn() -> Callable<'a>) -> VarResult<'a> { let value = PineRef::new(CallableFactory::new(factory)); let func_type = FunctionTypes(vec![FunctionType::new(( vec![ ("source", SyntaxType::float_series()), ("length", SyntaxType::int()), ], SyntaxType::float_series(), ))]); let syntax_type = SyntaxType::Function(Rc::new(func_type)); VarResult::new(value, syntax_type, name) } pub fn declare_ema_var<'a>() -> VarResult<'a> { declare_ma_var("ema", || { Callable::new(None, Some(Box::new(EmaVal::new(ema_func as *mut ())))) }) } pub fn declare_rma_var<'a>() -> VarResult<'a> { declare_ma_var("rma", || { Callable::new(None, Some(Box::new(EmaVal::new(rma_func as *mut ())))) }) } #[cfg(test)] mod tests { use super::*; use crate::ast::syntax_type::SyntaxType; use crate::runtime::VarOperate; use crate::runtime::{AnySeries, NoneCallback}; use crate::types::Series; use crate::{LibInfo, PineParser, PineRunner}; // use crate::libs::{floor, exp, }; #[test] fn alma_test() { let lib_info = LibInfo::new( vec![declare_ema_var(), declare_rma_var()], vec![("close", SyntaxType::float_series())], ); let src = "m1 = ema(close, 3)\nm2 = rma(close, 2)\n"; let blk = PineParser::new(src, &lib_info).parse_blk().unwrap(); let mut runner = PineRunner::new(&lib_info, &blk, &NoneCallback()); runner .run( &vec![( "close", AnySeries::from_float_vec(vec![None, Some(10f64), Some(20f64)]), )], None, ) .unwrap(); assert_eq!( runner.get_context().move_var(VarIndex::new(0, 0)), Some(PineRef::new(Series::from_vec(vec![ None, Some(5f64), Some(12.5f64) ]))) ); assert_eq!( runner.get_context().move_var(VarIndex::new(1, 0)), Some(PineRef::new(Series::from_vec(vec![ None, Some(5f64), Some(12.5f64) ]))) ); } }
mod builder_test; mod macro_test; fn setup_test_env() { colored::control::set_override(true); }
use std::os::raw::c_double; #[repr(C)] #[derive(Debug)] pub struct VkMVKSwapchainPerformance { pub lastFrameInterval: c_double, pub averageFrameInterval: c_double, pub averageFramesPerSecond: c_double, }
use { crate::switchboard::base::*, crate::switchboard::hanging_get_handler::{HangingGetHandler, Sender}, crate::switchboard::switchboard_impl::SwitchboardImpl, fidl_fuchsia_settings::{ DeviceRequest, DeviceRequestStream, DeviceSettings, DeviceWatchResponder, }, fuchsia_async as fasync, fuchsia_syslog::fx_log_err, futures::lock::Mutex, futures::TryStreamExt, parking_lot::RwLock, std::sync::Arc, }; impl Sender<DeviceSettings> for DeviceWatchResponder { fn send_response(self, data: DeviceSettings) { match self.send(data) { Ok(_) => {} Err(e) => fx_log_err!("failed to send device info, {:#?}", e), } } } impl From<SettingResponse> for DeviceSettings { fn from(response: SettingResponse) -> Self { if let SettingResponse::Device(info) = response { let mut device_settings = fidl_fuchsia_settings::DeviceSettings::empty(); device_settings.build_tag = Some(info.build_tag); device_settings } else { panic!("incorrect value sent to device handler"); } } } pub fn spawn_device_fidl_handler( switchboard_handle: Arc<RwLock<SwitchboardImpl>>, mut stream: DeviceRequestStream, ) { type DeviceHangingGetHandler = Arc<Mutex<HangingGetHandler<DeviceSettings, DeviceWatchResponder>>>; let hanging_get_handler: DeviceHangingGetHandler = HangingGetHandler::create(switchboard_handle, SettingType::Device); fasync::spawn(async move { while let Ok(Some(req)) = stream.try_next().await { // Support future expansion of FIDL #[allow(unreachable_patterns)] match req { DeviceRequest::Watch { responder } => { let mut hanging_get_lock = hanging_get_handler.lock().await; hanging_get_lock.watch(responder).await; } _ => { fx_log_err!("Unsupported DeviceRequest type"); } } } }); }
use serde::{Serialize, Deserialize}; #[derive(Clone, Copy, Serialize, Deserialize, Debug)] pub struct Point { pub x: f64, pub y: f64, } #[derive(Clone, Serialize, Deserialize, Debug)] pub struct Polygon{ pub points: Vec<Point>, pub edges: Vec<(usize,usize)>, } pub fn modular (val: i64, n: i64) -> usize { match val { | x if x < 0 && x.abs() > n => modular(val+n, n), | x if x < 0 => (n - val.abs()) as usize, | _ => (val % n) as usize } } fn det(p: Point, q: Point) -> f64 { p.x*q.y-p.y*q.x } pub fn plane(a: Point, b: Point, c: Point) -> bool { det(a, b) + det(b, c) + det(c, a) <= 0.0 } pub fn is_right_of(a: Point, b: Point, c: Point) -> bool { det(a, b) + det(b, c) + det(c, a) <= 0.0 } pub fn is_left_of(a: Point, b: Point, c: Point) -> bool { det(a, b) + det(b, c) + det(c, a) >= 0.0 } pub fn right_colinear_left(a: Point, b: Point, c: Point) -> i8 { match det(a, b) + det(b, c) + det(c, a) { x if x < 0.0 => 1, x if x > 0.0 => -1, _ => 0, } } pub fn det_star(a: Point, b: Point, c: Point) -> f64 { det(a, b) + det(b, c) + det(c, a) } /// function returns true if the x point is in the triangle ABC /// /// /// /// pub fn in_triangle(x: Point, a: Point, b: Point, c: Point) -> bool { ((det_star(a, b, x)>= 0.0) && (det_star(b, c, x)>= 0.0)) && (det_star(c, a, x)>=0.0) } pub fn find_left_n_right_xtreme (p: &Vec<Point>) -> (usize, usize) { let mut h = 0; let mut l = 0; for i in 1..p.len()-1 { if p[h].x < p[i].x { h = i; } if p[l].x > p[i].x { l = i; } } return (l, h) } pub fn linecross (p1: Point, p2: Point, q1: Point, q2: Point) -> bool { if (det_star(p1, p2, q2) > 0.0) == (det_star(p2, p1, q1) > 0.0) && // (det_star(q1, q2, p1) > 0.0) == (det_star(q2, q1, p2) > 0.0) { true } else { false } } pub fn angle_between_3_points(a: Point, middel: Point, c: Point) -> f64 { let result = (c.y-middel.y).atan2(c.x - middel.x) - (a.y-middel.y).atan2(a.x - middel.x); if result <= 0f64 { return 2.0*std::f64::consts::PI + result } else { return result } } pub fn test() { let pi = std::f64::consts::PI; let l = 10; for i in 0..l+1 { for j in 0..l+1 { let a = Point{x: ((pi*2.0*i as f64)/l as f64).cos(), y: ((pi*2.0*i as f64)/l as f64).sin()}; let origo = Point{x: 0.0, y: 0.0}; let c = Point{x: ((pi*2.0*j as f64)/l as f64).cos(), y: ((pi*2.0*j as f64)/l as f64).sin()}; let angle = angle_between_3_points(a, origo, c); println! ("{} {} = {}", i, j, angle); } } }
use crate::cli; use crate::intcode; pub fn run() { let filename = cli::aoc_filename("aoc_2019_05.txt"); let prog = intcode::read_from_file(filename); println!("{:?}", intcode::one_off_output(&prog, Some(vec![1]))); println!("{:?}", intcode::one_off_output(&prog, Some(vec![5]))); }
use tracing::error; use warp::http::StatusCode; use super::VmInput; use crate::state; use crate::state::StatePtr; use crate::vm; pub async fn handler( body: VmInput, state_ptr: StatePtr, ) -> Result<Box<dyn warp::Reply>, warp::Rejection> { if state::get_vm_pid(state_ptr.clone(), &body.vm_name) .await .is_none() { return Ok(Box::new(StatusCode::NOT_FOUND)); }; if let Err(e) = vm::terminate(&body.vm_name).await { error!("{}", e); return Ok(Box::new(warp::reply::with_status( e.to_string(), StatusCode::BAD_REQUEST, ))); }; Ok(Box::new(StatusCode::OK)) }
use spin::Mutex; use core::mem; extern "C" { fn gdt_flush(ptr: u32); } const MAX_ENTRIES: usize = 256; pub const KRNL_CODE_SEL: u16 = 0x8; pub const KRNL_DATA_SEL: u16 = 0x10; #[repr(C, packed)] #[derive(Copy, Clone, Default)] struct GdtEntry { limit: u16, base_low: u16, base_middle: u8, access: u8, granularity: u8, base_high: u8, } #[repr(C, packed)] #[derive(Default)] struct GdtDescriptior { limit: u16, base: u32, } struct Gdt { entries: [GdtEntry; MAX_ENTRIES], descriptor: GdtDescriptior, } lazy_static! { static ref GDT: Mutex<Gdt> = Mutex::new(Gdt { entries: [GdtEntry::default(); MAX_ENTRIES], descriptor: GdtDescriptior::default(), }); } impl Gdt { fn set_gate( self: &mut Gdt, index: usize, base: u32, limit: u16, access: u8, granularity: u8, ) { let entry = &mut self.entries[index]; entry.base_low = (base & 0xffff) as u16; entry.base_middle = ((base >> 16) & 0xff) as u8; entry.base_high = ((base >> 24) & 0xff) as u8; entry.limit = limit; entry.access = access; entry.granularity = granularity; } fn flush(self: &mut Gdt) { self.descriptor.limit = (mem::size_of::<GdtEntry>() * MAX_ENTRIES - 1) as u16; self.descriptor.base = &self.entries as *const _ as u32; unsafe { gdt_flush(&self.descriptor as *const _ as u32); } } } pub fn initialize() { let mut gdt = GDT.lock(); // setup gdt gates. gdt.set_gate(0, 0, 0x0, 0x0, 0x0); // null gdt entry gdt.set_gate(1, 0, 0xffff, 0x9a, 0xcf); // code segment gdt.set_gate(2, 0, 0xffff, 0x92, 0xcf); // data segment // flush gdt. gdt.flush(); }
use proc_macro2::TokenStream as TokenStream2; use quote::quote; use syn::{Ident, LitStr}; pub(super) fn generate_bed_open_code( id: &Ident, path: &LitStr, size: usize, compressed: bool, ) -> TokenStream2 { // TODO : At this point we just assume everything is sorted, but this // is not actually the case let rec_type_id = Ident::new(format!("Bed{}", size).as_str(), path.span()); if !compressed { quote! { let #id = grass::high_level_api::get_global_chrom_list().with(|gcl| { use grass::LineRecordStreamExt; use grass::algorithm::AssumeSorted; std::fs::File::open(#path).unwrap().into_record_iter::<grass::records::#rec_type_id, _>(gcl).assume_sorted() }); } } else { quote! { let #id = grass::high_level_api::get_global_chrom_list().with(|gcl| { use grass::LineRecordStreamExt; use grass::algorithm::AssumeSorted; libflate::gzip::Decoder::new(std::fs::File::open(#path).unwrap()).unwrap().into_record_iter::<grass::records::#rec_type_id, _>(gcl).assume_sorted() }); } } } pub(super) fn generate_xam_open_code(id: &Ident, path: &LitStr) -> TokenStream2 { // TODO : At this point we just assume everything is sorted, but this // is not actually the case let bam_file_id = Ident::new(format!("{}_owned_hts_instance", id).as_str(), id.span()); quote! { let #bam_file_id = grass::records::BamFile::open(#path).unwrap(); let #id = grass::high_level_api::get_global_chrom_list().with(|gcl| { use grass::ChromSet; use grass::algorithm::AssumeSorted; grass::records::BAMRecord::iter_of::<grass::chromset::LexicalChromSet>(&#bam_file_id, gcl.get_handle()).assume_sorted() }); } } pub(super) fn generate_vcf_open_code(id: &Ident, path: &LitStr) -> TokenStream2 { // TODO : At this point we just assume everything is sorted, but this // is not actually the case let bam_file_id = Ident::new(format!("{}_owned_hts_instance", id).as_str(), id.span()); quote! { let #bam_file_id = grass::records::VcfFile::open(#path).unwrap(); let #id = grass::high_level_api::get_global_chrom_list().with(|gcl| { use grass::ChromSet; use grass::algorithm::AssumeSorted; grass::records::VcfRecord::iter_of::<grass::chromset::LexicalChromSet>(&#bam_file_id, gcl.get_handle()).assume_sorted() }); } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::VecDeque; use std::iter::TrustedLen; use std::mem::MaybeUninit; use std::slice::IterMut; use std::vec::IntoIter; use crate::FastHash; use crate::HashSet; use crate::HashtableKeyable; use crate::HashtableLike; use crate::PartitionedHashSet; pub struct PartitionedHashtable<Impl, const BUCKETS_LG2: u32, const HIGH_BIT: bool = true> { tables: Vec<Impl>, } impl<Impl, const BUCKETS_LG2: u32, const HIGH_BIT: bool> PartitionedHashtable<Impl, BUCKETS_LG2, HIGH_BIT> { pub fn create(tables: Vec<Impl>) -> Self { assert_eq!(tables.len(), 1 << BUCKETS_LG2); PartitionedHashtable::<Impl, BUCKETS_LG2, HIGH_BIT> { tables } } } impl<Impl: HashtableLike, const BUCKETS_LG2: u32, const HIGH_BIT: bool> PartitionedHashtable<Impl, BUCKETS_LG2, HIGH_BIT> { pub fn iter_tables_mut(&mut self) -> IterMut<'_, Impl> { self.tables.iter_mut() } pub fn into_iter_tables(self) -> IntoIter<Impl> { self.tables.into_iter() } // #Unsafe the caller must ensure that the hashtable is not used after take_inner_tables pub unsafe fn pop_first_inner_table(&mut self) -> Option<Impl> { match self.tables.is_empty() { true => None, false => Some(self.tables.remove(0)), } } } /// crc32c hash will return a 32-bit hash value even it's type is u64. /// So we just need the low-32 bit to get the bucket index. #[inline(always)] pub fn hash2bucket<const BUCKETS_LG2: u32, const HIGH_BIT: bool>(hash: usize) -> usize { if HIGH_BIT { (hash >> (32 - BUCKETS_LG2)) & ((1 << BUCKETS_LG2) - 1) } else { hash & ((1 << BUCKETS_LG2) - 1) } } impl<K: HashtableKeyable + FastHash, const BUCKETS_LG2: u32, const HIGH_BIT: bool> PartitionedHashSet<K, BUCKETS_LG2, HIGH_BIT> { pub fn inner_sets_mut(&mut self) -> &mut Vec<HashSet<K>> { &mut self.tables } pub fn inner_sets(&self) -> &Vec<HashSet<K>> { &self.tables } pub fn set_merge(&mut self, other: &Self) { self.tables .iter_mut() .zip(other.tables.iter()) .for_each(|(l, r)| { l.set_merge(r); }); } pub fn set_insert(&mut self, key: &K) { let hash = key.fast_hash(); let index = hash2bucket::<BUCKETS_LG2, HIGH_BIT>(hash as usize); let _ = unsafe { self.tables[index].insert_and_entry_with_hash(key, hash) }; } } impl< K: ?Sized + FastHash, V, Impl: HashtableLike<Key = K, Value = V>, const BUCKETS_LG2: u32, const HIGH_BIT: bool, > HashtableLike for PartitionedHashtable<Impl, BUCKETS_LG2, HIGH_BIT> { type Key = Impl::Key; type Value = Impl::Value; type EntryRef<'a> = Impl::EntryRef<'a> where Self: 'a, Self::Key: 'a, Self::Value: 'a; type EntryMutRef<'a> = Impl::EntryMutRef<'a> where Self: 'a, Self::Key: 'a, Self::Value: 'a; type Iterator<'a> = PartitionedHashtableIter<Impl::Iterator<'a>> where Self: 'a, Self::Key: 'a, Self::Value: 'a; type IteratorMut<'a> = PartitionedHashtableIter<Impl::IteratorMut<'a>> where Self: 'a, Self::Key: 'a, Self::Value: 'a; fn len(&self) -> usize { self.tables.iter().map(|x| x.len()).sum::<usize>() } fn bytes_len(&self) -> usize { let mut impl_bytes = 0; for table in &self.tables { impl_bytes += table.bytes_len(); } std::mem::size_of::<Self>() + impl_bytes } fn unsize_key_size(&self) -> Option<usize> { let mut key_len = 0; for table in &self.tables { key_len += table.unsize_key_size()?; } Some(key_len) } fn entry(&self, key: &Self::Key) -> Option<Self::EntryRef<'_>> { let hash = key.fast_hash(); let index = hash2bucket::<BUCKETS_LG2, HIGH_BIT>(hash as usize); self.tables[index].entry(key) } fn entry_mut(&mut self, key: &Self::Key) -> Option<Self::EntryMutRef<'_>> { let hash = key.fast_hash(); let index = hash2bucket::<BUCKETS_LG2, HIGH_BIT>(hash as usize); self.tables[index].entry_mut(key) } fn get(&self, key: &Self::Key) -> Option<&Self::Value> { let hash = key.fast_hash(); let index = hash2bucket::<BUCKETS_LG2, HIGH_BIT>(hash as usize); self.tables[index].get(key) } fn get_mut(&mut self, key: &Self::Key) -> Option<&mut Self::Value> { let hash = key.fast_hash(); let index = hash2bucket::<BUCKETS_LG2, HIGH_BIT>(hash as usize); self.tables[index].get_mut(key) } unsafe fn insert( &mut self, key: &Self::Key, ) -> Result<&mut MaybeUninit<Self::Value>, &mut Self::Value> { let hash = key.fast_hash(); let index = hash2bucket::<BUCKETS_LG2, HIGH_BIT>(hash as usize); self.tables[index].insert(key) } #[inline(always)] unsafe fn insert_and_entry( &mut self, key: &Self::Key, ) -> Result<Self::EntryMutRef<'_>, Self::EntryMutRef<'_>> { let hash = key.fast_hash(); let index = hash2bucket::<BUCKETS_LG2, HIGH_BIT>(hash as usize); self.tables[index].insert_and_entry_with_hash(key, hash) } #[inline(always)] unsafe fn insert_and_entry_with_hash( &mut self, key: &Self::Key, hash: u64, ) -> Result<Self::EntryMutRef<'_>, Self::EntryMutRef<'_>> { let index = hash2bucket::<BUCKETS_LG2, HIGH_BIT>(hash as usize); self.tables[index].insert_and_entry_with_hash(key, hash) } fn iter(&self) -> Self::Iterator<'_> { let mut inner = VecDeque::with_capacity(self.tables.len()); for table in &self.tables { inner.push_back(table.iter()); } PartitionedHashtableIter::create(inner) } fn clear(&mut self) { for inner_table in &mut self.tables { inner_table.clear(); } } } pub struct PartitionedHashtableIter<Impl> { inner: VecDeque<Impl>, } impl<Impl> PartitionedHashtableIter<Impl> { pub fn create(inner: VecDeque<Impl>) -> Self { PartitionedHashtableIter::<Impl> { inner } } } impl<Impl: Iterator> Iterator for PartitionedHashtableIter<Impl> { type Item = Impl::Item; fn next(&mut self) -> Option<Self::Item> { loop { match self.inner.front_mut() { None => { return None; } Some(front) => { if let Some(next) = front.next() { return Some(next); } } } self.inner.pop_front(); } } fn size_hint(&self) -> (usize, Option<usize>) { let mut lower = 0; let mut upper = 0; for inner in &self.inner { lower += inner.size_hint().0; upper += inner.size_hint().1.unwrap(); } (lower, Some(upper)) } } unsafe impl<Impl: TrustedLen + Iterator> TrustedLen for PartitionedHashtableIter<Impl> {}
#![allow(dead_code)] #![allow(non_upper_case_globals)] use super::{acpi_table::*, phys_to_virt}; use alloc::boxed::Box; use alloc::vec::Vec; use apic::IoApic; use ps2_mouse::{Mouse, MouseState}; use spin::Mutex; use trapframe::TrapFrame; const IO_APIC_NUM_REDIRECTIONS: u8 = 120; const TABLE_SIZE: usize = 256; pub type InterruptHandle = Box<dyn Fn() + Send + Sync>; lazy_static! { static ref IRQ_TABLE: Mutex<Vec<Option<InterruptHandle>>> = Default::default(); } lazy_static! { static ref MOUSE: Mutex<Mouse> = Mutex::new(Mouse::new()); static ref MOUSE_CALLBACK: Mutex<Vec<Box<dyn Fn([u8; 3]) + Send + Sync>>> = Mutex::new(Vec::new()); } #[export_name = "hal_mice_set_callback"] pub fn mice_set_callback(callback: Box<dyn Fn([u8; 3]) + Send + Sync>) { MOUSE_CALLBACK.lock().push(callback); } fn mouse_on_complete(mouse_state: MouseState) { debug!("mouse state: {:?}", mouse_state); MOUSE_CALLBACK.lock().iter().for_each(|callback| { callback([ mouse_state.get_flags().bits(), mouse_state.get_x() as u8, mouse_state.get_y() as u8, ]); }); } fn mouse() { use x86_64::instructions::port::PortReadOnly; let mut port = PortReadOnly::new(0x60); let packet = unsafe { port.read() }; MOUSE.lock().process_packet(packet); } pub fn init() { MOUSE.lock().init().unwrap(); MOUSE.lock().set_on_complete(mouse_on_complete); unsafe { init_ioapic(); } init_irq_table(); irq_add_handle(Timer + IRQ0, Box::new(timer)); irq_add_handle(Keyboard + IRQ0, Box::new(keyboard)); irq_add_handle(Mouse + IRQ0, Box::new(mouse)); irq_add_handle(COM1 + IRQ0, Box::new(com1)); irq_enable_raw(Keyboard, Keyboard + IRQ0); irq_enable_raw(Mouse, Mouse + IRQ0); irq_enable_raw(COM1, COM1 + IRQ0); } fn init_irq_table() { let mut table = IRQ_TABLE.lock(); for _ in 0..TABLE_SIZE { table.push(None); } } unsafe fn init_ioapic() { for ioapic in AcpiTable::get_ioapic() { info!("Ioapic found: {:#x?}", ioapic); let mut ip = IoApic::new(phys_to_virt(ioapic.address as usize)); ip.disable_all(); } let mut ip = IoApic::new(phys_to_virt(super::IOAPIC_ADDR)); ip.disable_all(); } fn get_ioapic(irq: u32) -> Option<acpi::interrupt::IoApic> { for i in AcpiTable::get_ioapic() { let num_instr = core::cmp::min( ioapic_maxinstr(i.address).unwrap(), IO_APIC_NUM_REDIRECTIONS - 1, ); if i.global_system_interrupt_base <= irq && irq <= i.global_system_interrupt_base + num_instr as u32 { return Some(i); } } None } fn ioapic_controller(i: &acpi::interrupt::IoApic) -> IoApic { unsafe { IoApic::new(phys_to_virt(i.address as usize)) } } #[no_mangle] pub extern "C" fn trap_handler(tf: &mut TrapFrame) { trace!("Interrupt: {:#x} @ CPU{}", tf.trap_num, 0); // TODO 0 should replace in multi-core case match tf.trap_num as u8 { Breakpoint => breakpoint(), DoubleFault => double_fault(tf), PageFault => page_fault(tf), IRQ0..=63 => irq_handle(tf.trap_num as u8), _ => panic!("Unhandled interrupt {:x} {:#x?}", tf.trap_num, tf), } } #[export_name = "hal_irq_handle"] pub fn irq_handle(irq: u8) { use super::{LocalApic, XApic, LAPIC_ADDR}; let mut lapic = unsafe { XApic::new(phys_to_virt(LAPIC_ADDR)) }; lapic.eoi(); let table = IRQ_TABLE.lock(); match &table[irq as usize] { Some(f) => f(), None => panic!("unhandled external IRQ number: {}", irq), } } #[export_name = "hal_ioapic_set_handle"] pub fn set_handle(global_irq: u32, handle: InterruptHandle) -> Option<u8> { info!("set_handle irq={:#x?}", global_irq); // if global_irq == 1 { // irq_add_handle(global_irq as u8 + IRQ0, handle); // return Some(global_irq as u8 + IRQ0); // } let ioapic_info = get_ioapic(global_irq)?; let mut ioapic = ioapic_controller(&ioapic_info); let offset = (global_irq - ioapic_info.global_system_interrupt_base) as u8; let irq = ioapic.irq_vector(offset); let new_handle = if global_irq == 0x1 { Box::new(move || { handle(); keyboard(); mouse(); }) } else { handle }; irq_add_handle(irq, new_handle).map(|x| { info!( "irq_set_handle: mapping from {:#x?} to {:#x?}", global_irq, x ); ioapic.set_irq_vector(offset, x); x }) } #[export_name = "hal_ioapic_reset_handle"] pub fn reset_handle(global_irq: u32) -> bool { info!("reset_handle"); let ioapic_info = if let Some(x) = get_ioapic(global_irq) { x } else { return false; }; let mut ioapic = ioapic_controller(&ioapic_info); let offset = (global_irq - ioapic_info.global_system_interrupt_base) as u8; let irq = ioapic.irq_vector(offset); if !irq_remove_handle(irq) { ioapic.set_irq_vector(offset, 0); true } else { false } } /// Add a handle to IRQ table. Return the specified irq or an allocated irq on success #[export_name = "hal_irq_add_handle"] pub fn irq_add_handle(irq: u8, handle: InterruptHandle) -> Option<u8> { info!("IRQ add handle {:#x?}", irq); let mut table = IRQ_TABLE.lock(); // allocate a valid irq number if irq == 0 { let mut id = 0x20; while id < table.len() { if table[id].is_none() { table[id] = Some(handle); return Some(id as u8); } id += 1; } return None; } match table[irq as usize] { Some(_) => None, None => { table[irq as usize] = Some(handle); Some(irq) } } } #[export_name = "hal_irq_remove_handle"] pub fn irq_remove_handle(irq: u8) -> bool { // TODO: ioapic redirection entries associated with this should be reset. info!("IRQ remove handle {:#x?}", irq); let irq = irq as usize; let mut table = IRQ_TABLE.lock(); match table[irq] { Some(_) => { table[irq] = None; false } None => true, } } #[export_name = "hal_irq_allocate_block"] pub fn allocate_block(irq_num: u32) -> Option<(usize, usize)> { info!("hal_irq_allocate_block: count={:#x?}", irq_num); let irq_num = u32::next_power_of_two(irq_num) as usize; let mut irq_start = 0x20; let mut irq_cur = irq_start; let mut table = IRQ_TABLE.lock(); while irq_cur < TABLE_SIZE && irq_cur < irq_start + irq_num { if table[irq_cur].is_none() { irq_cur += 1; } else { irq_start = (irq_cur - irq_cur % irq_num) + irq_num; irq_cur = irq_start; } } for i in irq_start..irq_start + irq_num { table[i] = Some(Box::new(|| {})); } info!( "hal_irq_allocate_block: start={:#x?} num={:#x?}", irq_start, irq_num ); Some((irq_start, irq_num)) } #[export_name = "hal_irq_free_block"] pub fn free_block(irq_start: u32, irq_num: u32) { let mut table = IRQ_TABLE.lock(); for i in irq_start..irq_start + irq_num { table[i as usize] = None; } } #[export_name = "hal_irq_overwrite_handler"] pub fn overwrite_handler(msi_id: u32, handle: Box<dyn Fn() + Send + Sync>) -> bool { info!("IRQ overwrite handle {:#x?}", msi_id); let mut table = IRQ_TABLE.lock(); let set = table[msi_id as usize].is_none(); table[msi_id as usize] = Some(handle); set } #[export_name = "hal_irq_enable"] pub fn irq_enable(irq: u32) { info!("irq_enable irq={:#x?}", irq); // if irq == 1 { // irq_enable_raw(irq as u8, irq as u8 + IRQ0); // return; // } if let Some(x) = get_ioapic(irq) { let mut ioapic = ioapic_controller(&x); ioapic.enable((irq - x.global_system_interrupt_base) as u8, 0); } } fn irq_enable_raw(irq: u8, vector: u8) { info!("irq_enable_raw: irq={:#x?}, vector={:#x?}", irq, vector); let mut ioapic = unsafe { IoApic::new(phys_to_virt(super::IOAPIC_ADDR)) }; ioapic.set_irq_vector(irq, vector); ioapic.enable(irq, 0) } #[export_name = "hal_irq_disable"] pub fn irq_disable(irq: u32) { info!("irq_disable"); if let Some(x) = get_ioapic(irq) { let mut ioapic = ioapic_controller(&x); ioapic.disable((irq - x.global_system_interrupt_base) as u8); } } #[export_name = "hal_irq_configure"] pub fn irq_configure( global_irq: u32, vector: u8, dest: u8, level_trig: bool, active_high: bool, ) -> bool { info!( "irq_configure: irq={:#x?}, vector={:#x?}, dest={:#x?}, level_trig={:#x?}, active_high={:#x?}", global_irq, vector, dest, level_trig, active_high ); get_ioapic(global_irq) .map(|x| { let mut ioapic = ioapic_controller(&x); ioapic.config( (global_irq - x.global_system_interrupt_base) as u8, vector, dest, level_trig, active_high, false, /* physical */ true, /* mask */ ); }) .is_some() } #[export_name = "hal_irq_maxinstr"] pub fn ioapic_maxinstr(ioapic_addr: u32) -> Option<u8> { let mut table = MAX_INSTR_TABLE.lock(); for (addr, v) in table.iter() { if *addr == ioapic_addr as usize { return Some(*v); } } let mut ioapic = unsafe { IoApic::new(phys_to_virt(ioapic_addr as usize)) }; let v = ioapic.maxintr(); table.push((ioapic_addr as usize, v)); Some(v) } lazy_static! { static ref MAX_INSTR_TABLE: Mutex<Vec<(usize, u8)>> = Mutex::default(); } #[export_name = "hal_irq_isvalid"] pub fn irq_is_valid(irq: u32) -> bool { trace!("irq_is_valid: irq={:#x?}", irq); get_ioapic(irq).is_some() } fn breakpoint() { panic!("\nEXCEPTION: Breakpoint"); } fn double_fault(tf: &TrapFrame) { panic!("\nEXCEPTION: Double Fault\n{:#x?}", tf); } fn page_fault(tf: &mut TrapFrame) { panic!("\nEXCEPTION: Page Fault\n{:#x?}", tf); } fn timer() { super::timer_tick(); } fn com1() { let c = super::COM1.lock().receive(); super::serial_put(c); } fn keyboard() { use pc_keyboard::{DecodedKey, KeyCode}; if let Some(key) = super::keyboard::receive() { match key { DecodedKey::Unicode(c) => super::serial_put(c as u8), DecodedKey::RawKey(code) => { let s = match code { KeyCode::ArrowUp => "\u{1b}[A", KeyCode::ArrowDown => "\u{1b}[B", KeyCode::ArrowRight => "\u{1b}[C", KeyCode::ArrowLeft => "\u{1b}[D", _ => "", }; for c in s.bytes() { super::serial_put(c); } } } } } // Reference: https://wiki.osdev.org/Exceptions const DivideError: u8 = 0; const Debug: u8 = 1; const NonMaskableInterrupt: u8 = 2; const Breakpoint: u8 = 3; const Overflow: u8 = 4; const BoundRangeExceeded: u8 = 5; const InvalidOpcode: u8 = 6; const DeviceNotAvailable: u8 = 7; const DoubleFault: u8 = 8; const CoprocessorSegmentOverrun: u8 = 9; const InvalidTSS: u8 = 10; const SegmentNotPresent: u8 = 11; const StackSegmentFault: u8 = 12; const GeneralProtectionFault: u8 = 13; const PageFault: u8 = 14; const FloatingPointException: u8 = 16; const AlignmentCheck: u8 = 17; const MachineCheck: u8 = 18; const SIMDFloatingPointException: u8 = 19; const VirtualizationException: u8 = 20; const SecurityException: u8 = 30; const IRQ0: u8 = 32; // IRQ const Timer: u8 = 0; const Keyboard: u8 = 1; const COM2: u8 = 3; const COM1: u8 = 4; const Mouse: u8 = 12; const IDE: u8 = 14; const Error: u8 = 19; const Spurious: u8 = 31;
//! //! An example of the time_calc crate in action. //! extern crate time_calc; use time_calc::{ Bars, Beats, Bpm, Division, DivType, Measure, Ms, Ppqn, SampleHz, Samples, Ticks, TimeSig, }; // "Samples per second" is used to convert between samples and milliseconds. const SAMPLE_HZ: SampleHz = 44_100.0; // "Parts per quarter note" is used to calculate Ticks; a high resolution musical time measurement. const PPQN: Ppqn = 19_200; fn main() { println!("time_calc demonstration!"); // Out `Bars` type is a simplified version of a Measure. assert!(Bars(1).measure() == Measure(1, Division::Bar, DivType::Whole)); // The same goes for out `Beats` type. assert!(Beats(1).measure() == Measure(1, Division::Beat, DivType::Whole)); // We can use "parts per quarter note" to convert to ticks. println!("Parts per quarter note: {}", PPQN); println!("Duration of a beat in ticks: {}", Beats(1).ticks(PPQN)); println!("Duration of 38_400 ticks in beats: {}", Ticks(38_400).beats(PPQN)); // We can use "beats per minute" to convert from musical time to milliseconds. let bpm: Bpm = 120.0; println!("Duration of a beat at 120 beats per minute: {} milliseconds.", Beats(1).ms(bpm)); // And we can use "samples per second" to convert our duration to samples. println!("Samples per second: {}", SAMPLE_HZ); println!("Duration of a beat at 120bpm in samples: {}", Beats(1).samples(bpm, SAMPLE_HZ)); // We also need to know the "time signature" if we are to convert from "Bars". // This is because different time signatures can have a different duration in "Beats". let beats_per_bar = TimeSig { top: 4, bottom: 4 }.beats_per_bar(); println!("Duration of a bar in `Beats` with a 4/4 Time Signature: {}", beats_per_bar); let beats_per_bar = TimeSig { top: 3, bottom: 4 }.beats_per_bar(); println!("Duration of a bar in `Beats` with a 3/4 Time Signature: {}", beats_per_bar); let beats_per_bar = TimeSig { top: 7, bottom: 8 }.beats_per_bar(); println!("Duration of a bar in `Beats` with a 7/8 Time Signature: {}", beats_per_bar); let time_sig = TimeSig { top: 4, bottom: 4 }; println!("Duration of a bar at 120bpm, 44_100 sample_hz and 4/4 Time Sig in samples: {}", Bars(1).samples(bpm, time_sig, SAMPLE_HZ)); // We can also convert back the other way! Here's an example from Ms -> Beats. println!("1 minute as a duration in beats: {}", Ms(60_000.0).beats(bpm)); // Here's an example from Samples -> Bars. println!("176_400 samples as a duration in bars: {}", Samples(176_400).bars(bpm, time_sig, SAMPLE_HZ)); println!("Great Success!"); }
// // wallpaper.rs // Copyright (C) 2019 Malcolm Ramsay <malramsay64@gmail.com> // Distributed under terms of the MIT license. // use anyhow::{anyhow, Error}; use serde::{Deserialize, Serialize}; use std::convert::TryFrom; use crate::{CrystalFamily, Transform2}; #[derive(Clone, Serialize, Deserialize)] pub struct WallpaperGroup<'a> { pub name: &'a str, pub family: CrystalFamily, pub wyckoff_str: Vec<&'a str>, } /// Defining one of the Crystallographic wallpaper groups. /// /// This is the highest level description of the symmetry operations of a crystal structure. /// #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Wallpaper { pub name: String, pub family: CrystalFamily, } impl Wallpaper { pub fn new(group: &WallpaperGroup) -> Wallpaper { Wallpaper { name: String::from(group.name), family: group.family, } } } impl<'a> From<WallpaperGroup<'a>> for Wallpaper { fn from(group: WallpaperGroup) -> Wallpaper { Wallpaper { name: String::from(group.name), family: group.family, } } } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct WyckoffSite { pub letter: char, pub symmetries: Vec<Transform2>, pub num_rotations: u64, pub mirror_primary: bool, pub mirror_secondary: bool, } impl WyckoffSite { pub fn new(group: &WallpaperGroup) -> Result<WyckoffSite, Error> { let symmetries = group .wyckoff_str .iter() .map(|&a| Transform2::from_operations(a)) .collect::<Result<Vec<_>, _>>()?; Ok(WyckoffSite { letter: 'a', symmetries, num_rotations: 1, mirror_primary: false, mirror_secondary: false, }) } pub fn multiplicity(&self) -> usize { self.symmetries.len() } pub fn degrees_of_freedom(&self) -> &[bool] { // TODO implement -> This is only required for the non-general Wyckoff sites since all the // general sites have 3 degrees-of-freedom. // // This will be checked as a method of the Transform struct. &[true, true, true] } } #[allow(non_camel_case_types)] #[derive(Debug, Serialize, Deserialize)] pub enum WallpaperGroups { p1, p2, p1m1, p1g1, p2mm, p2mg, p2gg, } impl std::str::FromStr for WallpaperGroups { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "p1" => Ok(WallpaperGroups::p1), "p2" => Ok(WallpaperGroups::p2), "p1m1" => Ok(WallpaperGroups::p1m1), "p1g1" => Ok(WallpaperGroups::p1g1), "pg" => Ok(WallpaperGroups::p1g1), "p2mm" => Ok(WallpaperGroups::p2mm), "p2mg" => Ok(WallpaperGroups::p2mg), "p2gg" => Ok(WallpaperGroups::p2gg), _ => Err(anyhow!("Invalid Value")), } } } impl std::fmt::Display for WallpaperGroups { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { WallpaperGroups::p1 => write!(f, "p1"), WallpaperGroups::p2 => write!(f, "p2"), WallpaperGroups::p1m1 => write!(f, "p1m1"), WallpaperGroups::p1g1 => write!(f, "p1g1"), WallpaperGroups::p2mm => write!(f, "p2mm"), WallpaperGroups::p2mg => write!(f, "p2mg"), WallpaperGroups::p2gg => write!(f, "p2gg"), } } } impl WallpaperGroups { pub fn variants() -> Vec<&'static str> { vec!["p1", "p2", "p2m1", "p1g1", "p2mm", "p2mg", "p2gg"] } } impl<'a> TryFrom<WallpaperGroups> for WallpaperGroup<'a> { type Error = Error; fn try_from(name: WallpaperGroups) -> Result<Self, Self::Error> { match name { WallpaperGroups::p1 => Ok(WallpaperGroup { name: "p1", family: CrystalFamily::Monoclinic, wyckoff_str: vec!["x,y"], }), WallpaperGroups::p2 => Ok(WallpaperGroup { name: "p2", family: CrystalFamily::Monoclinic, wyckoff_str: vec!["x,y", "-x,-y"], }), WallpaperGroups::p1m1 => Ok(WallpaperGroup { name: "p1m1", family: CrystalFamily::Orthorhombic, wyckoff_str: vec!["x,y", "-x,y"], }), WallpaperGroups::p1g1 => Ok(WallpaperGroup { name: "p1m1", family: CrystalFamily::Orthorhombic, wyckoff_str: vec!["x,y", "-x,y+1/2"], }), WallpaperGroups::p2mm => Ok(WallpaperGroup { name: "p2mm", family: CrystalFamily::Orthorhombic, wyckoff_str: vec!["x,y", "-x,-y", "-x,y", "x,-y"], }), WallpaperGroups::p2mg => Ok(WallpaperGroup { name: "p2mg", family: CrystalFamily::Orthorhombic, wyckoff_str: vec!["x,y", "-x, -y", "-x+1/2, y", "x+1/2, -y"], }), WallpaperGroups::p2gg => Ok(WallpaperGroup { name: "p2gg", family: CrystalFamily::Orthorhombic, wyckoff_str: vec!["x,y", "-x, -y", "-x+1/2, y+1/2", "x+1/2, -y+1/2"], }), } } } #[cfg(test)] mod wyckoff_site_tests { use super::*; pub fn create_wyckoff() -> WyckoffSite { WyckoffSite { letter: 'a', symmetries: vec![Transform2::identity()], num_rotations: 1, mirror_primary: false, mirror_secondary: false, } } #[test] fn multiplicity() { let wyckoff = create_wyckoff(); assert_eq!(wyckoff.multiplicity(), 1); } }
use serenity::prelude::Context; use serenity::model::channel::Message; use serenity::framework::standard::{ CommandResult, macros::command, }; #[command] pub fn ping(context: &mut Context, message: &Message) -> CommandResult { message.reply(context, "Pong!")?; Ok(()) }
//给定一个正整数,返回它在 Excel 表中相对应的列名称。 // // 例如, // // 1 -> A // 2 -> B // 3 -> C // ... // 26 -> Z // 27 -> AA // 28 -> AB // ... // // // 示例 1: // // 输入: 1 //输出: "A" // // // 示例 2: // // 输入: 28 //输出: "AB" // // // 示例 3: // // 输入: 701 //输出: "ZY" // // impl Solution { pub fn convert_to_title(n: i32) -> String { use core::fmt::Write; if n <= 26 { let mut s = String::new(); if n > 0 { s.write_char(char::from(b'A' - 1 + n as u8)).unwrap(); } s.shrink_to_fit(); return s; } else { let mut s = String::new(); let i = n % 26; if i == 0 { s.write_str(Self::convert_to_title(n / 26 - 1).as_ref()) .unwrap(); s.write_str(Self::convert_to_title(26).as_ref()).unwrap(); } else { s.write_str(Self::convert_to_title(n / 26).as_ref()) .unwrap(); s.write_str(Self::convert_to_title(i).as_ref()).unwrap(); } return s; } } } struct Solution {} fn main() { let r = Solution::convert_to_title(26 * 26 * 26); println!("{:?}", r); }
// Copyright (c) 2016, Ben Boeckel // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // * Neither the name of this project nor the names of its contributors // may be used to endorse or promote products derived from this software // without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON // ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. extern crate serde_codegen; use std::env; use std::fs; use std::path::Path; pub fn main() { let out_dir = env::var_os("OUT_DIR").unwrap(); let paths = [ "types", ]; for path in &paths { let src = format!("src/{}.rs.in", path); let dst = format!("{}.rs", path); let src_path = Path::new(&src); let dst_path = Path::new(&out_dir).join(&dst); fs::create_dir_all(dst_path.parent().unwrap()).unwrap(); serde_codegen::expand(&src_path, &dst_path).unwrap(); } }
use super::v2; impl From<v2::License> for openapiv3::License { fn from(v2: v2::License) -> Self { openapiv3::License { name: v2.name.unwrap_or_default(), url: v2.url, extensions: indexmap::IndexMap::new(), } } }
use crate::{ beam::Beam, beam_iter::BeamIter, grid::Grid, position::TilePosition, ray::Ray, ray_iter::RayIter, rays::rays_from, AngleRad, BeamIntersect, }; #[derive(Debug, Default, PartialEq)] pub struct Crossing { pub valid: Option<TilePosition>, pub invalid: Option<TilePosition>, } pub struct TileRaycaster { grid: Grid, } impl TileRaycaster { #[must_use] pub const fn new(grid: Grid) -> Self { Self { grid } } pub const fn grid(&self) -> &Grid { &self.grid } #[must_use] pub fn cast_ray<T: Into<AngleRad>>(&self, tp: &TilePosition, angle: T) -> RayIter { let intersections = Ray::new(self.grid.clone(), tp.clone(), angle); intersections.into_iter() } pub fn cast_beam<T: Into<AngleRad>>( &self, beam_center: &TilePosition, beam_width: f32, angle: T, ) -> BeamIter { let rays = rays_from(beam_center, &self.grid, beam_width, &angle.into()); Beam::new(self.grid.tile_size, rays).into_iter() } pub fn last_valid<P, T: Into<AngleRad>>( &self, tp: &TilePosition, angle: T, is_valid: P, ) -> Option<TilePosition> where P: FnMut(&TilePosition) -> bool, { self.cast_ray(tp, angle).take_while(is_valid).last() } pub fn first_invalid<P, T: Into<AngleRad>>( &self, tp: &TilePosition, angle: T, is_valid: P, ) -> Option<TilePosition> where P: FnMut(&TilePosition) -> bool, { let mut iter = self.cast_ray(tp, angle).skip_while(is_valid); iter.next() } pub fn beam_last_valid<P, T: Into<AngleRad>>( &self, beam_center: &TilePosition, beam_width: f32, angle: T, is_valid: P, ) -> Option<BeamIntersect> where P: FnMut(&BeamIntersect) -> bool, { self.cast_beam(beam_center, beam_width, angle) .take_while(is_valid) .last() } pub fn crossing<P, T: Into<AngleRad>>( &self, tp: &TilePosition, angle: T, mut is_valid: P, ) -> Crossing where P: FnMut(&TilePosition) -> bool, { let mut iter = self.cast_ray(tp, angle).peekable(); let mut previous = iter.next(); match previous { None => Crossing::default(), Some(prev) if !is_valid(&prev) => Crossing::default(), Some(_) => { let (valid, invalid) = loop { let next = iter.next(); match (previous, next) { (Some(prev), Some(next)) => { if is_valid(&next) { previous = Some(next); continue; } break (Some(prev), Some(next)); } (Some(prev), None) => { if is_valid(&prev) { break (Some(prev), None); } break (None, None); } (None, None) => break (None, None), #[allow(clippy::panic)] (None, Some(_)) => { panic!("(prev: None, next: Some(_)) should be impossible") } } }; Crossing { valid, invalid } } } } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::alloc::Layout; use std::fmt; use std::marker::PhantomData; use std::sync::Arc; use common_arrow::arrow::bitmap::Bitmap; use common_exception::ErrorCode; use common_exception::Result; use common_expression::types::number::*; use common_expression::types::*; use common_expression::with_number_mapped_type; use common_expression::Column; use common_expression::ColumnBuilder; use common_expression::Scalar; use common_io::prelude::*; use serde::de::DeserializeOwned; use serde::Deserialize; use serde::Serialize; use super::aggregate_function_factory::AggregateFunctionDescription; use super::aggregate_scalar_state::ChangeIf; use super::aggregate_scalar_state::CmpAny; use super::aggregate_scalar_state::CmpMax; use super::aggregate_scalar_state::CmpMin; use super::aggregate_scalar_state::TYPE_ANY; use super::aggregate_scalar_state::TYPE_MAX; use super::aggregate_scalar_state::TYPE_MIN; use super::AggregateFunctionRef; use super::StateAddr; use crate::aggregates::assert_binary_arguments; use crate::aggregates::AggregateFunction; use crate::with_compare_mapped_type; use crate::with_simple_no_number_mapped_type; // State for arg_min(arg, val) and arg_max(arg, val) // A: ValueType for arg. // V: ValueType for val. pub trait AggregateArgMinMaxState<A: ValueType, V: ValueType>: Send + Sync + 'static { fn new() -> Self; fn add(&mut self, value: V::ScalarRef<'_>, data: Scalar); fn add_batch( &mut self, data_column: &A::Column, column: &V::Column, validity: Option<&Bitmap>, ) -> Result<()>; fn merge(&mut self, rhs: &Self) -> Result<()>; fn serialize(&self, writer: &mut Vec<u8>) -> Result<()>; fn deserialize(&mut self, reader: &mut &[u8]) -> Result<()>; fn merge_result(&mut self, column: &mut ColumnBuilder) -> Result<()>; } #[derive(Serialize, Deserialize)] struct ArgMinMaxState<A, V, C> where V: ValueType, V::Scalar: Serialize + DeserializeOwned, { #[serde(bound(deserialize = "V::Scalar: DeserializeOwned"))] pub value: Option<V::Scalar>, pub data: Scalar, #[serde(skip)] _a: PhantomData<A>, #[serde(skip)] _c: PhantomData<C>, } impl<A, V, C> AggregateArgMinMaxState<A, V> for ArgMinMaxState<A, V, C> where A: ValueType + Send + Sync, V: ValueType, V::Scalar: Send + Sync + Serialize + DeserializeOwned, C: ChangeIf<V> + Default, { fn new() -> Self { Self { value: None, data: Scalar::Null, _a: PhantomData, _c: PhantomData, } } fn add(&mut self, other: V::ScalarRef<'_>, data: Scalar) { match &self.value { Some(v) => { if C::change_if(V::to_scalar_ref(v), other.clone()) { self.value = Some(V::to_owned_scalar(other)); self.data = data; } } None => { self.value = Some(V::to_owned_scalar(other)); self.data = data; } } } fn add_batch( &mut self, arg_col: &A::Column, val_col: &V::Column, validity: Option<&Bitmap>, ) -> Result<()> { let column_len = V::column_len(val_col); if column_len == 0 { return Ok(()); } let val_col_iter = V::iter_column(val_col); if let Some(bit) = validity { if bit.unset_bits() == column_len { return Ok(()); } // V::ScalarRef doesn't derive Default, so take the first value as default. let mut v = unsafe { V::index_column_unchecked(val_col, 0) }; let mut has_v = bit.get_bit(0); let mut data_value = if has_v { let arg = unsafe { A::index_column_unchecked(arg_col, 0) }; A::upcast_scalar(A::to_owned_scalar(arg)) } else { Scalar::Null }; for ((row, val), valid) in val_col_iter.enumerate().skip(1).zip(bit.iter().skip(1)) { if !valid { continue; } if !has_v { has_v = true; v = val.clone(); let arg = unsafe { A::index_column_unchecked(arg_col, row) }; data_value = A::upcast_scalar(A::to_owned_scalar(arg)); } else if C::change_if(v.clone(), val.clone()) { v = val.clone(); let arg = unsafe { A::index_column_unchecked(arg_col, row) }; data_value = A::upcast_scalar(A::to_owned_scalar(arg)); } } if has_v { self.add(v, data_value); } } else { let v = val_col_iter.enumerate().reduce(|acc, (row, val)| { if C::change_if(acc.1.clone(), val.clone()) { (row, val) } else { acc } }); if let Some((row, val)) = v { let arg = unsafe { A::index_column_unchecked(arg_col, row) }; self.add(val, A::upcast_scalar(A::to_owned_scalar(arg))); } }; Ok(()) } fn merge(&mut self, rhs: &Self) -> Result<()> { if let Some(v) = &rhs.value { self.add(V::to_scalar_ref(v), rhs.data.clone()); } Ok(()) } fn serialize(&self, writer: &mut Vec<u8>) -> Result<()> { serialize_into_buf(writer, self) } fn deserialize(&mut self, reader: &mut &[u8]) -> Result<()> { *self = deserialize_from_slice(reader)?; Ok(()) } fn merge_result(&mut self, builder: &mut ColumnBuilder) -> Result<()> { if self.value.is_some() { if let Some(inner) = A::try_downcast_builder(builder) { A::push_item(inner, A::try_downcast_scalar(&self.data.as_ref()).unwrap()); } else { builder.push(self.data.as_ref()); } } else if let Some(inner) = A::try_downcast_builder(builder) { A::push_default(inner); } else { builder.push_default(); } Ok(()) } } #[derive(Clone)] pub struct AggregateArgMinMaxFunction<A, V, C, State> { display_name: String, return_data_type: DataType, _a: PhantomData<A>, _v: PhantomData<V>, _c: PhantomData<C>, _state: PhantomData<State>, } impl<A, V, C, State> AggregateFunction for AggregateArgMinMaxFunction<A, V, C, State> where A: ValueType + Send + Sync, V: ValueType + Send + Sync, C: ChangeIf<V> + Default, State: AggregateArgMinMaxState<A, V>, { fn name(&self) -> &str { "AggregateArgMinMaxFunction" } fn return_type(&self) -> Result<DataType> { Ok(self.return_data_type.clone()) } fn init_state(&self, place: StateAddr) { place.write(|| State::new()); } fn state_layout(&self) -> Layout { Layout::new::<State>() } fn accumulate( &self, place: StateAddr, columns: &[Column], validity: Option<&Bitmap>, _input_rows: usize, ) -> Result<()> { let state: &mut State = place.get(); let arg_col = A::try_downcast_column(&columns[0]).unwrap(); let val_col = V::try_downcast_column(&columns[1]).unwrap(); state.add_batch(&arg_col, &val_col, validity) } fn accumulate_keys( &self, places: &[StateAddr], offset: usize, columns: &[Column], _input_rows: usize, ) -> Result<()> { let arg_col = A::try_downcast_column(&columns[0]).unwrap(); let val_col = V::try_downcast_column(&columns[1]).unwrap(); let arg_col_iter = A::iter_column(&arg_col); let val_col_iter = V::iter_column(&val_col); val_col_iter .zip(arg_col_iter) .zip(places.iter()) .for_each(|((val, arg), place)| { let addr = place.next(offset); let state = addr.get::<State>(); state.add( val.clone(), A::upcast_scalar(A::to_owned_scalar(arg.clone())), ); }); Ok(()) } fn accumulate_row(&self, place: StateAddr, columns: &[Column], row: usize) -> Result<()> { let arg_col = A::try_downcast_column(&columns[0]).unwrap(); let val_col = V::try_downcast_column(&columns[1]).unwrap(); let state = place.get::<State>(); let arg = unsafe { A::index_column_unchecked(&arg_col, row) }; let val = unsafe { V::index_column_unchecked(&val_col, row) }; state.add(val, A::upcast_scalar(A::to_owned_scalar(arg.clone()))); Ok(()) } fn serialize(&self, place: StateAddr, writer: &mut Vec<u8>) -> Result<()> { let state = place.get::<State>(); state.serialize(writer) } fn deserialize(&self, place: StateAddr, reader: &mut &[u8]) -> Result<()> { let state = place.get::<State>(); state.deserialize(reader) } fn merge(&self, place: StateAddr, rhs: StateAddr) -> Result<()> { let rhs = rhs.get::<State>(); let state = place.get::<State>(); state.merge(rhs) } fn merge_result(&self, place: StateAddr, builder: &mut ColumnBuilder) -> Result<()> { let state = place.get::<State>(); state.merge_result(builder) } fn need_manual_drop_state(&self) -> bool { true } unsafe fn drop_state(&self, place: StateAddr) { let state = place.get::<State>(); std::ptr::drop_in_place(state); } } impl<A, V, C, State> fmt::Display for AggregateArgMinMaxFunction<A, V, C, State> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.display_name) } } impl<A, V, C, State> AggregateArgMinMaxFunction<A, V, C, State> where A: ValueType + Send + Sync, V: ValueType + Send + Sync, C: ChangeIf<V> + Default, State: AggregateArgMinMaxState<A, V>, { pub fn try_create( display_name: &str, return_data_type: DataType, ) -> Result<AggregateFunctionRef> { Ok(Arc::new(AggregateArgMinMaxFunction::<A, V, C, State> { display_name: display_name.to_owned(), return_data_type, _a: PhantomData, _v: PhantomData, _c: PhantomData, _state: PhantomData, })) } } pub fn try_create_aggregate_arg_minmax_function<const CMP_TYPE: u8>( display_name: &str, _params: Vec<Scalar>, arguments: Vec<DataType>, ) -> Result<AggregateFunctionRef> { assert_binary_arguments(display_name, arguments.len())?; let arg_type = arguments[0].clone(); let val_type = arguments[1].clone(); with_compare_mapped_type!(|CMP| match CMP_TYPE { CMP => { with_simple_no_number_mapped_type!(|ARG_TYPE| match arg_type { DataType::ARG_TYPE => { with_simple_no_number_mapped_type!(|VAL_TYPE| match val_type { DataType::VAL_TYPE => { type State = ArgMinMaxState<ARG_TYPE, VAL_TYPE, CMP>; AggregateArgMinMaxFunction::<ARG_TYPE, VAL_TYPE, CMP, State>::try_create( display_name, arg_type, ) } DataType::Number(num_type) => { with_number_mapped_type!(|NUM| match num_type { NumberDataType::NUM => { type State = ArgMinMaxState<ARG_TYPE, NumberType<NUM>, CMP>; AggregateArgMinMaxFunction::< ARG_TYPE, NumberType<NUM>, CMP, State, >::try_create( display_name, arg_type ) } }) } _ => { type State = ArgMinMaxState<ARG_TYPE, AnyType, CMP>; AggregateArgMinMaxFunction::<ARG_TYPE, AnyType, CMP, State>::try_create( display_name, arg_type, ) } }) } DataType::Number(arg_num) => { with_number_mapped_type!(|ARG_NUM| match arg_num { NumberDataType::ARG_NUM => { with_simple_no_number_mapped_type!(|VAL_TYPE| match val_type { DataType::VAL_TYPE => { type State = ArgMinMaxState<NumberType<ARG_NUM>, VAL_TYPE, CMP>; AggregateArgMinMaxFunction::< NumberType<ARG_NUM>, VAL_TYPE, CMP, State, >::try_create( display_name, arg_type ) } DataType::Number(val_num) => { with_number_mapped_type!(|VAL_NUM| match val_num { NumberDataType::VAL_NUM => { type State = ArgMinMaxState< NumberType<ARG_NUM>, NumberType<VAL_NUM>, CMP, >; AggregateArgMinMaxFunction::< NumberType<ARG_NUM>, NumberType<VAL_NUM>, CMP, State, >::try_create( display_name, arg_type ) } }) } _ => { type State = ArgMinMaxState<NumberType<ARG_NUM>, AnyType, CMP>; AggregateArgMinMaxFunction::< NumberType<ARG_NUM>, AnyType, CMP, State, >::try_create( display_name, arg_type ) } }) } }) } _ => { with_simple_no_number_mapped_type!(|VAL_TYPE| match val_type { DataType::VAL_TYPE => { type State = ArgMinMaxState<AnyType, VAL_TYPE, CMP>; AggregateArgMinMaxFunction::<AnyType, VAL_TYPE, CMP, State>::try_create( display_name, arg_type, ) } DataType::Number(num_type) => { with_number_mapped_type!(|NUM| match num_type { NumberDataType::NUM => { type State = ArgMinMaxState<AnyType, NumberType<NUM>, CMP>; AggregateArgMinMaxFunction::< AnyType, NumberType<NUM>, CMP, State, >::try_create( display_name, arg_type ) } }) } _ => { type State = ArgMinMaxState<AnyType, AnyType, CMP>; AggregateArgMinMaxFunction::<AnyType, AnyType, CMP, State>::try_create( display_name, arg_type, ) } }) } }) } _ => Err(ErrorCode::BadDataValueType(format!( "Unsupported compare type for aggregate function {} (type number: {})", display_name, CMP_TYPE ))), }) } pub fn aggregate_arg_min_function_desc() -> AggregateFunctionDescription { AggregateFunctionDescription::creator(Box::new( try_create_aggregate_arg_minmax_function::<TYPE_MIN>, )) } pub fn aggregate_arg_max_function_desc() -> AggregateFunctionDescription { AggregateFunctionDescription::creator(Box::new( try_create_aggregate_arg_minmax_function::<TYPE_MAX>, )) }
use crate::helpers::ID; use crate::ui::UI; use ezgui::{Color, EventCtx, GfxCtx, Key, ModalMenu}; use geom::{Duration, PolyLine}; use map_model::LANE_THICKNESS; use sim::{AgentID, TripID, TripResult}; pub enum RouteViewer { Inactive, Hovering(Duration, AgentID, PolyLine), Active(Duration, TripID, Option<PolyLine>), } impl RouteViewer { fn recalc(ui: &UI) -> RouteViewer { if let Some(agent) = ui .primary .current_selection .as_ref() .and_then(|id| id.agent_id()) { if let Some(trace) = ui.primary.sim.trace_route(agent, &ui.primary.map, None) { return RouteViewer::Hovering(ui.primary.sim.time(), agent, trace); } } RouteViewer::Inactive } pub fn event(&mut self, ctx: &mut EventCtx, ui: &UI, menu: &mut ModalMenu) { match self { RouteViewer::Inactive => { *self = RouteViewer::recalc(ui); } RouteViewer::Hovering(time, agent, _) => { if *time != ui.primary.sim.time() || ui.primary.current_selection != Some(ID::from_agent(*agent)) { *self = RouteViewer::recalc(ui); } if let RouteViewer::Hovering(_, agent, _) = self { // If there's a current route, then there must be a trip. let trip = ui.primary.sim.agent_to_trip(*agent).unwrap(); if ctx .input .contextual_action(Key::R, format!("show {}'s route", agent)) { *self = show_route(trip, ui); } } } RouteViewer::Active(time, trip, _) => { // TODO Using the modal menu from parent is weird... if menu.action("stop showing agent's route") { *self = RouteViewer::Inactive; } else if *time != ui.primary.sim.time() { *self = show_route(*trip, ui); } } } } pub fn draw(&self, g: &mut GfxCtx, ui: &UI) { match self { RouteViewer::Hovering(_, _, ref trace) => { g.draw_polygon( ui.cs.get("route").alpha(0.5), &trace.make_polygons(LANE_THICKNESS), ); } RouteViewer::Active(_, _, Some(ref trace)) => { g.draw_polygon( ui.cs.get_def("route", Color::RED.alpha(0.8)), &trace.make_polygons(LANE_THICKNESS), ); } _ => {} } } } fn show_route(trip: TripID, ui: &UI) -> RouteViewer { let time = ui.primary.sim.time(); match ui.primary.sim.trip_to_agent(trip) { TripResult::Ok(agent) => RouteViewer::Active( time, trip, ui.primary.sim.trace_route(agent, &ui.primary.map, None), ), TripResult::ModeChange => { println!("{} is doing a mode change", trip); RouteViewer::Active(time, trip, None) } TripResult::TripDone => { println!("{} is done or aborted, so no more showing route", trip); RouteViewer::Inactive } TripResult::TripDoesntExist => { println!("{} doesn't exist yet, so not showing route", trip); RouteViewer::Inactive } } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::fmt::Display; use std::fmt::Formatter; use crate::ast::write_comma_separated_list; use crate::ast::write_period_separated_list; use crate::ast::Identifier; use crate::ast::Query; #[derive(Debug, Clone, PartialEq)] pub struct CreateViewStmt { pub if_not_exists: bool, pub catalog: Option<Identifier>, pub database: Option<Identifier>, pub view: Identifier, pub columns: Vec<Identifier>, pub query: Box<Query>, } impl Display for CreateViewStmt { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { write!(f, "CREATE VIEW ")?; if self.if_not_exists { write!(f, "IF NOT EXISTS ")?; } write_period_separated_list( f, self.catalog .iter() .chain(&self.database) .chain(Some(&self.view)), )?; if !self.columns.is_empty() { write!(f, " (")?; write_comma_separated_list(f, &self.columns)?; write!(f, ")")?; } write!(f, " AS {}", self.query) } } #[derive(Debug, Clone, PartialEq)] pub struct AlterViewStmt { pub catalog: Option<Identifier>, pub database: Option<Identifier>, pub view: Identifier, pub columns: Vec<Identifier>, pub query: Box<Query>, } impl Display for AlterViewStmt { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { write!(f, "ALTER VIEW ")?; write_period_separated_list( f, self.catalog .iter() .chain(&self.database) .chain(Some(&self.view)), )?; if !self.columns.is_empty() { write!(f, " (")?; write_comma_separated_list(f, &self.columns)?; write!(f, ")")?; } write!(f, " AS {}", self.query) } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct DropViewStmt { pub if_exists: bool, pub catalog: Option<Identifier>, pub database: Option<Identifier>, pub view: Identifier, } impl Display for DropViewStmt { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { write!(f, "DROP VIEW ")?; if self.if_exists { write!(f, "IF EXISTS ")?; } write_period_separated_list( f, self.catalog .iter() .chain(&self.database) .chain(Some(&self.view)), ) } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::BTreeMap; use std::collections::BTreeSet; use std::fmt::Debug; use std::fmt::Display; use std::fmt::Formatter; use chrono::DateTime; use chrono::Utc; use enumflags2::bitflags; use enumflags2::BitFlags; use crate::app_error::AppError; use crate::app_error::WrongShareObject; use crate::schema::DatabaseMeta; use crate::schema::TableInfo; use crate::schema::TableMeta; #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, Default, Eq, PartialEq)] pub struct ShareNameIdent { pub tenant: String, pub share_name: String, } impl Display for ShareNameIdent { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "'{}'/'{}'", self.tenant, self.share_name) } } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, Default, Eq, PartialEq)] pub struct ShareAccountNameIdent { pub account: String, pub share_id: u64, } impl Display for ShareAccountNameIdent { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "'{}'/'{}'", self.account, self.share_id) } } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct ShowSharesReq { pub tenant: String, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct ShareAccountReply { pub share_name: ShareNameIdent, pub database_name: Option<String>, // for outbound share account, it is the time share has been created. // for inbound share account, it is the time accounts has been added to the share. pub create_on: DateTime<Utc>, // if is inbound share, then accounts is None pub accounts: Option<Vec<String>>, pub comment: Option<String>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct ShowSharesReply { // sharing to other accounts(outbound shares) pub outbound_accounts: Vec<ShareAccountReply>, // be shared by other accounts(inbound shares) pub inbound_accounts: Vec<ShareAccountReply>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct CreateShareReq { pub if_not_exists: bool, pub share_name: ShareNameIdent, pub comment: Option<String>, pub create_on: DateTime<Utc>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct CreateShareReply { pub share_id: u64, pub spec_vec: Option<Vec<ShareSpec>>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct DropShareReq { pub share_name: ShareNameIdent, pub if_exists: bool, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct DropShareReply { pub share_id: Option<u64>, pub spec_vec: Option<Vec<ShareSpec>>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct AddShareAccountsReq { pub share_name: ShareNameIdent, pub if_exists: bool, pub accounts: Vec<String>, pub share_on: DateTime<Utc>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct AddShareAccountsReply { pub share_id: Option<u64>, pub spec_vec: Option<Vec<ShareSpec>>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct RemoveShareAccountsReq { pub share_name: ShareNameIdent, pub if_exists: bool, pub accounts: Vec<String>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct RemoveShareAccountsReply { pub share_id: Option<u64>, pub spec_vec: Option<Vec<ShareSpec>>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct ShowShareOfReq { pub share_name: ShareNameIdent, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct ShowShareOfReply {} #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub enum ShareGrantObjectName { // database name Database(String), // database name, table name Table(String, String), } impl Display for ShareGrantObjectName { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { ShareGrantObjectName::Database(db) => { write!(f, "DATABASE {}", db) } ShareGrantObjectName::Table(db, table) => { write!(f, "TABLE {}.{}", db, table) } } } } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub enum ShareGrantObjectSeqAndId { // db_meta_seq, db_id, DatabaseMeta Database(u64, u64, DatabaseMeta), // db_id, table_meta_seq, table_id, table_meta Table(u64, u64, u64, TableMeta), } // share name and shared (table name, table info) map pub type TableInfoMap = BTreeMap<String, TableInfo>; pub type ShareTableInfoMap = (String, Option<TableInfoMap>); #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct GrantShareObjectReq { pub share_name: ShareNameIdent, pub object: ShareGrantObjectName, pub grant_on: DateTime<Utc>, pub privilege: ShareGrantObjectPrivilege, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct GrantShareObjectReply { pub share_id: u64, pub spec_vec: Option<Vec<ShareSpec>>, pub share_table_info: ShareTableInfoMap, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct RevokeShareObjectReq { pub share_name: ShareNameIdent, pub object: ShareGrantObjectName, pub privilege: ShareGrantObjectPrivilege, pub update_on: DateTime<Utc>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct RevokeShareObjectReply { pub share_id: u64, pub spec_vec: Option<Vec<ShareSpec>>, pub share_table_info: ShareTableInfoMap, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct GetShareGrantObjectReq { pub share_name: ShareNameIdent, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct ShareGrantReplyObject { pub object: ShareGrantObjectName, pub privileges: BitFlags<ShareGrantObjectPrivilege>, pub grant_on: DateTime<Utc>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct GetShareGrantObjectReply { pub share_name: ShareNameIdent, pub objects: Vec<ShareGrantReplyObject>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct GetShareGrantTenantsReq { pub share_name: ShareNameIdent, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct GetShareGrantTenants { pub account: String, pub grant_on: DateTime<Utc>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct GetShareGrantTenantsReply { pub accounts: Vec<GetShareGrantTenants>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct GetObjectGrantPrivilegesReq { pub tenant: String, pub object: ShareGrantObjectName, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct ObjectGrantPrivilege { pub share_name: String, pub privileges: BitFlags<ShareGrantObjectPrivilege>, pub grant_on: DateTime<Utc>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct GetObjectGrantPrivilegesReply { pub privileges: Vec<ObjectGrantPrivilege>, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct ShareAccountMeta { pub account: String, pub share_id: u64, pub share_on: DateTime<Utc>, pub accept_on: Option<DateTime<Utc>>, } impl ShareAccountMeta { pub fn new(account: String, share_id: u64, share_on: DateTime<Utc>) -> Self { Self { account, share_id, share_on, accept_on: None, } } } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, Default, Eq, PartialEq)] pub struct ShareId { pub share_id: u64, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, Default, Eq, PartialEq)] pub struct ShareIdToName { pub share_id: u64, } impl Display for ShareIdToName { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.share_id) } } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub enum ShareGrantObject { Database(u64), Table(u64), } impl ShareGrantObject { pub fn new(seq_and_id: &ShareGrantObjectSeqAndId) -> ShareGrantObject { match seq_and_id { ShareGrantObjectSeqAndId::Database(_seq, db_id, _meta) => { ShareGrantObject::Database(*db_id) } ShareGrantObjectSeqAndId::Table(_db_id, _seq, table_id, _meta) => { ShareGrantObject::Table(*table_id) } } } } impl Display for ShareGrantObject { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { ShareGrantObject::Database(db_id) => { write!(f, "db/{}", *db_id) } ShareGrantObject::Table(table_id) => { write!(f, "table/{}", *table_id) } } } } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct ObjectSharedByShareIds { // save share ids which shares this object pub share_ids: BTreeSet<u64>, } impl Default for ObjectSharedByShareIds { fn default() -> Self { Self::new() } } impl ObjectSharedByShareIds { pub fn new() -> ObjectSharedByShareIds { ObjectSharedByShareIds { share_ids: BTreeSet::new(), } } pub fn add(&mut self, share_id: u64) { self.share_ids.insert(share_id); } pub fn remove(&mut self, share_id: u64) { self.share_ids.remove(&share_id); } } // see: https://docs.snowflake.com/en/sql-reference/sql/revoke-privilege-share.html #[bitflags] #[repr(u64)] #[derive( serde::Serialize, serde::Deserialize, Clone, Copy, Debug, Eq, PartialEq, num_derive::FromPrimitive, )] pub enum ShareGrantObjectPrivilege { // For DATABASE or SCHEMA Usage = 1 << 0, // For DATABASE ReferenceUsage = 1 << 1, // For TABLE or VIEW Select = 1 << 2, } impl Display for ShareGrantObjectPrivilege { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match *self { ShareGrantObjectPrivilege::Usage => write!(f, "USAGE"), ShareGrantObjectPrivilege::ReferenceUsage => write!(f, "REFERENCE_USAGE"), ShareGrantObjectPrivilege::Select => write!(f, "SELECT"), } } } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] pub struct ShareGrantEntry { pub object: ShareGrantObject, pub privileges: BitFlags<ShareGrantObjectPrivilege>, pub grant_on: DateTime<Utc>, pub update_on: Option<DateTime<Utc>>, } impl ShareGrantEntry { pub fn new( object: ShareGrantObject, privileges: ShareGrantObjectPrivilege, grant_on: DateTime<Utc>, ) -> Self { Self { object, privileges: BitFlags::from(privileges), grant_on, update_on: None, } } pub fn grant_privileges( &mut self, privileges: ShareGrantObjectPrivilege, grant_on: DateTime<Utc>, ) { self.update_on = Some(grant_on); self.privileges = BitFlags::from(privileges); } // return true if all privileges are empty. pub fn revoke_privileges( &mut self, privileges: ShareGrantObjectPrivilege, update_on: DateTime<Utc>, ) -> bool { self.update_on = Some(update_on); self.privileges.remove(BitFlags::from(privileges)); self.privileges.is_empty() } pub fn object(&self) -> &ShareGrantObject { &self.object } pub fn privileges(&self) -> &BitFlags<ShareGrantObjectPrivilege> { &self.privileges } pub fn has_granted_privileges(&self, privileges: ShareGrantObjectPrivilege) -> bool { self.privileges.contains(privileges) } } impl Display for ShareGrantEntry { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.object) } } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq, Default)] pub struct ShareMeta { pub database: Option<ShareGrantEntry>, pub entries: BTreeMap<String, ShareGrantEntry>, // save accounts which has been granted access to this share. pub accounts: BTreeSet<String>, pub comment: Option<String>, pub share_on: DateTime<Utc>, pub update_on: Option<DateTime<Utc>>, // save db ids which created from this share pub share_from_db_ids: BTreeSet<u64>, } impl ShareMeta { pub fn new(share_on: DateTime<Utc>, comment: Option<String>) -> Self { ShareMeta { share_on, comment, ..Default::default() } } pub fn get_accounts(&self) -> Vec<String> { Vec::<String>::from_iter(self.accounts.clone().into_iter()) } pub fn has_account(&self, account: &String) -> bool { self.accounts.contains(account) } pub fn add_account(&mut self, account: String) { self.accounts.insert(account); } pub fn del_account(&mut self, account: &String) { self.accounts.remove(account); } pub fn has_share_from_db_id(&self, db_id: u64) -> bool { self.share_from_db_ids.contains(&db_id) } pub fn add_share_from_db_id(&mut self, db_id: u64) { self.share_from_db_ids.insert(db_id); } pub fn remove_share_from_db_id(&mut self, db_id: u64) { self.share_from_db_ids.remove(&db_id); } pub fn get_grant_entry(&self, object: ShareGrantObject) -> Option<ShareGrantEntry> { let database = self.database.as_ref()?; if database.object == object { return Some(database.clone()); } match object { ShareGrantObject::Database(_db_id) => None, ShareGrantObject::Table(_table_id) => self.entries.get(&object.to_string()).cloned(), } } pub fn grant_object_privileges( &mut self, object: ShareGrantObject, privileges: ShareGrantObjectPrivilege, grant_on: DateTime<Utc>, ) { let key = object.to_string(); match object { ShareGrantObject::Database(_db_id) => { if let Some(db) = &mut self.database { db.grant_privileges(privileges, grant_on); } else { self.database = Some(ShareGrantEntry::new(object, privileges, grant_on)); } } ShareGrantObject::Table(_table_id) => { match self.entries.get_mut(&key) { Some(entry) => { entry.grant_privileges(privileges, grant_on); } None => { let entry = ShareGrantEntry::new(object, privileges, grant_on); self.entries.insert(key, entry); } }; } } } pub fn revoke_object_privileges( &mut self, object: ShareGrantObject, privileges: ShareGrantObjectPrivilege, update_on: DateTime<Utc>, ) -> Result<(), AppError> { let key = object.to_string(); match object { ShareGrantObject::Database(_db_id) => { if let Some(entry) = &mut self.database { if object == entry.object { if entry.revoke_privileges(privileges, update_on) { // all database privileges have been revoked, clear database and entries. self.database = None; self.entries.clear(); self.update_on = Some(update_on); } } else { return Err(AppError::WrongShareObject(WrongShareObject::new(&key))); } } else { return Err(AppError::WrongShareObject(WrongShareObject::new( object.to_string(), ))); } } ShareGrantObject::Table(table_id) => match self.entries.get_mut(&key) { Some(entry) => { if let ShareGrantObject::Table(self_table_id) = entry.object { if self_table_id == table_id { if entry.revoke_privileges(privileges, update_on) { self.entries.remove(&key); } } else { return Err(AppError::WrongShareObject(WrongShareObject::new( object.to_string(), ))); } } else { unreachable!("ShareMeta.entries MUST be Table Object"); } } None => return Ok(()), }, } Ok(()) } pub fn has_granted_privileges( &self, obj_name: &ShareGrantObjectName, object: &ShareGrantObjectSeqAndId, privileges: ShareGrantObjectPrivilege, ) -> Result<bool, AppError> { match object { ShareGrantObjectSeqAndId::Database(_seq, db_id, _meta) => match &self.database { Some(db) => match db.object { ShareGrantObject::Database(self_db_id) => { if self_db_id != *db_id { Err(AppError::WrongShareObject(WrongShareObject::new( obj_name.to_string(), ))) } else { Ok(db.has_granted_privileges(privileges)) } } ShareGrantObject::Table(_) => { unreachable!("grant database CANNOT be a table"); } }, None => Ok(false), }, ShareGrantObjectSeqAndId::Table(_db_id, _table_seq, table_id, _meta) => { let key = ShareGrantObject::Table(*table_id).to_string(); match self.entries.get(&key) { Some(entry) => Ok(entry.has_granted_privileges(privileges)), None => Ok(false), } } } } } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, Default, Eq, PartialEq)] pub struct ShareIdent { pub share_id: u64, pub seq: u64, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, Default, Eq, PartialEq)] pub struct ShareInfo { pub ident: ShareIdent, pub name_ident: ShareNameIdent, pub meta: ShareMeta, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, Default, Eq, PartialEq)] pub struct ShareTableSpec { pub name: String, pub database_id: u64, pub table_id: u64, pub presigned_url_timeout: String, } impl ShareTableSpec { pub fn new(name: &str, database_id: u64, table_id: u64) -> Self { ShareTableSpec { name: name.to_owned(), database_id, table_id, presigned_url_timeout: "120s".to_string(), } } } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, Default, Eq, PartialEq)] pub struct ShareDatabaseSpec { pub name: String, pub id: u64, } #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, Default, Eq, PartialEq)] pub struct ShareSpec { pub name: String, pub share_id: u64, pub version: u64, pub database: Option<ShareDatabaseSpec>, pub tables: Vec<ShareTableSpec>, pub tenants: Vec<String>, } mod kvapi_key_impl { use common_meta_kvapi::kvapi; use crate::share::ShareAccountNameIdent; use crate::share::ShareGrantObject; use crate::share::ShareId; use crate::share::ShareIdToName; use crate::share::ShareNameIdent; const PREFIX_SHARE: &str = "__fd_share"; const PREFIX_SHARE_BY: &str = "__fd_share_by"; const PREFIX_SHARE_ID: &str = "__fd_share_id"; const PREFIX_SHARE_ID_TO_NAME: &str = "__fd_share_id_to_name"; const PREFIX_SHARE_ACCOUNT_ID: &str = "__fd_share_account_id"; /// __fd_share_by/{db|table}/<object_id> -> ObjectSharedByShareIds impl kvapi::Key for ShareGrantObject { const PREFIX: &'static str = PREFIX_SHARE_BY; fn to_string_key(&self) -> String { match *self { ShareGrantObject::Database(db_id) => kvapi::KeyBuilder::new_prefixed(Self::PREFIX) .push_raw("db") .push_u64(db_id) .done(), ShareGrantObject::Table(table_id) => kvapi::KeyBuilder::new_prefixed(Self::PREFIX) .push_raw("table") .push_u64(table_id) .done(), } } fn from_str_key(s: &str) -> Result<Self, kvapi::KeyError> { let mut p = kvapi::KeyParser::new_prefixed(s, Self::PREFIX)?; let kind = p.next_raw()?; let id = p.next_u64()?; p.done()?; if kind == "db" { Ok(ShareGrantObject::Database(id)) } else if kind == "table" { Ok(ShareGrantObject::Table(id)) } else { return Err(kvapi::KeyError::InvalidSegment { i: 1, expect: "db or table".to_string(), got: kind.to_string(), }); } } } /// __fd_share/<tenant>/<share_name> -> <share_id> impl kvapi::Key for ShareNameIdent { const PREFIX: &'static str = PREFIX_SHARE; fn to_string_key(&self) -> String { kvapi::KeyBuilder::new_prefixed(Self::PREFIX) .push_str(&self.tenant) .push_str(&self.share_name) .done() } fn from_str_key(s: &str) -> Result<Self, kvapi::KeyError> { let mut p = kvapi::KeyParser::new_prefixed(s, Self::PREFIX)?; let tenant = p.next_str()?; let share_name = p.next_str()?; p.done()?; Ok(ShareNameIdent { tenant, share_name }) } } /// __fd_share_id/<share_id> -> <share_meta> impl kvapi::Key for ShareId { const PREFIX: &'static str = PREFIX_SHARE_ID; fn to_string_key(&self) -> String { kvapi::KeyBuilder::new_prefixed(Self::PREFIX) .push_u64(self.share_id) .done() } fn from_str_key(s: &str) -> Result<Self, kvapi::KeyError> { let mut p = kvapi::KeyParser::new_prefixed(s, Self::PREFIX)?; let share_id = p.next_u64()?; p.done()?; Ok(ShareId { share_id }) } } // __fd_share_account/tenant/id -> ShareAccountMeta impl kvapi::Key for ShareAccountNameIdent { const PREFIX: &'static str = PREFIX_SHARE_ACCOUNT_ID; fn to_string_key(&self) -> String { if self.share_id != 0 { kvapi::KeyBuilder::new_prefixed(Self::PREFIX) .push_str(&self.account) .push_u64(self.share_id) .done() } else { kvapi::KeyBuilder::new_prefixed(Self::PREFIX) .push_str(&self.account) .done() } } fn from_str_key(s: &str) -> Result<Self, kvapi::KeyError> { let mut p = kvapi::KeyParser::new_prefixed(s, Self::PREFIX)?; let account = p.next_str()?; let share_id = p.next_u64()?; p.done()?; Ok(ShareAccountNameIdent { account, share_id }) } } /// __fd_share_id_to_name/<share_id> -> ShareNameIdent impl kvapi::Key for ShareIdToName { const PREFIX: &'static str = PREFIX_SHARE_ID_TO_NAME; fn to_string_key(&self) -> String { kvapi::KeyBuilder::new_prefixed(Self::PREFIX) .push_u64(self.share_id) .done() } fn from_str_key(s: &str) -> Result<Self, kvapi::KeyError> { let mut p = kvapi::KeyParser::new_prefixed(s, Self::PREFIX)?; let share_id = p.next_u64()?; p.done()?; Ok(ShareIdToName { share_id }) } } }
use std::{num::NonZeroU64, time::Duration}; use pathfinder_common::Chain; use pathfinder_ethereum::{EthereumApi, EthereumStateUpdate}; use pathfinder_retry::Retry; use primitive_types::H160; use tokio::sync::mpsc; use crate::state::sync::SyncEvent; #[derive(Clone)] pub struct L1SyncContext<EthereumClient> { pub ethereum: EthereumClient, pub chain: Chain, /// The Starknet core contract address on Ethereum pub core_address: H160, pub poll_interval: Duration, } /// Syncs L1 state update logs. Emits [Ethereum state update](EthereumStateUpdate) /// which should be handled to update storage and respond to queries. pub async fn sync<T>( tx_event: mpsc::Sender<SyncEvent>, context: L1SyncContext<T>, ) -> anyhow::Result<()> where T: EthereumApi + Clone, { let L1SyncContext { ethereum, chain: _, core_address, poll_interval, } = context; let mut previous = EthereumStateUpdate::default(); loop { let state_update = Retry::exponential( || async { ethereum.get_starknet_state(&core_address).await }, NonZeroU64::new(1).unwrap(), ) .factor(NonZeroU64::new(2).unwrap()) .max_delay(poll_interval / 2) .when(|_| true) .await?; if previous != state_update { previous = state_update.clone(); tx_event.send(SyncEvent::L1Update(state_update)).await?; } tokio::time::sleep(poll_interval).await; } }
macro_rules! term_is_not_number { ($name:ident) => { crate::runtime::context::term_is_not_number(stringify!($name), $name) }; } #[macro_export] macro_rules! term_try_into_atom { ($name:ident) => { crate::runtime::context::term_try_into_atom(stringify!($name), $name) }; } macro_rules! term_try_into_bool { ($name:ident) => { crate::runtime::context::term_try_into_bool(stringify!($name), $name) }; } macro_rules! term_try_into_isize { ($name:ident) => { crate::runtime::context::term_try_into_isize(stringify!($name), $name) }; } macro_rules! term_try_into_local_pid { ($name:ident) => { crate::runtime::context::term_try_into_local_pid(stringify!($name), $name) }; } macro_rules! term_try_into_local_reference { ($name:ident) => { crate::runtime::context::term_try_into_local_reference(stringify!($name), $name) }; } macro_rules! term_try_into_map_or_badmap { ($process:expr, $name:ident) => { crate::runtime::context::term_try_into_map_or_badmap($process, stringify!($name), $name) }; } macro_rules! term_try_into_non_empty_list { ($name:ident) => { crate::runtime::context::term_try_into_non_empty_list(stringify!($name), $name) }; } macro_rules! term_try_into_time_unit { ($name:ident) => { crate::runtime::context::term_try_into_time_unit(stringify!($name), $name) }; } macro_rules! term_try_into_tuple { ($name:ident) => { crate::runtime::context::term_try_into_tuple(stringify!($name), $name) }; }
extern crate bindgen; use std::env; use std::path::{Path, PathBuf}; fn main() { let target = env::var("TARGET").unwrap(); let mut lib_dir = Path::new("/"); if target.contains("windows") { lib_dir = Path::new("C:\\Program Files\\PicoQuant\\HydraHarp-HHLibv30"); println!("cargo:rustc-link-search={}", &lib_dir.display()); println!("cargo:rustc-link-lib=hhlib64"); } else { println!("cargo:rustc-link-lib=hh400"); } let bindings_partial = bindgen::Builder::default().header("wrapper.h"); if let Ok(nix_cflags) = env::var("NIX_CFLAGS_COMPILE") { let bindings = bindings_partial .clang_args(nix_cflags.split(" ")) .generate() .expect("Unable to generate bindings"); let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); bindings .write_to_file(out_path.join("bindings.rs")) .expect("Couldn't write bindings!"); } else { let bindings = if target.contains("windows") { bindings_partial .clang_args(&[format!("-I{}", &lib_dir.display())]) .generate() .expect("Unable to generate bindings") } else { bindings_partial .generate() .expect("Unable to generate bindings") }; let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); bindings .write_to_file(out_path.join("bindings.rs")) .expect("Couldn't write bindings!"); } }
// Copyright (c) 2017 Nikita Pekin and the xkcd_rs contributors // See the README.md file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::fs::File; use std::io::Read; use std::path::Path; pub fn read_sample_data_from_path<P>(path: P) -> String where P: AsRef<Path>, { let mut file = File::open(path).unwrap(); let mut body = String::new(); file.read_to_string(&mut body).unwrap(); body }
#![cfg_attr(not(feature = "std"), no_std)] use ink_lang as ink; /// The Delegator Contract /// /// Instantiates all the other contracts, and acts as a facade to interact with them. #[ink::contract] mod newomegadelegator { use newomega::NewOmega; use newomega::FightResult; use newomega::Move; use newomega::MAX_SHIPS; use newomegagame::NewOmegaGame; use newomegaranked::NewOmegaRanked; use newomegaranked::PlayerDefence; use newomegastorage::NewOmegaStorage; use newomegastorage::CommanderData; use newomegastorage::PlayerData; use newomegarewarder::NewOmegaRewarder; use ink_prelude::vec::Vec; use ink_prelude::string::String; use ink_storage::{ Lazy, }; use ink_lang::ToAccountId; /// Withdrawal error reasons definition #[derive(Debug, PartialEq, Eq, scale::Encode, scale::Decode)] #[cfg_attr(feature = "std", derive(scale_info::TypeInfo))] pub enum RewardWithdrawError { TransferFailed, InsufficientFunds, BelowSubsistenceThreshold, } #[ink(storage)] pub struct NewOmegaDelegator { owner: AccountId, new_omega: Lazy<NewOmega>, new_omega_storage: Lazy<NewOmegaStorage>, new_omega_game: Lazy<NewOmegaGame>, new_omega_ranked: Lazy<NewOmegaRanked>, new_omega_rewarder: Lazy<NewOmegaRewarder>, } #[ink(event)] pub struct RankedFightComplete { #[ink(topic)] attacker: AccountId, #[ink(topic)] defender: AccountId, result: FightResult, } const LOOT_CRATE_PRICE: u128 = 1; impl NewOmegaDelegator { /// Instantiates the Delegator. /// /// # Arguments /// /// * `version` - Contract version /// * `newomega_code_hash` - Contract code hash: NewOmega /// * `newomega_storage_code_hash` - Contract code hash: NewOmegaStorage /// * `newomega_game_code_hash` - Contract code hash: NewOmegaGame /// * `newomega_ranked_code_hash` - Contract code hash: NewOmegaRanked /// * `newomega_rewarder_code_hash` - Contract code hash: NewOmegaRewarder #[ink(constructor)] pub fn new( version: u32, newomega_code_hash: Hash, newomega_storage_code_hash: Hash, newomega_game_code_hash: Hash, newomega_ranked_code_hash: Hash, newomega_rewarder_code_hash: Hash, ) -> Self { let total_balance = Self::env().balance(); let salt = version.to_le_bytes(); let new_omega = NewOmega::new() .endowment(total_balance / 8) .code_hash(newomega_code_hash) .salt_bytes(salt) .instantiate() .expect("Failed instantiating NewOmega"); let new_omega_game = NewOmegaGame::new(new_omega.clone()) .endowment(total_balance / 8) .code_hash(newomega_game_code_hash) .salt_bytes(salt) .instantiate() .expect("Failed instantiating NewOmegaGame"); let mut new_omega_storage = NewOmegaStorage::new() .endowment(total_balance / 8) .code_hash(newomega_storage_code_hash) .salt_bytes(salt) .instantiate() .expect("Failed instantiating NewOmegaStorage"); let new_omega_ranked = NewOmegaRanked::new(new_omega_game.clone(), new_omega_storage.clone()) .endowment(total_balance / 8) .code_hash(newomega_ranked_code_hash) .salt_bytes(salt) .instantiate() .expect("Failed instantiating NewOmegaRanked"); let new_omega_rewarder = NewOmegaRewarder::new(new_omega_storage.clone()) .endowment(total_balance / 8) .code_hash(newomega_rewarder_code_hash) .salt_bytes(salt) .instantiate() .expect("Failed instantiating NewOmegaRewarder"); // Authorise the Ranked and Rewarder contracts to use the Storage contract new_omega_storage.authorise_contract(new_omega_ranked.to_account_id()); new_omega_storage.authorise_contract(new_omega_rewarder.to_account_id()); Self { owner: Self::env().caller(), new_omega: Lazy::new(new_omega), new_omega_storage: Lazy::new(new_omega_storage), new_omega_game: Lazy::new(new_omega_game), new_omega_ranked: Lazy::new(new_omega_ranked), new_omega_rewarder: Lazy::new(new_omega_rewarder), } } /// Returns a fight replay (detailed fight description). /// /// # Arguments /// /// * `seed` - Seed used to generate randomness /// * `selection_lhs` - Attacker fleet composition (array with ship quantities) /// * `selection_rhs` - Defender fleet composition (array with ship quantities) /// * `variants_lhs` - An array that holds variants of the attacker fleet /// * `variants_rhs` - An array that holds variants of the defender fleet /// * `commander_lhs` - The attacker commander /// * `commander_rhs` - The defender commander /// /// # Returns /// /// * `result` - A FightResult structure containing the result /// * `moves_lhs` - Logged moves of the attacker /// * `moves_rhs` - Logged moves of the defender #[ink(message)] pub fn replay(&self, seed: u64, selection_lhs: [u8; MAX_SHIPS], selection_rhs: [u8; MAX_SHIPS], variants_lhs: [u8; MAX_SHIPS], variants_rhs: [u8; MAX_SHIPS], commander_lhs: u8, commander_rhs: u8) -> (FightResult, Option<Vec<Move>>, Option<Vec<Move>>) { self.new_omega_game.fight(seed, true, selection_lhs, selection_rhs, variants_lhs, variants_rhs, commander_lhs, commander_rhs) } /// Returns a fight result (without detailed fight description). /// /// # Arguments /// /// * `seed` - Seed used to generate randomness /// * `selection_lhs` - Attacker fleet composition (array with ship quantities) /// * `selection_rhs` - Defender fleet composition (array with ship quantities) /// * `variants_lhs` - An array that holds variants of the attacker fleet /// * `variants_rhs` - An array that holds variants of the defender fleet /// * `commander_lhs` - The attacker commander /// * `commander_rhs` - The defender commander /// /// # Returns /// /// * `result` - A FightResult structure containing the result /// * `moves_lhs` - Always returning None /// * `moves_rhs` - Always returning None #[ink(message)] pub fn replay_result(&self, seed: u64, selection_lhs: [u8; MAX_SHIPS], selection_rhs: [u8; MAX_SHIPS], variants_lhs: [u8; MAX_SHIPS], variants_rhs: [u8; MAX_SHIPS], commander_lhs: u8, commander_rhs: u8) -> (FightResult, Option<Vec<Move>>, Option<Vec<Move>>) { self.new_omega_game.fight(seed, false, selection_lhs, selection_rhs, variants_lhs, variants_rhs, commander_lhs, commander_rhs) } /// Adds ship to the ship definitions /// /// # Arguments /// /// * `cp` - Ship Command Power /// * `hp` - Ship Health Points /// * `attack_base` - Base attack /// * `attack_variable` - Variable attack (subject to random) /// * `defence` - Ship Defence /// * `speed` - Ship Speed /// * `range` - Ship Range #[ink(message)] pub fn add_ship(&mut self, cp: u16, hp: u16, attack_base: u16, attack_variable: u16, defence: u16, speed: u8, range: u8) { assert_eq!(self.env().caller(), self.owner); self.new_omega_game.add_ship(cp, hp, attack_base, attack_variable, defence, speed, range); } /// Registers a fleet for Ranked Defence. /// /// # Arguments /// /// * `selection` - The fleet composition of the defence /// * `variants` - The variants (fittings) of the defence /// * `commander` - Index of the commander leading the defence /// * `name` - The defender name #[ink(message)] pub fn register_defence(&mut self, selection: [u8; MAX_SHIPS], variants: [u8; MAX_SHIPS], commander: u8, name: String) { let caller: AccountId = self.env().caller(); self.new_omega_ranked.register_defence(caller, selection, variants, commander, name); } /// Gets the registered defence of a player. /// Will panic if defence has not been registered for the player. /// /// # Returns /// /// * `defence` - The registered defence #[ink(message)] pub fn get_own_defence(&self) -> PlayerDefence { self.new_omega_ranked.get_own_defence(self.env().caller()) } /// Gets all the registered defenders (all players). /// /// # Returns /// /// * `defenders` - The registered defenders #[ink(message)] pub fn get_all_defenders(&self) -> Vec<(AccountId, PlayerDefence)> { self.new_omega_ranked.get_all_defenders() } /// Calculates a ranked fight between caller and another player. /// /// # Arguments /// /// * `target` - account id of the defender /// * `selection` - Attacker fleet composition (array with ship quantities) /// * `variants` - An array that holds variants of the attacker fleet /// * `commander` - The attacker commander /// /// # Events /// /// * RankedFightComplete - when fight is complete #[ink(message)] pub fn attack(&mut self, target: AccountId, selection: [u8; MAX_SHIPS], variants: [u8; MAX_SHIPS], commander: u8) { let caller: AccountId = self.env().caller(); let result: FightResult = self.new_omega_ranked.attack( caller, target, selection, variants, commander); self.env().emit_event(RankedFightComplete { attacker: caller, defender: target, result, }); } /// Gets the current ranked leaderboard. /// /// # Returns /// /// * `leaderboard` - A Vec containing a tuple of (player account id, player data) #[ink(message)] pub fn get_leaderboard(&self) -> Vec<(AccountId, PlayerData)> { self.new_omega_storage.get_leaderboard() } /// Gets all the owned commanders for the caller. /// /// # Returns /// /// * `commanders` - A Vec containing a tuple of (commander id, commander data) #[ink(message)] pub fn get_commanders(&self) -> Vec<(u8, CommanderData)> { self.new_omega_storage.get_commanders(self.env().caller()) } /// Generates a loot crate for the caller. /// /// # Returns /// /// * `commander` - Id of the commander received from the loot crate #[ink(message, payable)] pub fn buy_loot_crate(&mut self) -> u8 { assert!(self.env().transferred_balance() >= LOOT_CRATE_PRICE); let caller: AccountId = self.env().caller(); self.new_omega_rewarder.buy_loot_crate(caller) } /// Withdraws funds from the Rewarder contract to the Delegator contract owner /// /// # Arguments /// /// * `value` - Balance to withdraw. Panic if greater than available balance. #[ink(message)] pub fn admin_withdraw_funds(&mut self, value: Balance) -> Result<(), RewardWithdrawError> { assert_eq!(self.env().caller(), self.owner); if value > self.env().balance() { return Err(RewardWithdrawError::InsufficientFunds) } self.env() .transfer(self.owner, value) .map_err(|err| { match err { ink_env::Error::BelowSubsistenceThreshold => { RewardWithdrawError::BelowSubsistenceThreshold } _ => RewardWithdrawError::TransferFailed, } }) } } }
//! Provides types and functionality for the Discord [Overlay](https://discord.com/developers/docs/game-sdk/overlay) pub mod events; use crate::{Command, CommandKind, Error}; use serde::Serialize; #[derive(Serialize)] struct OverlayToggle { /// Our process id, this lets Discord know what process it should try /// to show the overlay in pid: u32, #[serde(rename = "locked")] visibility: Visibility, } impl OverlayToggle { fn new(visibility: Visibility) -> Self { Self { pid: std::process::id(), visibility, } } } #[derive(Copy, Clone, Debug, PartialEq)] pub enum Visibility { Visible, Hidden, } impl Serialize for Visibility { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { serializer.serialize_bool(!(*self == Self::Visible)) } } impl<'de> serde::Deserialize<'de> for Visibility { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { use serde::de; struct Visitor; impl<'de> de::Visitor<'de> for Visitor { type Value = Visibility; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { formatter.write_str("a boolean") } fn visit_bool<E>(self, value: bool) -> Result<Self::Value, E> where E: de::Error, { Ok(if value { Visibility::Hidden } else { Visibility::Visible }) } } deserializer.deserialize_bool(Visitor) } } #[derive(Copy, Clone, PartialEq, serde_repr::Serialize_repr)] #[repr(u8)] pub enum InviteAction { Join = 1, Spectate = 2, } #[derive(Serialize)] pub(crate) struct OverlayPidArgs { pid: u32, } impl OverlayPidArgs { pub(crate) fn new() -> Self { Self { pid: std::process::id(), } } } impl crate::Discord { /// Opens or closes the overlay. If the overlay is not enabled this will /// instead focus the Discord app itself. /// /// [API docs](https://discord.com/developers/docs/game-sdk/overlay#setlocked) pub async fn set_overlay_visibility(&self, visibility: Visibility) -> Result<(), Error> { let rx = self.send_rpc( CommandKind::SetOverlayVisibility, OverlayToggle::new(visibility), )?; handle_response!(rx, Command::SetOverlayVisibility => { Ok(()) }) } /// Opens the overlay modal for sending game invitations to users, channels, /// and servers. /// /// # Errors /// If you do not have a valid activity with all the required fields, this /// call will error. See /// [Activity Action Field Requirements](https://discord.com/developers/docs/game-sdk/activities#activity-action-field-requirements) /// for the fields required to have join and spectate invites function properly. /// /// [API docs](https://discord.com/developers/docs/game-sdk/overlay#openactivityinvite) pub async fn open_activity_invite(&self, action: InviteAction) -> Result<(), Error> { #[derive(Serialize)] struct OpenInviteModal { /// Our process id, this lets Discord know what process it should try /// to show the overlay in pid: u32, #[serde(rename = "type")] kind: InviteAction, } let rx = self.send_rpc( CommandKind::OpenOverlayActivityInvite, OpenInviteModal { pid: std::process::id(), kind: action, }, )?; handle_response!(rx, Command::OpenOverlayActivityInvite => { Ok(()) }) } /// Opens the overlay modal for joining a Discord guild, given its invite code. /// Unlike the normal SDK, this method automatically parses the code from /// the provided string so you don't need to do it yourself. /// /// Note that just because the result might be [`Result::Ok`] doesn't /// necessarily mean the user accepted the invite. /// /// [API docs](https://discord.com/developers/docs/game-sdk/overlay#openguildinvite) pub async fn open_guild_invite(&self, code: impl AsRef<str>) -> Result<(), Error> { let mut code = code.as_ref(); if let Some(rest) = code.strip_prefix("https://") { code = rest; } if let Some(rest) = code.strip_prefix("discord.gg/") { code = rest; } else if let Some(rest) = code.strip_prefix("discordapp.com/invite/") { code = rest; } #[derive(Serialize)] struct OpenGuildInviteModal<'stack> { pid: u32, code: &'stack str, } let rx = self.send_rpc( CommandKind::OpenOverlayGuildInvite, OpenGuildInviteModal { pid: std::process::id(), code, }, )?; handle_response!(rx, Command::OpenOverlayGuildInvite => { Ok(()) }) } /// Opens the overlay widget for voice settings for the currently connected application. /// /// [API docs](https://discord.com/developers/docs/game-sdk/overlay#openvoicesettings) pub async fn open_voice_settings(&self) -> Result<(), Error> { let rx = self.send_rpc(CommandKind::OpenOverlayVoiceSettings, OverlayPidArgs::new())?; handle_response!(rx, Command::OpenOverlayVoiceSettings => { Ok(()) }) } }
#[doc = "Reader of register CIFR"] pub type R = crate::R<u32, super::CIFR>; #[doc = "Reader of field `LSIRDYF`"] pub type LSIRDYF_R = crate::R<bool, bool>; #[doc = "Reader of field `LSERDYF`"] pub type LSERDYF_R = crate::R<bool, bool>; #[doc = "Reader of field `MSIRDYF`"] pub type MSIRDYF_R = crate::R<bool, bool>; #[doc = "Reader of field `HSIRDYF`"] pub type HSIRDYF_R = crate::R<bool, bool>; #[doc = "Reader of field `HSERDYF`"] pub type HSERDYF_R = crate::R<bool, bool>; #[doc = "Reader of field `PLLRDYF`"] pub type PLLRDYF_R = crate::R<bool, bool>; #[doc = "Reader of field `PLLSAI1RDYF`"] pub type PLLSAI1RDYF_R = crate::R<bool, bool>; #[doc = "Reader of field `PLLSAI2RDYF`"] pub type PLLSAI2RDYF_R = crate::R<bool, bool>; #[doc = "Reader of field `CSSF`"] pub type CSSF_R = crate::R<bool, bool>; #[doc = "Reader of field `LSECSSF`"] pub type LSECSSF_R = crate::R<bool, bool>; #[doc = "Reader of field `HSI48RDYF`"] pub type HSI48RDYF_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - LSI ready interrupt flag"] #[inline(always)] pub fn lsirdyf(&self) -> LSIRDYF_R { LSIRDYF_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - LSE ready interrupt flag"] #[inline(always)] pub fn lserdyf(&self) -> LSERDYF_R { LSERDYF_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - MSI ready interrupt flag"] #[inline(always)] pub fn msirdyf(&self) -> MSIRDYF_R { MSIRDYF_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - HSI ready interrupt flag"] #[inline(always)] pub fn hsirdyf(&self) -> HSIRDYF_R { HSIRDYF_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - HSE ready interrupt flag"] #[inline(always)] pub fn hserdyf(&self) -> HSERDYF_R { HSERDYF_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - PLL ready interrupt flag"] #[inline(always)] pub fn pllrdyf(&self) -> PLLRDYF_R { PLLRDYF_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - PLLSAI1 ready interrupt flag"] #[inline(always)] pub fn pllsai1rdyf(&self) -> PLLSAI1RDYF_R { PLLSAI1RDYF_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - PLLSAI2 ready interrupt flag"] #[inline(always)] pub fn pllsai2rdyf(&self) -> PLLSAI2RDYF_R { PLLSAI2RDYF_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - Clock security system interrupt flag"] #[inline(always)] pub fn cssf(&self) -> CSSF_R { CSSF_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - LSE Clock security system interrupt flag"] #[inline(always)] pub fn lsecssf(&self) -> LSECSSF_R { LSECSSF_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - HSI48 ready interrupt flag"] #[inline(always)] pub fn hsi48rdyf(&self) -> HSI48RDYF_R { HSI48RDYF_R::new(((self.bits >> 10) & 0x01) != 0) } }
use procon_reader::ProconReader; fn main() { let stdin = std::io::stdin(); let mut rd = ProconReader::new(stdin.lock()); let n: usize = rd.get(); let k: usize = rd.get(); let cp: Vec<(char, usize)> = (0..k) .map(|_| { let c: char = rd.get(); let p: usize = rd.get(); (c, p) }) .collect(); if k > n { println!("0"); return; } let mut lr = vec!['?'; n + 1]; let mut ways = vec![0; n + 1]; for (c, p) in cp { lr[p] = c; ways[p] = 1; } for i in 1..=n { if lr[i] == 'L' { for j in (i + 1)..=n { if lr[j] == '?' { ways[j] += 1; } } } else if lr[i] == 'R' { for j in (1..=(i - 1)).rev() { if lr[j] == '?' { ways[j] += 1; } } } } let mo: u64 = 998244353; let mut ans = 1; for i in 1..=n { ans = ans * ways[i] % mo; } println!("{}", ans); }
use std::time::SystemTime; use serde::{Deserialize, Serialize}; use crate::land::MeshData; /// Uniquely identifies a single unidirectional stream of data within a single network connection #[derive(Debug, Clone, Copy, Eq, Hash, PartialEq)] pub enum StreamType { TextChat, PingPong, WorldTileData } /// Enum of all packets in the network protocol #[derive(Serialize, Deserialize, Debug, Clone)] pub enum Packet { AuthRequest(AuthRequest), AuthResponse(AuthResponse), TextChat(TextChat), Ping(Ping), Pong(Pong), WorldTileDataRequest(WorldTileDataRequest), WorldTileData(WorldTileData) } /// Ping packet, expects a returned "Pong" response #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Ping { pub timestamp: u64, } impl Default for Ping { fn default() -> Self { Ping { timestamp: SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .expect("System time is before unix epoch") .as_millis() as u64 } } } /// Pong packet, contains the timestamp of the Ping packet it is responding to #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Pong { pub timestamp: u64, } /// Authentication information sent from the client to the server #[derive(Serialize, Deserialize, Debug, Clone)] pub struct AuthRequest { pub username: String, pub password: String, } #[derive(Serialize, Deserialize, Debug, Clone)] pub enum AuthResponse { Ok, IncorrectUsername, IncorrectPassword, } /// A text chat message #[derive(Serialize, Deserialize, Debug, Clone)] pub struct TextChat { pub index: u64, pub message: String, } /// World tile data request packet, sent by the client to the server #[derive(Serialize, Deserialize, Debug, Clone)] pub struct WorldTileDataRequest { pub x: u32, pub y: u32, pub lod: u8, } /// World tile data packet, requested by the client #[derive(Serialize, Deserialize, Debug, Clone)] pub struct WorldTileData { pub mesh_data: MeshData }
use std::result::Result; use quicli::prelude::*; use clap_port_flag::Port; use futures::prelude::*; use hyper::{self, service::service_fn, Body, Response, Server, StatusCode}; use mime_guess; use tokio; use Site; pub fn serve(site: Site, port: &Port) -> Result<(), Error> { let site = Box::new(site); let site: &'static Site = &*Box::leak(site); let listener = port.bind()?; let handle = tokio::reactor::Handle::current(); let listener = tokio::net::TcpListener::from_std(listener, &handle)?; let addr = listener.local_addr()?; let service = move || { service_fn(move |req| { let path = &req.uri().path()[1..]; let page = site.get(path); if let Some(page) = page { trace!("[200] {} {}", req.method(), req.uri()); Response::builder() .status(StatusCode::OK) .header(hyper::header::CONTENT_ENCODING, "gzip") .header(hyper::header::CONTENT_DISPOSITION, "inline") .header( hyper::header::CONTENT_TYPE, mime_guess::guess_mime_type_opt(path) .map(|m| m.to_string()) .unwrap_or_else(|| "text/html".to_string()), ) .body(Body::from(page)) } else { debug!("[404] {} {}", req.method(), req.uri()); Response::builder().status(StatusCode::NOT_FOUND).body(Body::from("Not found")) } }) }; let server = Server::builder(listener.incoming()) .serve(service) .map_err(|e| eprintln!("server error: {}", e)); println!("Server listening on http://{}", addr); tokio::run(server); Ok(()) }
extern crate cmake; use cmake::Config; fn main() { let dst = Config::new("external/CascLib") .define("CASC_BUILD_STATIC_LIB", "ON") .profile("Release") .build(); println!("cargo:rustc-link-search=native={}/lib", dst.display()); println!("cargo:rustc-link-lib=static=casc"); println!("cargo:rustc-link-lib=dylib=stdc++"); // stdc++ println!("cargo:rustc-link-lib=dylib=z"); // zlib }
#![cfg_attr(not(feature = "std"), no_std)] pub use self::meeting::Meeting; use ink_lang as ink; /** 活动合约 1. 由活动模板合约创建,每个模板匹配一个活动合约 2. 每个活动会独立部署一个合约(实例); 3. 所有合约的操作都是通过活动合约实现; */ #[ink::contract] pub mod meeting { use ink_prelude::vec::Vec; use ink_storage::{ collections::HashMap as StorageMap, traits::{PackedLayout, SpreadLayout}, }; // 检票历史记录 #[derive( Debug, PartialEq, Eq, Clone, scale::Encode, scale::Decode, SpreadLayout, PackedLayout, )] #[cfg_attr( feature = "std", derive(scale_info::TypeInfo, ink_storage::traits::StorageLayout) )] struct CheckRecord{ inspectors: AccountId, // 检票人 timestamp: u128, // 检票时间戳 block: u128, // 检票记录区块 } impl Default for CheckRecord { fn default() -> CheckRecord { CheckRecord { inspector: Default::default(), timestamp: Default::default(), block: Default::default(), } } } #[ink(storage)] pub struct Meeting { // 这个是关于活动控制部分,不属于活动跟本身的信息 controller: AccountId, // 主合约地址 template: AccountId, // 主合约地址 owner: AccountId, // 活动管理员 // 活动基础信息 // 这部分信息的修改,通过主合约来修改 // name: Vec<u8>, // 活动名称 // desc: Vec<u8>, // 活动描述 // uri: Vec<u8>, // 活动网址 // poster: Vec<u8>, // 活动海报地址 // start_time: u64, // 活动开始时间 // end_time: u64, // 活动结束时间 // start_sale_time: u64, // 开始售卖时间 // end_sale_time: u64, // 开始售卖时间 // status: MeetingStatus, // 会议状态 // 活动配置参数 price: Balance, // 收费方式=Uniform 时候生效 max_tickets: u64, // 总共可以出手多少车票 inspectors: StorageMap<AccountId, bool>, // 检票员 // 用户参与后会产生的数据 tickets: StorageMap<(u128,u128),(u8,u8,u8)>, // 已经售出门票,由元组组成key,元组元素为 分区序号,排号,座号,值是门票NFT(包括集合ID和NFT ID) check_records: StorageMap<(u128, u128), Vec<CheckRecord> >, // 检票记录: } impl Meeting { #[ink(constructor)] pub fn new( controller: AccountId, template: AccountId) -> Self { let caller = Self::env().caller(); Self { controller: controller, template: template, owner: caller, price: 0, max_tickets: 0, inspectors: ink_storage::collections::StorageMap::new(), tickets: ink_storage::collections::StorageMap::new(), check_records: ink_storage::collections::StorageMap::new(), } } /** 转移 owner 1. 必须 owner 才可以调用 */ #[ink(message)] pub fn transfer_owner(&mut self, new_owner:AccountId){ let caller = Self::env().caller(); if caller == self.owner { self.owner = new_owner } } #[ink(message)] pub fn get_owner(&self)-> AccountId{ self.owner } #[ink(message)] pub fn get_self(&self)-> AccountId{ Self::env().account_id() } /** 更新活动信息,包括:活动基础信息、活动配置参数 1. 只有 owner 可以调用修改,如果活动处于 active 状态 或者 活动已经有售卖门票,暂时不允许修改; 2. 如果涉及到基础信息部分的更新,需要调用主合约更新; 3. 修改成功后,触发事件 meeting_modified */ #[ink(message)] pub fn modify_meeting(&mut self, max_tickets: u64, price: Balance ){ } /** 购买门票 2. 需要确认转过来的钱是否大于等于票价(如果大于需要退回一部分) 3. 调用主合约创建 NFT 门票,需要支付服务费:服务费按票价比例(ratio),但是不得低于 min_ticket_price 4. 更新 tickets 5. 返回 */ #[ink(message)] pub fn buy_ticket() -> (u128,u128) { (0,0) } /** 添加验票员 1. 只能由 owner 调用 2. 需要检查是否已经存在了 3. 触发时间 inspector_added */ #[ink(message)] pub fn add_inspector(&mut self, inspector: AccountId){ } /** 移除验票员 1. 只能由 owner 调用 2. 需要检查是否存在 3. 触发事件 inspector_removed */ #[ink(message)] pub fn remove_inspector(&mut self, inspector: AccountId){ } /** 检票 1. 只能由 owner 或者 inspector 调用 2. 检查 NFT门票 是否有效; 3. 检查时间戳和当前区块时间戳间隔是否在 N 分钟以内 4. 获取 NFT门票当前的拥有账号 5. 检测 NFT门票的class_ID、nft_id, 和 timestamp 的 hash值与 提供的hash是否匹配 6. 添加检票记录 check_records ,返回 true 7. 触发事件 ticket_checked */ #[ink(message)] pub fn check_ticket(&mut self, ticket: (u128, u128), timestamp: u128, hash: Vec<u8> ) -> bool { } /** 返回所有的门票检票记录 */ #[ink(message)] pub fn get_check_records(&self, ticket: (u128, u128) ){ } /** 提取门票收入 1. 只能由 owner 调用 2. */ #[ink(message)] pub fn withdraw(&mut self, to:AccountId, amount:Balance){ } } }
//! # Simple module to simulate a database layer. //! //! The module has a single method currently to provide a Vec<> of all cards. use rustc_serialize::json; // the include_str! macro embeds the file in the executable as a static string const CARDS_JSON: &'static str = include_str!("../res/cards.json"); #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)] pub struct Card { pub name: String, pub apr: u16, pub transfer_duration: u16, pub purchase_duration: u16, pub credit_limit: u32, pub filters: Filters, } #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)] pub struct Filters { pub employment: Option<String>, pub income_min: Option<u32>, } /// Returns a Vec<> of all cards. /// Method will panic if decoding the embedded cards.json file fails. pub fn all_cards() -> Vec<Card> { json::decode(&CARDS_JSON).expect("decoding cards.json") }
#[doc = "Reader of register MTLTxQOMR"] pub type R = crate::R<u32, super::MTLTXQOMR>; #[doc = "Writer for register MTLTxQOMR"] pub type W = crate::W<u32, super::MTLTXQOMR>; #[doc = "Register MTLTxQOMR `reset()`'s with value 0x0007_0008"] impl crate::ResetValue for super::MTLTXQOMR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x0007_0008 } } #[doc = "Reader of field `FTQ`"] pub type FTQ_R = crate::R<bool, bool>; #[doc = "Write proxy for field `FTQ`"] pub struct FTQ_W<'a> { w: &'a mut W, } impl<'a> FTQ_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `TSF`"] pub type TSF_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TSF`"] pub struct TSF_W<'a> { w: &'a mut W, } impl<'a> TSF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `TXQEN`"] pub type TXQEN_R = crate::R<u8, u8>; #[doc = "Reader of field `TTC`"] pub type TTC_R = crate::R<u8, u8>; #[doc = "Write proxy for field `TTC`"] pub struct TTC_W<'a> { w: &'a mut W, } impl<'a> TTC_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4); self.w } } #[doc = "Reader of field `TQS`"] pub type TQS_R = crate::R<u8, u8>; #[doc = "Write proxy for field `TQS`"] pub struct TQS_W<'a> { w: &'a mut W, } impl<'a> TQS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 16)) | (((value as u32) & 0x07) << 16); self.w } } impl R { #[doc = "Bit 0 - Flush Transmit Queue"] #[inline(always)] pub fn ftq(&self) -> FTQ_R { FTQ_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Transmit Store and Forward"] #[inline(always)] pub fn tsf(&self) -> TSF_R { TSF_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bits 2:3 - Transmit Queue Enable"] #[inline(always)] pub fn txqen(&self) -> TXQEN_R { TXQEN_R::new(((self.bits >> 2) & 0x03) as u8) } #[doc = "Bits 4:6 - Transmit Threshold Control"] #[inline(always)] pub fn ttc(&self) -> TTC_R { TTC_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bits 16:18 - Transmit Queue Size"] #[inline(always)] pub fn tqs(&self) -> TQS_R { TQS_R::new(((self.bits >> 16) & 0x07) as u8) } } impl W { #[doc = "Bit 0 - Flush Transmit Queue"] #[inline(always)] pub fn ftq(&mut self) -> FTQ_W { FTQ_W { w: self } } #[doc = "Bit 1 - Transmit Store and Forward"] #[inline(always)] pub fn tsf(&mut self) -> TSF_W { TSF_W { w: self } } #[doc = "Bits 4:6 - Transmit Threshold Control"] #[inline(always)] pub fn ttc(&mut self) -> TTC_W { TTC_W { w: self } } #[doc = "Bits 16:18 - Transmit Queue Size"] #[inline(always)] pub fn tqs(&mut self) -> TQS_W { TQS_W { w: self } } }
use proconio::input; #[allow(unused_imports)] use proconio::marker::*; #[allow(unused_imports)] use std::cmp::*; #[allow(unused_imports)] use std::collections::*; #[allow(unused_imports)] use std::f64::consts::*; #[allow(unused)] const INF: usize = std::usize::MAX / 4; #[allow(unused)] const M: usize = 998244353; fn main() { input! { n: usize, s: usize, a: [usize; n], } let mut dp = vec![0usize; s + 1]; dp[0] = 1; for i in 0..n { let ai = a[i]; for j in (0..=s).rev() { if j >= ai { dp[j] = (dp[j - ai] + 2 * dp[j]) % M; } else { dp[j] = (2 * dp[j]) % M; } } } // for i in 1..=n { // eprintln!("{:?}", dp[i]); // } println!("{}", dp[s]); }
use clippy_utils::consts::constant_simple; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::{indent_of, reindent_multiline, snippet_opt}; use clippy_utils::ty::is_type_diagnostic_item; use clippy_utils::usage::contains_return_break_continue_macro; use clippy_utils::{in_constant, is_lang_ctor, path_to_local_id, sugg}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::LangItem::{OptionNone, OptionSome, ResultErr, ResultOk}; use rustc_hir::{Arm, Expr, ExprKind, PatKind}; use rustc_lint::LintContext; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::sym; declare_clippy_lint! { /// ### What it does /// Finds patterns that reimplement `Option::unwrap_or` or `Result::unwrap_or`. /// /// ### Why is this bad? /// Concise code helps focusing on behavior instead of boilerplate. /// /// ### Example /// ```rust /// let foo: Option<i32> = None; /// match foo { /// Some(v) => v, /// None => 1, /// }; /// ``` /// /// Use instead: /// ```rust /// let foo: Option<i32> = None; /// foo.unwrap_or(1); /// ``` #[clippy::version = "1.49.0"] pub MANUAL_UNWRAP_OR, complexity, "finds patterns that can be encoded more concisely with `Option::unwrap_or` or `Result::unwrap_or`" } declare_lint_pass!(ManualUnwrapOr => [MANUAL_UNWRAP_OR]); impl LateLintPass<'_> for ManualUnwrapOr { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { if in_external_macro(cx.sess(), expr.span) || in_constant(cx, expr.hir_id) { return; } lint_manual_unwrap_or(cx, expr); } } fn lint_manual_unwrap_or<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { fn applicable_or_arm<'a>(cx: &LateContext<'_>, arms: &'a [Arm<'a>]) -> Option<&'a Arm<'a>> { if_chain! { if arms.len() == 2; if arms.iter().all(|arm| arm.guard.is_none()); if let Some((idx, or_arm)) = arms.iter().enumerate().find(|(_, arm)| { match arm.pat.kind { PatKind::Path(ref qpath) => is_lang_ctor(cx, qpath, OptionNone), PatKind::TupleStruct(ref qpath, [pat], _) => matches!(pat.kind, PatKind::Wild) && is_lang_ctor(cx, qpath, ResultErr), _ => false, } }); let unwrap_arm = &arms[1 - idx]; if let PatKind::TupleStruct(ref qpath, [unwrap_pat], _) = unwrap_arm.pat.kind; if is_lang_ctor(cx, qpath, OptionSome) || is_lang_ctor(cx, qpath, ResultOk); if let PatKind::Binding(_, binding_hir_id, ..) = unwrap_pat.kind; if path_to_local_id(unwrap_arm.body, binding_hir_id); if cx.typeck_results().expr_adjustments(unwrap_arm.body).is_empty(); if !contains_return_break_continue_macro(or_arm.body); then { Some(or_arm) } else { None } } } if_chain! { if let ExprKind::Match(scrutinee, match_arms, _) = expr.kind; let ty = cx.typeck_results().expr_ty(scrutinee); if let Some(ty_name) = if is_type_diagnostic_item(cx, ty, sym::Option) { Some("Option") } else if is_type_diagnostic_item(cx, ty, sym::Result) { Some("Result") } else { None }; if let Some(or_arm) = applicable_or_arm(cx, match_arms); if let Some(or_body_snippet) = snippet_opt(cx, or_arm.body.span); if let Some(indent) = indent_of(cx, expr.span); if constant_simple(cx, cx.typeck_results(), or_arm.body).is_some(); then { let reindented_or_body = reindent_multiline(or_body_snippet.into(), true, Some(indent)); let suggestion = if scrutinee.span.from_expansion() { // we don't want parentheses around macro, e.g. `(some_macro!()).unwrap_or(0)` sugg::Sugg::hir_with_macro_callsite(cx, scrutinee, "..") } else { sugg::Sugg::hir(cx, scrutinee, "..").maybe_par() }; span_lint_and_sugg( cx, MANUAL_UNWRAP_OR, expr.span, &format!("this pattern reimplements `{}::unwrap_or`", ty_name), "replace with", format!( "{}.unwrap_or({})", suggestion, reindented_or_body, ), Applicability::MachineApplicable, ); } } }
use super::CGFloat; /// A point in a two-dimensional coordinate system. /// /// See [documentation](https://developer.apple.com/documentation/coregraphics/cgpoint). #[repr(C)] #[derive(Copy, Clone, Debug, Default, PartialOrd, PartialEq)] pub struct CGPoint { /// The x-coordinate of the point. pub x: CGFloat, /// The y-coordinate of the point. pub y: CGFloat, } impl From<(CGFloat, CGFloat)> for CGPoint { #[inline] fn from((x, y): (CGFloat, CGFloat)) -> Self { Self::new(x, y) } } impl CGPoint { /// A point at location `(0, 0)`. pub const ZERO: Self = Self::new(0.0, 0.0); /// Returns a point with the specified coordinates. /// /// This is equivalent to /// [`CGPointMake`](https://developer.apple.com/documentation/coregraphics/1455746-cgpointmake). #[inline] pub const fn new(x: CGFloat, y: CGFloat) -> Self { Self { x, y } } /// Returns a point with the given components losslessly converted to /// [`CGFloat`](type.CGFloat.html)s. #[inline] pub const fn from_i16s(x: i16, y: i16) -> Self { Self::new(x as _, y as _) } }
use super::evaluate::Evaluate; use super::{ Callable, Category, ComplexType, DataType, PineClass, PineFrom, PineRef, PineStaticType, PineType, Runnable, RuntimeErr, SecondType, }; use crate::runtime::Ctx; #[derive(Debug)] pub struct CallObjEval<'a> { obj: Box<dyn PineClass<'a> + 'a>, create_val: fn() -> Evaluate<'a>, create_func: fn() -> Callable<'a>, } impl<'a> PartialEq for CallObjEval<'a> { fn eq(&self, other: &CallObjEval<'a>) -> bool { self.create_val == other.create_val && PartialEq::eq(&*self.obj, &*other.obj) && self.create_func == other.create_func } } impl<'a> PineStaticType for CallObjEval<'a> { fn static_type() -> (DataType, SecondType) { (DataType::CallableObjectEvaluate, SecondType::Simple) } } impl<'a> PineType<'a> for CallObjEval<'a> { fn get_type(&self) -> (DataType, SecondType) { <Self as PineStaticType>::static_type() } fn category(&self) -> Category { Category::Complex } fn copy(&self) -> PineRef<'a> { PineRef::new_rc(CallObjEval::new( self.obj.copy(), self.create_val, self.create_func, )) } } impl<'a> PineFrom<'a, CallObjEval<'a>> for CallObjEval<'a> {} impl<'a> ComplexType for CallObjEval<'a> {} impl<'a> CallObjEval<'a> { pub fn new( obj: Box<dyn PineClass<'a> + 'a>, create_val: fn() -> Evaluate<'a>, create_func: fn() -> Callable<'a>, ) -> CallObjEval<'a> { CallObjEval { create_val, obj, create_func, } } pub fn get(&self, context: &mut dyn Ctx<'a>, name: &str) -> Result<PineRef<'a>, RuntimeErr> { self.obj.get(context, name) } pub fn set(&self, name: &str, property: PineRef<'a>) -> Result<(), RuntimeErr> { self.obj.set(name, property) } pub fn create_eval(&self) -> Evaluate<'a> { (self.create_val)() } pub fn create(&self) -> Callable<'a> { (self.create_func)() } } impl<'a> Clone for CallObjEval<'a> { fn clone(&self) -> CallObjEval<'a> { CallObjEval { obj: self.obj.copy(), create_val: self.create_val, create_func: self.create_func, } } } #[cfg(test)] mod tests { use super::super::{downcast_pf, Callable, EvaluateVal, Int, Object, RefData}; use super::*; use crate::ast::stat_expr_types::VarIndex; use crate::ast::syntax_type::FunctionType; use crate::ast::syntax_type::{SimpleSyntaxType, SyntaxType}; use crate::runtime::context::{Context, ContextType, Ctx, VarOperate}; use crate::types::{RuntimeErr, Series}; use std::mem; #[derive(Debug, Clone, PartialEq)] struct MyVal(); impl<'a> EvaluateVal<'a> for MyVal { fn custom_name(&self) -> &str { "test" } fn call(&mut self, ctx: &mut dyn Ctx<'a>) -> Result<PineRef<'a>, RuntimeErr> { let close_index = VarIndex::new(*ctx.get_varname_index("close").unwrap(), 0); match ctx.get_var(close_index) { Some(close_val) => Ok(close_val.copy()), _ => Err(RuntimeErr::VarNotFound), } } fn copy(&self) -> Box<dyn EvaluateVal<'a>> { Box::new(self.clone()) } } struct A; impl<'a> PineClass<'a> for A { fn custom_type(&self) -> &str { "Custom A" } fn get(&self, _ctx: &mut dyn Ctx<'a>, name: &str) -> Result<PineRef<'a>, RuntimeErr> { match name { "int1" => Ok(PineRef::new_box(Some(1i64))), "int2" => Ok(PineRef::new_box(Some(2i64))), "float1" => Ok(PineRef::new_box(Some(1f64))), "float2" => Ok(PineRef::new_box(Some(2f64))), _ => Err(RuntimeErr::NotSupportOperator), } } fn set(&self, _n: &str, _p: PineRef<'a>) -> Result<(), RuntimeErr> { Err(RuntimeErr::NotSupportOperator) } fn copy(&self) -> Box<dyn PineClass<'a> + 'a> { Box::new(A) } } fn test_func<'a>( _context: &mut dyn Ctx<'a>, mut args: Vec<Option<PineRef<'a>>>, _func_type: FunctionType<'a>, ) -> Result<PineRef<'a>, RuntimeErr> { Ok(mem::replace(&mut args[0], None).unwrap()) } const INT_TYPE: SyntaxType = SyntaxType::Simple(SimpleSyntaxType::Int); #[test] fn object_test() { let mut obj = CallObjEval::new( Box::new(A), || Evaluate::new(Box::new(MyVal())), || Callable::new(Some(test_func), None), ); let mut context = Context::new(None, ContextType::Normal); context.init(2, 0, 0); context.set_varname_index("close", 0); context.create_var(0, PineRef::new(Series::from(Some(1f64)))); assert_eq!( obj.get_type(), (DataType::CallableObjectEvaluate, SecondType::Simple) ); assert_eq!( downcast_pf::<Int>(obj.get(&mut context, "int1").unwrap()).unwrap(), RefData::new_box(Some(1)) ); assert_eq!( obj.create_eval().call(&mut context), Ok(PineRef::new_rc(Series::from(Some(1f64)))) ); let mut callable = obj.create(); assert_eq!( downcast_pf::<Int>( callable .call( &mut context, vec![PineRef::new_box(Some(1))], vec![], FunctionType::new((vec![("arg1", INT_TYPE)], INT_TYPE)) ) .unwrap() ) .unwrap(), RefData::new_box(Some(1)) ); } }
#[doc = "Reader of register PLLSTAT"] pub type R = crate::R<u32, super::PLLSTAT>; #[doc = "Reader of field `LOCK`"] pub type LOCK_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - PLL Lock"] #[inline(always)] pub fn lock(&self) -> LOCK_R { LOCK_R::new((self.bits & 0x01) != 0) } }
mod common; use common::exe; use duct::cmd; use std::io::Write; fn simple_test(stdin: &str, expected_stdout: &str) { let stdout = cmd!(exe(), "bc").stdin_bytes(stdin).read().unwrap(); assert_eq!(stdout, expected_stdout); } fn file_test(file: &str, stdin: &str, expected_stdout: &str) { let mut input_file = tempfile::NamedTempFile::new().unwrap(); writeln!(input_file, "{}", file).unwrap(); let input_file_path = input_file .path() .canonicalize() .unwrap() .to_str() .unwrap() .to_string(); let stdout = cmd!(exe(), "bc", input_file_path) .stdin_bytes(stdin) .read() .unwrap(); assert_eq!(stdout, expected_stdout); } #[test] fn comment() { simple_test("1 /* comment */ + 2", "3") } #[test] fn slash_star_slash() { simple_test("1 /*/ + 2 */ + 3", "4") } #[test] fn comment_with_double_quote() { simple_test("1 /* \" */ + 2", "3") } #[test] fn weird_string_comment() { simple_test("\"string/*\";9", "string/*9") } #[test] fn unterminated_hash_comment() { simple_test("#foo", "") } #[test] fn backslash() { simple_test("1 \\\n + 2", "3") } #[test] fn string() { simple_test("\"STR\n\"", "STR") } #[test] fn read_four() { file_test("read();halt", "4", "4") } #[test] fn read_squared() { file_test("read()^2;halt", "4", "16") } #[test] fn read_multiply_read() { file_test("read()*read();halt", "4\n5", "20") } #[test] fn if_0_else() { simple_test("if (0) 1 else 2; 9", "2\n9") } #[test] fn if_1_else() { simple_test("if (1) 1 else 2; 9", "1\n9") } #[test] fn if_1_if_1_else_else() { simple_test("if (1) if (1) 1 else 2 else 3; 9", "1\n9") } #[test] fn if_0_else_if_1() { simple_test("if (0) 1 else if (1) 2; 9", "2\n9") } #[test] fn for_double_semicolon() { simple_test("i=2; for (;;) { 2; if(--i==0) break; 3; }; 9", "2\n3\n2\n9") } #[test] fn for_semicolon_cond_semicolon() { simple_test("i=0; for(;i<3;)++i; 9", "1\n2\n3\n9") } #[test] fn for_no_init() { simple_test("i=1; for(;i<4;i++)i; 9", "1\n2\n3\n9") } #[test] fn for_full() { simple_test("for(i=1;i<4;i++)i; 9", "1\n2\n3\n9") } #[test] fn for_double_semicolon2() { simple_test("for (;;) {2;break}; 9", "2\n9") } #[test] fn define_return() { simple_test("define w() {return}\nw();9", "0\n9") } #[test] fn define_auto() { simple_test("define w() { auto z; return 8; }; w(); 9", "8\n9") } #[test] fn define_auto_array_same_name() { simple_test("define w(x) { auto x[]; return x; }; w(8); 9", "8\n9") } #[test] fn define_with_body_on_next_line() { simple_test("define w()\n{ auto z; return 8; }\nw()\n9", "8\n9") } #[test] fn void_function() { simple_test("define void w() {print \"void\"}\nw()\n9", "void9") } // Extra POSIX compat - GNU bc does not allow this #[test] fn function_named_void() { simple_test("define void() {print \"void\"}\nvoid()\n9", "void0\n9") } // Extra POSIX compat - GNU bc does not allow this #[test] fn variable_named_void() { simple_test("void=6\nvoid\n9", "6\n9") } #[test] fn if_cond_newline() { simple_test("if(0)\n3\n9", "9") } #[test] fn if_cond_stmt_else_newline() { simple_test("if(0)3 else\n4\n9", "4\n9") } #[test] fn while_cond_newline() { simple_test("i=9;while(--i)\ni\n9", "8\n7\n6\n5\n4\n3\n2\n1\n9") } #[test] fn ifz_does_not_match_if_keyword() { simple_test("ifz=1;ifz\n++ifz;ifz++\nifz", "1\n2\n2\n3") } // had parse error on "f()-N" #[test] fn long_print() { let stdout = cmd!(exe(), "bc", "-l") .stdin_bytes("e(0)-2") .read() .unwrap(); assert_eq!(stdout, "-1.00000000000000000000"); } #[test] fn not_a_and_b() { simple_test("(!a&&b)", "0") } // check that dc code is not messing this up (no NUL printing!) #[test] fn print_empty_string() { simple_test("print \"\"", "") } #[test] fn print_123() { simple_test("print 1,2,3", "123") } #[test] fn print_1_bracketed() { simple_test("{ print 1 }", "1") } #[test] fn nested_loops_and_breaks() { simple_test( "\ if(1) { 11 while(1) { 21 while(1) { 31 break 32 } 22 break 23 } 12 } else { 88 } 99 ", "\ 11 21 31 22 12 99", ) } #[test] fn continue_in_if() { simple_test( "\ i=2 while(i--) { 11 if(i) { 21 continue 22 } else { 31 continue 32 } 12 } 99 ", "\ 11 21 11 31 99", ) } #[test] fn continue_in_for() { simple_test( "\ for(i=1; i<3; i++) { i if(i==2) continue 77 } 99 ", "\ 1 77 2 99", ) } #[test] fn ibase() { simple_test("a=ZZ;a;ibase=36;a=ZZ;a;ibase=Z;a=ZZ;a", "99\n1295\n1224") } #[test] fn parsing_of_numbers() { let code = "\ for (b = 2; b <= 16; ++b) { if (b == 10) continue obase = 10 print \"ibase = A; ibase = \", b, \"\n\" obase = b for (i = 0; i <= 65536; ++i) { i print \"0.\", i, \"\n\" print \"1.\", i, \"\n\" print i, \".\", i, \"\n\" } } "; let stdout = cmd!(exe(), "bc") .stdin_bytes(code) .stderr_to_stdout() .pipe(cmd!(exe(), "bc").stderr_to_stdout()) .pipe(cmd!(exe(), "md5sum").stderr_to_stdout()) .read() .unwrap(); assert_eq!(stdout, "465d8c01308d0863b6f5669e8a1c69fb -"); } #[test] fn printing_of_numbers() { let code = "\ for (b = 2; b <= 101; ++b) { if (b == 10) continue s = b * b print \"obase = \", b, \"\n\" for (i = 0; i <= s; ++i) { i print \"0.\", i, \"\n\" print \"1.\", i, \"\n\" print i, \".\", i, \"\n\" } 2189432174861923048671023498128347619023487610234689172304.192748960128745108927461089237469018723460 } "; let stdout = cmd!(exe(), "bc") .stdin_bytes(code) .stderr_to_stdout() .pipe(cmd!(exe(), "bc").stderr_to_stdout()) .pipe(cmd!(exe(), "md5sum").stderr_to_stdout()) .read() .unwrap(); assert_eq!(stdout, "d884b35d251ca096410712743aeafb9e -"); }
pub struct Configuration { pub player_count: i8 } pub fn parse_parameters(params : Vec<String>) -> Configuration { if params.len() <= 1 { panic!("error: Cantidad insuficiente de parametros"); } let player_count: i8 = match params[1].parse() { Ok(n) => { n }, Err(_) => { panic!("error: El numero de jugadores invalido"); }, }; // Valido que los jugadores sean pares y >= 4 if player_count < 4 || player_count % 2 == 1 { panic!("error: El numero de jugadores invalido"); } Configuration { player_count: player_count } } #[test] #[should_panic] fn incorrect_parameters_panics_1() { let params = Vec::new(); parse_parameters(params); } #[test] #[should_panic] fn incorrect_parameters_panics_2() { let mut params = Vec::new(); params.push("progName".to_string()); params.push("3".to_string()); parse_parameters(params); } #[test] fn correct_parameter(){ let mut params = Vec::new(); params.push("progName".to_string()); params.push("4".to_string()); assert_eq!(parse_parameters(params).player_count, 4); }
use super::traits::VPath; use globset::{Glob, GlobMatcher, GlobSet, GlobSetBuilder}; #[derive(Clone)] pub enum Globber { Single(GlobMatcher), Set(GlobSet), } impl Globber { pub fn new<S: AsRef<str>>(pattern: S) -> Globber { Globber::Single(Glob::new(pattern.as_ref()).unwrap().compile_matcher()) } pub fn new_set<S: AsRef<str>>(patterns: &[S]) -> Globber { let mut builder = GlobSetBuilder::new(); for p in patterns { let glob = Glob::new(p.as_ref()).unwrap(); builder.add(glob); } let glob = builder.build().unwrap(); Globber::Set(glob) } pub fn is_match<P: VPath>(&self, path: &P) -> bool { let pa = path.to_string().into_owned(); match self { Globber::Set(p) => p.is_match(pa), Globber::Single(p) => p.is_match(pa), } } }
use std::io::Write as _; use criterion::{black_box, Criterion}; fn stream(c: &mut Criterion) { for (name, content) in [ ("demo.vte", &include_bytes!("../tests/demo.vte")[..]), ("rg_help.vte", &include_bytes!("../tests/rg_help.vte")[..]), ("rg_linus.vte", &include_bytes!("../tests/rg_linus.vte")[..]), ( "state_changes", &b"\x1b]2;X\x1b\\ \x1b[0m \x1bP0@\x1b\\"[..], ), ] { let mut group = c.benchmark_group(name); group.bench_function("nop", |b| { b.iter(|| { let buffer = anstream::Buffer::with_capacity(content.len()); let mut stream = buffer; stream.write_all(content).unwrap(); black_box(stream) }) }); group.bench_function("StripStream", |b| { b.iter(|| { let buffer = anstream::Buffer::with_capacity(content.len()); let mut stream = anstream::StripStream::new(buffer); stream.write_all(content).unwrap(); black_box(stream) }) }); #[cfg(all(windows, feature = "wincon"))] group.bench_function("WinconStream", |b| { b.iter(|| { let buffer = anstream::Buffer::with_capacity(content.len()); let mut stream = anstream::WinconStream::new(anstyle_wincon::Console::new(buffer).unwrap()); stream.write_all(content).unwrap(); black_box(stream) }) }); group.bench_function("AutoStream::always_ansi", |b| { b.iter(|| { let buffer = anstream::Buffer::with_capacity(content.len()); let mut stream = anstream::AutoStream::always_ansi(buffer); stream.write_all(content).unwrap(); black_box(stream) }) }); group.bench_function("AutoStream::always", |b| { b.iter(|| { let buffer = anstream::Buffer::with_capacity(content.len()); let mut stream = anstream::AutoStream::always(buffer); stream.write_all(content).unwrap(); black_box(stream) }) }); group.bench_function("AutoStream::never", |b| { b.iter(|| { let buffer = anstream::Buffer::with_capacity(content.len()); let mut stream = anstream::AutoStream::never(buffer); stream.write_all(content).unwrap(); black_box(stream) }) }); } } criterion::criterion_group!(benches, stream); criterion::criterion_main!(benches);
// Test that Cell is considered invariant with respect to its // type. // revisions: base nll // ignore-compare-mode-nll //[nll] compile-flags: -Z borrowck=mir use std::cell::Cell; struct Foo<'a> { x: Cell<Option<&'a isize>>, } fn use_<'short,'long>(c: Foo<'short>, s: &'short isize, l: &'long isize, _where:Option<&'short &'long ()>) { let _: Foo<'long> = c; //[base]~^ ERROR E0623 //[nll]~^^ ERROR lifetime may not live long enough } fn main() { }
/* This module provides an NFA compiler using Thompson's construction algorithm. The compiler takes a regex-syntax::Hir as input and emits an NFA graph as output. The NFA graph is structured in a way that permits it to be executed by a virtual machine and also used to efficiently build a DFA. The compiler deals with a slightly expanded set of NFA states that notably includes an empty node that has exactly one epsilon transition to the next state. In other words, it's a "goto" instruction if one views Thompson's NFA as a set of bytecode instructions. These goto instructions are removed in a subsequent phase before returning the NFA to the caller. The purpose of these empty nodes is that they make the construction algorithm substantially simpler to implement. We remove them before returning to the caller because they can represent substantial overhead when traversing the NFA graph (either while searching using the NFA directly or while building a DFA). In the future, it would be nice to provide a Glushkov compiler as well, as it would work well as a bit-parallel NFA for smaller regexes. But the Thompson construction is one I'm more familiar with and seems more straight-forward to deal with when it comes to large Unicode character classes. Internally, the compiler uses interior mutability to improve composition in the face of the borrow checker. In particular, we'd really like to be able to write things like this: self.c_concat(exprs.iter().map(|e| self.c(e))) Which elegantly uses iterators to build up a sequence of compiled regex sub-expressions and then hands it off to the concatenating compiler routine. Without interior mutability, the borrow checker won't let us borrow `self` mutably both inside and outside the closure at the same time. */ use core::{ borrow::Borrow, cell::{Cell, RefCell}, mem, }; use alloc::{sync::Arc, vec, vec::Vec}; use regex_syntax::{ hir::{self, Anchor, Class, Hir, HirKind, Literal, WordBoundary}, utf8::{Utf8Range, Utf8Sequences}, ParserBuilder, }; use crate::{ nfa::thompson::{ error::Error, map::{Utf8BoundedMap, Utf8SuffixKey, Utf8SuffixMap}, range_trie::RangeTrie, Look, SparseTransitions, State, Transition, NFA, }, util::{ alphabet::ByteClassSet, id::{IteratorIDExt, PatternID, StateID}, }, }; /// The configuration used for compiling a Thompson NFA from a regex pattern. #[derive(Clone, Copy, Debug, Default)] pub struct Config { reverse: Option<bool>, utf8: Option<bool>, nfa_size_limit: Option<Option<usize>>, shrink: Option<bool>, captures: Option<bool>, #[cfg(test)] unanchored_prefix: Option<bool>, } impl Config { /// Return a new default Thompson NFA compiler configuration. pub fn new() -> Config { Config::default() } /// Reverse the NFA. /// /// A NFA reversal is performed by reversing all of the concatenated /// sub-expressions in the original pattern, recursively. The resulting /// NFA can be used to match the pattern starting from the end of a string /// instead of the beginning of a string. /// /// Reversing the NFA is useful for building a reverse DFA, which is most /// useful for finding the start of a match after its ending position has /// been found. /// /// This is disabled by default. pub fn reverse(mut self, yes: bool) -> Config { self.reverse = Some(yes); self } /// Whether to enable UTF-8 mode or not. /// /// When UTF-8 mode is enabled (which is the default), unanchored searches /// will only match through valid UTF-8. If invalid UTF-8 is seen, then /// an unanchored search will stop at that point. This is equivalent to /// putting a `(?s:.)*?` at the start of the regex. /// /// When UTF-8 mode is disabled, then unanchored searches will match /// through any arbitrary byte. This is equivalent to putting a /// `(?s-u:.)*?` at the start of the regex. /// /// Generally speaking, UTF-8 mode should only be used when you know you /// are searching valid UTF-8, such as a Rust `&str`. If UTF-8 mode is used /// on input that is not valid UTF-8, then the regex is not likely to work /// as expected. /// /// This is enabled by default. pub fn utf8(mut self, yes: bool) -> Config { self.utf8 = Some(yes); self } /// Sets an approximate size limit on the total heap used by the NFA being /// compiled. /// /// This permits imposing constraints on the size of a compiled NFA. This /// may be useful in contexts where the regex pattern is untrusted and one /// wants to avoid using too much memory. /// /// This size limit does not apply to auxiliary heap used during /// compilation that is not part of the built NFA. /// /// Note that this size limit is applied during compilation in order for /// the limit to prevent too much heap from being used. However, the /// implementation may use an intermediate NFA representation that is /// otherwise slightly bigger than the final public form. Since the size /// limit may be applied to an intermediate representation, there is not /// necessarily a precise correspondence between the configured size limit /// and the heap usage of the final NFA. /// /// There is no size limit by default. /// /// # Example /// /// This example demonstrates how Unicode mode can greatly increase the /// size of the NFA. /// /// ``` /// use regex_automata::nfa::thompson::NFA; /// /// // 300KB isn't enough! /// NFA::builder() /// .configure(NFA::config().nfa_size_limit(Some(300_000))) /// .build(r"\w{20}") /// .unwrap_err(); /// /// // ... but 400KB probably is. /// let nfa = NFA::builder() /// .configure(NFA::config().nfa_size_limit(Some(400_000))) /// .build(r"\w{20}")?; /// /// assert_eq!(nfa.pattern_len(), 1); /// /// # Ok::<(), Box<dyn std::error::Error>>(()) /// ``` pub fn nfa_size_limit(mut self, bytes: Option<usize>) -> Config { self.nfa_size_limit = Some(bytes); self } /// Apply best effort heuristics to shrink the NFA at the expense of more /// time/memory. /// /// This is enabled by default. Generally speaking, if one is using an NFA /// to compile a DFA, then the extra time used to shrink the NFA will be /// more than made up for during DFA construction (potentially by a lot). /// In other words, enabling this can substantially decrease the overall /// amount of time it takes to build a DFA. /// /// The only reason to disable this if you want to compile an NFA and start /// using it as quickly as possible without needing to build a DFA. e.g., /// for an NFA simulation or for a lazy DFA. /// /// This is enabled by default. pub fn shrink(mut self, yes: bool) -> Config { self.shrink = Some(yes); self } /// Whether to include 'Capture' states in the NFA. /// /// This can only be enabled when compiling a forward NFA. This is /// always disabled---with no way to override it---when the `reverse` /// configuration is enabled. /// /// This is enabled by default. pub fn captures(mut self, yes: bool) -> Config { self.captures = Some(yes); self } /// Whether to compile an unanchored prefix into this NFA. /// /// This is enabled by default. It is made available for tests only to make /// it easier to unit test the output of the compiler. #[cfg(test)] fn unanchored_prefix(mut self, yes: bool) -> Config { self.unanchored_prefix = Some(yes); self } pub fn get_reverse(&self) -> bool { self.reverse.unwrap_or(false) } pub fn get_utf8(&self) -> bool { self.utf8.unwrap_or(true) } pub fn get_nfa_size_limit(&self) -> Option<usize> { self.nfa_size_limit.unwrap_or(None) } pub fn get_shrink(&self) -> bool { self.shrink.unwrap_or(true) } pub fn get_captures(&self) -> bool { !self.get_reverse() && self.captures.unwrap_or(true) } fn get_unanchored_prefix(&self) -> bool { #[cfg(test)] { self.unanchored_prefix.unwrap_or(true) } #[cfg(not(test))] { true } } pub(crate) fn overwrite(self, o: Config) -> Config { Config { reverse: o.reverse.or(self.reverse), utf8: o.utf8.or(self.utf8), nfa_size_limit: o.nfa_size_limit.or(self.nfa_size_limit), shrink: o.shrink.or(self.shrink), captures: o.captures.or(self.captures), #[cfg(test)] unanchored_prefix: o.unanchored_prefix.or(self.unanchored_prefix), } } } /// A builder for compiling an NFA. #[derive(Clone, Debug)] pub struct Builder { config: Config, parser: ParserBuilder, } impl Builder { /// Create a new NFA builder with its default configuration. pub fn new() -> Builder { Builder { config: Config::default(), parser: ParserBuilder::new() } } /// Compile the given regular expression into an NFA. /// /// If there was a problem parsing the regex, then that error is returned. /// /// Otherwise, if there was a problem building the NFA, then an error is /// returned. The only error that can occur is if the compiled regex would /// exceed the size limits configured on this builder. pub fn build(&self, pattern: &str) -> Result<NFA, Error> { self.build_many(&[pattern]) } pub fn build_many<P: AsRef<str>>( &self, patterns: &[P], ) -> Result<NFA, Error> { let mut hirs = vec![]; for p in patterns { hirs.push( self.parser .build() .parse(p.as_ref()) .map_err(Error::syntax)?, ); log!(log::trace!("parsed: {:?}", p.as_ref())); } self.build_many_from_hir(&hirs) } /// Compile the given high level intermediate representation of a regular /// expression into an NFA. /// /// If there was a problem building the NFA, then an error is returned. The /// only error that can occur is if the compiled regex would exceed the /// size limits configured on this builder. pub fn build_from_hir(&self, expr: &Hir) -> Result<NFA, Error> { self.build_from_hir_with(&mut Compiler::new(), expr) } pub fn build_many_from_hir<H: Borrow<Hir>>( &self, exprs: &[H], ) -> Result<NFA, Error> { self.build_many_from_hir_with(&mut Compiler::new(), exprs) } /// Compile the given high level intermediate representation of a regular /// expression into the NFA given using the given compiler. Callers may /// prefer this over `build` if they would like to reuse allocations while /// compiling many regular expressions. /// /// On success, the given NFA is completely overwritten with the NFA /// produced by the compiler. /// /// If there was a problem building the NFA, then an error is returned. /// The only error that can occur is if the compiled regex would exceed /// the size limits configured on this builder. When an error is returned, /// the contents of `nfa` are unspecified and should not be relied upon. /// However, it can still be reused in subsequent calls to this method. fn build_from_hir_with( &self, compiler: &mut Compiler, expr: &Hir, ) -> Result<NFA, Error> { self.build_many_from_hir_with(compiler, &[expr]) } fn build_many_from_hir_with<H: Borrow<Hir>>( &self, compiler: &mut Compiler, exprs: &[H], ) -> Result<NFA, Error> { compiler.configure(self.config); compiler.compile(exprs) } /// Apply the given NFA configuration options to this builder. pub fn configure(&mut self, config: Config) -> &mut Builder { self.config = self.config.overwrite(config); self } /// Set the syntax configuration for this builder using /// [`SyntaxConfig`](../../struct.SyntaxConfig.html). /// /// This permits setting things like case insensitivity, Unicode and multi /// line mode. /// /// This syntax configuration generally only applies when an NFA is built /// directly from a pattern string. If an NFA is built from an HIR, then /// all syntax settings are ignored. pub fn syntax( &mut self, config: crate::util::syntax::SyntaxConfig, ) -> &mut Builder { config.apply(&mut self.parser); self } } /// A compiler that converts a regex abstract syntax to an NFA via Thompson's /// construction. Namely, this compiler permits epsilon transitions between /// states. #[derive(Clone, Debug)] pub struct Compiler { /// The configuration from the builder. config: Config, /// The final NFA that is built. /// /// Parts of this NFA are constructed during compilation, but the actual /// states aren't added until a final "finish" step. This is because the /// states constructed during compilation have unconditional epsilon /// transitions, which makes the logic of compilation much simpler. The /// "finish" step removes these unconditional epsilon transitions and must /// therefore remap all of the transition state IDs. nfa: RefCell<NFA>, /// The set of compiled NFA states. Once a state is compiled, it is /// assigned a state ID equivalent to its index in this list. Subsequent /// compilation can modify previous states by adding new transitions. states: RefCell<Vec<CState>>, /// State used for compiling character classes to UTF-8 byte automata. /// State is not retained between character class compilations. This just /// serves to amortize allocation to the extent possible. utf8_state: RefCell<Utf8State>, /// State used for arranging character classes in reverse into a trie. trie_state: RefCell<RangeTrie>, /// State used for caching common suffixes when compiling reverse UTF-8 /// automata (for Unicode character classes). utf8_suffix: RefCell<Utf8SuffixMap>, /// A map used to re-map state IDs when translating the compiler's internal /// NFA state representation to the external NFA representation. remap: RefCell<Vec<StateID>>, /// A set of compiler internal state IDs that correspond to states that are /// exclusively epsilon transitions, i.e., goto instructions, combined with /// the state that they point to. This is used to record said states while /// transforming the compiler's internal NFA representation to the external /// form. empties: RefCell<Vec<(StateID, StateID)>>, /// The total memory used by each of the 'CState's in 'states'. This only /// includes heap usage by each state, and not the size of the state /// itself. memory_cstates: Cell<usize>, } /// A compiler intermediate state representation for an NFA that is only used /// during compilation. Once compilation is done, `CState`s are converted /// to `State`s (defined in the parent module), which have a much simpler /// representation. #[derive(Clone, Debug, Eq, PartialEq)] enum CState { /// An empty state whose only purpose is to forward the automaton to /// another state via en epsilon transition. These are useful during /// compilation but are otherwise removed at the end. Empty { next: StateID, }, /// An empty state that records a capture location. /// /// From the perspective of finite automata, this is precisely equivalent /// to 'Empty', but serves the purpose of instructing NFA simulations to /// record additional state when the finite state machine passes through /// this epsilon transition. /// /// These transitions are treated as epsilon transitions with no additional /// effects in DFAs. /// /// 'slot' in this context refers to the specific capture group offset that /// is being recorded. Each capturing group has two slots corresponding to /// the start and end of the matching portion of that group. CaptureStart { next: StateID, capture_index: u32, name: Option<Arc<str>>, }, CaptureEnd { next: StateID, capture_index: u32, }, /// A state that only transitions to `next` if the current input byte is /// in the range `[start, end]` (inclusive on both ends). Range { range: Transition, }, /// A state with possibly many transitions, represented in a sparse /// fashion. Transitions are ordered lexicographically by input range. /// As such, this may only be used when every transition has equal /// priority. (In practice, this is only used for encoding large UTF-8 /// automata.) In contrast, a `Union` state has each alternate in order /// of priority. Priority is used to implement greedy matching and also /// alternations themselves, e.g., `abc|a` where `abc` has priority over /// `a`. /// /// To clarify, it is possible to remove `Sparse` and represent all things /// that `Sparse` is used for via `Union`. But this creates a more bloated /// NFA with more epsilon transitions than is necessary in the special case /// of character classes. Sparse { ranges: Vec<Transition>, }, /// A conditional epsilon transition satisfied via some sort of /// look-around. Look { look: Look, next: StateID, }, /// An alternation such that there exists an epsilon transition to all /// states in `alternates`, where matches found via earlier transitions /// are preferred over later transitions. Union { alternates: Vec<StateID>, }, /// An alternation such that there exists an epsilon transition to all /// states in `alternates`, where matches found via later transitions are /// preferred over earlier transitions. /// /// This "reverse" state exists for convenience during compilation that /// permits easy construction of non-greedy combinations of NFA states. At /// the end of compilation, Union and UnionReverse states are merged into /// one Union type of state, where the latter has its epsilon transitions /// reversed to reflect the priority inversion. /// /// The "convenience" here arises from the fact that as new states are /// added to the list of `alternates`, we would like that add operation /// to be amortized constant time. But if we used a `Union`, we'd need to /// prepend the state, which takes O(n) time. There are other approaches we /// could use to solve this, but this seems simple enough. UnionReverse { alternates: Vec<StateID>, }, /// A match state. There is at most one such occurrence of this state in /// an NFA for each pattern compiled into the NFA. At time of writing, a /// match state is always produced for every pattern given, but in theory, /// if a pattern can never lead to a match, then the match state could be /// omitted. /// /// `id` refers to the ID of the pattern itself, which corresponds to the /// pattern's index (starting at 0). `start_id` refers to the anchored /// NFA starting state corresponding to this pattern. Match { pattern_id: PatternID, start_id: StateID, }, } /// A value that represents the result of compiling a sub-expression of a /// regex's HIR. Specifically, this represents a sub-graph of the NFA that /// has an initial state at `start` and a final state at `end`. #[derive(Clone, Copy, Debug)] pub struct ThompsonRef { start: StateID, end: StateID, } impl Compiler { /// Create a new compiler. pub fn new() -> Compiler { Compiler { config: Config::default(), nfa: RefCell::new(NFA::empty()), states: RefCell::new(vec![]), utf8_state: RefCell::new(Utf8State::new()), trie_state: RefCell::new(RangeTrie::new()), utf8_suffix: RefCell::new(Utf8SuffixMap::new(1000)), remap: RefCell::new(vec![]), empties: RefCell::new(vec![]), memory_cstates: Cell::new(0), } } /// Configure and prepare this compiler from the builder's knobs. /// /// The compiler is must always reconfigured by the builder before using it /// to build an NFA. Namely, this will also clear any latent state in the /// compiler used during previous compilations. fn configure(&mut self, config: Config) { self.config = config; self.nfa.borrow_mut().clear(); self.states.borrow_mut().clear(); self.memory_cstates.set(0); // We don't need to clear anything else since they are cleared on // their own and only when they are used. } /// Convert the current intermediate NFA to its final compiled form. fn compile<H: Borrow<Hir>>(&self, exprs: &[H]) -> Result<NFA, Error> { if exprs.is_empty() { return Ok(NFA::never_match()); } if exprs.len() > PatternID::LIMIT { return Err(Error::too_many_patterns(exprs.len())); } // We always add an unanchored prefix unless we were specifically told // not to (for tests only), or if we know that the regex is anchored // for all matches. When an unanchored prefix is not added, then the // NFA's anchored and unanchored start states are equivalent. let all_anchored = exprs.iter().all(|e| e.borrow().is_anchored_start()); let anchored = !self.config.get_unanchored_prefix() || all_anchored; let unanchored_prefix = if anchored { self.c_empty()? } else { if self.config.get_utf8() { self.c_unanchored_prefix_valid_utf8()? } else { self.c_unanchored_prefix_invalid_utf8()? } }; let compiled = self.c_alternation( exprs.iter().with_pattern_ids().map(|(pid, e)| { let group_kind = hir::GroupKind::CaptureIndex(0); let one = self.c_group(&group_kind, e.borrow())?; let match_state_id = self.add_match(pid, one.start)?; self.patch(one.end, match_state_id)?; Ok(ThompsonRef { start: one.start, end: match_state_id }) }), )?; self.patch(unanchored_prefix.end, compiled.start)?; self.finish(compiled.start, unanchored_prefix.start)?; Ok(self.nfa.replace(NFA::empty())) } /// Finishes the compilation process and populates the NFA attached to this /// compiler with the final graph. fn finish( &self, start_anchored: StateID, start_unanchored: StateID, ) -> Result<(), Error> { trace!( "intermediate NFA compilation complete, \ intermediate NFA size: {} states, {} bytes on heap", self.states.borrow().len(), self.nfa_memory_usage(), ); let mut nfa = self.nfa.borrow_mut(); let mut bstates = self.states.borrow_mut(); let mut remap = self.remap.borrow_mut(); let mut empties = self.empties.borrow_mut(); remap.resize(bstates.len(), StateID::ZERO); empties.clear(); // The idea here is to convert our intermediate states to their final // form. The only real complexity here is the process of converting // transitions, which are expressed in terms of state IDs. The new // set of states will be smaller because of partial epsilon removal, // so the state IDs will not be the same. for (sid, bstate) in bstates.iter_mut().with_state_ids() { match *bstate { CState::Empty { next } => { // Since we're removing empty states, we need to handle // them later since we don't yet know which new state this // empty state will be mapped to. empties.push((sid, next)); } CState::CaptureStart { next, capture_index, ref name } => { // We can't remove this empty state because of the side // effect of capturing an offset for this capture slot. remap[sid] = nfa.add_capture_start( next, capture_index, name.clone(), )?; } CState::CaptureEnd { next, capture_index } => { // We can't remove this empty state because of the side // effect of capturing an offset for this capture slot. remap[sid] = nfa.add_capture_end(next, capture_index)?; } CState::Range { range } => { remap[sid] = nfa.add_range(range)?; } CState::Sparse { ref mut ranges } => { let ranges = mem::replace(ranges, vec![]).into_boxed_slice(); remap[sid] = nfa.add_sparse(SparseTransitions { ranges })?; } CState::Look { look, next } => { remap[sid] = nfa.add_look(next, look)?; } CState::Union { ref mut alternates } => { let alternates = mem::replace(alternates, vec![]).into_boxed_slice(); remap[sid] = nfa.add_union(alternates)?; } CState::UnionReverse { ref mut alternates } => { let mut alternates = mem::replace(alternates, vec![]).into_boxed_slice(); alternates.reverse(); remap[sid] = nfa.add_union(alternates)?; } CState::Match { start_id, .. } => { remap[sid] = nfa.add_match()?; nfa.finish_pattern(start_id)?; } } } for &(empty_id, mut empty_next) in empties.iter() { // empty states can point to other empty states, forming a chain. // So we must follow the chain until the end, which must end at // a non-empty state, and therefore, a state that is correctly // remapped. We are guaranteed to terminate because our compiler // never builds a loop among only empty states. while let CState::Empty { next } = bstates[empty_next] { empty_next = next; } remap[empty_id] = remap[empty_next]; } nfa.set_start_anchored(start_anchored); nfa.set_start_unanchored(start_unanchored); nfa.remap(&remap); trace!( "final NFA (reverse? {:?}) compilation complete, \ final NFA size: {} states, {} bytes on heap", self.config.get_reverse(), nfa.states().len(), nfa.memory_usage(), ); Ok(()) } fn c(&self, expr: &Hir) -> Result<ThompsonRef, Error> { match *expr.kind() { HirKind::Empty => self.c_empty(), HirKind::Literal(Literal::Unicode(ch)) => self.c_char(ch), HirKind::Literal(Literal::Byte(b)) => self.c_range(b, b), HirKind::Class(Class::Bytes(ref c)) => self.c_byte_class(c), HirKind::Class(Class::Unicode(ref c)) => self.c_unicode_class(c), HirKind::Anchor(ref anchor) => self.c_anchor(anchor), HirKind::WordBoundary(ref wb) => self.c_word_boundary(wb), HirKind::Repetition(ref rep) => self.c_repetition(rep), HirKind::Group(ref group) => self.c_group(&group.kind, &group.hir), HirKind::Concat(ref es) => { self.c_concat(es.iter().map(|e| self.c(e))) } HirKind::Alternation(ref es) => { self.c_alternation(es.iter().map(|e| self.c(e))) } } } fn c_concat<I>(&self, mut it: I) -> Result<ThompsonRef, Error> where I: DoubleEndedIterator<Item = Result<ThompsonRef, Error>>, { let first = if self.is_reverse() { it.next_back() } else { it.next() }; let ThompsonRef { start, mut end } = match first { Some(result) => result?, None => return self.c_empty(), }; loop { let next = if self.is_reverse() { it.next_back() } else { it.next() }; let compiled = match next { Some(result) => result?, None => break, }; self.patch(end, compiled.start)?; end = compiled.end; } Ok(ThompsonRef { start, end }) } fn c_alternation<I>(&self, mut it: I) -> Result<ThompsonRef, Error> where I: Iterator<Item = Result<ThompsonRef, Error>>, { let first = it.next().expect("alternations must be non-empty")?; let second = match it.next() { None => return Ok(first), Some(result) => result?, }; let union = self.add_union()?; let end = self.add_empty()?; self.patch(union, first.start)?; self.patch(first.end, end)?; self.patch(union, second.start)?; self.patch(second.end, end)?; for result in it { let compiled = result?; self.patch(union, compiled.start)?; self.patch(compiled.end, end)?; } Ok(ThompsonRef { start: union, end }) } fn c_group( &self, kind: &hir::GroupKind, expr: &Hir, ) -> Result<ThompsonRef, Error> { if !self.config.get_captures() { return self.c(expr); } let (capi, name) = match *kind { hir::GroupKind::NonCapturing => return self.c(expr), hir::GroupKind::CaptureIndex(index) => (index, None), hir::GroupKind::CaptureName { ref name, index } => { (index, Some(Arc::from(&**name))) } }; let start = self.add_capture_start(capi, name)?; let inner = self.c(expr)?; let end = self.add_capture_end(capi)?; self.patch(start, inner.start)?; self.patch(inner.end, end)?; Ok(ThompsonRef { start, end }) } fn c_repetition( &self, rep: &hir::Repetition, ) -> Result<ThompsonRef, Error> { match rep.kind { hir::RepetitionKind::ZeroOrOne => { self.c_zero_or_one(&rep.hir, rep.greedy) } hir::RepetitionKind::ZeroOrMore => { self.c_at_least(&rep.hir, rep.greedy, 0) } hir::RepetitionKind::OneOrMore => { self.c_at_least(&rep.hir, rep.greedy, 1) } hir::RepetitionKind::Range(ref rng) => match *rng { hir::RepetitionRange::Exactly(count) => { self.c_exactly(&rep.hir, count) } hir::RepetitionRange::AtLeast(m) => { self.c_at_least(&rep.hir, rep.greedy, m) } hir::RepetitionRange::Bounded(min, max) => { self.c_bounded(&rep.hir, rep.greedy, min, max) } }, } } fn c_bounded( &self, expr: &Hir, greedy: bool, min: u32, max: u32, ) -> Result<ThompsonRef, Error> { let prefix = self.c_exactly(expr, min)?; if min == max { return Ok(prefix); } // It is tempting here to compile the rest here as a concatenation // of zero-or-one matches. i.e., for `a{2,5}`, compile it as if it // were `aaa?a?a?`. The problem here is that it leads to this program: // // >000000: 61 => 01 // 000001: 61 => 02 // 000002: union(03, 04) // 000003: 61 => 04 // 000004: union(05, 06) // 000005: 61 => 06 // 000006: union(07, 08) // 000007: 61 => 08 // 000008: MATCH // // And effectively, once you hit state 2, the epsilon closure will // include states 3, 5, 6, 7 and 8, which is quite a bit. It is better // to instead compile it like so: // // >000000: 61 => 01 // 000001: 61 => 02 // 000002: union(03, 08) // 000003: 61 => 04 // 000004: union(05, 08) // 000005: 61 => 06 // 000006: union(07, 08) // 000007: 61 => 08 // 000008: MATCH // // So that the epsilon closure of state 2 is now just 3 and 8. let empty = self.add_empty()?; let mut prev_end = prefix.end; for _ in min..max { let union = if greedy { self.add_union() } else { self.add_reverse_union() }?; let compiled = self.c(expr)?; self.patch(prev_end, union)?; self.patch(union, compiled.start)?; self.patch(union, empty)?; prev_end = compiled.end; } self.patch(prev_end, empty)?; Ok(ThompsonRef { start: prefix.start, end: empty }) } fn c_at_least( &self, expr: &Hir, greedy: bool, n: u32, ) -> Result<ThompsonRef, Error> { if n == 0 { // When the expression cannot match the empty string, then we // can get away with something much simpler: just one 'alt' // instruction that optionally repeats itself. But if the expr // can match the empty string... see below. if !expr.is_match_empty() { let union = if greedy { self.add_union() } else { self.add_reverse_union() }?; let compiled = self.c(expr)?; self.patch(union, compiled.start)?; self.patch(compiled.end, union)?; return Ok(ThompsonRef { start: union, end: union }); } // What's going on here? Shouldn't x* be simpler than this? It // turns out that when implementing leftmost-first (Perl-like) // match semantics, x* results in an incorrect preference order // when computing the transitive closure of states if and only if // 'x' can match the empty string. So instead, we compile x* as // (x+)?, which preserves the correct preference order. // // See: https://github.com/rust-lang/regex/issues/779 let compiled = self.c(expr)?; let plus = if greedy { self.add_union() } else { self.add_reverse_union() }?; self.patch(compiled.end, plus)?; self.patch(plus, compiled.start)?; let question = if greedy { self.add_union() } else { self.add_reverse_union() }?; let empty = self.add_empty()?; self.patch(question, compiled.start)?; self.patch(question, empty)?; self.patch(plus, empty)?; Ok(ThompsonRef { start: question, end: empty }) } else if n == 1 { let compiled = self.c(expr)?; let union = if greedy { self.add_union() } else { self.add_reverse_union() }?; self.patch(compiled.end, union)?; self.patch(union, compiled.start)?; Ok(ThompsonRef { start: compiled.start, end: union }) } else { let prefix = self.c_exactly(expr, n - 1)?; let last = self.c(expr)?; let union = if greedy { self.add_union() } else { self.add_reverse_union() }?; self.patch(prefix.end, last.start)?; self.patch(last.end, union)?; self.patch(union, last.start)?; Ok(ThompsonRef { start: prefix.start, end: union }) } } fn c_zero_or_one( &self, expr: &Hir, greedy: bool, ) -> Result<ThompsonRef, Error> { let union = if greedy { self.add_union() } else { self.add_reverse_union() }?; let compiled = self.c(expr)?; let empty = self.add_empty()?; self.patch(union, compiled.start)?; self.patch(union, empty)?; self.patch(compiled.end, empty)?; Ok(ThompsonRef { start: union, end: empty }) } fn c_exactly(&self, expr: &Hir, n: u32) -> Result<ThompsonRef, Error> { let it = (0..n).map(|_| self.c(expr)); self.c_concat(it) } fn c_byte_class( &self, cls: &hir::ClassBytes, ) -> Result<ThompsonRef, Error> { let end = self.add_empty()?; let mut trans = Vec::with_capacity(cls.ranges().len()); for r in cls.iter() { trans.push(Transition { start: r.start(), end: r.end(), next: end, }); } Ok(ThompsonRef { start: self.add_sparse(trans)?, end }) } fn c_unicode_class( &self, cls: &hir::ClassUnicode, ) -> Result<ThompsonRef, Error> { // If all we have are ASCII ranges wrapped in a Unicode package, then // there is zero reason to bring out the big guns. We can fit all ASCII // ranges within a single sparse state. if cls.is_all_ascii() { let end = self.add_empty()?; let mut trans = Vec::with_capacity(cls.ranges().len()); for r in cls.iter() { assert!(r.start() <= '\x7F'); assert!(r.end() <= '\x7F'); trans.push(Transition { start: r.start() as u8, end: r.end() as u8, next: end, }); } Ok(ThompsonRef { start: self.add_sparse(trans)?, end }) } else if self.is_reverse() { if !self.config.get_shrink() { // When we don't want to spend the extra time shrinking, we // compile the UTF-8 automaton in reverse using something like // the "naive" approach, but will attempt to re-use common // suffixes. self.c_unicode_class_reverse_with_suffix(cls) } else { // When we want to shrink our NFA for reverse UTF-8 automata, // we cannot feed UTF-8 sequences directly to the UTF-8 // compiler, since the UTF-8 compiler requires all sequences // to be lexicographically sorted. Instead, we organize our // sequences into a range trie, which can then output our // sequences in the correct order. Unfortunately, building the // range trie is fairly expensive (but not nearly as expensive // as building a DFA). Hence the reason why the 'shrink' option // exists, so that this path can be toggled off. For example, // we might want to turn this off if we know we won't be // compiling a DFA. let mut trie = self.trie_state.borrow_mut(); trie.clear(); for rng in cls.iter() { for mut seq in Utf8Sequences::new(rng.start(), rng.end()) { seq.reverse(); trie.insert(seq.as_slice()); } } let mut utf8_state = self.utf8_state.borrow_mut(); let mut utf8c = Utf8Compiler::new(self, &mut *utf8_state)?; trie.iter(|seq| { utf8c.add(&seq)?; Ok(()) })?; utf8c.finish() } } else { // In the forward direction, we always shrink our UTF-8 automata // because we can stream it right into the UTF-8 compiler. There // is almost no downside (in either memory or time) to using this // approach. let mut utf8_state = self.utf8_state.borrow_mut(); let mut utf8c = Utf8Compiler::new(self, &mut *utf8_state)?; for rng in cls.iter() { for seq in Utf8Sequences::new(rng.start(), rng.end()) { utf8c.add(seq.as_slice())?; } } utf8c.finish() } // For reference, the code below is the "naive" version of compiling a // UTF-8 automaton. It is deliciously simple (and works for both the // forward and reverse cases), but will unfortunately produce very // large NFAs. When compiling a forward automaton, the size difference // can sometimes be an order of magnitude. For example, the '\w' regex // will generate about ~3000 NFA states using the naive approach below, // but only 283 states when using the approach above. This is because // the approach above actually compiles a *minimal* (or near minimal, // because of the bounded hashmap for reusing equivalent states) UTF-8 // automaton. // // The code below is kept as a reference point in order to make it // easier to understand the higher level goal here. Although, it will // almost certainly bit-rot, so keep that in mind. /* let it = cls .iter() .flat_map(|rng| Utf8Sequences::new(rng.start(), rng.end())) .map(|seq| { let it = seq .as_slice() .iter() .map(|rng| self.c_range(rng.start, rng.end)); self.c_concat(it) }); self.c_alternation(it) */ } fn c_unicode_class_reverse_with_suffix( &self, cls: &hir::ClassUnicode, ) -> Result<ThompsonRef, Error> { // N.B. It would likely be better to cache common *prefixes* in the // reverse direction, but it's not quite clear how to do that. The // advantage of caching suffixes is that it does give us a win, and // has a very small additional overhead. let mut cache = self.utf8_suffix.borrow_mut(); cache.clear(); let union = self.add_union()?; let alt_end = self.add_empty()?; for urng in cls.iter() { for seq in Utf8Sequences::new(urng.start(), urng.end()) { let mut end = alt_end; for brng in seq.as_slice() { let key = Utf8SuffixKey { from: end, start: brng.start, end: brng.end, }; let hash = cache.hash(&key); if let Some(id) = cache.get(&key, hash) { end = id; continue; } let compiled = self.c_range(brng.start, brng.end)?; self.patch(compiled.end, end)?; end = compiled.start; cache.set(key, hash, end); } self.patch(union, end)?; } } Ok(ThompsonRef { start: union, end: alt_end }) } fn c_anchor(&self, anchor: &Anchor) -> Result<ThompsonRef, Error> { let look = match *anchor { Anchor::StartLine => Look::StartLine, Anchor::EndLine => Look::EndLine, Anchor::StartText => Look::StartText, Anchor::EndText => Look::EndText, }; let id = self.add_look(look)?; Ok(ThompsonRef { start: id, end: id }) } fn c_word_boundary( &self, wb: &WordBoundary, ) -> Result<ThompsonRef, Error> { let look = match *wb { WordBoundary::Unicode => Look::WordBoundaryUnicode, WordBoundary::UnicodeNegate => Look::WordBoundaryUnicodeNegate, WordBoundary::Ascii => Look::WordBoundaryAscii, WordBoundary::AsciiNegate => Look::WordBoundaryAsciiNegate, }; let id = self.add_look(look)?; Ok(ThompsonRef { start: id, end: id }) } fn c_char(&self, ch: char) -> Result<ThompsonRef, Error> { let mut buf = [0; 4]; let it = ch .encode_utf8(&mut buf) .as_bytes() .iter() .map(|&b| self.c_range(b, b)); self.c_concat(it) } fn c_range(&self, start: u8, end: u8) -> Result<ThompsonRef, Error> { let id = self.add_range(start, end)?; Ok(ThompsonRef { start: id, end: id }) } fn c_empty(&self) -> Result<ThompsonRef, Error> { let id = self.add_empty()?; Ok(ThompsonRef { start: id, end: id }) } fn c_unanchored_prefix_valid_utf8(&self) -> Result<ThompsonRef, Error> { self.c_at_least(&Hir::any(false), false, 0) } fn c_unanchored_prefix_invalid_utf8(&self) -> Result<ThompsonRef, Error> { self.c_at_least(&Hir::any(true), false, 0) } fn patch(&self, from: StateID, to: StateID) -> Result<(), Error> { let old_memory_cstates = self.memory_cstates.get(); match self.states.borrow_mut()[from] { CState::Empty { ref mut next } => { *next = to; } CState::Range { ref mut range } => { range.next = to; } CState::Sparse { .. } => { panic!("cannot patch from a sparse NFA state") } CState::Look { ref mut next, .. } => { *next = to; } CState::Union { ref mut alternates } => { alternates.push(to); self.memory_cstates .set(old_memory_cstates + mem::size_of::<StateID>()); } CState::UnionReverse { ref mut alternates } => { alternates.push(to); self.memory_cstates .set(old_memory_cstates + mem::size_of::<StateID>()); } CState::CaptureStart { ref mut next, .. } => { *next = to; } CState::CaptureEnd { ref mut next, .. } => { *next = to; } CState::Match { .. } => {} } if old_memory_cstates != self.memory_cstates.get() { self.check_nfa_size_limit()?; } Ok(()) } fn add_empty(&self) -> Result<StateID, Error> { self.add_state(CState::Empty { next: StateID::ZERO }) } fn add_capture_start( &self, capture_index: u32, name: Option<Arc<str>>, ) -> Result<StateID, Error> { self.add_state(CState::CaptureStart { next: StateID::ZERO, capture_index, name, }) } fn add_capture_end(&self, capture_index: u32) -> Result<StateID, Error> { self.add_state(CState::CaptureEnd { next: StateID::ZERO, capture_index, }) } fn add_range(&self, start: u8, end: u8) -> Result<StateID, Error> { let trans = Transition { start, end, next: StateID::ZERO }; self.add_state(CState::Range { range: trans }) } fn add_sparse(&self, ranges: Vec<Transition>) -> Result<StateID, Error> { if ranges.len() == 1 { self.add_state(CState::Range { range: ranges[0] }) } else { self.add_state(CState::Sparse { ranges }) } } fn add_look(&self, mut look: Look) -> Result<StateID, Error> { if self.is_reverse() { look = look.reversed(); } self.add_state(CState::Look { look, next: StateID::ZERO }) } fn add_union(&self) -> Result<StateID, Error> { self.add_state(CState::Union { alternates: vec![] }) } fn add_reverse_union(&self) -> Result<StateID, Error> { self.add_state(CState::UnionReverse { alternates: vec![] }) } fn add_match( &self, pattern_id: PatternID, start_id: StateID, ) -> Result<StateID, Error> { self.add_state(CState::Match { pattern_id, start_id }) } fn add_state(&self, state: CState) -> Result<StateID, Error> { let mut states = self.states.borrow_mut(); let id = StateID::new(states.len()) .map_err(|_| Error::too_many_states(states.len()))?; self.memory_cstates .set(self.memory_cstates.get() + state.memory_usage()); states.push(state); // If we don't explicitly drop this, then 'nfa_memory_usage' will also // try to borrow it when we check the size limit and hit an error. drop(states); self.check_nfa_size_limit()?; Ok(id) } fn is_reverse(&self) -> bool { self.config.get_reverse() } /// If an NFA size limit was set, this checks that the NFA compiled so far /// fits within that limit. If so, then nothing is returned. Otherwise, an /// error is returned. /// /// This should be called after increasing the heap usage of the /// intermediate NFA. /// /// Note that this borrows 'self.states', so callers should ensure there is /// no mutable borrow of it outstanding. fn check_nfa_size_limit(&self) -> Result<(), Error> { if let Some(limit) = self.config.get_nfa_size_limit() { if self.nfa_memory_usage() > limit { return Err(Error::exceeded_size_limit(limit)); } } Ok(()) } /// Returns the heap memory usage, in bytes, of the NFA compiled so far. /// /// Note that this is an approximation of how big the final NFA will be. /// In practice, the final NFA will likely be a bit smaller since it uses /// things like `Box<[T]>` instead of `Vec<T>`. fn nfa_memory_usage(&self) -> usize { self.states.borrow().len() * mem::size_of::<CState>() + self.memory_cstates.get() } } impl CState { fn memory_usage(&self) -> usize { match *self { CState::Empty { .. } | CState::Range { .. } | CState::Look { .. } | CState::CaptureStart { .. } | CState::CaptureEnd { .. } | CState::Match { .. } => 0, CState::Sparse { ref ranges } => { ranges.len() * mem::size_of::<Transition>() } CState::Union { ref alternates } => { alternates.len() * mem::size_of::<StateID>() } CState::UnionReverse { ref alternates } => { alternates.len() * mem::size_of::<StateID>() } } } } #[derive(Debug)] struct Utf8Compiler<'a> { nfac: &'a Compiler, state: &'a mut Utf8State, target: StateID, } #[derive(Clone, Debug)] struct Utf8State { compiled: Utf8BoundedMap, uncompiled: Vec<Utf8Node>, } #[derive(Clone, Debug)] struct Utf8Node { trans: Vec<Transition>, last: Option<Utf8LastTransition>, } #[derive(Clone, Debug)] struct Utf8LastTransition { start: u8, end: u8, } impl Utf8State { fn new() -> Utf8State { Utf8State { compiled: Utf8BoundedMap::new(10_000), uncompiled: vec![] } } fn clear(&mut self) { self.compiled.clear(); self.uncompiled.clear(); } } impl<'a> Utf8Compiler<'a> { fn new( nfac: &'a Compiler, state: &'a mut Utf8State, ) -> Result<Utf8Compiler<'a>, Error> { let target = nfac.add_empty()?; state.clear(); let mut utf8c = Utf8Compiler { nfac, state, target }; utf8c.add_empty(); Ok(utf8c) } fn finish(&mut self) -> Result<ThompsonRef, Error> { self.compile_from(0)?; let node = self.pop_root(); let start = self.compile(node)?; Ok(ThompsonRef { start, end: self.target }) } fn add(&mut self, ranges: &[Utf8Range]) -> Result<(), Error> { let prefix_len = ranges .iter() .zip(&self.state.uncompiled) .take_while(|&(range, node)| { node.last.as_ref().map_or(false, |t| { (t.start, t.end) == (range.start, range.end) }) }) .count(); assert!(prefix_len < ranges.len()); self.compile_from(prefix_len)?; self.add_suffix(&ranges[prefix_len..]); Ok(()) } fn compile_from(&mut self, from: usize) -> Result<(), Error> { let mut next = self.target; while from + 1 < self.state.uncompiled.len() { let node = self.pop_freeze(next); next = self.compile(node)?; } self.top_last_freeze(next); Ok(()) } fn compile(&mut self, node: Vec<Transition>) -> Result<StateID, Error> { let hash = self.state.compiled.hash(&node); if let Some(id) = self.state.compiled.get(&node, hash) { return Ok(id); } let id = self.nfac.add_sparse(node.clone())?; self.state.compiled.set(node, hash, id); Ok(id) } fn add_suffix(&mut self, ranges: &[Utf8Range]) { assert!(!ranges.is_empty()); let last = self .state .uncompiled .len() .checked_sub(1) .expect("non-empty nodes"); assert!(self.state.uncompiled[last].last.is_none()); self.state.uncompiled[last].last = Some(Utf8LastTransition { start: ranges[0].start, end: ranges[0].end, }); for r in &ranges[1..] { self.state.uncompiled.push(Utf8Node { trans: vec![], last: Some(Utf8LastTransition { start: r.start, end: r.end }), }); } } fn add_empty(&mut self) { self.state.uncompiled.push(Utf8Node { trans: vec![], last: None }); } fn pop_freeze(&mut self, next: StateID) -> Vec<Transition> { let mut uncompiled = self.state.uncompiled.pop().unwrap(); uncompiled.set_last_transition(next); uncompiled.trans } fn pop_root(&mut self) -> Vec<Transition> { assert_eq!(self.state.uncompiled.len(), 1); assert!(self.state.uncompiled[0].last.is_none()); self.state.uncompiled.pop().expect("non-empty nodes").trans } fn top_last_freeze(&mut self, next: StateID) { let last = self .state .uncompiled .len() .checked_sub(1) .expect("non-empty nodes"); self.state.uncompiled[last].set_last_transition(next); } } impl Utf8Node { fn set_last_transition(&mut self, next: StateID) { if let Some(last) = self.last.take() { self.trans.push(Transition { start: last.start, end: last.end, next, }); } } } #[cfg(test)] mod tests { use alloc::vec::Vec; use super::{ Builder, Config, PatternID, SparseTransitions, State, StateID, Transition, NFA, }; fn build(pattern: &str) -> NFA { Builder::new() .configure(Config::new().captures(false).unanchored_prefix(false)) .build(pattern) .unwrap() } fn pid(id: usize) -> PatternID { PatternID::new(id).unwrap() } fn sid(id: usize) -> StateID { StateID::new(id).unwrap() } fn s_byte(byte: u8, next: usize) -> State { let next = sid(next); let trans = Transition { start: byte, end: byte, next }; State::Range { range: trans } } fn s_range(start: u8, end: u8, next: usize) -> State { let next = sid(next); let trans = Transition { start, end, next }; State::Range { range: trans } } fn s_sparse(ranges: &[(u8, u8, usize)]) -> State { let ranges = ranges .iter() .map(|&(start, end, next)| Transition { start, end, next: sid(next), }) .collect(); State::Sparse(SparseTransitions { ranges }) } fn s_union(alts: &[usize]) -> State { State::Union { alternates: alts .iter() .map(|&id| sid(id)) .collect::<Vec<StateID>>() .into_boxed_slice(), } } fn s_match(id: usize) -> State { State::Match { id: pid(id) } } // Test that building an unanchored NFA has an appropriate `(?s:.)*?` // prefix. #[test] fn compile_unanchored_prefix() { // When the machine can only match valid UTF-8. let nfa = Builder::new() .configure(Config::new().captures(false)) .build(r"a") .unwrap(); // There should be many states since the `.` in `(?s:.)*?` matches any // Unicode scalar value. assert_eq!(11, nfa.len()); assert_eq!(nfa.states[10], s_match(0)); assert_eq!(nfa.states[9], s_byte(b'a', 10)); // When the machine can match through invalid UTF-8. let nfa = Builder::new() .configure(Config::new().captures(false).utf8(false)) .build(r"a") .unwrap(); assert_eq!( nfa.states, &[ s_union(&[2, 1]), s_range(0, 255, 0), s_byte(b'a', 3), s_match(0), ] ); } #[test] fn compile_empty() { assert_eq!(build("").states, &[s_match(0),]); } #[test] fn compile_literal() { assert_eq!(build("a").states, &[s_byte(b'a', 1), s_match(0),]); assert_eq!( build("ab").states, &[s_byte(b'a', 1), s_byte(b'b', 2), s_match(0),] ); assert_eq!( build("☃").states, &[s_byte(0xE2, 1), s_byte(0x98, 2), s_byte(0x83, 3), s_match(0)] ); // Check that non-UTF-8 literals work. let nfa = Builder::new() .configure( Config::new() .captures(false) .utf8(false) .unanchored_prefix(false), ) .syntax(crate::SyntaxConfig::new().utf8(false)) .build(r"(?-u)\xFF") .unwrap(); assert_eq!(nfa.states, &[s_byte(b'\xFF', 1), s_match(0),]); } #[test] fn compile_class() { assert_eq!( build(r"[a-z]").states, &[s_range(b'a', b'z', 1), s_match(0),] ); assert_eq!( build(r"[x-za-c]").states, &[s_sparse(&[(b'a', b'c', 1), (b'x', b'z', 1)]), s_match(0)] ); assert_eq!( build(r"[\u03B1-\u03B4]").states, &[s_range(0xB1, 0xB4, 2), s_byte(0xCE, 0), s_match(0)] ); assert_eq!( build(r"[\u03B1-\u03B4\u{1F919}-\u{1F91E}]").states, &[ s_range(0xB1, 0xB4, 5), s_range(0x99, 0x9E, 5), s_byte(0xA4, 1), s_byte(0x9F, 2), s_sparse(&[(0xCE, 0xCE, 0), (0xF0, 0xF0, 3)]), s_match(0), ] ); assert_eq!( build(r"[a-z☃]").states, &[ s_byte(0x83, 3), s_byte(0x98, 0), s_sparse(&[(b'a', b'z', 3), (0xE2, 0xE2, 1)]), s_match(0), ] ); } #[test] fn compile_repetition() { assert_eq!( build(r"a?").states, &[s_union(&[1, 2]), s_byte(b'a', 2), s_match(0),] ); assert_eq!( build(r"a??").states, &[s_union(&[2, 1]), s_byte(b'a', 2), s_match(0),] ); } #[test] fn compile_group() { assert_eq!( build(r"ab+").states, &[s_byte(b'a', 1), s_byte(b'b', 2), s_union(&[1, 3]), s_match(0)] ); assert_eq!( build(r"(ab)").states, &[s_byte(b'a', 1), s_byte(b'b', 2), s_match(0)] ); assert_eq!( build(r"(ab)+").states, &[s_byte(b'a', 1), s_byte(b'b', 2), s_union(&[0, 3]), s_match(0)] ); } #[test] fn compile_alternation() { assert_eq!( build(r"a|b").states, &[s_byte(b'a', 3), s_byte(b'b', 3), s_union(&[0, 1]), s_match(0)] ); assert_eq!( build(r"|b").states, &[s_byte(b'b', 2), s_union(&[2, 0]), s_match(0)] ); assert_eq!( build(r"a|").states, &[s_byte(b'a', 2), s_union(&[0, 2]), s_match(0)] ); } #[test] fn many_start_pattern() { let nfa = Builder::new() .configure(Config::new().captures(false).unanchored_prefix(false)) .build_many(&["a", "b"]) .unwrap(); assert_eq!( nfa.states, &[ s_byte(b'a', 1), s_match(0), s_byte(b'b', 3), s_match(1), s_union(&[0, 2]), ] ); assert_eq!(nfa.start_anchored().as_usize(), 4); assert_eq!(nfa.start_unanchored().as_usize(), 4); // Test that the start states for each individual pattern are correct. assert_eq!(nfa.start_pattern(pid(0)), sid(0)); assert_eq!(nfa.start_pattern(pid(1)), sid(2)); } }
use input_i_scanner::InputIScanner; use std::collections::VecDeque; fn main() { let stdin = std::io::stdin(); let mut _i_i = InputIScanner::from(stdin.lock()); macro_rules! scan { (($($t: ty),+)) => { ($(scan!($t)),+) }; ($t: ty) => { _i_i.scan::<$t>() as $t }; (($($t: ty),+); $n: expr) => { std::iter::repeat_with(|| scan!(($($t),+))).take($n).collect::<Vec<_>>() }; ($t: ty; $n: expr) => { std::iter::repeat_with(|| scan!($t)).take($n).collect::<Vec<_>>() }; } let n = scan!(usize); let mut tt = vec![0; n]; let mut aa = vec![vec![]; n]; for i in 0..n { let t = scan!(u64); tt[i] = t; let k = scan!(usize); if k == 0 { continue; } let a = scan!(usize; k); aa[i] = a; } let mut seen = vec![false; n]; seen[n - 1] = true; let mut que = VecDeque::new(); que.push_back(n - 1); while let Some(u) = que.pop_front() { for &v in &aa[u] { let v = v - 1; if seen[v] { continue; } seen[v] = true; que.push_back(v); } } let mut ans = 0; for i in 0..n { if seen[i] { ans += tt[i]; } } println!("{}", ans); }
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use common_base::base::tokio::io::AsyncReadExt; use common_base::base::tokio::io::AsyncWriteExt; use common_base::base::tokio::net::TcpStream; use common_exception::Result; use opensrv_mysql::ErrorKind; pub struct RejectConnection; impl RejectConnection { pub async fn reject_mysql_connection( mut stream: TcpStream, code: ErrorKind, error_message: impl Into<String>, ) -> Result<()> { RejectConnection::send_handshake(&mut stream).await?; RejectConnection::receive_handshake_response(&mut stream).await?; // Send error. Packet[seq = 2] let mut buffer = vec![0xFF_u8]; buffer.extend((code as u16).to_le_bytes()); buffer.extend(&vec![b'#']); buffer.extend(code.sqlstate()); buffer.extend(error_message.into().as_bytes()); let size = buffer.len().to_le_bytes(); buffer.splice(0..0, [size[0], size[1], size[2], 2].iter().cloned()); stream.write_all(&buffer).await?; stream.flush().await?; Ok(()) } async fn send_handshake(stream: &mut TcpStream) -> Result<()> { // Send handshake, packet from opensrv-mysql. Packet[seq = 0] stream .write_all(&[ 69, 00, 00, 00, 10, 53, 46, 49, 46, 49, 48, 45, 97, 108, 112, 104, 97, 45, 109, 115, 113, 108, 45, 112, 114, 111, 120, 121, 0, 8, 0, 0, 0, 59, 88, 44, 112, 111, 95, 107, 125, 0, 0, 66, 33, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62, 111, 54, 94, 87, 122, 33, 47, 107, 77, 125, 78, 0, ]) .await?; stream.flush().await?; Ok(()) } async fn receive_handshake_response(stream: &mut TcpStream) -> Result<()> { let mut buffer = vec![0; 4]; stream.read_exact(&mut buffer).await?; // Ignore handshake response. Packet[seq = 1] let len = u32::from_le_bytes([buffer[0], buffer[1], buffer[2], 0]); buffer.resize(len as usize, 0); stream.read_exact(&mut buffer).await?; Ok(()) } }
#![feature(test)] extern crate test; mod option; mod trait_object; mod stack_alloc; mod sneaky_stack_alloc; mod proper_alloc; mod invalidate; mod alias;
use winit::{ dpi::{PhysicalPosition, PhysicalSize}, event::{Event, MouseScrollDelta, WindowEvent::*}, event_loop::{ControlFlow, EventLoop}, }; use crate::{ rendering::{Display, GUIRenderer, SimRenderer}, simulation::Simulation, }; pub struct RenderDriver { pub display: Display, pub sim_renderer: SimRenderer, pub gui_renderer: GUIRenderer, } impl RenderDriver { pub fn new(simulation: &mut Simulation, event_loop: &EventLoop<()>) -> Self { let display = Display::new(event_loop); let sim_renderer = SimRenderer::new(&display, simulation); let gui_renderer = GUIRenderer::new(&display, simulation); Self { display, sim_renderer, gui_renderer, } } pub fn handle_event(&mut self, simulation: &mut Simulation, event: &Event<()>) { self.sim_renderer.handle_event(&mut self.display, simulation, event); self.gui_renderer.handle_event(&mut self.display, simulation, event); self.display.handle_event(event); } pub fn render(&mut self, simulation: &Simulation) { let (_output_frame, output_view) = self.display.get_frame().unwrap(); self.sim_renderer.render(&self.display, simulation, &output_view); self.gui_renderer.render(&self.display, simulation, &output_view); } pub fn request_render(&mut self) { self.display.window.request_redraw(); } pub fn handle_window_event(&mut self, event: &winit::event::WindowEvent, control_flow: &mut ControlFlow) { match event { CloseRequested => *control_flow = ControlFlow::Exit, ScaleFactorChanged { new_inner_size, .. } => { self.display.resize(new_inner_size.width, new_inner_size.height); } Resized(new_inner_size) => { self.display.resize(new_inner_size.width, new_inner_size.height); } _ => {} } } } pub trait PetriEventHandler { fn handle_event<T>(&mut self, display: &mut Display, simulation: &mut Simulation, event: &Event<T>) { self.forward_event(display, simulation, event); if let Event::WindowEvent { ref event, .. } = event { match event { Resized(size) => { self.handle_resize(display, simulation, size); } MouseWheel { delta: MouseScrollDelta::LineDelta(_, y), .. } => { self.handle_scroll(display, simulation, y); } CursorMoved { position, .. } => { self.handle_mouse_move(display, simulation, position); } KeyboardInput { input, .. } => { self.handle_keyboard_input(display, simulation, input); } _ => {} } } } fn forward_event<T>(&mut self, _display: &mut Display, _simulation: &mut Simulation, _event: &Event<T>) {} fn handle_resize(&mut self, _display: &mut Display, _simulation: &mut Simulation, _size: &PhysicalSize<u32>) {} fn handle_scroll(&mut self, _display: &mut Display, _simulation: &mut Simulation, _delta: &f32) {} fn handle_mouse_move( &mut self, _display: &mut Display, _simulation: &mut Simulation, _pos: &PhysicalPosition<f64>, ) { } fn handle_keyboard_input( &mut self, _display: &mut Display, _simulation: &mut Simulation, _input: &winit::event::KeyboardInput, ) { } }
pub struct Edge { pub point_1: usize, pub point_2: usize, pub length: f64, } use point::Point; pub fn update_points_for_edge(orig_length: f64, p1: &mut Point, p2: &mut Point) { let vec_diff = p1.current_position() - p2.current_position(); let edge_length_diff = vec_diff.norm() - orig_length; let vec_diff_unit = vec_diff.normalized(); p1.cur_pos = p1.cur_pos + vec_diff_unit*edge_length_diff*0.5; p2.cur_pos = p2.cur_pos - vec_diff_unit*edge_length_diff*0.5; }
#![deny(warnings, missing_docs, missing_debug_implementations)] //! The core library that allows you to match a regex against buffers and collect //! the results. It also provides the ability to display this in a colorful way //! to a terminal. extern crate glob; extern crate regex; extern crate colored; mod matcher; mod display; mod files; /// The core module for finding matches within files. pub mod grusp { pub use matcher::{Matcher, Stats as StatCollector}; pub use display::{MatchesDisplay as Display}; pub use files::{Collecter as FileCollector}; }
use actix_web::Error; use actix_web::HttpRequest; use actix_web::HttpResponse; use actix_web::Responder; use auth::{claims::AccessToken, claims::RefreshToken}; use bigneon_db::models::User; use crypto::sha2::Sha256; use jwt::{Component, Header, Token}; use serde_json; use uuid::Uuid; #[derive(Serialize, Deserialize)] pub struct TokenResponse { pub access_token: String, pub refresh_token: String, } impl Responder for TokenResponse { type Item = HttpResponse; type Error = Error; fn respond_to<S>(self, _req: &HttpRequest<S>) -> Result<HttpResponse, Error> { let body = serde_json::to_string(&self)?; Ok(HttpResponse::Ok() .content_type("application/json") .body(body)) } } impl TokenResponse { pub fn new(access_token: &str, refresh_token: &str) -> Self { TokenResponse { access_token: String::from(access_token), refresh_token: String::from(refresh_token), } } pub fn create_from_user(token_secret: &str, token_issuer: &str, user: &User) -> Self { let access_token_claims = AccessToken::new(&user.id, token_issuer.to_string()); let access_token = Token::new(Default::default(), access_token_claims); let refresh_token_claims = RefreshToken::new(&user.id, token_issuer.to_string()); let refresh_token = Token::new(Default::default(), refresh_token_claims); TokenResponse { access_token: sign_token(token_secret, &access_token), refresh_token: sign_token(token_secret, &refresh_token), } } pub fn create_from_refresh_token( token_secret: &str, token_issuer: &str, user_id: &Uuid, signed_refresh_token: &str, ) -> Self { let access_token_claims = AccessToken::new(&user_id, token_issuer.to_string()); let access_token = Token::new(Default::default(), access_token_claims); TokenResponse { access_token: sign_token(token_secret, &access_token), refresh_token: String::from(signed_refresh_token), } } } fn sign_token<T: Component>(token_secret: &str, token: &Token<Header, T>) -> String { token .signed(token_secret.as_bytes(), Sha256::new()) .unwrap() }
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test that we DO NOT warn when lifetime name is used multiple // argments, or more than once in a single argument. // // compile-pass #![deny(single_use_lifetimes)] #![allow(dead_code)] #![allow(unused_variables)] fn c<'a>(x: &'a u32, y: &'a u32) { // OK: used twice } fn d<'a>(x: (&'a u32, &'a u32)) { // OK: used twice } fn main() { }
use std::net::SocketAddrV4; use std::path::PathBuf; use std::str::FromStr; use std::sync::mpsc; mod command; use nardol::error::NetCommsError; use shared::ImplementedMessage; use shared::user::UserLite; use utils::input; mod client; use client::*; use crate::client::open_database; // ERROR HANDLING fn main() -> Result<(), NetCommsError> { // D:\\stepa\\Documents\\Rust\\net_comms\\src\bin\\client\\client_config.ron // C:\Documents\Rust\net_comms\src\bin\client\client_config.ron let config_location = get_config_location(); let config = ClientConfig::new(&config_location).unwrap(); let mut db_path = config.save_location.clone(); db_path.push("database.db"); let (output_t, output_r) = mpsc::channel(); let (waiting_messages_t, _waiting_messages_r) = mpsc::channel::<ImplementedMessage>(); open_database(&db_path, output_t.clone()).unwrap(); output(output_r); let socket = SocketAddrV4::new(ip(&config), config.port); let user = get_user(socket, UserLite::default_user(), output_t.clone()).unwrap(); let handle = get_waiting_messages(user.clone(), socket, waiting_messages_t, config.request_incoming_messages_timer, &config.save_location, &db_path, output_t.clone()); process_user_input(socket, user, output_t); handle.join().unwrap(); Ok(()) } fn get_config_location() -> PathBuf { loop { let location = input("Enter client config location: \n>>> ").unwrap(); match PathBuf::from_str(&location) { Ok(path) => { if path.is_file() { return path } }, Err(_) => println!("Please enter valid client config location."), } } }
use crate::datastructure::DataStructure; use crate::generator::Generator; use crate::postprocessors::identity::IdentityPostProcessor; use crate::postprocessors::PostProcessor; use crate::raytracer::RayTracer; use crate::renderer::Renderer; use crate::shader::Shader; pub struct RendererBuilder<'a> { pub(self) generator: &'a dyn Generator, } pub struct RendererBuilderRaytracer<'a> { pub(self) generator: &'a dyn Generator, pub(self) raytracer: &'a dyn RayTracer, } pub struct RendererBuilderShader<'a> { pub(self) generator: &'a dyn Generator, pub(self) raytracer: &'a dyn RayTracer, pub(self) shader: &'a dyn Shader, } pub struct RendererBuilderDatastructure<'a> { pub(self) generator: &'a dyn Generator, pub(self) raytracer: &'a dyn RayTracer, pub(self) shader: &'a dyn Shader, pub(self) datastructure: &'a dyn DataStructure, } impl<'a> RendererBuilder<'a> { pub fn new(generator: &'a dyn Generator) -> Self { Self { generator } } pub fn with_raytracer(self, raytracer: &'a dyn RayTracer) -> RendererBuilderRaytracer<'a> { RendererBuilderRaytracer { generator: self.generator, raytracer, } } } impl<'a> RendererBuilderRaytracer<'a> { pub fn with_shader(self, shader: &'a dyn Shader) -> RendererBuilderShader<'a> { RendererBuilderShader { generator: self.generator, raytracer: self.raytracer, shader, } } } impl<'a> RendererBuilderShader<'a> { pub fn with_datastructure( self, datastructure: &'a dyn DataStructure, ) -> RendererBuilderDatastructure<'a> { RendererBuilderDatastructure { generator: self.generator, raytracer: self.raytracer, shader: self.shader, datastructure, } } } impl<'a> RendererBuilderDatastructure<'a> { pub fn without_postprocessor(self) -> Renderer<'a> { Renderer::new( self.generator, self.raytracer, self.shader, self.datastructure, &IdentityPostProcessor, ) } pub fn with_postprocessor(self, postprocessor: &'a dyn PostProcessor) -> Renderer<'a> { Renderer::new( self.generator, self.raytracer, self.shader, self.datastructure, postprocessor, ) } }
//! Compiles the language build configurations in configs/ into two files (one for the tokenizer, one for the rules) //! so they can be inlined. These configs are included at compile time because they define the neccessary parameters to //! run the rules for a language correctly. They are NOT user configuration. use fs_err as fs; use fs::File; use std::{collections::HashMap, io::BufWriter, path::Path}; fn main() { let path = env!("CARGO_MANIFEST_DIR"); let path = Path::new(path).join("configs"); let mut tokenizer_config_map: HashMap<String, serde_json::Value> = HashMap::new(); let mut rules_config_map: HashMap<String, serde_json::Value> = HashMap::new(); println!("cargo:rerun-if-changed={}", path.display()); for entry in fs::read_dir(path).expect("must be able to read config dir") { let entry = entry.expect("must be able to read config dir entry"); println!("cargo:rerun-if-changed={}", entry.path().display()); if entry.path().is_dir() { let lang_code = entry .path() .file_name() .expect("directory must have name") .to_str() .expect("directory name must be unicode") .to_string(); let tokenizer_path = entry.path().join("tokenizer.json"); let rules_path = entry.path().join("rules.json"); println!("cargo:rerun-if-changed={}", tokenizer_path.display()); println!("cargo:rerun-if-changed={}", rules_path.display()); let tokenizer_json = fs::read_to_string(tokenizer_path).expect("tokenizer.json for language must exist"); let rules_json = fs::read_to_string(rules_path).expect("rules.json for language must exist"); tokenizer_config_map.insert( lang_code.clone(), serde_json::from_str(&tokenizer_json).expect("tokenizer.json must be valid json"), ); rules_config_map.insert( lang_code, serde_json::from_str(&rules_json).expect("tokenizer.json must be valid json"), ); } } let out_dir = std::env::var("OUT_DIR").expect("OUT_DIR env var must be set when build.rs is run"); let out_dir = Path::new(&out_dir); let tokenizer_config_writer = BufWriter::new( File::create(out_dir.join("tokenizer_configs.json")) .expect("must be able to create file in out dir"), ); serde_json::to_writer_pretty(tokenizer_config_writer, &tokenizer_config_map) .expect("must be able to write JSON to file"); let rules_config_writer = BufWriter::new( File::create(out_dir.join("rules_configs.json")) .expect("must be able to create file in out dir"), ); serde_json::to_writer_pretty(rules_config_writer, &rules_config_map) .expect("must be able to write JSON to file"); }
// Copyright (c) 2020 Sam Blenny // SPDX-License-Identifier: Apache-2.0 OR MIT // #![forbid(unsafe_code)] use crate::framebuffer::{LINES, WIDTH}; use crate::pt::Pt; /// ClipRect specifies a region of pixels. X and y pixel ranges are inclusive of /// min and exclusive of max (i.e. it's min.x..max.x rather than min.x..=max.x) /// Coordinate System Notes: /// - (0,0) is top left /// - Increasing Y moves downward on the screen, increasing X moves right #[derive(Copy, Clone, Debug, PartialEq)] pub struct ClipRect { pub min: Pt, pub max: Pt, } impl ClipRect { /// Initialize a rectangle using automatic min/max fixup for corner points pub fn new(min_x: usize, min_y: usize, max_x: usize, max_y: usize) -> ClipRect { // Make sure min_x <= max_x && min_y <= max_y let mut min = Pt { x: min_x, y: min_y }; let mut max = Pt { x: max_x, y: max_y }; if min_x > max_x { min.x = max_x; max.x = min_x; } if min_y > max_y { min.y = max_y; max.y = min_y; } ClipRect { min, max } } /// Make a rectangle of the full screen size (0,0)..(WIDTH,LINES) pub fn full_screen() -> ClipRect { ClipRect::new(0, 0, WIDTH, LINES) } /// Make a rectangle of the screen size minus padding (6,6)..(WIDTH-6,LINES-6) pub fn padded_screen() -> ClipRect { let pad = 6; ClipRect::new(pad, pad, WIDTH - pad, LINES - pad) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_cliprect_equivalence() { let cr1 = ClipRect { min: Pt { x: 1, y: 2 }, max: Pt { x: 8, y: 9 }, }; // Called properly: let cr2 = ClipRect::new(1, 2, 8, 9); // Called with mixed up corners that should get auto-corrected let cr3 = ClipRect::new(8, 2, 1, 9); let cr4 = ClipRect::new(1, 9, 8, 2); assert_eq!(cr1, cr2); assert_eq!(cr2, cr3); assert_eq!(cr3, cr4); } #[test] fn test_cliprect_full_screen() { let clip = ClipRect::full_screen(); assert_eq!(clip.min, Pt::new(0, 0)); assert_eq!(clip.max, Pt::new(WIDTH, LINES)); } #[test] fn test_cliprect_padded_screen() { let c1 = ClipRect::full_screen(); let c2 = ClipRect::padded_screen(); assert!(c2.min > c1.min); assert!(c2.max < c1.max); } }
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::fs::File; use std::io::Write; use std::path::Path; use common_expression::types::NumberDataType; pub enum OP { Plus, Minus, Mul, Div, IntDiv, Modulo, Super, } pub fn codegen_arithmetic_type() { let dest = Path::new("src/query/expression/src/utils"); let path = dest.join("arithmetics_type.rs"); let mut file = File::create(path).expect("open"); // Write the head. let codegen_src_path = file!(); writeln!( file, "// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the \"License\"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an \"AS IS\" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // This code is generated by {codegen_src_path}. DO NOT EDIT. use crate::types::number::{{Number, F64, F32}}; pub trait ResultTypeOfBinary: Sized {{ type AddMul: Number; type Minus: Number; type IntDiv: Number; type Modulo: Number; type LeastSuper: Number; }} pub trait ResultTypeOfUnary: Sized {{ type Negate: Number; type Sum: Number; fn checked_add(self, _rhs: Self) -> Option<Self>; fn checked_sub(self, _rhs: Self) -> Option<Self>; fn checked_mul(self, _rhs: Self) -> Option<Self>; fn checked_div(self, _rhs: Self) -> Option<Self>; fn checked_rem(self, _rhs: Self) -> Option<Self>; }}" ) .unwrap(); let number_types = vec![ NumberDataType::UInt8, NumberDataType::UInt16, NumberDataType::UInt32, NumberDataType::UInt64, NumberDataType::Int8, NumberDataType::Int16, NumberDataType::Int32, NumberDataType::Int64, NumberDataType::Float32, NumberDataType::Float64, ]; for lhs in &number_types { for rhs in &number_types { let add_mul = arithmetic_coercion(*lhs, *rhs, OP::Plus); let minus = arithmetic_coercion(*lhs, *rhs, OP::Minus); let intdiv = arithmetic_coercion(*lhs, *rhs, OP::IntDiv); let modulo = arithmetic_coercion(*lhs, *rhs, OP::Modulo); let least_super = arithmetic_coercion(*lhs, *rhs, OP::Super); writeln!( file, " impl ResultTypeOfBinary for ({}, {}) {{ type AddMul = {}; type Minus = {}; type IntDiv = {}; type Modulo = {}; type LeastSuper = {}; }}", to_primitive_str(*lhs), to_primitive_str(*rhs), to_primitive_str(add_mul), to_primitive_str(minus), to_primitive_str(intdiv), to_primitive_str(modulo), to_primitive_str(least_super), ) .unwrap(); } } for arg in &number_types { let negate = neg_coercion(*arg); let sum = sum_coercion(*arg); match negate { NumberDataType::Float32 | NumberDataType::Float64 => { writeln!( file, " impl ResultTypeOfUnary for {} {{ type Negate = {}; type Sum = {}; fn checked_add(self, rhs: Self) -> Option<Self> {{ Some(self + rhs) }} fn checked_sub(self, rhs: Self) -> Option<Self> {{ Some(self - rhs) }} fn checked_mul(self, rhs: Self) -> Option<Self> {{ Some(self * rhs) }} fn checked_div(self, rhs: Self) -> Option<Self> {{ Some(self / rhs) }} fn checked_rem(self, rhs: Self) -> Option<Self> {{ Some(self % rhs) }} }}", to_primitive_str(*arg), to_primitive_str(negate), to_primitive_str(sum), ) .unwrap(); } _ => { writeln!( file, " impl ResultTypeOfUnary for {} {{ type Negate = {}; type Sum = {}; fn checked_add(self, rhs: Self) -> Option<Self> {{ self.checked_add(rhs) }} fn checked_sub(self, rhs: Self) -> Option<Self> {{ self.checked_sub(rhs) }} fn checked_mul(self, rhs: Self) -> Option<Self> {{ self.checked_mul(rhs) }} fn checked_div(self, rhs: Self) -> Option<Self> {{ self.checked_div(rhs) }} fn checked_rem(self, rhs: Self) -> Option<Self> {{ self.checked_rem(rhs) }} }}", to_primitive_str(*arg), to_primitive_str(negate), to_primitive_str(sum), ) .unwrap(); } } } file.flush().unwrap(); } fn to_primitive_str(dt: NumberDataType) -> &'static str { match dt { NumberDataType::UInt8 => "u8", NumberDataType::UInt16 => "u16", NumberDataType::UInt32 => "u32", NumberDataType::UInt64 => "u64", NumberDataType::Int8 => "i8", NumberDataType::Int16 => "i16", NumberDataType::Int32 => "i32", NumberDataType::Int64 => "i64", NumberDataType::Float32 => "F32", NumberDataType::Float64 => "F64", } } fn arithmetic_coercion(a: NumberDataType, b: NumberDataType, op: OP) -> NumberDataType { let is_signed = a.is_signed() || b.is_signed(); let is_float = a.is_float() || b.is_float(); let bit_width = a.bit_width().max(b.bit_width()); match op { OP::Plus | OP::Mul => NumberDataType::new(next_bit_width(bit_width), is_signed, is_float), OP::Modulo => { if is_float { return NumberDataType::Float64; } let result_is_signed = a.is_signed(); let right_size = b.bit_width(); let size_of_result = if result_is_signed { next_bit_width(right_size) } else { right_size }; NumberDataType::new(size_of_result, result_is_signed, false) } OP::Minus => NumberDataType::new(next_bit_width(bit_width), true, is_float), OP::Div => NumberDataType::Float64, OP::IntDiv => NumberDataType::new(bit_width, is_signed, false), OP::Super => NumberDataType::new(bit_width, is_signed, is_float), } } fn neg_coercion(a: NumberDataType) -> NumberDataType { let bit_width = if a.is_signed() { a.bit_width() } else { next_bit_width(a.bit_width()) }; NumberDataType::new(bit_width, true, a.is_float()) } fn sum_coercion(a: NumberDataType) -> NumberDataType { if a.is_float() { NumberDataType::Float64 } else if a.is_signed() { NumberDataType::Int64 } else { NumberDataType::UInt64 } } const fn next_bit_width(width: u8) -> u8 { if width < 64 { width * 2 } else { 64 } }
use rand::Rng; use rand_distr::{Normal, Distribution}; use std::cmp; use std::collections::HashMap; use actix::prelude::*; use serde::{Deserialize, Serialize}; const MAX_ASTEROID:i32 = 180; const MIN_ASTEROID:i32 = 80; #[derive(Copy, Clone, Message, Default, Serialize, Deserialize)] #[rtype(result = "()")] pub struct Rocket { pub x:f32, pub y:f32, } impl Rocket { pub fn update(&mut self, _x:f32, _y:f32){ self.x = _x; self.y = _y; } pub fn reset(&mut self){ self.x = 0.5; self.y = 0.75; } } #[derive(Copy, Clone, Message, Default, Serialize, Deserialize)] #[rtype(result = "()")] pub struct Shot { pub x:f32, pub y:f32 } impl Shot { fn update(&mut self) { self.y -= 0.015; } } #[derive(Copy, Clone, Message, Default, Serialize, Deserialize)] #[rtype(result = "()")] pub struct Asteroid { pub health:u8, //Health refers to how many hits it can take. Asteroids start at 1, planets at 2, suns at 3 x:f32, y:f32, diameter:f32, speed:f32 } impl Asteroid { fn update(&mut self) { self.y += self.speed; if self.y > 1.0 { self.initialize(); } } fn initialize(&mut self){ let mut rng = rand::thread_rng(); self.x = rng.gen_range(0.0..1.0); self.y = rng.gen_range(-0.5..0.0); self.speed = rng.gen_range(0.006..0.013); self.set_diameter(); } //health => number of hits before exploding fn assign_health(&mut self) { if self.diameter > 1.0/7.0 { self.health = 3; }else if self.diameter > 1.0/9.0 { self.health = 2; }else { self.health = 1; } } fn set_diameter(&mut self) { //(mean, standard_deviation) let normal = Normal::new(0.075, 0.033).unwrap(); self.diameter = normal.sample(&mut rand::thread_rng()) as f32; let temp:i32 = (self.diameter*1000.0).round() as i32; //Keep within a range self.diameter = (cmp::max(MIN_ASTEROID, temp) as f32)/1000.0; self.diameter = (cmp::min(MAX_ASTEROID, temp) as f32)/1000.0; self.assign_health(); } } pub const TITLE:u8 = 0; pub const PLAY:u8 = 1; pub const END:u8 = 2; #[derive(Copy, Clone, Message, Default, Serialize, Deserialize)] #[rtype(result = "()")] pub struct Screen { pub width:f32, pub height:f32, } //Controls the entire game, will be sent to each of the clients, for them to display #[derive(Clone, Message, Default, Serialize, Deserialize)] #[rtype(result = "()")] pub struct GameState { pub score:u32, pub user_count:u8, pub rockets:HashMap<usize, Rocket>, pub _screens:HashMap<usize, Screen>, pub shots:Vec<Shot>, pub asteroids:Vec<Asteroid>, pub screen:u8, pub losing_rocket_id:usize } fn is_collision(rocket:Rocket, asteroid:Asteroid) -> bool { let rocket_width:f32 = 1.0/17.5; return (asteroid.x - rocket.x).abs() < asteroid.diameter/2.0 && (asteroid.y - rocket.y).abs() < asteroid.diameter/2.0 || (asteroid.x - (rocket.x + rocket_width)).abs() < asteroid.diameter/2.0 && (asteroid.y - rocket.y).abs() < asteroid.diameter/2.0; } //collisions impl GameState { pub fn update(&mut self){ if self.screen == PLAY { self.score += 1; for i in 0..self.asteroids.len() { self.asteroids[i].update(); } for i in 0..self.shots.len() { self.shots[i].update(); } if self.score % 500 == 0 { let mut asteroid:Asteroid = Asteroid::default(); asteroid.initialize(); self.asteroids.push(asteroid); } // if self.score % 3 == 0 { // self.collisions(); // } self.collisions(); } } //Finds the distance between two points. (For object collision) fn distance(&mut self, x1:f32,y1:f32,x2:f32,y2:f32) -> f32{ return (((x2-x1)*(x2-x1) + (y2-y1)*(y2-y1)) as f64).sqrt() as f32; } fn is_shot_collision(&mut self, i:usize, j:usize) -> bool { return self.distance(self.shots[j].x, self.shots[j].y, self.asteroids[i].x, self.asteroids[i].y) <= self.asteroids[i].diameter/2.0 } fn asteroid_hit(&mut self, i:usize) { if self.asteroids[i].health > 1 { self.asteroids[i].health -= 1; }else { self.asteroids[i].initialize(); } self.score += 20; } fn collisions(&mut self){ for i in 0..self.asteroids.len() { self.check_bullet_asteroid_collisions(i); self.check_rocket_asteroid_collisions(i); if self.screen == END { return; } } } fn check_bullet_asteroid_collisions(&mut self, i:usize) { let mut shot_count = self.shots.len(); let mut j = 0; while j < shot_count { if self.is_shot_collision(i, j) { // did a bullet hit the asteroid self.asteroid_hit(i); // decrease life of the asteroid or reset it self.shots.remove(j); shot_count -= 1; }else if self.shots[j].y < -0.05 { // if the shot is 5 percent out of range, then remove it self.shots.remove(j); shot_count -= 1; }else { j += 1; } } } fn check_rocket_asteroid_collisions(&mut self, i:usize) { for (id, rocket) in self.rockets.iter() { //Collisions if is_collision(*rocket, self.asteroids[i]) { self.screen = END; self.losing_rocket_id = *id; self.clear_game(); return } } } fn clear_game(&mut self){ self.asteroids = Vec::new(); self.shots = Vec::new(); // self.score = 0; } pub fn build(&mut self) { //creates 7 asteroids above the map to begin with self.asteroids = Vec::new(); self.shots = Vec::new(); self.score = 0; for _ in 0..7 { let mut asteroid:Asteroid = Asteroid::default(); asteroid.initialize(); self.asteroids.push(asteroid); } for (_id, rocket) in self.rockets.iter_mut() { rocket.reset(); } self.screen = PLAY; } pub fn num_players(&self) -> usize{ self.rockets.len() } fn _shoot(&mut self, id:usize){ let from_rocket:Rocket = match self.rockets.get(&id) { Some(&rocket) => rocket, _ => return }; let shot = Shot { x:from_rocket.x, y:from_rocket.y }; self.shots.push(shot); } pub fn _print_state(&self) { println!("Rockets: {}, Shots count: {}, Asteroids count: {}", self.rockets.len(), self.shots.len(), self.asteroids.len()); } pub fn _is_playing(&self) -> bool { return self.screen == 1 && self.rockets.len() > 0; } pub fn to_json_string(&self) -> String { match serde_json::to_string(self) { Ok(json_str) => json_str, Err(e) => { println!("Error while serializing game state to json: {}", e); "".to_string() } } } }
pub mod utilities; use crate::utilities::Verify; use proffer::*; #[test] fn basic_gen() { let struct_ = Struct::new("Basic") .set_is_pub(true) .add_attribute("#[derive(Clone)]") .add_field( Field::new("field1", "String") .set_is_pub(true) .add_attribute("#[serde = w]") .add_doc("/// Some example documentation") .add_docs(vec!["/// Another line", "/// and another"]) .to_owned(), ) .add_field(Field::new("field2", "usize")) .to_owned(); let expected = r#" #[derive(Clone)] pub struct Basic { /// Some example documentation /// Another line /// and another #[serde = w] pub field1: String, field2: usize, } "#; let src_code = struct_.generate_and_verify(); println!("{}", &src_code); assert_eq!(norm_whitespace(expected), norm_whitespace(&src_code)); } #[test] fn generic_gen() { let s = Struct::new("Generic") .set_is_pub(true) .add_generic( Generic::new("T") .add_trait_bounds(vec!["ToString"]) .to_owned(), ) .add_generic( Generic::new("S") .add_trait_bounds(vec!["ToString", "Number"]) .to_owned(), ) .add_fields(&[Field::new("field1", "S"), Field::new("field2", "T")]) .to_owned(); let src_code = s.generate_and_verify(); println!("{}", &src_code); let expected = r#" pub struct Generic<T, S> where T: ToString, S: ToString + Number, { field1: S, field2: T, } "#; assert_eq!(norm_whitespace(expected), norm_whitespace(&src_code)); } #[test] fn gen_with_doc() { let struct_ = Struct::new("Basic") .set_is_pub(true) .add_doc("/// Some example documentation") .add_docs(vec!["/// Another line", "/// and another"]) .to_owned(); let expected = r#" /// Some example documentation /// Another line /// and another pub struct Basic { } "#; let src_code = struct_.generate_and_verify(); println!("{}", &src_code); assert_eq!(norm_whitespace(expected), norm_whitespace(&src_code)); }
//! Stops xidlehook completely at a specific index of the chain or at //! the end. This is used to implement `--once` in the xidlehook //! example application. use crate::{Module, Progress, Result, TimerInfo}; use std::fmt; use log::trace; /// See the module-level documentation #[derive(Clone, Copy)] pub struct StopAt { stop_after: Option<usize>, } impl StopAt { /// Returns a module which will stop execution after a chain of /// timers have reached a certain timer index. pub fn index(i: usize) -> Self { Self { stop_after: Some(i), } } /// Returns a module which will stop execution after a chain of /// timers have executed entirely once. pub fn completion() -> Self { Self { stop_after: None } } } impl Module for StopAt { fn post_timer(&mut self, timer: TimerInfo) -> Result<Progress> { #[allow(clippy::integer_arithmetic)] // timer list is never empty let stop_after = self.stop_after.unwrap_or(timer.length - 1); trace!("{}/{}", timer.index, stop_after); if timer.index >= stop_after { Ok(Progress::Stop) } else { Ok(Progress::Continue) } } } impl fmt::Debug for StopAt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "StopAt") } }
extern crate capstone; use capstone::prelude::*; const X86_CODE: &'static [u8] = b"\x55\x48\x8b\x05\xb8\x13\x00\x00\xe9\x14\x9e\x08\x00\x45\x31\xe4"; fn main() { let mut cs = Capstone::new() .x86() .mode(arch::x86::ArchMode::Mode64) .detail(true) .build() .expect("Failed to create Capstone object"); let insns = cs.disasm_all(X86_CODE, 0x1000) .expect("Failed to dissasemble"); println!("count = {}", insns.len()); for insn in insns.iter() { println!("0x{:x}: {:6} {}", insn.address(), insn.mnemonic().unwrap_or(""), insn.op_str().unwrap_or("") ); } }
use bevy::prelude::*; use multimap::MultiMap; use std::borrow::Cow; use std::collections::HashMap; use std::hash::Hash; // IDEA: Can we instead implicitly declare indexes by passing in a ComponentIndex<T> to our systems? // We don't actually want the full resource structure, since these should never be manually updated #[derive(Debug, PartialEq, Eq)] pub struct ComponentIndex<T: Hash + Eq> { // TODO: we can speed this up by changing reverse to be a Hashmap<Entity, Hash<T>>, then feeding those directly back into forward // This prevents us from ever having to store the unhashed T, which can be significantly sized (requires unstable functionality) // TODO: How can we improve memory locality on this data structure forward: MultiMap<T, Entity>, reverse: HashMap<Entity, T>, } impl<T: Hash + Eq> ComponentIndex<T> { pub fn get(&self, component_val: &T) -> Cow<'_, [Entity]> { match self.forward.get_vec(component_val) { Some(e) => Cow::from(e), None => Cow::from(Vec::new()), } } pub fn new() -> Self { ComponentIndex::<T>::default() } fn remove(&mut self, entity: &Entity) { let old_component = &self.reverse.get(&entity); if old_component.is_some() { self.forward .retain(|k, v| (k == old_component.unwrap()) && (v != entity)); self.reverse.remove(entity); } } // TODO: add manual_update function for multi-stage flow // TODO: add clean function to remove unused keys and fix memory locality } impl<T: Hash + Eq> Default for ComponentIndex<T> { fn default() -> Self { ComponentIndex::<T> { forward: MultiMap::new(), reverse: HashMap::new(), } } } pub trait IndexKey: Component + Eq + Hash + Clone {} impl<T: Component + Eq + Hash + Clone> IndexKey for T {} pub trait ComponentIndexes { fn init_index<T: IndexKey>(&mut self) -> &mut Self; fn update_component_index<T: IndexKey>( index: ResMut<ComponentIndex<T>>, query: Query<(&T, Entity)>, changed_query: Query<(&T, Entity), Changed<T>>, ); } impl ComponentIndexes for AppBuilder { fn init_index<T: IndexKey>(&mut self) -> &mut Self { self.init_resource::<ComponentIndex<T>>(); // FIXME: this should instead be run automatically whenever an index is used // Otherwise there's no guarantee it's fresh // Will also need to add a copy to LAST self.add_startup_system_to_stage( "post_startup", Self::update_component_index::<T>.system(), ); self.add_system_to_stage( stage::POST_UPDATE, Self::update_component_index::<T>.system(), ); self } fn update_component_index<T: IndexKey>( mut index: ResMut<ComponentIndex<T>>, query: Query<(&T, Entity)>, changed_query: Query<(&T, Entity), Changed<T>>, ) { // First, clean up any entities who had this component removed for entity in query.removed::<T>().iter() { index.remove(entity); } for (component, entity) in changed_query.iter() { index.remove(&entity); // Add in new values for the changed records to the forward and reverse entries index.forward.insert(component.clone(), entity); index.reverse.insert(entity, component.clone()); } } } #[allow(dead_code)] mod test { use super::*; #[derive(Debug, Clone, Hash, PartialEq, Eq)] struct MyStruct { val: i8, } #[derive(Debug, Clone, Hash, PartialEq, Eq)] struct MyTupleStruct(i8); #[derive(Debug, Clone, Hash, PartialEq, Eq)] struct MyCompoundStruct { val: i8, name: String, } #[allow(dead_code)] #[derive(Debug, Clone, Hash, PartialEq, Eq)] enum MyEnum { Red, Blue, } const GOOD_NUMBER: i8 = 42; const BAD_NUMBER: i8 = 0; #[derive(Debug, PartialEq, Eq)] enum Goodness { Good, Bad, Confused, } fn spawn_bad_entity(commands: &mut Commands) { commands.spawn((MyStruct { val: BAD_NUMBER }, Goodness::Bad)); } fn spawn_good_entity(commands: &mut Commands) { commands.spawn((MyStruct { val: GOOD_NUMBER }, Goodness::Good)); } fn spawn_deficient_entity(commands: &mut Commands) { commands.spawn((Goodness::Good,)); } fn augment_entities( commands: &mut Commands, query: Query<Entity, (With<Goodness>, Without<MyStruct>)>, ) { for e in query.iter() { commands.insert(e, (MyStruct { val: GOOD_NUMBER },)); } } fn reform_entities( mut query: Query<(&mut Goodness, &mut MyStruct)>, index: Res<ComponentIndex<MyStruct>>, ) { let entities = index.get(&MyStruct { val: BAD_NUMBER }); for e in entities.iter() { let (mut goodness, mut val) = query.get_mut(*e).unwrap(); *goodness = Goodness::Good; *val = MyStruct { val: GOOD_NUMBER }; } } fn purge_badness(commands: &mut Commands, index: Res<ComponentIndex<MyStruct>>) { let entities = index.get(&MyStruct { val: BAD_NUMBER }); for e in entities.iter() { commands.despawn(*e); } } fn ensure_goodness(query: Query<&Goodness>, index: Res<ComponentIndex<MyStruct>>) { let entities = index.get(&MyStruct { val: GOOD_NUMBER }); // Each test must have at least one matching example when checked assert!(entities.len() >= 1); // Each entity with MyStruct.val = GOOD_NUMBER is Good for e in entities.iter() { assert_eq!( query .get_component::<Goodness>(*e) .unwrap_or(&Goodness::Confused), &Goodness::Good ); } } fn ensure_absence_of_bad(query: Query<&Goodness>, index: Res<ComponentIndex<MyStruct>>) { let entities = index.get(&MyStruct { val: BAD_NUMBER }); assert!(entities.len() == 0); for goodness in query.iter() { assert!(*goodness != Goodness::Bad); } } fn debug_index(index: Res<ComponentIndex<MyStruct>>) { dbg!(index); } #[test] fn struct_test() { let mut app_builder = App::build(); app_builder.init_index::<MyStruct>(); app_builder.run(); } #[test] fn tuple_struct_test() { App::build().init_index::<MyTupleStruct>().run() } #[test] fn compound_struct_test() { App::build().init_index::<MyCompoundStruct>().run() } #[test] fn enum_test() { App::build().init_index::<MyEnum>().run() } #[test] fn startup_spawn_test() { App::build() .init_index::<MyStruct>() .add_startup_system(spawn_good_entity.system()) .add_startup_system(spawn_bad_entity.system()) .add_system_to_stage(stage::FIRST, ensure_goodness.system()) .run() } #[test] fn update_spawn_test() { App::build() .init_index::<MyStruct>() .add_system(spawn_good_entity.system()) .add_system(spawn_bad_entity.system()) .add_system_to_stage(stage::LAST, ensure_goodness.system()) .run() } #[test] fn duplicate_spawn_test() { App::build() .init_index::<MyStruct>() .add_system(spawn_good_entity.system()) .add_system(spawn_good_entity.system()) .add_system(spawn_bad_entity.system()) .add_system_to_stage(stage::LAST, ensure_goodness.system()) .run() } #[test] fn component_added_test() { App::build() .init_index::<MyStruct>() .add_startup_system(spawn_deficient_entity.system()) .add_startup_system(spawn_bad_entity.system()) .add_system(augment_entities.system()) .add_system_to_stage(stage::LAST, ensure_goodness.system()) .run() } #[test] fn component_modified_test() { App::build() .init_index::<MyStruct>() .add_startup_system(spawn_bad_entity.system()) .add_startup_system(spawn_bad_entity.system()) .add_system(reform_entities.system()) .add_system_to_stage(stage::LAST, ensure_goodness.system()) .run() } #[test] fn entity_removal_test() { App::build() .init_index::<MyStruct>() .add_startup_system(spawn_bad_entity.system()) .add_system(purge_badness.system()) .add_system_to_stage(stage::LAST, ensure_absence_of_bad.system()) .run() } #[test] fn duplicate_removal_test() { App::build() .init_index::<MyStruct>() .add_startup_system(spawn_bad_entity.system()) .add_startup_system(spawn_bad_entity.system()) .add_system(purge_badness.system()) .add_system_to_stage(stage::LAST, ensure_absence_of_bad.system()) .run() } #[test] #[should_panic] // Commands don't get processed until the end of the current Stage fn same_stage_addition_test() { App::build() .init_index::<MyStruct>() .add_system(spawn_deficient_entity.system()) .add_system(augment_entities.system()) .add_system_to_stage(stage::LAST, ensure_goodness.system()) .run() } #[test] #[should_panic] // Commands don't get processed until the end of the current Stage fn same_stage_modification_test() { App::build() .init_index::<MyStruct>() .add_system(spawn_bad_entity.system()) .add_system(reform_entities.system()) .add_system_to_stage(stage::LAST, ensure_goodness.system()) .run() } #[test] #[should_panic] // Commands don't get processed until the end of the current Stage fn same_stage_removal_test() { App::build() .init_index::<MyStruct>() .add_system(spawn_bad_entity.system()) .add_system(purge_badness.system()) .add_system_to_stage(stage::LAST, ensure_absence_of_bad.system()) .run() } #[test] fn earlier_stage_addition_test() { App::build() .init_index::<MyStruct>() .add_system_to_stage(stage::PRE_UPDATE, spawn_deficient_entity.system()) .add_system(augment_entities.system()) .add_system_to_stage(stage::LAST, ensure_goodness.system()) .run() } // FIXME: add test to catch delayed index updating with naive approach }
fn main() { let mut n = 1; loop { println!("Hello! {}", n); n += 1; std::thread::sleep_ms(1000); } }
use std::collections::HashMap; use std::fs::File; use std::path::{Path, PathBuf}; use clap::{app_from_crate, Arg}; use image::{ImageBuffer, RgbaImage, Rgba, ImageFormat}; use serde::{Deserialize, Deserializer}; use serde::de::Error; fn main() { let matches = app_from_crate!() .arg(Arg::with_name("image-src").short('I').value_name("DIR").multiple_occurrences(true)) .arg(Arg::with_name("colormap").short('m').value_name("FILE").required(true)) .arg(Arg::with_name("output").short('o').value_name("FILE").default_value("a.png")) .arg(Arg::with_name("image").value_name("IMAGE").required(true)) .get_matches(); let image_src = matches.values_of_os("image-src").map(|v| v.map(Path::new).collect()).unwrap_or_else(|| Vec::new()); let colormap = Path::new(matches.value_of_os("colormap").unwrap()); let output = Path::new(matches.value_of_os("output").unwrap()); let image = Path::new(matches.value_of_os("image").unwrap()); let map: ColorMap = serde_yaml::from_reader(File::open(colormap).expect("Failed to open color map")).expect("Failed to read color map"); let fallback = map.0.iter().filter_map(|a| if a.fallback.unwrap_or(false) { Some(a) } else { None }).next(); let image = image::open(image).expect("Failed to open input image").into_rgb(); let mut images: HashMap<&Path, RgbaImage> = HashMap::new(); for entry in map.0.iter() { if let ColorSource::Image(p) = &entry.source { if !images.contains_key(&**p) { let mut i = None; for &dir in image_src.iter() { if let Ok(img) = image::open(dir.join(p)) { i = Some(img.into_rgba()); break; } } match i { None => { panic!("Failed to open referenced image {}", p.to_string_lossy()); } Some(i) => { if i.width() != image.width() || i.height() != image.height() { panic!("Included images must be same size"); } images.insert(p, i); } } } } } let mut output_image: RgbaImage = ImageBuffer::new(image.width(), image.height()); for i in 0..image.width() { for j in 0..image.height() { let pixel = image.get_pixel(i, j); let [r, g, b] = pixel.0; let color = Color { a: 0xff, r, g, b }; let mut entry = None; for e in map.0.iter() { if let Some(c) = e.color { if c == color { entry = Some(e); break; } } } entry = entry.or(fallback); match entry { None => panic!("No rule found for image color #{:02x}{:02x}{:02x}", r, g, b), Some(entry) => { let mut color = match &entry.source { ColorSource::Fill(c) => *c, ColorSource::Image(p) => { let [r, g, b, a] = images.get(&**p).unwrap().get_pixel(i, j).0; Color { r, g, b, a } } }; for filter in entry.filters.iter() { match filter { ColorFilter::Multiply { color: c } => { color = color.multiply(*c); } } } output_image.put_pixel(i, j, Rgba([color.r, color.g, color.b, color.a])); } } } } output_image.save_with_format(output, ImageFormat::Png).unwrap(); } #[derive(Debug, Deserialize)] struct ColorMap(Vec<ColorMapEntry>); #[derive(Debug, Deserialize)] struct ColorMapEntry { color: Option<Color>, fallback: Option<bool>, #[serde(default)] filters: Vec<ColorFilter>, #[serde(flatten)] source: ColorSource, } #[derive(Debug, PartialEq, Eq, Copy, Clone)] struct Color { r: u8, g: u8, b: u8, a: u8 } #[derive(Debug, Deserialize)] #[serde(rename_all = "lowercase")] enum ColorSource { Fill(Color), Image(PathBuf), } #[derive(Debug, Deserialize)] #[serde(rename_all = "lowercase", tag = "type")] enum ColorFilter { Multiply { color: Color, } } impl Color { fn from_rgba(color: u32) -> Color { let [a, r, g, b] = color.to_be_bytes(); Color { r, g, b, a } } fn from_rgb(color: u32) -> Color { Color::from_rgba(0xff000000 | color) } fn from_hex_string(s: &str) -> Result<Color, ()> { if s.len() == 8 { u32::from_str_radix(s, 16).map(Color::from_rgba).map_err(|_| ()) } else if s.len() == 6 { u32::from_str_radix(s, 16).map(Color::from_rgb).map_err(|_| ()) } else { Err(()) } } fn multiply(self, other: Color) -> Color { Color { r: dmul255(self.r, other.r), g: dmul255(self.g, other.g), b: dmul255(self.b, other.b), a: dmul255(self.a, other.a), } } } fn dmul255(a: u8, b: u8) -> u8 { (((a as f32 / 255.0) * (b as f32 / 255.0)) * 255.0) as u8 } impl<'de> Deserialize<'de> for Color { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de> { let s = String::deserialize(deserializer)?; if s.starts_with("#") { Color::from_hex_string(&s[1..]).map_err(|_| D::Error::custom("invalid hex syntax")) } else { match &*s { "transparent" => Ok(Color::from_rgba(0x00000000)), _ => Err(D::Error::custom("invalid color spec")) } } } }
use std::cell::Cell; use hardware::pin::Pin; use hardware::peripherals::digital_io::DigitalOutput; use hardware::peripherals::digital_io::DigitalValue; use hardware::peripherals::time::Time; use wasp::motor::Direction; use wasp::motor::StepperDriverConfig; use wasp::motor::StepperDriver; // Does not compile yet. Lifetimes are hard. /* fn make_stepper<'a>(step_pin: &'a mut Pin<u8>, dir_pin: &'a mut Pin<u8>, time: &'a Time, config: StepperDriverConfig) -> (SimulatedStepper, StepperDriver<'a>, StepOutput<'a>, DirectionOutput<'a>) { let simulated_stepper = SimulatedStepper::new(); let step_output = StepOutput::new(step_pin, &simulated_stepper); let direction_output = DirectionOutput::new(dir_pin, &simulated_stepper); let stepper_driver = StepperDriver::new(&mut step_output, &mut direction_output, time, config); (simulated_stepper, stepper_driver, step_output, direction_output) } */ pub struct SimulatedStepper { step: Cell<i32>, direction: Cell<Direction>, } impl SimulatedStepper { pub fn new() -> SimulatedStepper { SimulatedStepper { step: Cell::default(), direction: Cell::default(), } } pub fn step(&self) { //println!("Simulated Stepper Stepping!"); let step = self.step.get(); let direction = self.direction.get(); self.step.set(step + direction as i32); } pub fn get_step(&self) -> i32 { self.step.get() } pub fn set_direction(&self, direction: Direction) { self.direction.set(direction); } } pub struct StepOutput<'a> { stepper: &'a SimulatedStepper, val: DigitalValue, pin: &'a mut Pin<u8>, } impl<'a> StepOutput<'a> { pub fn new(pin: &'a mut Pin<u8>, stepper: &'a SimulatedStepper) -> StepOutput<'a> { StepOutput { stepper: stepper, val: DigitalValue::Low, pin: pin, } } } impl<'a> DigitalOutput for StepOutput<'a> { fn write(&mut self, val: DigitalValue) { //println!("StepOutput Write: {:?}", val); if self.val == DigitalValue::Low && val == DigitalValue::High { self.stepper.step(); } self.val = val; } fn read(&self) -> DigitalValue { self.val } } pub struct DirectionOutput<'a> { stepper: &'a SimulatedStepper, val: DigitalValue, pin: &'a mut Pin<u8>, } impl<'a> DirectionOutput<'a> { pub fn new(pin: &'a mut Pin<u8>, stepper: &'a SimulatedStepper) -> DirectionOutput<'a> { DirectionOutput { stepper: stepper, val: DigitalValue::Low, pin: pin, } } } impl<'a> DigitalOutput for DirectionOutput<'a> { fn write(&mut self, val: DigitalValue) { match val { DigitalValue::High => self.stepper.set_direction(Direction::Forward), DigitalValue::Low => self.stepper.set_direction(Direction::Backward), } self.val = val; } fn read(&self) -> DigitalValue { self.val } }
pub fn square_of_sum(n: u32) -> u32 { // unimplemented!("square of sum of 1...{}", n) let mut ans = 0; for i in 1..=n { ans = ans + i; } ans = ans.pow(2); return ans; } pub fn sum_of_squares(n: u32) -> u32 { // unimplemented!("sum of squares of 1...{}", n) let mut ans = 0; for i in 1..=n { ans = ans + i.pow(2); } return ans; } pub fn difference(n: u32) -> u32 { // unimplemented!( // "difference between square of sum of 1...{n} and sum of squares of 1...{n}", // n = n, // ) let ans; if square_of_sum(n) > sum_of_squares(n) { ans = square_of_sum(n) - sum_of_squares(n) } else { ans = sum_of_squares(n) - square_of_sum(n) } return ans; }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at https://mozilla.org/MPL/2.0/. use crate::mem_allocator::JEAllocator; /// mmap allocator. /// For better performance, we use jemalloc as the inner allocator. #[derive(Debug, Clone, Copy, Default)] pub struct MmapAllocator { allocator: JEAllocator, } impl MmapAllocator { pub fn new() -> Self { Self { allocator: JEAllocator::default(), } } } #[cfg(target_os = "linux")] pub mod linux { use std::alloc::AllocError; use std::alloc::Allocator; use std::alloc::Layout; use std::ptr::null_mut; use std::ptr::NonNull; use super::MmapAllocator; use crate::runtime::ThreadTracker; // MADV_POPULATE_WRITE is supported since Linux 5.14. const MADV_POPULATE_WRITE: i32 = 23; const THRESHOLD: usize = 64 << 20; impl MmapAllocator { #[inline(always)] fn mmap_alloc(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { debug_assert!(layout.align() <= page_size()); ThreadTracker::alloc(layout.size() as i64)?; const PROT: i32 = libc::PROT_READ | libc::PROT_WRITE; const FLAGS: i32 = libc::MAP_PRIVATE | libc::MAP_ANONYMOUS | libc::MAP_POPULATE; let addr = unsafe { libc::mmap(null_mut(), layout.size(), PROT, FLAGS, -1, 0) }; if addr == libc::MAP_FAILED { return Err(AllocError); } let addr = NonNull::new(addr as *mut ()).ok_or(AllocError)?; Ok(NonNull::<[u8]>::from_raw_parts(addr, layout.size())) } #[inline(always)] unsafe fn mmap_dealloc(&self, ptr: NonNull<u8>, layout: Layout) { debug_assert!(layout.align() <= page_size()); ThreadTracker::dealloc(layout.size() as i64); let result = libc::munmap(ptr.cast().as_ptr(), layout.size()); assert_eq!(result, 0, "Failed to deallocate."); } #[inline(always)] unsafe fn mmap_grow( &self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout, ) -> Result<NonNull<[u8]>, AllocError> { debug_assert!(old_layout.align() <= page_size()); debug_assert!(old_layout.align() == new_layout.align()); ThreadTracker::dealloc(old_layout.size() as i64); ThreadTracker::alloc(new_layout.size() as i64)?; const REMAP_FLAGS: i32 = libc::MREMAP_MAYMOVE; let addr = libc::mremap( ptr.cast().as_ptr(), old_layout.size(), new_layout.size(), REMAP_FLAGS, ); if addr == libc::MAP_FAILED { return Err(AllocError); } let addr = NonNull::new(addr as *mut ()).ok_or(AllocError)?; if linux_kernel_version() >= (5, 14, 0) { libc::madvise(addr.cast().as_ptr(), new_layout.size(), MADV_POPULATE_WRITE); } Ok(NonNull::<[u8]>::from_raw_parts(addr, new_layout.size())) } #[inline(always)] unsafe fn mmap_shrink( &self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout, ) -> Result<NonNull<[u8]>, AllocError> { debug_assert!(old_layout.align() <= page_size()); debug_assert!(old_layout.align() == new_layout.align()); ThreadTracker::dealloc(old_layout.size() as i64); ThreadTracker::alloc(new_layout.size() as i64)?; const REMAP_FLAGS: i32 = libc::MREMAP_MAYMOVE; let addr = libc::mremap( ptr.cast().as_ptr(), old_layout.size(), new_layout.size(), REMAP_FLAGS, ); if addr == libc::MAP_FAILED { return Err(AllocError); } let addr = NonNull::new(addr as *mut ()).ok_or(AllocError)?; Ok(NonNull::<[u8]>::from_raw_parts(addr, new_layout.size())) } } unsafe impl Allocator for MmapAllocator { #[inline(always)] fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { if layout.align() > page_size() { return self.allocator.allocate(layout); } if layout.size() >= THRESHOLD { self.mmap_alloc(layout) } else { self.allocator.allocate(layout) } } #[inline(always)] unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) { if layout.align() > page_size() { return self.allocator.deallocate(ptr, layout); } if layout.size() >= THRESHOLD { self.mmap_dealloc(ptr, layout); } else { self.allocator.deallocate(ptr, layout); } } #[inline(always)] fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { if layout.align() > page_size() { return self.allocator.allocate_zeroed(layout); } if layout.size() >= THRESHOLD { self.mmap_alloc(layout) } else { self.allocator.allocate_zeroed(layout) } } unsafe fn grow( &self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout, ) -> Result<NonNull<[u8]>, AllocError> { if old_layout.align() > page_size() { return self.allocator.grow(ptr, old_layout, new_layout); } if old_layout.size() >= THRESHOLD { self.mmap_grow(ptr, old_layout, new_layout) } else if new_layout.size() >= THRESHOLD { let addr = self.mmap_alloc(new_layout)?; std::ptr::copy_nonoverlapping( ptr.as_ptr(), addr.cast().as_ptr(), old_layout.size(), ); self.allocator.deallocate(ptr, old_layout); Ok(addr) } else { self.allocator.grow(ptr, old_layout, new_layout) } } unsafe fn grow_zeroed( &self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout, ) -> Result<NonNull<[u8]>, AllocError> { if old_layout.align() > page_size() { return self.allocator.grow_zeroed(ptr, old_layout, new_layout); } if old_layout.size() >= THRESHOLD { self.mmap_grow(ptr, old_layout, new_layout) } else if new_layout.size() >= THRESHOLD { let addr = self.mmap_alloc(new_layout)?; std::ptr::copy_nonoverlapping( ptr.as_ptr(), addr.cast().as_ptr(), old_layout.size(), ); self.allocator.deallocate(ptr, old_layout); Ok(addr) } else { self.allocator.grow_zeroed(ptr, old_layout, new_layout) } } unsafe fn shrink( &self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout, ) -> Result<NonNull<[u8]>, AllocError> { if old_layout.align() > page_size() { return self.allocator.shrink(ptr, old_layout, new_layout); } if new_layout.size() >= THRESHOLD { self.mmap_shrink(ptr, old_layout, new_layout) } else if old_layout.size() >= THRESHOLD { let addr = self.allocator.allocate(new_layout)?; std::ptr::copy_nonoverlapping( ptr.as_ptr(), addr.cast().as_ptr(), old_layout.size(), ); self.mmap_dealloc(ptr, old_layout); Ok(addr) } else { self.allocator.shrink(ptr, old_layout, new_layout) } } } #[inline(always)] fn page_size() -> usize { use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; const INVAILED: usize = 0; static CACHE: AtomicUsize = AtomicUsize::new(INVAILED); let fetch = CACHE.load(Ordering::Relaxed); if fetch == INVAILED { let result = unsafe { libc::sysconf(libc::_SC_PAGE_SIZE) as usize }; debug_assert_eq!(result.count_ones(), 1); CACHE.store(result, Ordering::Relaxed); result } else { fetch } } #[inline(always)] fn linux_kernel_version() -> (u16, u8, u8) { use std::sync::atomic::AtomicU32; use std::sync::atomic::Ordering; const INVAILED: u32 = 0; static CACHE: AtomicU32 = AtomicU32::new(INVAILED); let fetch = CACHE.load(Ordering::Relaxed); let code = if fetch == INVAILED { let mut uname = unsafe { std::mem::zeroed::<libc::utsname>() }; assert_ne!(-1, unsafe { libc::uname(&mut uname) }); let mut length = 0usize; // refer: https://semver.org/, here we stop at \0 and _ while length < uname.release.len() && uname.release[length] != 0 && uname.release[length] != 95 { length += 1; } // fallback to (5.13.0) let fallback_version = 5u32 << 16 | 13u32 << 8; let slice = unsafe { &*(&uname.release[..length] as *const _ as *const [u8]) }; let result = match std::str::from_utf8(slice) { Ok(ver) => match semver::Version::parse(ver) { Ok(semver) => { (semver.major.min(65535) as u32) << 16 | (semver.minor.min(255) as u32) << 8 | (semver.patch.min(255) as u32) } Err(_) => fallback_version, }, Err(_) => fallback_version, }; CACHE.store(result, Ordering::Relaxed); result } else { fetch }; ((code >> 16) as u16, (code >> 8) as u8, code as u8) } } #[cfg(not(target_os = "linux"))] pub mod not_linux { use std::alloc::AllocError; use std::alloc::Allocator; use std::alloc::Layout; use std::ptr::NonNull; use super::MmapAllocator; unsafe impl Allocator for MmapAllocator { #[inline(always)] fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { self.allocator.allocate(layout) } #[inline(always)] fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { self.allocator.allocate_zeroed(layout) } #[inline(always)] unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) { self.allocator.deallocate(ptr, layout) } unsafe fn grow( &self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout, ) -> Result<NonNull<[u8]>, AllocError> { self.allocator.grow(ptr, old_layout, new_layout) } unsafe fn grow_zeroed( &self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout, ) -> Result<NonNull<[u8]>, AllocError> { self.allocator.grow_zeroed(ptr, old_layout, new_layout) } unsafe fn shrink( &self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout, ) -> Result<NonNull<[u8]>, AllocError> { self.allocator.shrink(ptr, old_layout, new_layout) } } } #[cfg(test)] mod test { #[test] fn test_semver() { let uname_release: Vec<u8> = "4.18.0-2.4.3.xyz.x86_64.fdsf.fdsfsdfsdf.fdsafdsf\0\0\0cxzcxzcxzc" .as_bytes() .to_vec(); let mut length = 0; while length < uname_release.len() && uname_release[length] != 0 && uname_release[length] != 95 { length += 1; } let slice = unsafe { &*(&uname_release[..length] as *const _ as *const [u8]) }; let ver = std::str::from_utf8(slice).unwrap(); let version = semver::Version::parse(ver); assert!(version.is_ok()); let version = version.unwrap(); assert_eq!(version.major, 4); assert_eq!(version.minor, 18); assert_eq!(version.patch, 0); } }
#![warn(missing_docs)] //! Convert between pinyin forms or zhuyin. extern crate phf; use std::str; use std::string::String; use std::vec::Vec; // MAP_P2Z and MAP_Z2P static maps include!(concat!(env!("OUT_DIR"), "/codegen.rs")); static PINYIN_TONES: [[char; 5]; 6] = [['a', 'ā', 'á', 'ǎ', 'à'], ['o', 'ō', 'ó', 'ǒ', 'ò'], ['e', 'ē', 'é', 'ě', 'è'], ['i', 'ī', 'í', 'ǐ', 'ì'], ['u', 'ū', 'ú', 'ǔ', 'ù'], ['ü', 'ǖ', 'ǘ', 'ǚ', 'ǜ']]; static ZHUYIN_TONES: [&str; 5] = ["˙", "", "ˊ", "ˇ", "ˋ"]; /// Returns the toned char for `c` of `tone` in pinyin fn get_tonal_mark<C>(c: C, tone: u8) -> char where C: Into<char> { let mut c = c.into(); if c == 'v' { c = 'ü'; } for t in PINYIN_TONES.iter() { if c == t[0] { return t[tone as usize] } } unreachable!(); } fn tone_rhyme(s: &[u8], tone: u8) -> String { let mut ret = String::with_capacity(3); // If only one character, tone it and done if s.len() == 1 { ret.push(get_tonal_mark(s[0], tone)); return ret } let (c1, c2) = (s[0], s[1]); // Tone the 1st character if: // * the 1st character is 'a' // * the 1st character is 'o' or 'e' and there's no 'a' // * the 2nd character is not a rhyme if c1 == b'a' || ((c1 == b'o' || c1 == b'e') && c2 != b'a') || !is_rhyme(c2) { ret.push(get_tonal_mark(c1, tone)); ret.push_str(unsafe { str::from_utf8_unchecked(&s[1..]) }); return ret } // Tone the 2nd character otherwise ret.push(c1 as char); ret.push(get_tonal_mark(c2, tone)); ret.push_str(unsafe { str::from_utf8_unchecked(&s[2..]) }); ret } fn is_rhyme(c: u8) -> bool { c == b'a' || c == b'e' || c == b'i' || c == b'o' || c == b'u' || c == b'v' } fn is_consonant(c: u8) -> bool { c >= b'a' && c <= b'z' && !is_rhyme(c) } /// Decode a toned rhyme to (rhyme, tone) fn decode_rhyme(s: &str) -> Option<(String, u8)> { let mut rhyme = String::with_capacity(4); let mut tone = 0; // TODO possibly iterate over bytes // Push each char to the return string, un-accenting chars along the way for mut c in s.chars() { for t in PINYIN_TONES.iter() { for i in 1..t.len() { if c == t[i] { // If there's two toned vowels, it's malformed c = t[0]; if tone != 0 { return None } tone = i as u8; } } } if c == 'ü' { c = 'v'; } rhyme.push(c); } if tone == 0 { tone = 5; } Some((rhyme, tone)) } /// Split numbered pinyin to (consonant, rhyme, tone) /// /// Returns None on a missing tone or invalid input. /// /// # Examples /// ``` /// # use pinyin_zhuyin::*; /// assert_eq!(split("shuang1"), Some(("sh", "uang", 1))); /// /// assert_eq!(split("zh9"), None); /// ``` pub fn split(s: &str) -> Option<(&str, &str, u8)> { if s == "r5" { return Some(("", "r", 5)) } _split(s) } fn _split(s: &str) -> Option<(&str, &str, u8)> { let s = s.as_bytes(); let mut pos = 0; for &b in s.iter() { if !is_consonant(b) { break } pos += 1; } let consonant = unsafe { str::from_utf8_unchecked(&s[0..pos]) }; for &b in s.iter().skip(pos) { if b < b'a' || b > b'z' { break; } pos += 1; } let rhyme = unsafe { str::from_utf8_unchecked(&s[consonant.len()..pos]) }; // Rhyme could not be empty, and the length of tone is at most 1 if rhyme.is_empty() || s.len() - pos > 2 { return None } let tone; if pos < s.len() { tone = s[pos] - b'0'; if tone > 5 || tone < 1 { return None } } else { // No tone return None } Some((consonant, rhyme, tone)) } /// Encode pinyin /// /// Returns None on a missing tone or invalid input. /// /// # Examples /// ``` /// # use pinyin_zhuyin::*; /// assert_eq!(encode_pinyin("ma3"), Some("mǎ".to_owned())); /// assert_eq!(encode_pinyin("er2"), Some("ér".to_owned())); /// assert_eq!(encode_pinyin("r5"), Some("r".to_owned())); /// /// assert_eq!(encode_pinyin("ma"), None); /// ``` pub fn encode_pinyin<S>(s: S) -> Option<String> where S: AsRef<str> { let s = s.as_ref(); if s == "e5" { return Some("ê".to_owned()) } else if s == "r" || s == "r5" { return Some("r".to_owned()) } let (consonant, rhyme, tone) = _split(s)?; encode_pinyin_from_parts(consonant, rhyme, tone) } fn encode_pinyin_from_parts(consonant: &str, rhyme: &str, tone: u8) -> Option<String> { let consonant = consonant.as_bytes(); let mut rhyme = rhyme.as_bytes().to_vec(); if !consonant.is_empty() { // Is it a valid consonant? if MAP_P2Z.get(unsafe { str::from_utf8_unchecked(consonant) }) == None { return None } // Convert 'ü' to 'u' if consonant is 'j', 'q', 'x' or 'y' if rhyme[0] == b'v' { let c = consonant[0]; if c == b'j' || c == b'q' || c == b'x' || c == b'y' { rhyme[0] = b'u'; } } } // Is it a valid rhyme? if MAP_P2Z.get(unsafe { str::from_utf8_unchecked(&rhyme) }) == None { return None } // Tone the rhyme and convert 'v' to 'ü' let mut tone = tone; if tone == 5 { tone = 0; } let mut rhyme: String = tone_rhyme(&rhyme, tone); if rhyme.as_bytes()[0] == b'v' { rhyme = "ü".to_owned() + &rhyme[1..]; } Some(unsafe { str::from_utf8_unchecked(consonant).to_owned() } + &rhyme) } /// Decode pinyin /// /// Returns None if invalid input. /// /// # Example /// ``` /// # use pinyin_zhuyin::*; /// assert_eq!(decode_pinyin("mǎ"), Some("ma3".to_owned())); /// assert_eq!(decode_pinyin("ér"), Some("er2".to_owned())); /// assert_eq!(decode_pinyin("r"), Some("r5".to_owned())); /// ``` pub fn decode_pinyin<S>(s: S) -> Option<String> where S: AsRef<str> { let s = s.as_ref(); if s == "ê" { return Some("e5".to_owned()) } else if s == "r" { return Some("r5".to_owned()) } let (consonant, rhyme, tone) = decode_pinyin_to_parts(s)?; let mut ret = Vec::with_capacity(7); ret.extend_from_slice(consonant.as_bytes()); ret.extend_from_slice(rhyme.as_bytes()); ret.push(tone + b'0'); Some(unsafe { String::from_utf8_unchecked(ret) }) } fn decode_pinyin_to_parts(s: &str) -> Option<(&str, String, u8)> { let s = s.as_bytes(); let mut consonant: &[u8] = &[]; let mut rhyme: &[u8] = &[]; for (i, &b) in s.iter().enumerate() { if !is_consonant(b) { consonant = &s[..i]; rhyme = &s[i..]; break; } } // Is it a valid consonant? if !consonant.is_empty() { if MAP_P2Z.get(unsafe { str::from_utf8_unchecked(consonant) }) == None { return None } } let (mut untoned_rhyme, tone) = decode_rhyme(unsafe { str::from_utf8_unchecked(rhyme) })?; { let rhyme_bytes = unsafe { untoned_rhyme.as_bytes_mut() }; // convert 'u' to 'v' if consonant is 'j', 'q', 'x' or 'y' if !consonant.is_empty() && rhyme_bytes[0] == b'u' { let c = consonant[0]; if c == b'j' || c == b'q' || c == b'x' || c == b'y' { rhyme_bytes[0] = b'v'; } } } // Is it a valid rhyme? if MAP_P2Z.get(&*untoned_rhyme) == None { return None } let consonant = unsafe { str::from_utf8_unchecked(consonant) }; Some((consonant, untoned_rhyme, tone)) } /// Encode zhuyin /// /// Returns None on a missing tone or invalid input. /// /// # Example /// ``` /// # use pinyin_zhuyin::*; /// assert_eq!(encode_zhuyin("ma3"), Some("ㄇㄚˇ".to_owned())); /// ``` pub fn encode_zhuyin<S>(s: S) -> Option<String> where S: AsRef<str> { let s = s.as_ref(); if s == "e5" { return Some("ㄝ".to_owned()) } else if s == "r5" { return Some("ㄦ˙".to_owned()) } let (consonant, rhyme, mut tone) = _split(s)?; if tone == 5 { tone = 0; } encode_zhuyin_from_parts(consonant, rhyme, tone) } fn encode_zhuyin_from_parts(consonant: &str, rhyme: &str, tone: u8) -> Option<String> { // Let's give this capacity to append rhyme and tone later let mut ret_vec = Vec::with_capacity(11); ret_vec.extend_from_slice(consonant.as_bytes()); let mut consonant = ret_vec; let mut rhyme = rhyme.as_bytes().to_vec(); // Convert 'u' to 'v' since it's enforced in Zhuyin and our table if rhyme[0] == b'u' && !consonant.is_empty() { let c = consonant[0]; if c == b'j' || c == b'q' || c == b'x' || c == b'y' { rhyme[0] = b'v'; } } // Handle 整體認讀 if rhyme == b"i" { if consonant == b"zh" || consonant == b"ch" || consonant == b"sh" || consonant == b"r" || consonant == b"z" || consonant == b"c" || consonant == b"s" || consonant == b"y" { rhyme.clear(); } } else if consonant == b"w" && rhyme == b"u" { consonant.clear(); } else if consonant == b"y" { if rhyme == b"v" || rhyme == b"e" || rhyme == b"ve" || rhyme == b"in" || rhyme == b"van" || rhyme == b"ing" || rhyme == b"vn" { consonant.clear(); } } if !consonant.is_empty() { if let Some(zhuyin) = MAP_P2Z.get(unsafe { str::from_utf8_unchecked(&consonant) }) { consonant.clear(); consonant.extend_from_slice(zhuyin.as_bytes()); } else { return None } } if !rhyme.is_empty() { if let Some(zhuyin) = MAP_P2Z.get(unsafe { str::from_utf8_unchecked(&rhyme) }) { rhyme.clear(); rhyme.extend_from_slice(zhuyin.as_bytes()); } else { return None } } consonant.extend_from_slice(&rhyme); let mut ret = unsafe { String::from_utf8_unchecked(consonant) }; ret.push_str(ZHUYIN_TONES[tone as usize]); Some(ret) } /// Decode zhuyin /// /// Returns None if invalid input. /// /// # Example /// ``` /// # use pinyin_zhuyin::*; /// assert_eq!(decode_zhuyin("ㄇㄚˇ"), Some("ma3".to_owned())); /// ``` pub fn decode_zhuyin<S>(s: S) -> Option<String> where S: AsRef<str> { let s = s.as_ref(); if s == "ㄝ" { return Some("e5".to_owned()) } else if s == "ㄦ˙" { return Some("r5".to_owned()) } let (consonant, rhyme, tone) = decode_zhuyin_to_parts(s)?; if rhyme.is_empty() { return None } let mut ret = Vec::with_capacity(7); ret.extend_from_slice(consonant.as_bytes()); ret.extend_from_slice(rhyme.as_bytes()); ret.push(tone + b'0'); Some(unsafe { String::from_utf8_unchecked(ret) }) } fn decode_zhuyin_to_parts(s: &str) -> Option<(String, String, u8)> { let mut consonant = Vec::with_capacity(2); let mut rhyme = Vec::with_capacity(4); let mut tone: u8 = 1; let mut useless_char_buf = [0; 4]; // TODO is iterating over bytes instead worth it? 'split_input: for (i, c) in s.char_indices() { let c = c.encode_utf8(&mut useless_char_buf); if let Some(decoded) = MAP_Z2P.get(c) { // Add char as consonant or rhyme accordingly let decoded = decoded.as_bytes(); match i == 0 && is_consonant(decoded[0]) { true => consonant.extend_from_slice(decoded), false => rhyme.extend_from_slice(c.as_bytes()), } continue } // The last char should be a tone let tone_slice = &s[i..]; for (j, t) in ZHUYIN_TONES.into_iter().enumerate() { if &tone_slice == t { if j == 0 { tone = 5; } else { tone = j as u8; } break 'split_input; } } return None } if rhyme.is_empty() { // If 整體認讀, the rhyme should be 'i' match consonant.as_slice() { b"zh" | b"ch" | b"sh" | b"r" | b"z" | b"c" | b"s" => { rhyme.push(b'i'); return Some(unsafe { (String::from_utf8_unchecked(consonant), String::from_utf8_unchecked(rhyme), tone) }) }, _ => return None } } // Is it an valid rhyme? match MAP_Z2P.get(unsafe { str::from_utf8_unchecked(&rhyme) }) { Some(decoded) if is_rhyme(decoded.as_bytes()[0]) => { rhyme.clear(); rhyme.extend_from_slice(decoded.as_bytes()); }, _ => return None, }; if consonant.is_empty() { // Handle yi, wu, yv 整體認讀, and the special case "ong" to "weng" if rhyme == b"i" || rhyme == b"v" || rhyme == b"e" || rhyme == b"ve" || rhyme == b"in" || rhyme == b"van" || rhyme == b"ing" || rhyme == b"vn" { consonant.clear(); consonant.push(b'y'); } else if rhyme == b"u" { consonant.clear(); consonant.push(b'w'); } else if rhyme[0] == b'u' { consonant.clear(); consonant.push(b'w'); rhyme.drain(0..1); } else if rhyme[0] == b'i' { consonant.clear(); consonant.push(b'y'); rhyme.drain(0..1); } else if rhyme == b"ong" { consonant.clear(); consonant.push(b'w'); rhyme.clear(); rhyme.extend_from_slice(b"eng"); } } Some(unsafe { (String::from_utf8_unchecked(consonant), String::from_utf8_unchecked(rhyme), tone) }) } /// Convert pinyin to zhuyin /// /// # Example /// ``` /// # use pinyin_zhuyin::*; /// assert_eq!(pinyin_to_zhuyin("mǎ"), Some("ㄇㄚˇ".to_owned())); /// ``` pub fn pinyin_to_zhuyin<S>(s: S) -> Option<String> where S: AsRef<str> { let s = s.as_ref(); if s == "ê" { return Some("ㄝ".to_owned()) } encode_zhuyin(decode_pinyin(s)?) } /// Convert zhuyin to pinyin /// /// # Example /// ``` /// # use pinyin_zhuyin::*; /// assert_eq!(zhuyin_to_pinyin("ㄇㄚˇ"), Some("mǎ".to_owned())); /// ``` pub fn zhuyin_to_pinyin<S>(s: S) -> Option<String> where S: AsRef<str> { let s = s.as_ref(); if s == "ㄝ" { return Some("ê".to_owned()) } encode_pinyin(decode_zhuyin(s)?) } mod tests { #[allow(unused_imports)] use super::*; #[allow(unused_macros)] macro_rules! s( ($i:expr) => (Some($i.to_owned())); ); #[test] fn encode_pinyin_test() { assert_eq!(encode_pinyin("e5"), s!("ê")); assert_eq!(encode_pinyin("ju3"), s!("jǔ")); assert_eq!(encode_pinyin("jv3"), s!("jǔ")); assert_eq!(encode_pinyin("lvan4"), s!("lüàn")); // not valid, for test only assert_eq!(encode_pinyin("zhuan4"), s!("zhuàn")); assert_eq!(encode_pinyin("zhao2"), s!("zháo")); assert_eq!(encode_pinyin("leng1"), s!("lēng")); assert_eq!(encode_pinyin("shui3"), s!("shuǐ")); assert_eq!(encode_pinyin("liu2"), s!("liú")); assert_eq!(encode_pinyin("an3"), s!("ǎn")); assert_eq!(encode_pinyin("yi2"), s!("yí")); assert_eq!(encode_pinyin("yuan2"), s!("yuán")); assert_eq!(encode_pinyin("yvan2"), s!("yuán")); assert_eq!(encode_pinyin("min2"), s!("mín")); assert_eq!(encode_pinyin("er2"), s!("ér")); assert_eq!(encode_pinyin("r5"), s!("r")); assert_eq!(encode_pinyin("a"), None); assert_eq!(encode_pinyin("a0"), None); assert_eq!(encode_pinyin("zh3"), None); assert_eq!(encode_pinyin("zhaang4"), None); assert_eq!(encode_pinyin("啊"), None); assert_eq!(encode_pinyin("a5啊"), None); assert_eq!(encode_pinyin("啊a5"), None); assert_eq!(encode_pinyin(""), None); } #[test] fn encode_zhuyin_test() { assert_eq!(encode_zhuyin("e5"), s!("ㄝ")); assert_eq!(encode_zhuyin("ju3"), s!("ㄐㄩˇ")); assert_eq!(encode_zhuyin("jv3"), s!("ㄐㄩˇ")); assert_eq!(encode_zhuyin("lvan4"), s!("ㄌㄩㄢˋ")); // not valid, for test only assert_eq!(encode_zhuyin("zhuan4"), s!("ㄓㄨㄢˋ")); assert_eq!(encode_zhuyin("zhao2"), s!("ㄓㄠˊ")); assert_eq!(encode_zhuyin("leng1"), s!("ㄌㄥ")); assert_eq!(encode_zhuyin("shui3"), s!("ㄕㄨㄟˇ")); assert_eq!(encode_zhuyin("liu2"), s!("ㄌㄧㄡˊ")); assert_eq!(encode_zhuyin("an3"), s!("ㄢˇ")); assert_eq!(encode_zhuyin("yi2"), s!("ㄧˊ")); assert_eq!(encode_zhuyin("yuan2"), s!("ㄩㄢˊ")); assert_eq!(encode_zhuyin("yvan2"), s!("ㄩㄢˊ")); assert_eq!(encode_zhuyin("min2"), s!("ㄇㄧㄣˊ")); assert_eq!(encode_zhuyin("er2"), s!("ㄦˊ")); assert_eq!(encode_zhuyin("r5"), s!("ㄦ˙")); // Zhuyin-specific assert_eq!(encode_zhuyin("yu1"), s!("ㄩ")); assert_eq!(encode_zhuyin("wu2"), s!("ㄨˊ")); assert_eq!(encode_zhuyin("yve3"), s!("ㄩㄝˇ")); assert_eq!(encode_zhuyin("yue4"), s!("ㄩㄝˋ")); assert_eq!(encode_zhuyin("zhi4"), s!("ㄓˋ")); assert_eq!(encode_zhuyin("a"), None); assert_eq!(encode_zhuyin("a0"), None); assert_eq!(encode_zhuyin("zh3"), None); assert_eq!(encode_zhuyin("zhaang4"), None); assert_eq!(encode_zhuyin("啊"), None); assert_eq!(encode_zhuyin("a5啊"), None); assert_eq!(encode_zhuyin("啊a5"), None); assert_eq!(encode_zhuyin(""), None); } #[test] fn decode_pinyin_test() { assert_eq!(decode_pinyin("ê"), s!("e5")); assert_eq!(decode_pinyin("ju"), s!("jv5")); assert_eq!(decode_pinyin("lǚ"), s!("lv3")); assert_eq!(decode_pinyin("lüàn"), s!("lvan4")); // not valid, for test only assert_eq!(decode_pinyin("zhuàn"), s!("zhuan4")); assert_eq!(decode_pinyin("zháo"), s!("zhao2")); assert_eq!(decode_pinyin("lēng"), s!("leng1")); assert_eq!(decode_pinyin("shuǐ"), s!("shui3")); assert_eq!(decode_pinyin("liú"), s!("liu2")); assert_eq!(decode_pinyin("ǎn"), s!("an3")); assert_eq!(decode_pinyin("yí"), s!("yi2")); assert_eq!(decode_pinyin("yuán"), s!("yvan2")); assert_eq!(decode_pinyin("mín"), s!("min2")); assert_eq!(decode_pinyin("ér"), s!("er2")); assert_eq!(decode_pinyin("r"), s!("r5")); assert_eq!(decode_pinyin("a5"), None); assert_eq!(decode_pinyin("zhāāng"), None); assert_eq!(decode_pinyin("啊"), None); assert_eq!(decode_pinyin("a啊"), None); assert_eq!(decode_pinyin("啊a"), None); assert_eq!(decode_pinyin(""), None); } #[test] fn decode_zhuyin_test() { assert_eq!(decode_zhuyin("ㄝ"), s!("e5")); assert_eq!(decode_zhuyin("ㄐㄩ˙"), s!("jv5")); assert_eq!(decode_zhuyin("ㄌㄩˇ"), s!("lv3")); assert_eq!(decode_zhuyin("ㄌㄩㄢˋ"), s!("lvan4")); // not valid, for test only assert_eq!(decode_zhuyin("ㄓㄨㄢˋ"), s!("zhuan4")); assert_eq!(decode_zhuyin("ㄓㄠˊ"), s!("zhao2")); assert_eq!(decode_zhuyin("ㄓˋ"), s!("zhi4")); assert_eq!(decode_zhuyin("ㄌㄥ"), s!("leng1")); assert_eq!(decode_zhuyin("ㄕㄨㄟˇ"), s!("shui3")); assert_eq!(decode_zhuyin("ㄌㄧㄡˊ"), s!("liu2")); assert_eq!(decode_zhuyin("ㄢˇ"), s!("an3")); assert_eq!(decode_zhuyin("ㄩ"), s!("yv1")); assert_eq!(decode_zhuyin("ㄨˊ"), s!("wu2")); assert_eq!(decode_zhuyin("ㄩㄝˇ"), s!("yve3")); assert_eq!(decode_zhuyin("ㄩㄝˋ"), s!("yve4")); assert_eq!(decode_zhuyin("ㄧˊ"), s!("yi2")); assert_eq!(decode_zhuyin("ㄩㄢˊ"), s!("yvan2")); assert_eq!(decode_zhuyin("ㄇㄧㄣˊ"), s!("min2")); assert_eq!(decode_zhuyin("ㄦˊ"), s!("er2")); assert_eq!(decode_zhuyin("ㄦ˙"), s!("r5")); // Zhuyin-specific assert_eq!(decode_zhuyin("ㄨㄥˊ"), s!("weng2")); assert_eq!(decode_zhuyin("a5"), None); assert_eq!(decode_zhuyin("ㄩㄝㄝ"), None); assert_eq!(decode_zhuyin("ㄐˇ"), None); assert_eq!(decode_zhuyin("ㄨㄕ"), None); assert_eq!(decode_zhuyin("ㄕㄨㄕㄨ"), None); assert_eq!(decode_zhuyin("啊"), None); assert_eq!(decode_zhuyin("ㄚ啊"), None); assert_eq!(decode_zhuyin("啊ㄚ"), None); assert_eq!(decode_zhuyin(""), None); } #[test] fn pinyin_to_zhuyin_test() { assert_eq!(pinyin_to_zhuyin("mín"), s!("ㄇㄧㄣˊ")); assert_eq!(pinyin_to_zhuyin("zhāng"), s!("ㄓㄤ")); assert_eq!(pinyin_to_zhuyin("wéng"), s!("ㄨㄥˊ")); assert_eq!(pinyin_to_zhuyin("ér"), s!("ㄦˊ")); assert_eq!(pinyin_to_zhuyin("r"), s!("ㄦ˙")); assert_eq!(pinyin_to_zhuyin("wengg"), None); assert_eq!(pinyin_to_zhuyin("啊"), None); assert_eq!(pinyin_to_zhuyin(""), None); } #[test] fn zhuyin_to_pinyin_test() { assert_eq!(zhuyin_to_pinyin("ㄇㄧㄣˊ"), s!("mín")); assert_eq!(zhuyin_to_pinyin("ㄓㄤ"), s!("zhāng")); assert_eq!(zhuyin_to_pinyin("ㄨㄥˊ"), s!("wéng")); assert_eq!(zhuyin_to_pinyin("ㄦˊ"), s!("ér")); assert_eq!(zhuyin_to_pinyin("ㄦ˙"), s!("r")); assert_eq!(zhuyin_to_pinyin("ㄥㄥ"), None); assert_eq!(zhuyin_to_pinyin("啊"), None); assert_eq!(zhuyin_to_pinyin(""), None); } }
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use chrono::DateTime; use chrono::Utc; use common_exception::ErrorCode; use common_exception::Result; use common_meta_app::schema::TableStatistics; use futures::TryStreamExt; use storages_common_table_meta::meta::TableSnapshot; use storages_common_table_meta::table::OPT_KEY_SNAPSHOT_LOCATION; use crate::io::MetaReaders; use crate::io::SnapshotHistoryReader; use crate::FuseTable; impl FuseTable { pub async fn navigate_to_time_point( &self, time_point: DateTime<Utc>, ) -> Result<Arc<FuseTable>> { self.find(|snapshot| { if let Some(ts) = snapshot.timestamp { ts <= time_point } else { false } }) .await } pub async fn navigate_to_snapshot(&self, snapshot_id: &str) -> Result<Arc<FuseTable>> { self.find(|snapshot| { snapshot .snapshot_id .simple() .to_string() .as_str() .starts_with(snapshot_id) }) .await } pub async fn find<P>(&self, mut pred: P) -> Result<Arc<FuseTable>> where P: FnMut(&TableSnapshot) -> bool { let snapshot_location = if let Some(loc) = self.snapshot_loc().await? { loc } else { // not an error? return Err(ErrorCode::TableHistoricalDataNotFound( "Empty Table has no historical data", )); }; let snapshot_version = self.snapshot_format_version().await?; let reader = MetaReaders::table_snapshot_reader(self.get_operator()); // grab the table history as stream // snapshots are order by timestamp DESC. let mut snapshot_stream = reader.snapshot_history( snapshot_location, snapshot_version, self.meta_location_generator().clone(), ); // Find the instant which matches the given `time_point`. let mut instant = None; while let Some(snapshot) = snapshot_stream.try_next().await? { if pred(snapshot.as_ref()) { instant = Some(snapshot); break; } } if let Some(snapshot) = instant { // Load the table instance by the snapshot // The `seq` of ident that we cloned here is JUST a place holder // we should NOT use it other than a pure place holder. let mut table_info = self.table_info.clone(); // There are more to be kept in snapshot, like engine_options, ordering keys... // or we could just keep a clone of TableMeta in the snapshot. // // currently, here are what we can recovery from the snapshot: // 1. the table schema table_info.meta.schema = Arc::new(snapshot.schema.clone()); // 2. the table option `snapshot_location` let ver = snapshot.format_version(); let loc = self .meta_location_generator .snapshot_location_from_uuid(&snapshot.snapshot_id, ver)?; table_info .meta .options .insert(OPT_KEY_SNAPSHOT_LOCATION.to_owned(), loc); // 3. The statistics let summary = &snapshot.summary; table_info.meta.statistics = TableStatistics { number_of_rows: summary.row_count, data_bytes: summary.uncompressed_byte_size, compressed_data_bytes: summary.compressed_byte_size, index_data_bytes: summary.index_size, }; // let's instantiate it let table = FuseTable::do_create(table_info)?; Ok(table.into()) } else { Err(ErrorCode::TableHistoricalDataNotFound( "No historical data found at given point", )) } } }
use input_i_scanner::{scan_with, InputIScanner}; fn main() { let stdin = std::io::stdin(); let mut _i_i = InputIScanner::from(stdin.lock()); let n = scan_with!(_i_i, usize); let mut deg = vec![0; n]; for _ in 0..(n - 1) { let (a, b) = scan_with!(_i_i, (usize, usize)); deg[a - 1] += 1; deg[b - 1] += 1; } if deg.contains(&(n - 1)) { println!("Yes"); } else { println!("No"); } }
fn main() { println!("Hello, qemu!"); }
#![deny(warnings)] #![deny(unsafe_code)] #![no_main] #![no_std] extern crate panic_halt; use cortex_m::asm; use cortex_m_rt::entry; use stm32l0xx_hal::{pac, prelude::*, rcc::Config}; #[entry] fn main() -> ! { let dp = pac::Peripherals::take().unwrap(); let cp = cortex_m::Peripherals::take().unwrap(); // Configure the clock. let mut rcc = dp.RCC.freeze(Config::hsi16()); // Get the delay provider. let mut delay = cp.SYST.delay(rcc.clocks); // Acquire the GPIOA peripheral. This also enables the clock for GPIOA in // the RCC register. let gpioa = dp.GPIOA.split(&mut rcc); // Configure TIM2 as PWM on PA1. let c2 = gpioa.pa1; let mut pwm = dp.TIM2.pwm(c2, 10.khz(), &mut rcc); let max = pwm.get_max_duty(); pwm.enable(); pwm.set_duty(max); delay.delay_ms(1000_u16); pwm.set_duty(max / 2); delay.delay_ms(1000_u16); pwm.set_duty(max / 4); delay.delay_ms(1000_u16); pwm.set_duty(max / 8); loop { asm::nop(); } }
use std::ptr; use std::f32; use std::u32; use std::ffi::CString; use crate::Battery; use crate::technology::Technology; use crate::state::State; /// Returns battery percentage. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_percentage(ptr: *const Battery) -> libc::c_float { assert!(!ptr.is_null()); let battery = &*ptr; battery.percentage() } /// Returns battery energy (in `mWh`). /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_energy(ptr: *const Battery) -> libc::uint32_t { assert!(!ptr.is_null()); let battery = &*ptr; battery.energy() } /// Returns battery energy (in `mWh`) when it is considered full. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_energy_full(ptr: *const Battery) -> libc::uint32_t { assert!(!ptr.is_null()); let battery = &*ptr; battery.energy_full() } /// Returns battery energy (in `mWh`) designed to hold when it is considered full. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_energy_full_design(ptr: *const Battery) -> libc::uint32_t { assert!(!ptr.is_null()); let battery = &*ptr; battery.energy_full_design() } /// Returns battery energy rate (in `mW`). /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_energy_rate(ptr: *const Battery) -> libc::uint32_t { assert!(!ptr.is_null()); let battery = &*ptr; battery.energy_rate() } /// Returns battery voltage (in `mV`) /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_voltage(ptr: *const Battery) -> libc::uint32_t { assert!(!ptr.is_null()); let battery = &*ptr; battery.voltage() } /// Returns battery capacity in `0.0`..`100.0` percents range. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_capacity(ptr: *const Battery) -> libc::c_float { assert!(!ptr.is_null()); let battery = &*ptr; battery.capacity() } /// Returns battery state. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_state(ptr: *const Battery) -> State { assert!(!ptr.is_null()); let battery = &*ptr; battery.state().into() } /// Returns battery technology. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_technology(ptr: *const Battery) -> Technology { assert!(!ptr.is_null()); let battery = &*ptr; battery.technology().into() } /// Returns battery temperature. /// /// # Returns /// /// If value is not available, function returns max possible value for `float` type (`1E+37`). /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_temperature(ptr: *const Battery) -> libc::c_float { assert!(!ptr.is_null()); let battery = &*ptr; match battery.temperature() { None => f32::MAX, Some(temp) => temp, } } /// Returns battery cycles count. /// /// # Returns /// /// If value is not available, function returns max possible value for `uint32` type (`4294967295`). /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_cycle_count(ptr: *const Battery) -> libc::uint32_t { assert!(!ptr.is_null()); let battery = &*ptr; match battery.cycle_count() { None => u32::MAX, Some(value) => value, } } /// Returns battery vendor. /// /// Caller is required to free returned value with [battery_str_free](fn.battery_str_free.html) /// function after using it. /// /// # Returns /// /// This function might return `NULL` if vendor data is not available. /// Calling [battery_str_free](fn.battery_str_free.html) is not required in that case, /// yet it will not lead to any error. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_vendor(ptr: *const Battery) -> *mut libc::c_char { assert!(!ptr.is_null()); let battery = &*ptr; match battery.vendor() { Some(vendor) => { let c_str = CString::new(vendor).unwrap(); c_str.into_raw() }, None => ptr::null_mut(), } } /// Returns battery model. /// /// Caller is required to free returned value with [battery_str_free](fn.battery_str_free.html) /// function after using it. /// /// # Returns /// /// This function might return `NULL` if model data is not available. /// Calling [battery_str_free](fn.battery_str_free.html) is not required in that case, /// yet it will not lead to any error. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_model(ptr: *const Battery) -> *mut libc::c_char { assert!(!ptr.is_null()); let battery = &*ptr; match battery.model() { Some(model) => { let c_str = CString::new(model).unwrap(); c_str.into_raw() }, None => ptr::null_mut(), } } /// Returns battery serial number. /// /// Caller is required to free returned value with [battery_str_free](fn.battery_str_free.html) /// function after using it. /// /// # Returns /// /// This function might return `NULL` if serial number data is not available. /// Calling [battery_str_free](fn.battery_str_free.html) is not required in that case, /// yet it will not lead to any error. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_serial_number(ptr: *const Battery) -> *mut libc::c_char { assert!(!ptr.is_null()); let battery = &*ptr; match battery.serial_number() { Some(sn) => { let c_str = CString::new(sn).unwrap(); c_str.into_raw() }, None => ptr::null_mut(), } } /// Returns battery time to full. /// /// # Returns /// /// If battery is not charging at the moment, this function will return `0`, /// otherwise it will return seconds amount. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_time_to_full(ptr: *const Battery) -> libc::uint64_t { assert!(!ptr.is_null()); let battery = &*ptr; match battery.time_to_full() { None => 0, Some(duration) => duration.as_secs(), } } /// Returns battery time to empty. /// /// # Returns /// /// If battery is not discharging at the moment, this function will return `0`, /// otherwise it will return seconds amount. /// /// # Panics /// /// This function will panic if passed pointer is `NULL` #[no_mangle] pub unsafe extern fn battery_get_time_to_empty(ptr: *const Battery) -> libc::uint64_t { assert!(!ptr.is_null()); let battery = &*ptr; match battery.time_to_empty() { None => 0, Some(duration) => duration.as_secs(), } } /// Frees battery instance. /// /// Caller is required to call this function when battery pointer is not needed anymore /// in order to properly free memory. #[no_mangle] pub unsafe extern fn battery_free(ptr: *mut Battery) { if ptr.is_null() { return; } Box::from_raw(ptr); } /// Frees battery information string value. /// /// Caller is required to call this function for return values for the following functions: /// * [battery_vendor](fn.battery_vendor.html) /// * [battery_model](fn.battery_model.html) /// * [battery_serial_number](fn.battery_serial_number.html) #[no_mangle] pub unsafe extern fn battery_str_free(ptr: *mut libc::c_char) { if ptr.is_null() { return; } CString::from_raw(ptr); }
use ast::*; use env::*; use parse::parse; use transform::*; use util::resolve; use std::vec::append; use std::hashmap::HashMap; fn new_env() -> CompileTimeEnv { let env0 = ~[]; let env1 = extend(&env0, "lambda", TFun); let env2 = extend(&env1, "quote", TQuote); let env3 = extend(&env2, "syntax", TQuote); extend(&env3, "let-syntax", TLetSyntax) } fn lookup(e: &CompileTimeEnv, v: &Value, dcs: &DefinitionContextStore) -> Transform { if !v.is_id() { return TNone; } let resolved = &resolve(v, dcs).expect("Failed to resolve " + v.to_str()); let looked_up = e.iter().find(|&nt| { nt.clone().n0() == *resolved }).expect("Found no binding in CTE for " + resolved.to_str()); looked_up.clone().n1().clone() } fn extend(e: &CompileTimeEnv, k: &str, v: Transform) -> CompileTimeEnv { let mut new_env = e.clone(); new_env.insert(0, (k.to_owned(), v)); new_env } fn fresh(v: &Value, gen: &Gen) -> GenName { let name = match v.clone() { Stx(~Atom(~Sym(n)), _) => n, _ => fail!("Invalid value given to fresh()") }; let mut end_iter = gen.rev_iter().map(|n| format!(":{:s}", n.to_str())); let end : ~[~str] = end_iter.collect(); format!("{:s}{:s}", name, end.concat()) } fn fresh_mark(gen: &Gen) -> GenName { fresh(&Stx(~Atom(~Sym(~"mark")), ~CtxNone), gen) } fn fresh_def(gen: &Gen) -> GenName { fresh(&Stx(~Atom(~Sym(~"def")), ~CtxNone), gen) } pub fn expand_rec(v: Value, dcs: &DefinitionContextStore, env: &CompileTimeEnv, gen: &Gen) -> (Value, DefinitionContextStore) { match v.clone() { Stx(~List(largs), ctx) => { match largs { // lambda expansion [ref id_lam, ref id_arg, ref body] if lookup(env, id_lam, dcs) == TFun => { let name_new = fresh(id_arg, gen); let id_new = rename(id_arg, id_arg, name_new); let newbody = rename(body, id_arg, name_new); let new_env = extend(env, name_new, TVar(~id_new.clone())); let (stx_expbody, dcs1) = expand_rec(newbody, dcs, &new_env, &new_gen(0, gen)); (Stx(~List(~[id_lam.clone(), id_new, stx_expbody]), ctx), dcs1) }, // Quote [ref id_quote, ref stx] if lookup(env, id_quote, dcs) == TQuote => { (Stx(~List(~[id_quote.clone(), stx.clone()]), ctx), dcs.clone()) }, // LetSyntax [ref id_ls, ref id_mac, ref rhs, ref body] if lookup(env, id_ls, dcs) == TLetSyntax => { let name_new = fresh(id_mac, gen); let parsed = parse(Val(~rhs.clone()), dcs); let (val, _, dcs1) = eval_sub(parsed, dcs, env, &new_gen(0, gen), &~""); let transform = TVal(~val); let new_env = extend(env, name_new, transform); expand_rec(rename(body, id_mac, name_new), &dcs1, &new_env, &new_gen(1, gen)) }, // Stop expanding [ref id_stop, ..] if lookup(env, id_stop, dcs) == TStop => (v, dcs.clone()), // macro application [ref id_mac, ..] if { match lookup(env, id_mac, dcs) { TVal(_) => true, _ => false } } => { match lookup(env, id_mac, dcs) { TVal(val) => { let new_mark = fresh_mark(gen); let marked = ~[Val(~mark(&v, &new_mark))]; let (evald, _, dcs1)= eval_sub(App(~Val(val), marked), dcs, env, &new_gen(0, gen), &~""); let eval_marked = mark(&evald, &new_mark); expand_rec(eval_marked, &dcs1, env, &new_gen(1, gen)) }, _ => fail!("Impossible case.") } }, all => { let mut m_dcs = dcs.clone(); let mut valz = ~[]; for v in all.iter() { let (ev, edcs) = expand_rec(v.clone(), &m_dcs, env, &new_gen(0, gen)); valz.push(ev); m_dcs = edcs; }; (Stx(~List(valz), ctx), m_dcs) } } }, id => match lookup(env, &id, dcs) { TVar(new_id) => (*new_id, dcs.clone()), x => fail!("Could not find transformation: " + id.to_str() + " == " + x.to_str()) } } } pub fn expand(v: Value) -> (Value, DefinitionContextStore){ expand_rec(v, &HashMap::new(), &new_env(), &~[]) } fn eval_prim(name: &str, args: ~[Value]) -> Value { match (name, args) { ("list", largs) => List(largs), ("car", [List(largs)]) => largs[0], ("cdr", [List(largs)]) => List(largs.slice_from(1).to_owned()), ("cons", [e, List(largs)]) => List(append(~[e], largs).clone()), //syntax ("stx-e", [Stx(v, _)]) => *v, ("mk-stx", [Atom(a), Stx(_, ctx)]) => Stx(~Atom(a), ctx), ("mk-stx", [List(lstx), Stx(_, ctx)]) => Stx(~List(lstx), ctx), //math ("+", largs) => largs.iter().fold(Atom(~Num(0)), |b, a| { match (b, a) { (Atom(~Num(x)), &Atom(~Num(y))) => Atom(~Num(x + y)), (x, y) => fail!("Invalid operation " + x.to_str() + " + " + y.to_str()) } }), bad => fail!("not implemented: " + bad.to_str()) } } // TODO: fixme //fn eval_env(e: Expression, env: RuntimeEnv) -> Value { //match e { //Var(s) => env.get(&s).clone(), //App(~Val(~Fun(~Var(name), body)), [arg]) => { //let mut mut_env = env.clone(); //mut_env.insert(name, eval_env(arg, env)); //eval_env(*body, mut_env) //}, //App(~Val(~Atom(~Prim(name))), args) => { //let val_args = args.map(|e| eval_env(e.clone(), env.clone())); //eval_prim(name, val_args) //}, //App(e0, es) => eval_env(App(~Val(~eval_env(*e0, env.clone())), es), env), //Val(val) => *val, //} //} fn var_in_val(v: &Value, name: &str) -> bool { match v.clone() { Fun(earg, ebody) => var_in(earg, name) || var_in(ebody, name), List(vs) => vs.iter().any(|v| var_in_val(v, name)), Stx(v, _) => var_in_val(v, name), _ => false } } fn var_in(e: &Expression, name: &str) -> bool { match e.clone() { Var(ref n) if n == &name.to_owned() => true, Var(_) => false, App(e0, es) => var_in(e0, name) || es.iter().any(|e| var_in(e, name)), Val(v) => var_in_val(v, name) } } fn var_not_in(e: &Expression, root: &str) -> ~str { if var_in(e, root) { root + "#" } else { root.to_owned() } } fn substitute_val(v: Value, name: &str, to_sub: Expression) -> Value { match v { Fun(~Var(ref f), ref e1) if f == &name.to_owned() => Fun(~Var(f.to_owned()), e1.clone()), Fun(~Var(name_2), ast) => { let name_3 = var_not_in(ast, name_2); let v_with_name = substitute(*ast, name_2, Var(name_3.clone())); Fun(~Var(name_3), ~substitute(v_with_name, name, to_sub)) }, List(vs) => List(vs.map(|e| substitute_val(e.clone(), name, to_sub.clone()))), x => x } } fn substitute(e: Expression, name: &str, to_sub: Expression) -> Expression { match e { Var(ref n) if n == &name.to_owned() => to_sub, Var(n) => Var(n), App(e0, es) => App(~substitute(*e0, name, to_sub.clone()), es.map(|e| substitute(e.clone(), name, to_sub.clone()))), Val(~v) => Val(~substitute_val(v, name, to_sub)), } } fn no_stops(cte: &CompileTimeEnv) -> CompileTimeEnv { let mut new_cte = ~[]; for kv in cte.iter() { let (k, v) = kv.clone(); if v != TStop { new_cte.push((k, v)); } }; new_cte } fn eval_tprim(name: ~str, args: ~[Expression], dcs: &DefinitionContextStore, cte: &CompileTimeEnv, gen: &Gen, mrk: &GenName) -> (Value, CompileTimeEnv, DefinitionContextStore) { match (name, args) { (~"lvalue", [ast]) => { let (id_result, cte1, dcs1) = eval_sub(ast, dcs, cte, gen, mrk); match lookup(&cte1, &id_result, &dcs1) { TVal(~v) => (v, cte1, dcs1), _ => fail!("Invalid lvalue") } }, //(~"lexpand", [ast_expr, ast_stops]) => { //let stx = eval_sub(ast_expr, dcs, cte, mrk).n0(); //let stops = eval_sub(ast_stops, dcs, cte, mrk).n0(); //match stops { //List(stops) => { //let mut env_stops = no_stops(cte); //for stop in stops.iter() { //env_stops.insert(resolve(stop, dcs).unwrap(), TStop); //} //let expanded = expand_rec(mark(&stx, mrk), dcs, &env_stops).n0(); //let marked = mark(&expanded, mrk); //(marked, cte.clone(), dcs.clone()) //}, //_ => fail!("Invalid lexpand") //} //}, (~"lexpand", [ast_expr, ast_stops, ast_defs]) => { let (stx_expr, cte1, dcs1)= eval_sub(ast_expr, dcs, cte, &new_gen(0, gen), mrk); let (stops, cte2, dcs2) = eval_sub(ast_stops, &dcs1, &cte1, &new_gen(1, gen), mrk); match stops { List(stops) => { let (d, cte3, dcs3) = eval_sub(ast_defs, &dcs2, &cte2, &new_gen(2, gen), mrk); let defs_ = match d { Defs(d) => d, _ => fail!(~"failed to lexpend, invalid definitions") }; let mut env_stops = no_stops(&cte3); for stop in stops.iter() { env_stops = extend(&env_stops, resolve(stop, &dcs3).unwrap(), TStop); } let stx_new = defs(&mark(&stx_expr, mrk), &defs_); let (expanded, dcs4) = expand_rec(stx_new, &dcs3, &env_stops, &new_gen(3, gen)); let marked = mark(&defs(&expanded, &defs_), mrk); (marked, cte3, dcs4) }, _ => fail!("Invalid lexpand") } }, (~"new-defs", []) => { let def = Some(fresh_def(gen)); (Defs(def.clone()), cte.clone(), extend_new_def(dcs, def)) }, //runtime binding (~"def-bind", [ast_defs, ast_id]) => { let (def, cte1, dcs1) = eval_sub(ast_defs, dcs, cte, &new_gen(0, gen), mrk); let defs_ = match def { Defs(d) => d, _ => fail!("failed to def-bind, invalid definitions") }; let (id, cte2, dcs2) = eval_sub(ast_id, &dcs1, &cte1, &new_gen(1, gen), mrk); let name_new = fresh(&id, gen); let id_new = rename(&id, &id, name_new.clone()); let dcs3 = extend_def_bind(&dcs2, &defs_, &id, mrk, name_new); let cte3 = extend(&cte2, name_new, TVar(~id_new)); (Atom(~Num(0)), cte3, dcs3) }, //compile time binding (~"def-bind", [ast_defs, ast_id, ast_stx]) => { let (def, cte1, dcs1) = eval_sub(ast_defs, dcs, cte, &new_gen(0, gen), mrk); let defs_ = match def { Defs(d) => d, _ => fail!("failed to def-bind, invalid definitions") }; let (id, cte2, dcs2) = eval_sub(ast_id, &dcs1, &cte1, &new_gen(1, gen), mrk); let (stx, cte3, dcs3) = eval_sub(ast_stx, &dcs2, &cte2, &new_gen(2, gen), mrk); let parsed = parse(Val(~defs(&mark(&stx, mrk), &defs_)), &dcs3); let (val, cte4, dcs4) = eval_sub(parsed, &dcs3, &cte3, &new_gen(3, gen), mrk); let name_new = fresh(&id, gen); //let id_new = rename(&id, &id, name_new.clone()); let dcs5 = extend_def_bind(&dcs4, &defs_, &id, mrk, name_new); let cte5 = extend(&cte4, name_new, TVal(~val)); (Atom(~Num(0)), cte5, dcs5) }, (bad, _) => fail!("Unimplemented tprim: " + bad.to_str()) } } // TODO: Refactor new, more complex, primitives to each be contianed in // a separate function. fn eval_sub(e: Expression, dcs: &DefinitionContextStore, cte: &CompileTimeEnv, gen: &Gen, mrk: &GenName) -> (Value, CompileTimeEnv, DefinitionContextStore) { match e { Var(s) => fail!("unbound variable: " + s), App(~Val(~Fun(~Var(name), body)), [arg]) => { let (args, cte1, dcs1) = eval_sub(arg, dcs, cte, &new_gen(0, gen), mrk); let subbed = substitute(*body, name, Val(~args)); eval_sub(subbed, &dcs1, &cte1, &new_gen(1, gen), mrk) }, // TODO: move these into separate sub-eval that takes environments App(~Val(~Atom(~Prim(~"begin"))), args) => { args.iter().fold((Atom(~Num(0)), cte.clone(), dcs.clone()), |(_, cte, dcs), e| { let (v, new_cte, new_dcs) = eval_sub(e.clone(), &dcs, &cte, &new_gen(2, gen), mrk); (v, new_cte, new_dcs) }) }, App(~Val(~Atom(~Prim(~"begin0"))), [ref arg0, .. args]) => { let gen = &new_gen(0, gen); let (v0, cte0, dcs0) = eval_sub(arg0.clone(), dcs, cte, gen, mrk); let (_, cteN, dcsN) = args.iter().fold((v0.clone(), cte0, dcs0), |(_, cte, dcs), e| { let (v, new_cte, new_dcs) = eval_sub(e.clone(), &dcs, &cte, &new_gen(3, gen), mrk); (v, new_cte, new_dcs) }); (v0, cteN, dcsN) }, App(~Val(~Atom(~Prim(name))), args) => { let val_args_n_envs = args.map(|e| { eval_sub(e.clone(), dcs, cte, gen, mrk) }); let val_args = val_args_n_envs.map(|x| x.clone().n0()); let mut cte1 = cte.clone(); let mut dcs1 = dcs.clone(); if (val_args.len() > 0) { cte1 = val_args_n_envs.last().clone().n1(); dcs1 = val_args_n_envs.last().clone().n2(); }; (eval_prim(name, val_args), cte1, dcs1) }, App(~Val(~Atom(~TPrim(name))), args) => eval_tprim(name, args, dcs, cte, gen, mrk), App(e0, es) => { let (val1, cte1, dcs1) = eval_sub(*e0, dcs, cte, gen, mrk); eval_sub(App(~Val(~val1), es), &dcs1, &cte1, gen, mrk) }, Val(~List(vs)) => { (match vs { [Atom(~Sym(~"quote")), n] => n, _ => List(vs) }, cte.clone(), dcs.clone()) }, Val(val) => (*val, cte.clone(), dcs.clone()), } } // todo: work with environments instead of substitution pub fn eval(e: Expression, dcs: &DefinitionContextStore) -> (Value, CompileTimeEnv, DefinitionContextStore) { eval_sub(e, dcs, &~[], &~[], &~"") } #[cfg(test)] mod test { use ast::*; use super::*; use std::hashmap::HashMap; #[test] fn test_val() { let dcs = &HashMap::new(); assert!(eval(make_prim("car"), dcs).n0() == make_prim("car")); assert!(eval(make_sym("symbol"), dcs).n0() == make_sym("symbol")); assert!(eval(Val(~List(~[make_sym("s"), make_prim("x")])), dcs).n0() == List(~[make_sym("s"), make_prim("x")])); assert!(eval(Val(~Fun(~Var(~"x"), ~Var(~"x"))), dcs).n0() == Fun(~Var(~"x"), ~Var(~"x"))); } #[test] #[should_fail] fn test_fail_var() { eval(Var(~"x"), &HashMap::new()); } #[test] fn test_fun_app() { assert!(eval(App(~Val(~Fun(~Var(~"x"), ~Var(~"x"))), ~[make_sym("name")]), &HashMap::new()).n0() == make_sym("name")); } }
#![allow(unused)] use azsys; use std::fmt; #[derive(PartialEq, Debug)] pub enum AzReturnCode { AzResultCoreOk = azsys::az_result_core_AZ_OK as isize, AzResultCoreErrorCanceled = azsys::az_result_core_AZ_ERROR_CANCELED as isize, AzResultCoreErrorArg = azsys::az_result_core_AZ_ERROR_ARG as isize, AzResultCoreErrorNotEnoughSpace = azsys::az_result_core_AZ_ERROR_NOT_ENOUGH_SPACE as isize, AzResultCoreErrorNotImplemented = azsys::az_result_core_AZ_ERROR_NOT_IMPLEMENTED as isize, AzResultCoreErrorItemNotFound = azsys::az_result_core_AZ_ERROR_ITEM_NOT_FOUND as isize, AzResultCoreErrorUnexpectedChar = azsys::az_result_core_AZ_ERROR_UNEXPECTED_CHAR as isize, AzResultCoreErrorUnexpectedEnd = azsys::az_result_core_AZ_ERROR_UNEXPECTED_END as isize, AzResultCoreErrorNotSupported = azsys::az_result_core_AZ_ERROR_NOT_SUPPORTED as isize, AzResultCoreErrorDependencyNotProvided = azsys::az_result_core_AZ_ERROR_DEPENDENCY_NOT_PROVIDED as isize, AzResultCoreErrorOutOfMemory = azsys::az_result_core_AZ_ERROR_OUT_OF_MEMORY as isize, AzResultCoreErrorJsonInvalidState = azsys::az_result_core_AZ_ERROR_JSON_INVALID_STATE as isize, AzResultCoreErrorJsonNestingOverflow = azsys::az_result_core_AZ_ERROR_JSON_NESTING_OVERFLOW as isize, AzResultCoreErrorJsonReaderDone = azsys::az_result_core_AZ_ERROR_JSON_READER_DONE as isize, AzResultCoreErrorHttpInvalidState = azsys::az_result_core_AZ_ERROR_HTTP_INVALID_STATE as isize, AzResultCoreErrorHttpPipelineInvalidPolicy = azsys::az_result_core_AZ_ERROR_HTTP_PIPELINE_INVALID_POLICY as isize, AzResultCoreErrorHttpInvalidMethodVerb = azsys::az_result_core_AZ_ERROR_HTTP_INVALID_METHOD_VERB as isize, AzResultCoreErrorHttpAuthenticationFailed = azsys::az_result_core_AZ_ERROR_HTTP_AUTHENTICATION_FAILED as isize, AzResultCoreErrorHttpResponseOverflow = azsys::az_result_core_AZ_ERROR_HTTP_RESPONSE_OVERFLOW as isize, AzResultCoreErrorHttpResponseCouldntResolveHost = azsys::az_result_core_AZ_ERROR_HTTP_RESPONSE_COULDNT_RESOLVE_HOST as isize, AzResultCoreErrorHttpCorruptResponseHeader = azsys::az_result_core_AZ_ERROR_HTTP_CORRUPT_RESPONSE_HEADER as isize, AzResultCoreErrorHttpEndOfHeaders = azsys::az_result_core_AZ_ERROR_HTTP_END_OF_HEADERS as isize, AzResultCoreErrorHttpAdapter = azsys::az_result_core_AZ_ERROR_HTTP_ADAPTER as isize, AzResultIoTErrorTopicNoMatch = azsys::az_result_iot_AZ_ERROR_IOT_TOPIC_NO_MATCH as isize, AzResultIoTErrorEndOfProperties = azsys::az_result_iot_AZ_ERROR_IOT_END_OF_PROPERTIES as isize, } impl AzReturnCode { pub fn from_i32(value: i32) -> AzReturnCode { unsafe { std::mem::transmute(value) } } } impl fmt::Display for AzReturnCode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { AzReturnCode::AzResultCoreOk => write!(f, "AzResultCoreOk"), AzReturnCode::AzResultCoreErrorCanceled => write!(f, "AzResultCoreErrorCanceled"), AzReturnCode::AzResultCoreErrorArg => write!(f, "AzResultCoreErrorArg"), AzReturnCode::AzResultCoreErrorNotEnoughSpace => { write!(f, "AzResultCoreErrorNotEnoughSpace") } AzReturnCode::AzResultCoreErrorNotImplemented => { write!(f, "AzResultCoreErrorNotImplemented") } AzReturnCode::AzResultCoreErrorItemNotFound => { write!(f, "AzResultCoreErrorItemNotFound") } AzReturnCode::AzResultCoreErrorUnexpectedChar => { write!(f, "AzResultCoreErrorUnexpectedChar") } AzReturnCode::AzResultCoreErrorUnexpectedEnd => { write!(f, "AzResultCoreErrorUnexpectedEnd") } AzReturnCode::AzResultCoreErrorNotSupported => { write!(f, "AzResultCoreErrorNotSupported") } AzReturnCode::AzResultCoreErrorDependencyNotProvided => { write!(f, "AzResultCoreErrorDependencyNotProvided") } AzReturnCode::AzResultCoreErrorOutOfMemory => write!(f, "AzResultCoreErrorOutOfMemory"), AzReturnCode::AzResultCoreErrorJsonInvalidState => { write!(f, "AzResultCoreErrorJsonInvalidState") } AzReturnCode::AzResultCoreErrorJsonNestingOverflow => { write!(f, "AzResultCoreErrorJsonNestingOverflow") } AzReturnCode::AzResultCoreErrorJsonReaderDone => { write!(f, "AzResultCoreErrorJsonReaderDone") } AzReturnCode::AzResultCoreErrorHttpInvalidState => { write!(f, "AzResultCoreErrorHttpInvalidState") } AzReturnCode::AzResultCoreErrorHttpPipelineInvalidPolicy => { write!(f, "AzResultCoreErrorHttpPipelineInvalidPolicy") } AzReturnCode::AzResultCoreErrorHttpInvalidMethodVerb => { write!(f, "AzResultCoreErrorHttpInvalidMethodVerb") } AzReturnCode::AzResultCoreErrorHttpAuthenticationFailed => { write!(f, "AzResultCoreErrorHttpAuthenticationFailed") } AzReturnCode::AzResultCoreErrorHttpResponseOverflow => { write!(f, "AzResultCoreErrorHttpResponseOverflow") } AzReturnCode::AzResultCoreErrorHttpResponseCouldntResolveHost => { write!(f, "AzResultCoreErrorHttpResponseCouldntResolveHost") } AzReturnCode::AzResultCoreErrorHttpCorruptResponseHeader => { write!(f, "AzResultCoreErrorHttpCorruptResponseHeader") } AzReturnCode::AzResultCoreErrorHttpEndOfHeaders => { write!(f, "AzResultCoreErrorHttpEndOfHeaders") } AzReturnCode::AzResultCoreErrorHttpAdapter => write!(f, "AzResultCoreErrorHttpAdapter"), AzReturnCode::AzResultIoTErrorTopicNoMatch => write!(f, "AzResultIoTErrorTopicNoMatch"), AzReturnCode::AzResultIoTErrorEndOfProperties => { write!(f, "AzResultIoTErrorEndOfProperties") } _ => { let work: *const i32 = unsafe { std::mem::transmute::<&AzReturnCode, *const i32>(self) }; write!( f, "Unrecognized return code failue {}", format!("{}", unsafe { *work as i32 }) ) } } } }