text
stringlengths
8
4.13M
use crate::{ BalanceOf, ChannelId, Channels, Config, Error, Event, FeeModel, InboxResponses, Nonce, Outbox, OutboxMessageResult, Pallet, RelayerMessages, }; use frame_support::ensure; use sp_messenger::messages::{ ChainId, Message, MessageWeightTag, Payload, ProtocolMessageRequest, ProtocolMessageResponse, RequestResponse, VersionedPayload, }; use sp_runtime::traits::Get; use sp_runtime::{ArithmeticError, DispatchError, DispatchResult}; impl<T: Config> Pallet<T> { /// Takes a new message destined for dst_chain and adds the message to the outbox. pub(crate) fn new_outbox_message( src_chain_id: ChainId, dst_chain_id: ChainId, channel_id: ChannelId, payload: VersionedPayload<BalanceOf<T>>, ) -> Result<Nonce, DispatchError> { // ensure message is not meant to self. ensure!( src_chain_id != dst_chain_id, Error::<T>::InvalidMessageDestination ); Channels::<T>::try_mutate( dst_chain_id, channel_id, |maybe_channel| -> Result<Nonce, DispatchError> { let channel = maybe_channel.as_mut().ok_or(Error::<T>::MissingChannel)?; // check if the outbox is full let count = Outbox::<T>::count(); ensure!( count < channel.max_outgoing_messages, Error::<T>::OutboxFull ); let weight_tag = MessageWeightTag::outbox(&payload); let next_outbox_nonce = channel.next_outbox_nonce; // add message to outbox let msg = Message { src_chain_id, dst_chain_id, channel_id, nonce: next_outbox_nonce, payload, last_delivered_message_response_nonce: channel .latest_response_received_message_nonce, }; Outbox::<T>::insert((dst_chain_id, channel_id, next_outbox_nonce), msg); // update channel state channel.next_outbox_nonce = next_outbox_nonce .checked_add(Nonce::one()) .ok_or(DispatchError::Arithmetic(ArithmeticError::Overflow))?; // get next relayer let relayer_id = Self::next_relayer()?; RelayerMessages::<T>::mutate(relayer_id.clone(), |maybe_messages| { let mut messages = maybe_messages.as_mut().cloned().unwrap_or_default(); messages.outbox.push(( dst_chain_id, (channel_id, next_outbox_nonce), weight_tag, )); *maybe_messages = Some(messages) }); // emit event to notify relayer Self::deposit_event(Event::OutboxMessage { chain_id: dst_chain_id, channel_id, nonce: next_outbox_nonce, relayer_id, }); Ok(next_outbox_nonce) }, ) } /// Removes messages responses from Inbox responses as the src_chain signalled that responses are delivered. /// all the messages with nonce <= latest_confirmed_nonce are deleted. fn distribute_rewards_for_delivered_message_responses( dst_chain_id: ChainId, channel_id: ChannelId, latest_confirmed_nonce: Option<Nonce>, fee_model: &FeeModel<BalanceOf<T>>, ) -> DispatchResult { let mut current_nonce = latest_confirmed_nonce; while let Some(nonce) = current_nonce { // for every inbox response we take, distribute the reward to the relayers. if InboxResponses::<T>::take((dst_chain_id, channel_id, nonce)).is_none() { return Ok(()); } Self::distribute_reward_to_relayers(fee_model.inbox_fee.relayer_pool_fee)?; current_nonce = nonce.checked_sub(Nonce::one()) } Ok(()) } /// Process the incoming messages from given chain_id and channel_id. pub(crate) fn process_inbox_messages( msg: Message<BalanceOf<T>>, msg_weight_tag: MessageWeightTag, ) -> DispatchResult { let (dst_chain_id, channel_id, nonce) = (msg.src_chain_id, msg.channel_id, msg.nonce); let channel = Channels::<T>::get(dst_chain_id, channel_id).ok_or(Error::<T>::MissingChannel)?; assert_eq!( nonce, channel.next_inbox_nonce, "The message nonce and the channel next inbox nonce must be the same as checked in pre_dispatch; qed" ); let response = match msg.payload { // process incoming protocol message. VersionedPayload::V0(Payload::Protocol(RequestResponse::Request(req))) => { Payload::Protocol(RequestResponse::Response( Self::process_incoming_protocol_message_req( dst_chain_id, channel_id, req, &msg_weight_tag, ), )) } // process incoming endpoint message. VersionedPayload::V0(Payload::Endpoint(RequestResponse::Request(req))) => { let response = if let Some(endpoint_handler) = T::get_endpoint_response_handler(&req.dst_endpoint) { if msg_weight_tag != MessageWeightTag::EndpointRequest(req.dst_endpoint.clone()) { return Err(Error::<T>::WeightTagNotMatch.into()); } endpoint_handler.message(dst_chain_id, (channel_id, nonce), req) } else { Err(Error::<T>::NoMessageHandler.into()) }; Payload::Endpoint(RequestResponse::Response(response)) } // return error for all the remaining branches VersionedPayload::V0(payload) => match payload { Payload::Protocol(_) => Payload::Protocol(RequestResponse::Response(Err( Error::<T>::InvalidMessagePayload.into(), ))), Payload::Endpoint(_) => Payload::Endpoint(RequestResponse::Response(Err( Error::<T>::InvalidMessagePayload.into(), ))), }, }; let resp_payload = VersionedPayload::V0(response); let weight_tag = MessageWeightTag::inbox_response(msg_weight_tag, &resp_payload); InboxResponses::<T>::insert( (dst_chain_id, channel_id, nonce), Message { src_chain_id: T::SelfChainId::get(), dst_chain_id, channel_id, nonce, payload: resp_payload, // this nonce is not considered in response context. last_delivered_message_response_nonce: None, }, ); // get the next relayer let relayer_id = Self::next_relayer()?; RelayerMessages::<T>::mutate(relayer_id.clone(), |maybe_messages| { let mut messages = maybe_messages.as_mut().cloned().unwrap_or_default(); messages .inbox_responses .push((dst_chain_id, (channel_id, nonce), weight_tag)); *maybe_messages = Some(messages) }); Channels::<T>::mutate( dst_chain_id, channel_id, |maybe_channel| -> DispatchResult { let channel = maybe_channel.as_mut().ok_or(Error::<T>::MissingChannel)?; channel.next_inbox_nonce = nonce .checked_add(Nonce::one()) .ok_or(DispatchError::Arithmetic(ArithmeticError::Overflow))?; Ok(()) }, )?; // reward relayers for relaying message responses to src_chain. // clean any delivered inbox responses Self::distribute_rewards_for_delivered_message_responses( dst_chain_id, channel_id, msg.last_delivered_message_response_nonce, &channel.fee, )?; Self::deposit_event(Event::InboxMessageResponse { chain_id: dst_chain_id, channel_id, nonce, relayer_id, }); Ok(()) } fn process_incoming_protocol_message_req( chain_id: ChainId, channel_id: ChannelId, req: ProtocolMessageRequest<BalanceOf<T>>, weight_tag: &MessageWeightTag, ) -> Result<(), DispatchError> { match req { ProtocolMessageRequest::ChannelOpen(_) => { if weight_tag != &MessageWeightTag::ProtocolChannelOpen { return Err(Error::<T>::WeightTagNotMatch.into()); } Self::do_open_channel(chain_id, channel_id) } ProtocolMessageRequest::ChannelClose => { if weight_tag != &MessageWeightTag::ProtocolChannelClose { return Err(Error::<T>::WeightTagNotMatch.into()); } Self::do_close_channel(chain_id, channel_id) } } } fn process_incoming_protocol_message_response( chain_id: ChainId, channel_id: ChannelId, req: ProtocolMessageRequest<BalanceOf<T>>, resp: ProtocolMessageResponse, weight_tag: &MessageWeightTag, ) -> DispatchResult { match (req, resp) { // channel open request is accepted by dst_chain. // open channel on our end. (ProtocolMessageRequest::ChannelOpen(_), Ok(_)) => { if weight_tag != &MessageWeightTag::ProtocolChannelOpen { return Err(Error::<T>::WeightTagNotMatch.into()); } Self::do_open_channel(chain_id, channel_id) } // for rest of the branches we dont care about the outcome and return Ok // for channel close request, we do not care about the response as channel is already closed. // for channel open request and request is rejected, channel is left in init state and no new messages are accepted. _ => Ok(()), } } pub(crate) fn process_outbox_message_responses( resp_msg: Message<BalanceOf<T>>, resp_msg_weight_tag: MessageWeightTag, ) -> DispatchResult { let (dst_chain_id, channel_id, nonce) = (resp_msg.src_chain_id, resp_msg.channel_id, resp_msg.nonce); let channel = Channels::<T>::get(dst_chain_id, channel_id).ok_or(Error::<T>::MissingChannel)?; assert_eq!( nonce, channel.latest_response_received_message_nonce .and_then(|nonce| nonce.checked_add(Nonce::one())) .unwrap_or(Nonce::zero()), "The message nonce and the channel last msg response nonce must be the same as checked in pre_dispatch; qed" ); // fetch original request let req_msg = Outbox::<T>::take((dst_chain_id, channel_id, nonce)) .ok_or(Error::<T>::MissingMessage)?; let resp = match (req_msg.payload, resp_msg.payload) { // process incoming protocol outbox message response. ( VersionedPayload::V0(Payload::Protocol(RequestResponse::Request(req))), VersionedPayload::V0(Payload::Protocol(RequestResponse::Response(resp))), ) => Self::process_incoming_protocol_message_response( dst_chain_id, channel_id, req, resp, &resp_msg_weight_tag, ), // process incoming endpoint outbox message response. ( VersionedPayload::V0(Payload::Endpoint(RequestResponse::Request(req))), VersionedPayload::V0(Payload::Endpoint(RequestResponse::Response(resp))), ) => { if let Some(endpoint_handler) = T::get_endpoint_response_handler(&req.dst_endpoint) { if resp_msg_weight_tag != MessageWeightTag::EndpointResponse(req.dst_endpoint.clone()) { return Err(Error::<T>::WeightTagNotMatch.into()); } endpoint_handler.message_response(dst_chain_id, (channel_id, nonce), req, resp) } else { Err(Error::<T>::NoMessageHandler.into()) } } (_, _) => Err(Error::<T>::InvalidMessagePayload.into()), }; // distribute rewards to relayers for relaying the outbox messages. Self::distribute_reward_to_relayers(channel.fee.outbox_fee.relayer_pool_fee)?; Channels::<T>::mutate( dst_chain_id, channel_id, |maybe_channel| -> DispatchResult { let channel = maybe_channel.as_mut().ok_or(Error::<T>::MissingChannel)?; channel.latest_response_received_message_nonce = Some(nonce); Ok(()) }, )?; // deposit event notifying the message status. match resp { Ok(_) => Self::deposit_event(Event::OutboxMessageResult { chain_id: dst_chain_id, channel_id, nonce, result: OutboxMessageResult::Ok, }), Err(err) => Self::deposit_event(Event::OutboxMessageResult { chain_id: dst_chain_id, channel_id, nonce, result: OutboxMessageResult::Err(err), }), } Ok(()) } }
/* chapter 4 syntax and semantics */ fn main() { let hello = "hello".to_string(); let world = ", world!"; let hello_world = hello + world; println!("{}", hello_world); } // output should be: /* */
use string_length; //use std::io; fn main() { let name = "Haseeb".to_string(); let result = string_length::mystring::len_calculator(name); println!("Returned length is {}",result); }
const RUST: u32 = 0xB7410E00; fn main() { let r = red(RUST); let g = green(RUST); let b = blue(RUST); let a = alpha(RUST); println!("r: {}, g: {}, b: {}, a: {}", r, g, b, a); } fn red(color : u32) -> u32 { (color >> 24) & 0xff } fn green(color : u32) -> u32 { (color >> 16) & 0xff } fn blue(color : u32) -> u32 { (color >> 8) & 0xff } fn alpha(color : u32) -> u32 { (color >> 0) & 0xff }
/// Interface to libthread_db.so /// /// See /usr/include/thread_db.h use dlopen_derive::WrapperApi; use dlopen::wrapper::{Container, WrapperApi}; use crate::proc_service::{ProcHandle, PsAddr}; #[derive(Debug)] #[repr(C)] pub enum TdErr { /// No error. Ok, /// No further specified error. Err, /// No matching thread found. NoThr, /// No matching synchronization handle found. NoSv, /// No matching light-weighted process found. NoLWP, /// Invalid process handle. BadPH, /// Invalid thread handle. BadTH, /// Invalid synchronization handle. BadSH, /// Invalid thread agent. BadTA, /// Invalid key. BadKEY, /// No event available. NoMsg, /// No floating-point register content available. NoFPRegs, /// Application not linked with thread library. NoLibthread, /// Requested event is not supported. NoEvent, /// Capability not available. NoCapab, /// Internal debug library error. DbErr, /// Operation is not applicable. NoAplic, /// No thread-specific data available. NoTSD, /// Out of memory. Malloc, /// Not entire register set was read or written. PartialReg, /// X register set not available for given thread. NoXregs, /// Thread has not yet allocated TLS for given module. TLSDefer, NoTalloc, /// Version if libpthread and libthread_db do not match. Version, /// There is no TLS segment in the given module. NoTLS, } /// Handle for a process. Opaque type. pub type TdThrAgent = libc::c_void; /// The actual thread handle type `td_thrhandle_t`. Opaque (but copyable) type. #[derive(Copy, Clone)] pub struct TdThrHandle { _th_ta_p: *mut TdThrAgent, _th_unique: *mut PsAddr, } /// Possible thread states. AnyState is a pseudo-state used to /// select threads regardless of state in td_ta_thr_iter(). #[allow(dead_code)] #[derive(Debug)] #[repr(C)] pub enum TdThrState { AnyState, Unknown, Stopped, Run, Active, Zombie, Sleep, StoppedAsleep, } /// Thread type: user or system. TD_THR_ANY_TYPE is a pseudo-type used /// to select threads regardless of type in td_ta_thr_iter(). #[allow(dead_code)] #[derive(Debug)] #[repr(C)] pub enum TdThrType { AnyType, User, System } ///Bitmask of enabled events. #[derive(Debug)] #[repr(C)] pub struct TdThrEvents { event_bits: [u32; 2], } /// Gathered statistics about the process. #[derive(Default,Debug)] #[repr(C)] pub struct TdTaStats { /// Total number of threads in use. pub nthreads: i32, /// Concurrency level requested by user. pub r_concurrency: i32, /// Average runnable threads, numerator. pub nrunnable_num: i32, /// Average runnable threads, denominator. pub nrunnable_den: i32, /// Achieved concurrency level, numerator. pub a_concurrency_num: i32, /// Achieved concurrency level, denominator. pub a_concurrency_den: i32, /// Average number of processes in use, numerator. pub nlwps_num: i32, /// Average number of processes in use, denominator. pub nlwps_den: i32, /// Average number of idling processes, numerator. pub nidle_num: i32, /// Average number of idling processes, denominator. pub nidle_den: i32, } /// Information about the thread. #[repr(C)] pub struct TdThrInfo { /// Process handle. ti_ta_p: *mut TdThrAgent, /// Unused. ti_user_flags: libc::c_uint, /// Thread ID returned by pthread_create(). pub ti_tid: libc::pthread_t, /// Pointer to thread-local data. pub ti_tls: *mut u8, /// Start function passed to pthread_create(). pub ti_startfunc: *mut PsAddr, /// Base of thread's stack. pub ti_stkbase: *mut PsAddr, /// Size of thread's stack. pub ti_stksize: libc::c_long, /// Unused. ti_ro_area: *mut PsAddr, /// Unused. ti_ro_size: libc::c_int, /// Thread state. pub ti_state: TdThrState, /// Nonzero if suspended by debugger pub ti_db_suspended: libc::c_uchar, /// Type of the thread (system vs user thread). pub ti_type: TdThrType, /// Unused. ti_pc: libc::intptr_t, /// Unused. ti_sp: libc::intptr_t, /// Unused. ti_flags: libc::c_ushort, /// Thread priority. pub ti_pri: libc::c_int, /// Kernel PID for this thread. pub ti_lid: libc::pid_t, /// Signal mask. pub ti_sigmask: libc::sigset_t, /// Nonzero if event reporting enabled. pub ti_traceme: libc::c_uchar, /// Unused. ti_preemptflag: libc::c_uchar, /// Unused. ti_pirecflag: libc::c_uchar, /// Set of pending signals. pub ti_pending: libc::sigset_t, /// Set of enabled events. pub ti_events: TdThrEvents, } impl std::fmt::Debug for TdThrInfo { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "TdThrInfo {{ ti_tid: {}, ti_type: {:?}, ti_pri: {}, ti_lid: {} }}", self.ti_tid, self.ti_type, self.ti_pri, self.ti_lid) } } #[derive(WrapperApi)] pub struct ThreadDb { /// Initialize the thread debug support library. td_init: unsafe extern "C" fn() -> TdErr, /// Generate new thread debug library handle for process PS. td_ta_new: unsafe extern "C" fn(ps: *mut ProcHandle, ta: *mut *mut TdThrAgent) -> TdErr, /// Free resources allocated for TA. td_ta_delete: unsafe extern "C" fn(ta: *mut TdThrAgent) -> TdErr, /// Get number of currently running threads in process associated with TA. td_ta_get_nthreads: unsafe extern "C" fn(ta: *const TdThrAgent, np: *mut i32) -> TdErr, /// Enable collecting statistics for process associated with TA. td_ta_enable_stats: unsafe extern "C" fn(ta: *mut TdThrAgent, enable: i32) -> TdErr, /// Reset statistics. td_ta_reset_stats: unsafe extern "C" fn(ta: *mut TdThrAgent) -> TdErr, /// Retrieve statistics from process associated with TA. td_ta_get_stats: unsafe extern "C" fn(ta: *const TdThrAgent, stats: *mut TdTaStats) -> TdErr, /// Call for each thread in a process associated with TA the callback function CALLBACK. /// From looking at the glibc implementation: /// - Return value of `callback`: 0 => ok, _ => error /// - `state`: must be `TdThrState::AnyState` /// - `ti_prio`: minimum priority (probably 0 for all) /// - `ti_sigmask` and `ti_user_flags` are unused td_ta_thr_iter: unsafe extern "C" fn(ta: *mut TdThrAgent, callback: unsafe extern "C" fn(handle: *const TdThrHandle, cbdata: *mut libc::c_void) -> i32, cbdata: *mut libc::c_void, state: TdThrState, pri: i32, ti_sigmask: *mut libc::sigset_t, ti_user_flags: u32) -> TdErr, /// Validate that TH is a thread handle. td_thr_validate: unsafe extern "C" fn(handle: *const TdThrHandle) -> TdErr, /// Return information about thread TH. td_thr_get_info: unsafe extern "C" fn(handle: *const TdThrHandle, info: *mut TdThrInfo) -> TdErr, } pub fn open_lib() -> Container<ThreadDb> { dummy(); eprintln!("open_lib"); let container: Container<ThreadDb> = unsafe { Container::load("libthread_db.so") }.unwrap(); let res = unsafe { container.td_init() }; eprintln!("td_init -> {:?}", res); container } /// Dummy function to fool dead code elimination. fn dummy() { unsafe { use crate::proc_service::*; let mut handle = ProcHandle { pid: 0, symbols: std::collections::HashMap::new() }; ps_getpid(&mut handle); } }
use lr1_parser::{rule, symbol, Lr1ParserBuilder}; use structopt::StructOpt; #[derive(StructOpt, Debug)] #[structopt( name = env!("CARGO_PKG_NAME"), author = env!("CARGO_PKG_AUTHORS"), about = env!("CARGO_PKG_DESCRIPTION"), )] struct Opt { #[structopt(help = "Expression to be parsed")] expr: Option<String>, } fn main() { let opt = Opt::from_args(); let parser = Lr1ParserBuilder::new() .add_rule(rule!(E -> E "+" T)) .add_rule(rule!(E -> E "-" T)) .add_rule(rule!(E -> T)) .add_rule(rule!(T -> T "*" F)) .add_rule(rule!(T -> T "/" F)) .add_rule(rule!(T -> F)) .add_rule(rule!(F -> "(" E ")")) .add_rule(rule!(F -> "num")) .starting_symbol(symbol!(E)) .build(); let expr = if let Some(expr) = opt.expr { expr } else { let expr = "(12+(34/56)-78)"; println!("No expression specified, defaulting to {}", expr); expr.into() }; let mut tokens = vec![]; let mut num_buf = String::new(); for ch in expr.chars() { match ch { '+' | '-' | '*' | '/' | '(' | ')' => { if !num_buf.is_empty() { tokens.push(num_buf.to_owned()); num_buf.clear(); } tokens.push(ch.to_string()); } ch if ch.is_ascii_digit() => { num_buf.push(ch); } ch if ch.is_whitespace() => { if !num_buf.is_empty() { tokens.push(num_buf.to_owned()); num_buf.clear(); } } _ => panic!("Lexer error: Unexpected character {:?}", ch), } } if !num_buf.is_empty() { tokens.push(num_buf.to_owned()); num_buf.clear(); } println!("Now start parsing {:?}", tokens); parser.parse(tokens); }
use figlet::figlet fn main() { println!("Hello, world!"); println!("{}", figlet) }
use app::{ get_immutable_store, get_locales, get_mutable_store, get_templates_vec, get_translations_manager, }; use futures::executor::block_on; use perseus::{build_app, SsrNode}; fn main() { let exit_code = real_main(); std::process::exit(exit_code) } fn real_main() -> i32 { let immutable_store = get_immutable_store(); let mutable_store = get_mutable_store(); let translations_manager = block_on(get_translations_manager()); let locales = get_locales(); // Build the site for all the common locales (done in parallel) let fut = build_app( get_templates_vec::<SsrNode>(), &locales, (&immutable_store, &mutable_store), &translations_manager, // We use another binary to handle exporting false, ); let res = block_on(fut); if let Err(err) = res { eprintln!("Static generation failed: '{}'.", err); 1 } else { println!("Static generation successfully completed!"); 0 } }
#[macro_use] extern crate cached; #[macro_use] extern crate lazy_static; trait Monad<A> { fn return_(t: A) -> Self; //:: A -> Monad<A> fn bind<MB,B>(m: Self, f: Fn(A) -> MB) -> MB where MB: Monad<B>; //:: Monad<A> -> (A -> Monad<B>)) -> Monad<B> } fn not_curried(p1: u32, p2: u32) -> u32 { p1 + p2 } fn curried(p1: u32) -> Box<Fn(u32) -> u32> { Box::new(move |p2: u32| { p1 + p2 }) } cached!{ FIB; fn fib(n: u64) -> u64 = { if n == 0 || n == 1 { return n } fib(n-1) + fib(n-2) } } fn main() { let fsin = |x: f64| x.sin(); let fabs = |x: f64| x.abs(); let transform = |x: f64| fabs(fsin(x)); not_curried(1, 2); curried(1)(2); let immutable_v1 = 1; //immutable_v1 = 2; //invalid let mut mutable_v2 = 1; mutable_v2 = 2; let x = { println!("side effect"); 1 + 2 }; let y = ||{ println!("side effect"); 1 + 2 }; fib(30); }
use exonum::storage::{Fork, MapIndex, Snapshot}; use currency::assets::{AssetId, AssetInfo}; use currency::SERVICE_NAME; /// Schema for accessing global asset information. pub struct Schema<S>(pub S) where S: AsRef<Snapshot>; impl<S> Schema<S> where S: AsRef<Snapshot>, { /// Internal `MapIndex` for this `Schema`. pub fn index(self) -> MapIndex<S, AssetId, AssetInfo> { let key = SERVICE_NAME.to_string() + ".assets"; MapIndex::new(key, self.0) } /// Fetch asset info from the database. pub fn fetch(self, id: &AssetId) -> Option<AssetInfo> { self.index().get(id) } } impl<'a> Schema<&'a mut Fork> { /// Internal `MapIndex` for this `Schema`, with mutable access. pub fn index_mut(&mut self) -> MapIndex<&mut Fork, AssetId, AssetInfo> { let key = SERVICE_NAME.to_string() + ".assets"; MapIndex::new(key, self.0) } /// Store asset info in the database. pub fn store(&mut self, id: &AssetId, asset: AssetInfo) { match asset.amount() { 0 => self.remove(id), _ => self.index_mut().put(&*id, asset), }; } /// Remove asset info from the database. pub fn remove(&mut self, id: &AssetId) { self.index_mut().remove(id) } }
use std::io::{stdin, Read, StdinLock}; use std::str::FromStr; #[allow(dead_code)] struct Scanner<'a> { cin: StdinLock<'a>, } #[allow(dead_code)] impl<'a> Scanner<'a> { fn new(cin: StdinLock<'a>) -> Scanner<'a> { Scanner { cin: cin } } fn read<T: FromStr>(&mut self) -> Option<T> { let token = self .cin .by_ref() .bytes() .map(|c| c.unwrap() as char) .skip_while(|c| c.is_whitespace()) .take_while(|c| !c.is_whitespace()) .collect::<String>(); token.parse::<T>().ok() } fn input<T: FromStr>(&mut self) -> T { self.read().unwrap() } fn vec<T: FromStr>(&mut self, len: usize) -> Vec<T> { (0..len).map(|_| self.input()).collect() } fn mat<T: FromStr>(&mut self, row: usize, col: usize) -> Vec<Vec<T>> { (0..row).map(|_| self.vec(col)).collect() } } #[derive(Clone, Debug)] struct Graph { n: usize, adj_list: Vec<Vec<usize>>, } impl Graph { fn new(n: usize) -> Self { let adj_list = vec![vec![]; n]; Graph { n, adj_list } } fn add_edge(&mut self, u: usize, v: usize) { self.adj_list[u].push(v); } } fn main() { let cin = stdin(); let cin = cin.lock(); let mut sc = Scanner::new(cin); let n: usize = sc.input(); let m: usize = sc.input(); let mut graph = Graph::new(n); for _ in 0..m { let a: usize = sc.input(); let b: usize = sc.input(); graph.add_edge(a - 1, b - 1); graph.add_edge(b - 1, a - 1); } use std::collections::BinaryHeap; let mut dist: Vec<_> = (0..graph.n).map(|_| std::i64::MAX).collect(); let mut prev = vec![-1i64; n]; let mut heap = BinaryHeap::new(); dist[0] = 0i64; heap.push(std::cmp::Reverse((0i64, 0))); while let Some(std::cmp::Reverse((cost, cur))) = heap.pop() { if cost > dist[cur] { continue; } for &next in graph.adj_list[cur].iter() { if cost + 1 < dist[next] { dist[next] = cost + 1; prev[next] = cur as i64; heap.push(std::cmp::Reverse((cost + 1, next))); } } } // for d in &dist { // println!("d {}", d); // } let mut ans = vec![0; n]; for (i, &p) in prev.iter().enumerate() { if p != -1 { ans[i] = p; } } let mut g = Graph::new(n); for (i, &x) in ans.iter().enumerate() { if x != -1 { g.add_edge(i, x as usize); g.add_edge(x as usize, i); } } let dist2 = shortest_path(&g, 0); for i in 0..n { if dist2[i] != dist[i] { println!("No"); return; } } println!("Yes"); for x in ans[1..].iter() { println!("{}", x + 1); } } fn shortest_path(graph: &Graph, start: usize) -> Vec<i64> { use std::collections::BinaryHeap; let mut dist: Vec<_> = (0..graph.n).map(|_| std::i64::MAX).collect(); let mut heap = BinaryHeap::new(); dist[start] = 0i64; heap.push(std::cmp::Reverse((0i64, start))); while let Some(std::cmp::Reverse((cost, cur))) = heap.pop() { if cost > dist[cur] { continue; } for &next in graph.adj_list[cur].iter() { if cost + 1 < dist[next] { heap.push(std::cmp::Reverse((cost + 1, next))); dist[next] = cost + 1; } } } dist }
use regex::Regex; use std::collections::HashMap; use std::io::{self}; #[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)] struct Point { x: i64, y: i64, } #[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)] enum Color { White, Black, } impl Point { fn new(x: i64, y: i64) -> Point { Point { x, y } } fn add(&self, other: &Point) -> Point { Point { x: self.x + other.x, y: self.y + other.y, } } } fn get_neighbouring_black_tiles( point: &Point, previous_tiles: &HashMap<Point, Color>, neighbours: &Vec<Point>, ) -> usize { let mut black_count = 0; for n in neighbours.iter() { match previous_tiles.get(&point.add(n)) { Some(c) => match *c { Color::Black => { black_count += 1; } _ => (), }, None => (), } } black_count } fn get_new_color(black_count: usize, color: Color) -> Color { match color { Color::White => { if black_count == 2 { Color::Black } else { Color::White } } Color::Black => { if black_count == 0 || black_count > 2 { Color::White } else { Color::Black } } } } fn main() -> io::Result<()> { let files_results = vec![ ("test.txt", 10, 2208), ("test2.txt", 1, 0), ("test3.txt", 1, 0), ("input.txt", 330, 3711), ]; for (f, result_1, result_2) in files_results.into_iter() { let direction_map: HashMap<&str, Point> = vec![ ("e", Point::new(2, 0)), ("se", Point::new(1, -1)), ("sw", Point::new(-1, -1)), ("w", Point::new(-2, 0)), ("nw", Point::new(-1, 1)), ("ne", Point::new(1, 1)), ] .into_iter() .collect(); let file_content: Vec<String> = std::fs::read_to_string(f)? .lines() .map(|x| x.to_string()) .collect(); let re = Regex::new(r"(e|se|sw|w|nw|ne)+?").unwrap(); let mut tiles: HashMap<Point, Color> = vec![(Point::new(0, 0), Color::White)].into_iter().collect(); for line in file_content.iter() { let instructions: Vec<Point> = re .captures_iter(line) .map(|cap| direction_map.get(&cap[1]).unwrap().clone()) .collect(); let address = instructions .into_iter() .fold(Point::new(0, 0), |pt, instr| pt.add(&instr)); tiles .entry(address) .and_modify(|x| { *x = match x { Color::White => Color::Black, Color::Black => Color::White, } }) .or_insert(Color::Black); } assert_eq!( tiles.iter().filter(|(_, v)| **v == Color::Black).count(), result_1 ); let mut previous_tiles = tiles.clone(); tiles.clear(); let neighbours: Vec<Point> = direction_map.values().cloned().collect(); for _ in 0..100 { for (point, color) in previous_tiles.iter() { let black_count = get_neighbouring_black_tiles(&point, &previous_tiles, &neighbours); let new_color = get_new_color(black_count, *color); tiles.insert(point.clone(), new_color); if *color == Color::Black { for n in neighbours.iter() { let tmp_point: Point = point.add(n); if previous_tiles.contains_key(&tmp_point) { continue; } let black_count = get_neighbouring_black_tiles(&tmp_point, &previous_tiles, &neighbours); let new_color = get_new_color(black_count, Color::White); if new_color == Color::Black { tiles.insert(tmp_point, new_color); } } } } previous_tiles = tiles.clone(); } assert_eq!( tiles.iter().filter(|(_, v)| **v == Color::Black).count(), result_2 ); } Ok(()) }
#![cfg_attr(rustfmt, rustfmt_skip)] //trace_macros!(true); #[macro_export] macro_rules! or { [$($parse_funcs: expr),+] => { |parser: &mut parser::Parser, stack: &mut stack::Stack| -> bool { $( let result = $parse_funcs(parser, stack); if result { debug_parser!("Or statement rule {} accepted expression {:?}. Parser state {:?}", stringify!($parse_funcs), stack.peek(), parser); return true } else { debug_parser!("Or statement rule {} didn't accept parser input {:?}", stringify!($parse_funcs), parser); } )+; debug_parser!("Or statement fails"); false } } } #[macro_export] macro_rules! and { [($($parse_funcs: expr),+) => $nandler_func: expr] => { |parser: &mut parser::Parser, stack: &mut stack::Stack| -> bool { let mut entered = false; // If we already accepted first token ($(if $parse_funcs(parser, stack) { entered = true; debug_parser!("And statement rule {} accepted expression {:?}. Parser state {:?}", stringify!($parse_funcs), stack.peek(), parser); } else { if entered { panic!("TODO: Custom error message. Unexpected token: {:?}. Rule: {:?}", parser.peek(), stringify!($parse_funcs)); } debug_parser!("And statement rule {} didn't accept parser input {:?}", stringify!($parse_funcs), parser); return false }), +); let _ = entered; // To suppress unused boolean $nandler_func(stack); debug_parser!("And handling function {:?} successfully handled expression and returned {:?}", stringify!($nandler_func), stack.peek()); true } }; } /// Macro has two modifications: /// - optional!(rule) If we don't care about optional value (i.e. when optional expression handles it's head); /// - optional!(rule, nil) If we want to know if optional rule pushed expression on stack (we handle set of expressions manually) #[macro_export] macro_rules! optional { ($parse_func:expr, nil) => { |parser: &mut parser::Parser, stack: &mut stack::Stack| -> bool { if $parse_func(parser, stack) { let expression = Some(stack.pop_single()); stack.push_optional(expression); debug_parser!("Optional rule {} parsed parser input {:?}", stringify!($parse_func), stack.peek()); } else { stack.push_optional(None); debug_parser!("Optional rule {} didn't parse parser input {:?}", stringify!($parse_func), parser); } true } }; ($parse_func:expr) => { |parser: &mut parser::Parser, stack: &mut stack::Stack| -> bool { if $parse_func(parser, stack) { debug_parser!("Optional rule {} parsed parser input {:?}", stringify!($parse_func), stack.peek()); } else { debug_parser!("Optional rule {} didn't parse parser input {:?}", stringify!($parse_func), parser); } true } } } #[macro_export] macro_rules! repetition { ($parse_func:expr) => { |parser: &mut parser::Parser, stack: &mut stack::Stack| -> bool { let mut result = VecDeque::new(); while $parse_func(parser, stack) { debug_parser!("Repeating rule {}", stringify!($parse_func)); let single = stack.pop_single(); result.push_back(single) } debug_parser!("Finished repetition {}", stringify!($parse_func)); stack.push_repetition(result); true } } } #[macro_export] macro_rules! rule { ($name: ident, $parse_func:expr) => { pub fn $name(parser: &mut parser::Parser, stack: &mut stack::Stack) -> bool { debug_parser!("Executing rule {}", stringify!($name)); $parse_func(parser, stack) } }; } #[macro_export] macro_rules! terminal { ($keyword: expr) => { |parser: &mut parser::Parser, stack: &mut stack::Stack| -> bool { if let Some(token) = parser.peek().cloned() { match token.keyword() { Some(keyword) => { if keyword == $keyword { parser.shift(); debug_parser!("Accepted keyword {:?}", keyword); stack.push_single(Box::new(crate::ast::expressions::Terminal(keyword))); true } else { false } } _ => false } } else { false } } } }
use nom::multispace; use nom::{IResult, Err, ErrorKind, Needed}; use std::collections::{HashSet, VecDeque}; use std::str; use column::Column; use common::{binary_comparison_operator, column_identifier, integer_literal, string_literal, Literal, Operator}; use select::{SelectStatement, nested_selection}; #[derive(Clone, Debug, Hash, PartialEq, Serialize, Deserialize)] pub enum ConditionBase { Field(Column), Literal(Literal), Placeholder, NestedSelect(Box<SelectStatement>), } #[derive(Clone, Debug, Hash, PartialEq, Serialize, Deserialize)] pub struct ConditionTree { pub operator: Operator, pub left: Box<ConditionExpression>, pub right: Box<ConditionExpression>, } impl<'a> ConditionTree { pub fn contained_columns(&'a self) -> HashSet<&'a Column> { let mut s = HashSet::new(); let mut q = VecDeque::<&'a ConditionTree>::new(); q.push_back(self); while let Some(ref ct) = q.pop_front() { match *ct.left.as_ref() { ConditionExpression::Base(ConditionBase::Field(ref c)) => { s.insert(c); } ConditionExpression::LogicalOp(ref ct) | ConditionExpression::ComparisonOp(ref ct) => q.push_back(ct), _ => (), } match *ct.right.as_ref() { ConditionExpression::Base(ConditionBase::Field(ref c)) => { s.insert(c); } ConditionExpression::LogicalOp(ref ct) | ConditionExpression::ComparisonOp(ref ct) => q.push_back(ct), _ => (), } } s } } #[derive(Clone, Debug, Hash, PartialEq, Serialize, Deserialize)] pub enum ConditionExpression { ComparisonOp(ConditionTree), LogicalOp(ConditionTree), NegationOp(Box<ConditionExpression>), Base(ConditionBase), } /// Parse a conditional expression into a condition tree structure named!(pub condition_expr<&[u8], ConditionExpression>, alt_complete!( chain!( left: and_expr ~ multispace? ~ caseless_tag!("or") ~ multispace ~ right: condition_expr, || { ConditionExpression::LogicalOp( ConditionTree { operator: Operator::Or, left: Box::new(left), right: Box::new(right), } ) } ) | and_expr) ); named!(pub and_expr<&[u8], ConditionExpression>, alt_complete!( chain!( left: parenthetical_expr ~ multispace? ~ caseless_tag!("and") ~ multispace ~ right: and_expr, || { ConditionExpression::LogicalOp( ConditionTree { operator: Operator::And, left: Box::new(left), right: Box::new(right), } ) } ) | parenthetical_expr) ); named!(pub parenthetical_expr<&[u8], ConditionExpression>, alt_complete!( delimited!(tag!("("), condition_expr, chain!(tag!(")") ~ multispace?, ||{})) | not_expr) ); named!(pub not_expr<&[u8], ConditionExpression>, alt_complete!( chain!( caseless_tag!("not") ~ multispace ~ right: parenthetical_expr, || { ConditionExpression::NegationOp(Box::new(right)) } ) | boolean_primary) ); named!(boolean_primary<&[u8], ConditionExpression>, chain!( left: predicate ~ multispace? ~ op: binary_comparison_operator ~ multispace? ~ right: predicate, || { ConditionExpression::ComparisonOp( ConditionTree { operator: op, left: Box::new(left), right: Box::new(right), } ) } ) ); named!(predicate<&[u8], ConditionExpression>, alt_complete!( chain!( delimited!(opt!(multispace), tag!("?"), opt!(multispace)), || { ConditionExpression::Base( ConditionBase::Placeholder ) } ) | chain!( field: integer_literal, || { ConditionExpression::Base(ConditionBase::Literal(field)) } ) | chain!( field: string_literal, || { ConditionExpression::Base(ConditionBase::Literal(field)) } ) | chain!( field: delimited!(opt!(multispace), column_identifier, opt!(multispace)), || { ConditionExpression::Base( ConditionBase::Field(field) ) } ) | chain!( select: delimited!(tag!("("), nested_selection, tag!(")")), || { ConditionExpression::Base( ConditionBase::NestedSelect(Box::new(select)) ) } ) ) ); #[cfg(test)] mod tests { use super::*; use column::Column; use common::{Literal, Operator, FieldExpression}; fn columns(cols: &[&str]) -> Vec<FieldExpression> { cols.iter() .map(|c| FieldExpression::Col(Column::from(*c))) .collect() } fn flat_condition_tree( op: Operator, l: ConditionBase, r: ConditionBase, ) -> ConditionExpression { ConditionExpression::ComparisonOp(ConditionTree { operator: op, left: Box::new(ConditionExpression::Base(l)), right: Box::new(ConditionExpression::Base(r)), }) } #[test] fn ct_contained_columns() { use std::collections::HashSet; let cond = "a.foo = ? and b.bar = 42"; let res = condition_expr(cond.as_bytes()); let c1 = Column::from("a.foo"); let c2 = Column::from("b.bar"); let mut expected_cols = HashSet::new(); expected_cols.insert(&c1); expected_cols.insert(&c2); match res.unwrap().1 { ConditionExpression::LogicalOp(ct) => { assert_eq!(ct.contained_columns(), expected_cols); } _ => panic!(), } } #[test] fn equality_placeholder() { let cond = "foo = ?"; let res = condition_expr(cond.as_bytes()); assert_eq!( res.unwrap().1, flat_condition_tree( Operator::Equal, ConditionBase::Field(Column::from("foo")), ConditionBase::Placeholder ) ); } #[test] fn equality_literals() { let cond1 = "foo = 42"; let cond2 = "foo = \"hello\""; let res1 = condition_expr(cond1.as_bytes()); assert_eq!( res1.unwrap().1, flat_condition_tree( Operator::Equal, ConditionBase::Field(Column::from("foo")), ConditionBase::Literal(Literal::Integer(42 as i64)) ) ); let res2 = condition_expr(cond2.as_bytes()); assert_eq!( res2.unwrap().1, flat_condition_tree( Operator::Equal, ConditionBase::Field(Column::from("foo")), ConditionBase::Literal(Literal::String(String::from("hello"))) ) ); } #[test] fn inequality_literals() { let cond1 = "foo >= 42"; let cond2 = "foo <= 5"; let res1 = condition_expr(cond1.as_bytes()); assert_eq!( res1.unwrap().1, flat_condition_tree( Operator::GreaterOrEqual, ConditionBase::Field(Column::from("foo")), ConditionBase::Literal(Literal::Integer(42 as i64)) ) ); let res2 = condition_expr(cond2.as_bytes()); assert_eq!( res2.unwrap().1, flat_condition_tree( Operator::LessOrEqual, ConditionBase::Field(Column::from("foo")), ConditionBase::Literal(Literal::Integer(5 as i64)) ) ); } #[test] fn empty_string_literal() { let cond = "foo = ''"; let res = condition_expr(cond.as_bytes()); assert_eq!( res.unwrap().1, flat_condition_tree( Operator::Equal, ConditionBase::Field(Column::from("foo")), ConditionBase::Literal(Literal::String(String::from(""))) ) ); } #[test] fn parenthesis() { let cond = "(foo = ? or bar = 12) and foobar = 'a'"; use ConditionExpression::*; use ConditionBase::*; use common::Literal; let a = ComparisonOp(ConditionTree { operator: Operator::Equal, left: Box::new(Base(Field("foo".into()))), right: Box::new(Base(Placeholder)), }); let b = ComparisonOp(ConditionTree { operator: Operator::Equal, left: Box::new(Base(Field("bar".into()))), right: Box::new(Base(Literal(Literal::Integer(12.into())))), }); let left = LogicalOp(ConditionTree { operator: Operator::Or, left: Box::new(a), right: Box::new(b), }); let right = ComparisonOp(ConditionTree { operator: Operator::Equal, left: Box::new(Base(Field("foobar".into()))), right: Box::new(Base(Literal(Literal::String("a".into())))), }); let complete = LogicalOp(ConditionTree { operator: Operator::And, left: Box::new(left), right: Box::new(right), }); let res = condition_expr(cond.as_bytes()); assert_eq!(res.unwrap().1, complete); } #[test] fn order_of_operations() { let cond = "foo = ? and bar = 12 or foobar = 'a'"; use ConditionExpression::*; use ConditionBase::*; use common::Literal; let a = ComparisonOp(ConditionTree { operator: Operator::Equal, left: Box::new(Base(Field("foo".into()))), right: Box::new(Base(Placeholder)), }); let b = ComparisonOp(ConditionTree { operator: Operator::Equal, left: Box::new(Base(Field("bar".into()))), right: Box::new(Base(Literal(Literal::Integer(12.into())))), }); let left = LogicalOp(ConditionTree { operator: Operator::And, left: Box::new(a), right: Box::new(b), }); let right = ComparisonOp(ConditionTree { operator: Operator::Equal, left: Box::new(Base(Field("foobar".into()))), right: Box::new(Base(Literal(Literal::String("a".into())))), }); let complete = LogicalOp(ConditionTree { operator: Operator::Or, left: Box::new(left), right: Box::new(right), }); let res = condition_expr(cond.as_bytes()); assert_eq!(res.unwrap().1, complete); } #[test] fn negation() { let cond = "not bar = 12 or foobar = 'a'"; use ConditionExpression::*; use ConditionBase::*; use common::Literal::*; let left = NegationOp(Box::new(ComparisonOp(ConditionTree { operator: Operator::Equal, left: Box::new(Base(Field("bar".into()))), right: Box::new(Base(Literal(Integer(12.into())))), }))); let right = ComparisonOp(ConditionTree { operator: Operator::Equal, left: Box::new(Base(Field("foobar".into()))), right: Box::new(Base(Literal(String("a".into())))), }); let complete = LogicalOp(ConditionTree { operator: Operator::Or, left: Box::new(left), right: Box::new(right), }); let res = condition_expr(cond.as_bytes()); assert_eq!(res.unwrap().1, complete); } #[test] fn nested_select() { use select::SelectStatement; use table::Table; use ConditionBase::*; use std::default::Default; let cond = "bar in (select col from foo)"; let res = condition_expr(cond.as_bytes()); let nested_select = Box::new(SelectStatement { tables: vec![Table::from("foo")], fields: columns(&["col"]), ..Default::default() }); let expected = flat_condition_tree( Operator::In, Field("bar".into()), NestedSelect(nested_select), ); assert_eq!(res.unwrap().1, expected); } #[test] fn and_with_nested_select() { use select::SelectStatement; use table::Table; use ConditionBase::*; use std::default::Default; let cond = "paperId in (select paperId from PaperConflict) and size > 0"; let res = condition_expr(cond.as_bytes()); let nested_select = Box::new(SelectStatement { tables: vec![Table::from("PaperConflict")], fields: columns(&["paperId"]), ..Default::default() }); let left = flat_condition_tree( Operator::In, Field("paperId".into()), NestedSelect(nested_select) ); let right = flat_condition_tree( Operator::Greater, Field("size".into()), Literal(0.into()), ); let expected = ConditionExpression::LogicalOp(ConditionTree { left: Box::new(left), right: Box::new(right), operator: Operator::And }); assert_eq!(res.unwrap().1, expected); } }
// Adapted from https://github.com/actix/examples/tree/master/websocket use std::time::Duration; use crate::server::{self, ServerConfig}; use crate::utils::{self, CliEnv}; use actix::io::SinkWrite; use actix::*; use actix_codec::{AsyncRead, AsyncWrite, Framed}; use awc::{ error::WsProtocolError, ws::{Codec, Frame, Message}, Client, }; use futures::{ lazy, stream::{SplitSink, Stream}, Future, }; use serde::{Deserialize, Serialize}; // Todo: (Seriously) consider another protocol, possibly ssh // Although multi-site setup might gain advantages of something else, // some multi-user setup might be a solid solution /// Used to pass a list of commands /// This is serialized and passed to command /// actor. #[derive(Serialize)] pub struct CmdMsg { pub cmd: String, pub args: Vec<String>, } #[derive(Serialize, Message)] pub enum WsMsg { CmdMsg(CmdMsg), DoneMsg, } use std::sync::mpsc::{Receiver, SyncSender}; pub struct CliConnection { send_cmd: SyncSender<WsMsg>, recv_from_cmd: Receiver<FromCmdMsg>, } // Attempt graceful shutdown impl Drop for CliConnection { fn drop(&mut self) { match self.send_cmd.send(WsMsg::DoneMsg) { Ok(_) => match self.recv_from_cmd.recv() { Ok(from_cmd) => match from_cmd { FromCmdMsg::AllDone => { println!("Got AllDone in drop"); } _ => println!("Got other than AllDone in drop: {:?}", from_cmd), }, Err(e) => println!("Error in drop: {:?}", e), }, Err(e) => println!("Send failed in drop: {:?}", e), } } } // todo: The whole protocol is complex and brittle. Has helped // learning, but a simpler solution would be nice impl CliConnection { pub fn new(env: &CliEnv, server: Option<&ServerConfig>) -> Self { let (send_cmd, recv_cmd) = std::sync::mpsc::sync_channel(0); let (send_from_cmd, recv_from_cmd) = std::sync::mpsc::sync_channel(0); // todo: Detect/start docker // Also would be better to loop on ws() I think let (ws_url, tunnel_handle) = match server { Some(server_config) => { let ws_url = "127.0.0.1:5712".to_string(); // Todo: Wait for, currently tunnel will only // process one request let handle = match server::tunnel_single_thread(&env, server_config, 5712, 5711) { Ok(handle) => handle, Err(e) => panic!("Tunnel failed: {:?}", e), }; (ws_url, Some(handle)) } None => { let ws_url = "127.0.0.1:5711".to_string(); if !utils::wait_for(&ws_url) { panic!("Failed to connect to cli server"); } (ws_url, None) } }; let _handle = std::thread::spawn(move || { ::std::env::set_var("RUST_LOG", "actix_web=info"); match env_logger::try_init() { Ok(_) => { println!("Logger initialized"); } Err(e) => { println!("Logger might be initialized: {:?}", e); } } let sys = actix::System::new("wp-cli-client"); Arbiter::spawn(lazy(move || { Client::new() .ws(format!("http://{}/ws/", ws_url)) .connect() .map_err(|e| { println!("Ws connect error: {}", e); () }) .map(move |(_response, framed)| { let (sink, stream) = framed.split(); let addr = ChatClient::create(|ctx| { ChatClient::add_stream(stream, ctx); ChatClient { sink: SinkWrite::new(sink, ctx), send_from_cmd, } }); // Thread looping to wait for messages std::thread::spawn(move || { loop { match recv_cmd.recv() { Ok(ws_msg) => { println!("Received mpsc"); match &ws_msg { WsMsg::CmdMsg(_cmd_msg) => addr.do_send(ws_msg), WsMsg::DoneMsg => { addr.do_send(ws_msg); // Break this loop, system should // go on until commands are processed // and done message processed break; } } } Err(e) => eprintln!("Receive error: {:?}", e), } } }); }) })); let _ = sys.run(); match tunnel_handle { Some(handle) => match handle.join() { Ok(res) => { println!("Joined tunnel handle: {:?}", res); } Err(e) => { eprintln!("Error joining tunnel handle: {:?}", e); } }, None => (), } println!("System done"); }); CliConnection { send_cmd, recv_from_cmd, } } pub fn cmd<S, S2>(&self, cmd: S, args: Vec<S2>) -> Result<String, String> where S: Into<String>, S2: Into<String>, { self.cmd_msg(CmdMsg { cmd: cmd.into(), args: args.into_iter().map(|i| i.into()).collect(), }) } pub fn cmd_msg(&self, cmd: CmdMsg) -> Result<String, String> { match self.send_cmd.send(WsMsg::CmdMsg(cmd)) { Ok(_) => (), Err(e) => return Err(format!("Send error: {:?}", e)), } let mut out_buffer = String::new(); loop { match self.recv_from_cmd.recv() { Ok(from_cmd) => { // Handle FromCmdMsg match from_cmd { FromCmdMsg::Info(info) => println!("Info: {}", info), FromCmdMsg::Line(line) => { // I think we need stderr in here too (different msg) // as this doesn't capture all apparently println!("{}", line); out_buffer.push('\n'); out_buffer.push_str(&line); } FromCmdMsg::CmdDone => { println!("Command done"); break; } FromCmdMsg::AllDone => (), } } Err(e) => eprintln!("Error receiving from_cmd: {:?}", e), } } Ok(out_buffer) } } /// Message mirroring those coming from command actor /// Line is output, /// Info is additional info around command execution, /// in our case, waiting for services to come online /// When a process is finished, CmdDone is sent #[derive(Deserialize, Debug)] pub enum FromCmdMsg { Line(String), CmdDone, Info(String), AllDone, } struct ChatClient<T: AsyncRead + AsyncWrite> { sink: SinkWrite<SplitSink<Framed<T, Codec>>>, send_from_cmd: SyncSender<FromCmdMsg>, } impl<T: 'static> Actor for ChatClient<T> where T: AsyncRead + AsyncWrite, { type Context = Context<Self>; fn started(&mut self, ctx: &mut Context<Self>) { // start heartbeats otherwise server will disconnect after 10 seconds self.hb(ctx) } fn stopped(&mut self, _: &mut Context<Self>) { println!("Disconnected"); // Stop application on disconnect System::current().stop(); } } impl<T: 'static> ChatClient<T> where T: AsyncRead + AsyncWrite, { // Heartbeats might be less relevant for our use // case. I'm not sure we want the process to // terminate without heartbeat. // Interrupts on the other hand would be nice // todo: Is heartbeats part of the ws protocol, though? fn hb(&self, ctx: &mut Context<Self>) { ctx.run_later(Duration::new(1, 0), |act, ctx| { act.sink.write(Message::Ping(String::new())).unwrap(); act.hb(ctx); // client should also check for a timeout here, similar to the // server code }); } } /// Handle command and stop messages impl<T: 'static> Handler<WsMsg> for ChatClient<T> where T: AsyncRead + AsyncWrite, { type Result = (); // Serialize and write to socket sink fn handle(&mut self, msg: WsMsg, _ctx: &mut Context<Self>) { println!("In handle"); match serde_json::to_string(&msg) { Ok(json) => { println!("Ready to write: {}", json); match self.sink.write(Message::Text(json)) { Ok(ok) => { // Possible AsyncSink::NotReady, AsyncSink::Ready println!("Wrote: {:?}", ok); } Err(e) => { println!("Error writing: {:?}", e); } } } Err(e) => { println!("Could not serialize: {:?}", e); } } } } /// Handle server websocket messages impl<T: 'static> StreamHandler<Frame, WsProtocolError> for ChatClient<T> where T: AsyncRead + AsyncWrite, { fn handle(&mut self, msg: Frame, ctx: &mut Context<Self>) { match msg { Frame::Text(txt) => { // Attempt to extract FromCmdMsg let from_cmd = match txt { Some(bytes) => match serde_json::from_slice::<FromCmdMsg>(&bytes) { Ok(from_cmd) => from_cmd, Err(e) => { println!("Deserialize error: {:?}", e); return; } }, None => { println!("Received none"); return; } }; let stop_ctx = match &from_cmd { FromCmdMsg::AllDone => { // All commands in list processed, shutting down println!("Got AllDone, shuttings down system and closing connection"); true } _ => false, }; // Send message to mpsc channel match self.send_from_cmd.send(from_cmd) { Ok(_) => (), Err(e) => eprintln!("Send error, {:?}", e), } if stop_ctx { ctx.stop(); } } _ => (), } } fn started(&mut self, _ctx: &mut Context<Self>) { println!("Connected"); } fn finished(&mut self, ctx: &mut Context<Self>) { println!("Server disconnected"); ctx.stop() } } impl<T: 'static> actix::io::WriteHandler<WsProtocolError> for ChatClient<T> where T: AsyncRead + AsyncWrite { }
use std::sync::Arc; use bonsaidb_core::networking::{Payload, Request, Response}; use fabruic::{self, Certificate, Endpoint}; use flume::Receiver; use futures::StreamExt; use serde::{Deserialize, Serialize}; use url::Url; use super::{CustomApiCallback, PendingRequest}; #[cfg(feature = "pubsub")] use crate::client::SubscriberMap; use crate::{client::OutstandingRequestMapHandle, Error}; /// This function will establish a connection and try to keep it active. If an /// error occurs, any queries that come in while reconnecting will have the /// error replayed to them. pub async fn reconnecting_client_loop< R: Send + Sync + Serialize + for<'de> Deserialize<'de> + 'static, O: Send + Sync + Serialize + for<'de> Deserialize<'de> + 'static, >( mut url: Url, certificate: Option<Certificate>, request_receiver: Receiver<PendingRequest<R, O>>, custom_api_callback: Option<Arc<dyn CustomApiCallback<O>>>, #[cfg(feature = "pubsub")] subscribers: SubscriberMap, ) -> Result<(), Error> { if url.port().is_none() && url.scheme() == "bonsaidb" { let _ = url.set_port(Some(5645)); } while let Ok(request) = request_receiver.recv_async().await { if let Err((failed_request, err)) = connect_and_process( &url, certificate.as_ref(), request, &request_receiver, custom_api_callback.clone(), #[cfg(feature = "pubsub")] &subscribers, ) .await { if let Some(failed_request) = failed_request { drop(failed_request.responder.send(Err(err))); } continue; } } Ok(()) } async fn connect_and_process< R: Send + Sync + Serialize + for<'de> Deserialize<'de> + 'static, O: Send + Sync + Serialize + for<'de> Deserialize<'de> + 'static, >( url: &Url, certificate: Option<&Certificate>, initial_request: PendingRequest<R, O>, request_receiver: &Receiver<PendingRequest<R, O>>, custom_api_callback: Option<Arc<dyn CustomApiCallback<O>>>, #[cfg(feature = "pubsub")] subscribers: &SubscriberMap, ) -> Result<(), (Option<PendingRequest<R, O>>, Error)> { let (_connection, payload_sender, payload_receiver) = match connect(url, certificate).await { Ok(result) => result, Err(err) => return Err((Some(initial_request), err)), }; let outstanding_requests = OutstandingRequestMapHandle::default(); let request_processor = tokio::spawn(process( outstanding_requests.clone(), payload_receiver, custom_api_callback, #[cfg(feature = "pubsub")] subscribers.clone(), )); if let Err(err) = payload_sender.send(&initial_request.request) { return Err((Some(initial_request), Error::from(err))); } { let mut outstanding_requests = outstanding_requests.lock().await; outstanding_requests.insert( initial_request .request .id .expect("all requests require ids"), initial_request, ); } futures::try_join!( process_requests(outstanding_requests, request_receiver, payload_sender), async { request_processor.await.map_err(|_| Error::Disconnected)? } ) .map_err(|err| (None, err))?; Ok(()) } async fn process_requests< R: Send + Sync + Serialize + for<'de> Deserialize<'de> + 'static, O: Send + Sync + Serialize + for<'de> Deserialize<'de> + 'static, >( outstanding_requests: OutstandingRequestMapHandle<R, O>, request_receiver: &Receiver<PendingRequest<R, O>>, payload_sender: fabruic::Sender<Payload<Request<R>>>, ) -> Result<(), Error> { while let Ok(client_request) = request_receiver.recv_async().await { let mut outstanding_requests = outstanding_requests.lock().await; payload_sender.send(&client_request.request)?; outstanding_requests.insert( client_request.request.id.expect("all requests require ids"), client_request, ); } // Return an error to make sure try_join returns. Err(Error::Disconnected) } pub async fn process<R: Send + Sync + 'static, O: Send + Sync + 'static>( outstanding_requests: OutstandingRequestMapHandle<R, O>, mut payload_receiver: fabruic::Receiver<Payload<Response<O>>>, custom_api_callback: Option<Arc<dyn CustomApiCallback<O>>>, #[cfg(feature = "pubsub")] subscribers: SubscriberMap, ) -> Result<(), Error> { while let Some(payload) = payload_receiver.next().await { let payload = payload?; super::process_response_payload( payload, &outstanding_requests, custom_api_callback.as_ref(), #[cfg(feature = "pubsub")] &subscribers, ) .await; } Err(Error::Disconnected) } async fn connect< R: Send + Sync + Serialize + for<'de> Deserialize<'de> + 'static, O: Send + Sync + Serialize + for<'de> Deserialize<'de> + 'static, >( url: &Url, certificate: Option<&Certificate>, ) -> Result< ( fabruic::Connection<()>, fabruic::Sender<Payload<Request<R>>>, fabruic::Receiver<Payload<Response<O>>>, ), Error, > { let endpoint = Endpoint::new_client() .map_err(|err| Error::Core(bonsaidb_core::Error::Transport(err.to_string())))?; let connecting = if let Some(certificate) = certificate { endpoint.connect_pinned(url, certificate, None).await? } else { endpoint.connect(url).await? }; let connection = connecting.accept::<()>().await?; let (sender, receiver) = connection.open_stream(&()).await?; Ok((connection, sender, receiver)) }
use std::error::Error; mod email; mod print; pub use print::PrintSender; pub use email::EmailSender; pub trait Sender { fn init(&mut self); fn send(&self, msg: &str) -> Result<(), Box<dyn Error>>; fn name(&self) -> &'static str; }
pub mod d01; pub mod d02; pub mod d03; pub mod d04; pub mod d05;
use std::{convert::TryInto, str::FromStr}; use crate::headers::{Header, HeaderName, HeaderValue, Headers, CONTENT_TYPE}; use crate::mime::Mime; /// Indicate the media type of a resource's content. /// /// [MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type) /// /// # Specifications /// /// - [RFC 7231, section 3.1.1.5: Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) /// - [RFC 7233, section 4.1: Content-Type in multipart](https://tools.ietf.org/html/rfc7233#section-4.1) /// /// # Examples /// /// ``` /// # fn main() -> http_types::Result<()> { /// # /// use http_types::content::ContentType; /// use http_types::{headers::Header, Response}; /// use http_types::mime::Mime; /// use std::str::FromStr; /// /// let content_type = ContentType::new("text/*"); /// /// let mut res = Response::new(200); /// res.insert_header(&content_type, &content_type); /// /// let content_type = ContentType::from_headers(res)?.unwrap(); /// assert_eq!(content_type.header_value(), format!("{}", Mime::from_str("text/*")?).as_str()); /// # /// # Ok(()) } /// ``` #[derive(Debug)] pub struct ContentType { media_type: Mime, } impl ContentType { /// Create a new instance. pub fn new<U>(media_type: U) -> Self where U: TryInto<Mime>, U::Error: std::fmt::Debug, { Self { media_type: media_type .try_into() .expect("could not convert into a valid Mime type"), } } /// Create a new instance from headers. /// /// `Content-Type` headers can provide both full and partial URLs. In /// order to always return fully qualified URLs, a base URL must be passed to /// reference the current environment. In HTTP/1.1 and above this value can /// always be determined from the request. pub fn from_headers(headers: impl AsRef<Headers>) -> crate::Result<Option<Self>> { let headers = match headers.as_ref().get(CONTENT_TYPE) { Some(headers) => headers, None => return Ok(None), }; // If we successfully parsed the header then there's always at least one // entry. We want the last entry. let ctation = headers.iter().last().unwrap(); let media_type = Mime::from_str(ctation.as_str()).map_err(|mut e| { e.set_status(400); e })?; Ok(Some(Self { media_type })) } } impl Header for ContentType { fn header_name(&self) -> HeaderName { CONTENT_TYPE } fn header_value(&self) -> HeaderValue { let output = format!("{}", self.media_type); // SAFETY: the internal string is validated to be ASCII. unsafe { HeaderValue::from_bytes_unchecked(output.into()) } } } impl PartialEq<Mime> for ContentType { fn eq(&self, other: &Mime) -> bool { &self.media_type == other } } impl PartialEq<&Mime> for ContentType { fn eq(&self, other: &&Mime) -> bool { &&self.media_type == other } } impl From<Mime> for ContentType { fn from(media_type: Mime) -> Self { Self { media_type } } } #[cfg(test)] mod test { use super::*; use crate::headers::Headers; #[test] fn smoke() -> crate::Result<()> { let ct = ContentType::new(Mime::from_str("text/*")?); let mut headers = Headers::new(); ct.apply_header(&mut headers); let ct = ContentType::from_headers(headers)?.unwrap(); assert_eq!( ct.header_value(), format!("{}", Mime::from_str("text/*")?).as_str() ); Ok(()) } #[test] fn bad_request_on_parse_error() { let mut headers = Headers::new(); headers.insert(CONTENT_TYPE, "<nori ate the tag. yum.>"); let err = ContentType::from_headers(headers).unwrap_err(); assert_eq!(err.status(), 400); } }
pub mod annotations; pub mod file; pub mod filters; pub mod graph_details; pub mod graph_picker; pub mod overlays; pub mod path_position; pub mod paths; pub mod settings; pub mod util; pub use annotations::*; pub use file::*; pub use filters::*; pub use graph_details::*; pub use graph_picker::*; pub use overlays::*; pub use path_position::*; pub use paths::*; pub use settings::*; pub use util::*;
use evmap; use itertools::Itertools; use itertools::FoldWhile::{Continue, Done}; use std::collections::{HashMap, HashSet}; use std::error; use std::io; use std::io::BufRead; use crate::day; pub type BoxResult<T> = Result<T, Box<dyn error::Error>>; pub struct Day20 {} impl day::Day for Day20 { fn tag(&self) -> &str { "20" } fn part1(&self, input: &dyn Fn() -> Box<dyn io::Read>) { println!("{:?}", self.part1_impl(&mut *input())); } fn part2(&self, input: &dyn Fn() -> Box<dyn io::Read>) { println!("{:?}", self.part2_impl(&mut *input())); } } impl Day20 { fn part1_impl(self: &Self, input: &mut dyn io::Read) -> BoxResult<usize> { let reader = io::BufReader::new(input); let (grid_r, mut grid_w) = evmap::new(); let (portals_r, mut portals_w) = evmap::new(); let mut w = 0; let mut h = 0; let mut b = 0; reader.lines().enumerate().for_each(|(y, l)| { let l = l.unwrap(); // width if y == 0 { w = l.len() - 4; } l.chars().enumerate().for_each(|(x, c)| { // eprintln!("x {} y {} w {} h {} b {}", x, y, w, h, b); let uc = c.is_ascii_uppercase(); // upper outer portal if y == 1 && uc { let mut s = String::new(); grid_r.get_and(&(x, 0), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 2, y - 1)); portals_w.refresh(); } // left outer portal else if x == 1 && uc { let mut s = String::new(); grid_r.get_and(&(0, y), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 1, y - 2)); portals_w.refresh(); } // right outer portal else if x == 3 + w && uc { let mut s = String::new(); grid_r.get_and(&(x - 1, y), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 4, y - 2)); portals_w.refresh(); } // breadth else if b == 0 && y > 1 && x > 1 && x < 2 + w && c == ' ' { b = y - 2; } // left inner portal else if b > 0 && y > 1 + b && x == 3 + b && uc { let mut s = String::new(); grid_r.get_and(&(x - 1, y), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 4, y - 2)); portals_w.refresh(); } // right inner portal else if b > 0 && y > 3 + b && y < w - b && x == 1 + w - b && uc { let mut s = String::new(); grid_r.get_and(&(x - 1, y), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 1, y - 2)); portals_w.refresh(); } // upper inner portal else if b > 0 && y == 3 + b && x > 1 && uc { let mut s = String::new(); grid_r.get_and(&(x, y - 1), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 2, y - 4)); portals_w.refresh(); } // height else if h == 0 && b > 0 && y > 3 + b && x > 4 + b && x < w - b && uc { h = y + b; } // lower inner portal else if h > 0 && y == 1 + h - b && x > 1 && x < 2 + w && uc { let mut s = String::new(); grid_r.get_and(&(x, y - 1), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 2, y - 1)); portals_w.refresh(); } // lower outer portal else if h > 0 && y == 3 + h && uc { let mut s = String::new(); grid_r.get_and(&(x, y - 1), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 2, y - 4)); portals_w.refresh(); } grid_w.update((x, y), c); grid_w.refresh(); }) }); // eprintln!("w {} h {} b {}", w, h, b); let mut jump = HashMap::new(); portals_r.for_each(|_x, y| { // eprintln!("portals {:?} {:?}", _x, y); if y.len() == 2 { jump.insert(y[0], y[1]); jump.insert(y[1], y[0]); } }); let entry = portals_r.get_and("AA", |p| p[0]).unwrap(); let exit = portals_r.get_and("ZZ", |p| p[0]).unwrap(); // eprintln!("entry {:?} exit {:?}", entry, exit); let dirs = vec![(0, -1), (1, 0), (0, 1), (-1, 0)]; let next = |(x, y): (usize, usize), path: &Vec<(usize, usize)>| { let mut next = dirs.iter().flat_map(|&(dx, dy)| { let nx = x as i32 + dx; let ny = y as i32 + dy; if nx < 0 || ny < 0 { None } else { let (nx, ny) = (nx as usize, ny as usize); if path.contains(&(nx, ny)) { None } else { grid_r.get_and( &(nx + 2, ny + 2), |c| if c[0] == '.' { Some((nx, ny)) } else { None }) .unwrap_or(None) } } }).collect::<Vec<_>>(); if let Some(&p) = jump.get(&(x, y)) { next.push(p); }; next }; loop { let mut plugged = false; grid_r.for_each(|(x, y), c| { if c[0] == '.' { let p = (x - 2, y - 2); if next(p, &vec![]).len() == 1 && p != entry && p != exit { // eprintln!("plugging {:?}", p); grid_w.update((*x, *y), '#'); plugged = true; } } }); if !plugged { break; } grid_w.refresh(); }; // for y in 0..h { // let mut l = String::new(); // for x in 0..w { // grid_r.get_and(&(x + 2, y + 2), |c| l.push(c[0])); // } // eprintln!("{}", l); // } let mut seen = HashSet::new(); let r = (1..).fold_while((vec![(entry, vec![])], 0), |(states, i), _| { // eprintln!("i {} states len {}", i, states.len()); // eprintln!("states {:?}", states); states.iter().for_each(|&(p, _)| { seen.insert(p); }); let new_states: Vec<(_, Vec<_>)> = states.iter().flat_map(|(pos, path)| { let candidates: Vec<_> = next(*pos, path).into_iter().filter(|p| !seen.contains(p)).collect(); // eprintln!("candidates {:?}", candidates); candidates.into_iter().map(move |p| { let mut path = path.clone(); path.push(*pos); (p, path.clone()) }) }).collect(); if new_states.is_empty() || new_states.iter().any(|&(p, _)| p == exit) { Done((new_states, i + 1)) } else { Continue((new_states, i + 1)) } }); let r = r.into_inner().1; // eprintln!("{:?}", r); Ok(r) } fn part2_impl(self: &Self, input: &mut dyn io::Read) -> BoxResult<usize> { let reader = io::BufReader::new(input); let (grid_r, mut grid_w) = evmap::new(); let (portals_r, mut portals_w) = evmap::new(); let mut w = 0; let mut h = 0; let mut b = 0; reader.lines().enumerate().for_each(|(y, l)| { let l = l.unwrap(); // width if y == 0 { w = l.len() - 4; } l.chars().enumerate().for_each(|(x, c)| { // eprintln!("x {} y {} w {} h {} b {}", x, y, w, h, b); let uc = c.is_ascii_uppercase(); // upper outer portal if y == 1 && uc { let mut s = String::new(); grid_r.get_and(&(x, 0), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 2, y - 1, true)); portals_w.refresh(); } // left outer portal else if x == 1 && uc { let mut s = String::new(); grid_r.get_and(&(0, y), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 1, y - 2, true)); portals_w.refresh(); } // right outer portal else if x == 3 + w && uc { let mut s = String::new(); grid_r.get_and(&(x - 1, y), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 4, y - 2, true)); portals_w.refresh(); } // breadth else if b == 0 && y > 1 && x > 1 && x < 2 + w && c == ' ' { b = y - 2; } // left inner portal else if b > 0 && y > 1 + b && x == 3 + b && uc { let mut s = String::new(); grid_r.get_and(&(x - 1, y), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 4, y - 2, false)); portals_w.refresh(); } // right inner portal else if b > 0 && y > 3 + b && y < w - b && x == 1 + w - b && uc { let mut s = String::new(); grid_r.get_and(&(x - 1, y), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 1, y - 2, false)); portals_w.refresh(); } // upper inner portal else if b > 0 && y == 3 + b && x > 1 && uc { let mut s = String::new(); grid_r.get_and(&(x, y - 1), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 2, y - 4, false)); portals_w.refresh(); } // height else if h == 0 && b > 0 && y > 3 + b && x > 4 + b && x < w - b && uc { h = y + b; } // lower inner portal else if h > 0 && y == 1 + h - b && x > 1 && x < 2 + w && uc { let mut s = String::new(); grid_r.get_and(&(x, y - 1), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 2, y - 1, false)); portals_w.refresh(); } // lower outer portal else if h > 0 && y == 3 + h && uc { let mut s = String::new(); grid_r.get_and(&(x, y - 1), |c| s.push(c[0])); s.push(c); portals_w.insert(s, (x - 2, y - 4, true)); portals_w.refresh(); } grid_w.update((x, y), c); grid_w.refresh(); }) }); // eprintln!("w {} h {} b {}", w, h, b); let mut jump = HashMap::new(); portals_r.for_each(|_x, y| { // eprintln!("portals {:?} {:?}", _x, y); if y.len() == 2 { jump.insert(y[0], y[1]); jump.insert(y[1], y[0]); } }); let entry = portals_r.get_and("AA", |p| (p[0].0, p[0].1, 0)).unwrap(); let exit = portals_r.get_and("ZZ", |p| (p[0].0, p[0].1, 0)).unwrap(); // eprintln!("entry {:?} exit {:?}", entry, exit); let dirs = vec![(0, -1), (1, 0), (0, 1), (-1, 0)]; let next = |(x, y, z): (usize, usize, usize), path: &Vec<(usize, usize, usize)>| { let mut next = dirs.iter().flat_map(|&(dx, dy)| { let nx = x as i32 + dx; let ny = y as i32 + dy; if nx < 0 || ny < 0 { None } else { let (nx, ny) = (nx as usize, ny as usize); if path.contains(&(nx, ny, z)) { None } else { grid_r.get_and( &(nx + 2, ny + 2), |c| if c[0] == '.' { Some((nx, ny, z)) } else { None }) .unwrap_or(None) } } }).collect::<Vec<_>>(); if let Some(&(nx, ny, _)) = jump.get(&(x, y, false)) { next.push((nx, ny, z + 1)); } else if let Some(&(nx, ny, _)) = jump.get(&(x, y, true)) { if z > 0 { next.push((nx, ny, z - 1)); } }; // eprintln!("{} {} {} -> {:?}", x, y, z, next); next }; loop { let mut plugged = false; grid_r.for_each(|(x, y), c| { if c[0] == '.' { let p0 = (x - 2, y - 2, 0); let p1 = (x - 2, y - 2, 1); if next(p1, &vec![]).len() == 1 && p0 != entry && p0 != exit { // eprintln!("plugging ({}, {})", p0.0, p0.1); grid_w.update((*x, *y), '#'); plugged = true; } } }); if !plugged { break; } grid_w.refresh(); }; // for y in 0..h { // let mut l = String::new(); // for x in 0..w { // grid_r.get_and(&(x + 2, y + 2), |c| l.push(c[0])); // } // eprintln!("{}", l); // } let mut seen = HashSet::new(); let r = (1..).fold_while((vec![(entry, vec![])], 0), |(states, i), _| { // eprintln!("i {} states len {}", i, states.len()); // eprintln!("states {:?}", states); states.iter().for_each(|&(p, _)| { seen.insert(p); }); let new_states: Vec<(_, Vec<_>)> = states.iter().flat_map(|(pos, path)| { let candidates: Vec<_> = next(*pos, path).into_iter().filter(|p| !seen.contains(p)).collect(); candidates.into_iter().map(move |p| { let mut path = path.clone(); path.push(*pos); (p, path.clone()) }) }).collect(); if new_states.is_empty() || new_states.iter().any(|&(p, _)| p == exit) { Done((new_states, i + 1)) } else { Continue((new_states, i + 1)) } }); let r = r.into_inner().1; // eprintln!("{:?}", r); Ok(r) } } #[cfg(test)] mod tests { use super::*; fn test1(s: &str, n: usize) { assert_eq!( Day20 {}.part1_impl(&mut s.replace(":", "").as_bytes()).unwrap(), n); } #[test] fn part1() { test1(" A : A : #######.######### : #######.........# : #######.#######.# : #######.#######.# : #######.#######.# : ##### B ###.# : BC...## C ###.# : ##.## ###.# : ##...DE F ###.# : ##### G ###.# : #########.#####.# : DE..#######...###.# : #.#########.###.# : FG..#########.....# : ###########.##### : Z : Z :", 23); test1(" A : A : #################.############# : #.#...#...................#.#.# : #.#.#.###.###.###.#########.#.# : #.#.#.......#...#.....#.#.#...# : #.#########.###.#####.#.#.###.# : #.............#.#.....#.......# : ###.###########.###.#####.#.#.# : #.....# A C #.#.#.# : ####### S P #####.# : #.#...# #......VT: #.#.#.# #.##### : #...#.# YN....#.# : #.###.# #####.# : DI....#.# #.....# : #####.# #.###.# : ZZ......# QG....#..AS: ###.### ####### : JO..#.#.# #.....# : #.#.#.# ###.#.# : #...#..DI BU....#..LF: #####.# #.##### : YN......# VT..#....QG: #.###.# #.###.# : #.#...# #.....# : ###.### J L J #.#.### : #.....# O F P #.#...# : #.###.#####.#.#####.#####.###.# : #...#.#.#...#.....#.....#.#...# : #.#####.###.###.#.#.#########.# : #...#.#.....#...#.#.#.#.....#.# : #.###.#####.###.###.#.#.####### : #.#.........#...#.............# : #########.###.###.############# : B J C : U P P :", 58); } fn test2(s: &str, n: usize) { assert_eq!(Day20 {}.part2_impl(&mut s.replace(":", "").as_bytes()).unwrap(), n); } #[test] fn part2() { test2(" Z L X W C : Z P Q B K : ###########.#.#.#.#######.############### : #...#.......#.#.......#.#.......#.#.#...# : ###.#.#.#.#.#.#.#.###.#.#.#######.#.#.### : #.#...#.#.#...#.#.#...#...#...#.#.......# : #.###.#######.###.###.#.###.###.#.####### : #...#.......#.#...#...#.............#...# : #.#########.#######.#.#######.#######.### : #...#.# F R I Z #.#.#.# : #.###.# D E C H #.#.#.# : #.#...# #...#.# : #.###.# #.###.# : #.#....OA WB..#.#..ZH: #.###.# #.#.#.# : CJ......# #.....# : ####### ####### : #.#....CK #......IC: #.###.# #.###.# : #.....# #...#.# : ###.### #.#.#.# : XF....#.# RF..#.#.# : #####.# ####### : #......CJ NM..#...# : ###.#.# #.###.# : RE....#.# #......RF: ###.### X X L #.#.#.# : #.....# F Q P #.#.#.# : ###.###########.###.#######.#########.### : #.....#...#.....#.......#...#.....#.#...# : #####.#.###.#######.#######.###.###.#.#.# : #.......#.......#.#.#.#.#...#...#...#.#.# : #####.###.#####.#.#.#.#.###.###.#.###.### : #.......#.....#.#...#...............#...# : #############.#.#.###.################### : A O F N : A A D M :", 396); } }
//! Model connection to textures, animations, etc. //! //! There is no data in the actual Nitro files (that I know of) that tells us //! which animation applies to which model or what texture to use when a model //! says to use texture with name such-and-such. Presumably game code would just //! call supply the right files to the right calls. That leaves us to figure it //! out for ourselves. This modules contains the heuristics for that. use cli::Args; use db::{Database, AnimationId, TextureId, PaletteId, ModelId, PatternId, MatAnimId}; use errors::Result; /// A Connection records interrelationships between Nitro resources, namely how /// all the other resources relate to the models. pub struct Connection { pub models: Vec<ModelConnection>, } /// Records for a model (1) what texture/palette each material should use, and /// (2) which animations can be applied to it. pub struct ModelConnection { pub materials: Vec<MaterialConnection>, /// List of animations that can be applied to the model. pub animations: Vec<AnimationId>, /// List of patterns that can be applied to the model (and how to apply /// them). pub patterns: Vec<PatternConnection>, /// List of material animations that can be applied to the model. pub mat_anims: Vec<MatAnimConnection>, } /// Result of resolving which texture/palette a material should use. pub enum MaterialConnection { NoTexture, TextureMissing, TextureOkNoPalette { texture: Match<TextureId>, }, TextureOkPaletteMissing { texture: Match<TextureId>, }, TextureOkPaletteOk { texture: Match<TextureId>, palette: Match<PaletteId>, }, } /// Result of resolving a texture/palette name to a matching texture/palette ID. #[derive(Copy, Clone)] pub struct Match<T: Copy> { pub id: T, /// True if the match was the best possible amoung all our candidates /// (high confidence we picked the correct one). pub best: bool, } impl MaterialConnection { pub fn texture(&self) -> Option<Match<TextureId>> { match *self { MaterialConnection::NoTexture | MaterialConnection::TextureMissing => None, MaterialConnection::TextureOkNoPalette { texture } | MaterialConnection::TextureOkPaletteMissing { texture } | MaterialConnection::TextureOkPaletteOk { texture, .. } => Some(texture) } } pub fn texture_id(&self) -> Option<TextureId> { match *self { MaterialConnection::NoTexture | MaterialConnection::TextureMissing => None, MaterialConnection::TextureOkNoPalette { texture } | MaterialConnection::TextureOkPaletteMissing { texture } | MaterialConnection::TextureOkPaletteOk { texture, .. } => Some(texture.id), } } pub fn palette_id(&self) -> Option<TextureId> { match *self { MaterialConnection::NoTexture | MaterialConnection::TextureMissing | MaterialConnection::TextureOkNoPalette { .. } | MaterialConnection::TextureOkPaletteMissing { .. } => None, MaterialConnection::TextureOkPaletteOk { palette, .. } => Some(palette.id) } } /// Produces None if there was no texture, the texture/palette if there was /// and everything resolved sucessfully, or an Err if there was any /// resolving error. pub fn image_id(&self) -> Result<Option<(TextureId, Option<PaletteId>)>> { match *self { MaterialConnection::NoTexture => Ok(None), MaterialConnection::TextureMissing => bail!("texture missing"), MaterialConnection::TextureOkNoPalette { texture } => Ok(Some((texture.id, None))), MaterialConnection::TextureOkPaletteMissing { .. } => bail!("palette missing"), MaterialConnection::TextureOkPaletteOk { texture, palette } => Ok(Some((texture.id, Some(palette.id)))), } } } #[derive(Copy, Clone)] pub struct ConnectionOptions { /// Apply all animations to every model. pub all_animations: bool, } impl ConnectionOptions { /// Creates a ConnectionOptions from the CLI arguments. pub fn from_cli_args(args: &Args) -> ConnectionOptions { ConnectionOptions { all_animations: args.flags.contains(&"all-animations"), } } } impl Connection { pub fn build(db: &Database, options: ConnectionOptions) -> Connection { // Record whether we failed to resolve any materials so we can warn let mut missing_textures = false; let models = db.models.iter().enumerate().map(|(model_id, model)| { let materials = (0..model.materials.len()) .map(|material_id| { let mat_conn = resolve_material(db, model_id, material_id); if mat_conn.image_id().is_err() { missing_textures = true; } mat_conn }).collect(); let animations = find_applicable_animations(db, model_id, options); let patterns = find_applicable_patterns(db, model_id); let mat_anims = find_applicable_mat_anims(db, model_id); ModelConnection { materials, animations, patterns, mat_anims } }).collect(); if missing_textures { warn!("A matching texture/palette couldn't be found for some materials!"); info!("Hint: textures are sometimes stored in a separate .nsbtx file."); } Connection { models } } } // HEURISTICS: /// TO RESOLVE A MATERIAL TEXTURE: The model file stores the texture name, so /// our initial set of candidates is all the texture in the DB with that name. /// If the material specifies a palette, we won't match a texture that doesn't /// require a palette, and the other way around too. If there are multiple /// candidates, we prefer one from the same file as the model (this is a good /// heuristic for models that store their textures/palettes in the same NSBMD /// file, which most do, but it doesn't help the textures are in a separate /// NSBTX file). If there are still multiple candidates, we prefer the first one /// (but we record a note about the match being tentative). /// /// Palettes are subsequently resolved similarly, prefering palettes from the /// same file as the texture. fn resolve_material(db: &Database, model_id: ModelId, material_idx: usize) -> MaterialConnection { let material = &db.models[model_id].materials[material_idx]; let texture_name = match material.texture_name { None => return MaterialConnection::NoTexture, Some(ref name) => name, }; let has_palette = material.palette_name.is_some(); // Resolve the texture name. Start with all textures with the right name. let mut candidates = db.textures_by_name.get(texture_name) .cloned().unwrap_or(vec![]); // If the material specifies a palette, discard candidates that don't use // one, and conversely. candidates.retain(|&tex_id| { let requires_palette = db.textures[tex_id].params .format().desc().requires_palette; requires_palette == has_palette }); // If there are candidates in the same file as the model we prefer them; // discard the others. let is_in_model_file = |&tex_id: &TextureId| { db.textures_found_in[tex_id] == db.models_found_in[model_id] }; if candidates.iter().any(is_in_model_file) { candidates.retain(is_in_model_file) } let texture_match = match candidates.len() { 0 => return MaterialConnection::TextureMissing, n => Match { id: candidates[0], best: n == 1 }, }; // If there was no palette, we're done! if !has_palette { return MaterialConnection::TextureOkNoPalette { texture: texture_match, }; } // Otherwise, resolve the palette. Start with candidates that have the right // name. let palette_name = material.palette_name.as_ref().unwrap(); let mut candidates = db.palettes_by_name.get(palette_name) .cloned().unwrap_or(vec![]); // If there are candidates in the same file as the texture we prefer them; // discard the others. let texture_file = db.textures_found_in[texture_match.id]; let is_in_tex_file = |&pal_id: &PaletteId| { db.palettes_found_in[pal_id] == texture_file }; if candidates.iter().any(is_in_tex_file) { candidates.retain(is_in_tex_file) } let palette_match = match candidates.len() { 0 => return MaterialConnection::TextureOkPaletteMissing { texture: texture_match }, n => Match { id: candidates[0], best: n == 1 }, }; MaterialConnection::TextureOkPaletteOk { texture: texture_match, palette: palette_match, } } /// TO DETERMINE WHICH ANIMATIONS APPLY: An animation varies the values of the /// model's object matrices, so the obvious heuristic is that an animation /// applies to a model if it animates as many objects as the model has. This /// obviously gives false-positives since any two models with the same number of /// objects have the same set of animations applied to them. Surprisingly it /// also gives false-negatives: some animations that certainly go with a certain /// model have a different number of objects (maybe so it can be re-used amoung /// multiple models??). /// /// To solve the second issue the user is given the option of disabling this /// heuristic and applying applying all the animations to every model. This, /// together with the first issue, is the main impediment to batch-converting /// whole games. fn find_applicable_animations(db: &Database, model_id: ModelId, options: ConnectionOptions) -> Vec<AnimationId> { if options.all_animations { // Let's try not to worry about how big this is :o return (0..db.animations.len()).collect(); } // Only animations with the same number of objects apply. let num_model_objs = db.models[model_id].objects.len(); (0..db.animations.len()) .filter(|&anim_id| { let num_anim_objs = db.animations[anim_id].objects_curves.len(); num_anim_objs == num_model_objs }) .collect() } /// Indicates that a model can have the specified pattern applied to it, and /// tells what the texture/palette names in that pattern should resolve to for /// that model. pub struct PatternConnection { pub pattern_id: PatternId, pub texture_ids: Vec<Option<TextureId>>, pub palette_ids: Vec<Option<PaletteId>>, } /// TO DETERMINE WHICH PATTERNS APPLY: A pattern track targets a material by /// name, so apply a pattern to a model when every track in the pattern targets /// a valid material in that model. fn find_applicable_patterns(db: &Database, model_id: ModelId) -> Vec<PatternConnection> { let model = &db.models[model_id]; db.patterns.iter().enumerate().filter_map(|(pattern_id, pattern)| { // Check if all the tracks target valid materials let valid = pattern.material_tracks.iter().all(|track| { model.materials.iter().any(|mat| mat.name == track.name) }); if !valid { return None; } // TODO: give priority to textures/palettes in the same file as the // model. let texture_ids = pattern.texture_names.iter().map(|name| { let ids = db.textures_by_name.get(name)?; Some(ids[0]) }).collect(); let palette_ids = pattern.palette_names.iter().map(|name| { let ids = db.palettes_by_name.get(name)?; Some(ids[0]) }).collect(); Some(PatternConnection { pattern_id, texture_ids, palette_ids, }) }).collect() } /// Indicates that a model can have the specified material animations applied to /// it. pub struct MatAnimConnection { pub mat_anim_id: MatAnimId, } fn find_applicable_mat_anims(db: &Database, model_id: ModelId) -> Vec<MatAnimConnection> { let model = &db.models[model_id]; db.mat_anims.iter().enumerate().filter_map(|(mat_anim_id, mat_anim)| { // Check if all the tracks target valid materials let valid = mat_anim.tracks.iter().all(|track| { model.materials.iter().any(|mat| mat.name == track.name) }); if !valid { None } else { Some(MatAnimConnection { mat_anim_id }) } }).collect() }
use cranelift_codegen::binemit; use cranelift_codegen::ir; pub struct RelocSink { pub relocs: Vec<( binemit::Reloc, ir::ExternalName, binemit::CodeOffset, binemit::Addend, )>, } impl RelocSink { pub fn new() -> Self { Self { relocs: Vec::new() } } } impl<'func> binemit::RelocSink for RelocSink { fn reloc_ebb( &mut self, _offset: binemit::CodeOffset, _reloc: binemit::Reloc, _ebb_offset: binemit::CodeOffset, ) { panic!("ebb header addresses not yet implemented"); } fn reloc_external( &mut self, offset: binemit::CodeOffset, reloc: binemit::Reloc, name: &ir::ExternalName, addend: binemit::Addend, ) { // TODO: How should addend be handled? Should it be added to // offset and stored in self.relocs or carried through beside // offset? self.relocs.push((reloc, name.clone(), offset, addend)); } fn reloc_constant(&mut self, _: binemit::CodeOffset, _: binemit::Reloc, _: ir::ConstantOffset) { panic!("reloc_constant not yet implemented"); } fn reloc_jt( &mut self, _offset: binemit::CodeOffset, _reloc: binemit::Reloc, _jt: ir::JumpTable, ) { panic!("jump table addresses not yet implemented"); } }
use core::{convert::TryInto, ops::Deref, ptr::NonNull}; use alloc::{rc::Rc, sync::Arc}; use fermium::SDL_Renderer; use crate::{ sdl_get_error, Initialization, PixelFormatEnum, SdlError, Surface, Texture, Window, WindowCreationFlags, }; pub(crate) struct Renderer { nn: NonNull<SDL_Renderer>, // Note(Lokathor): As long as the renderer lives, we have to also keep the // window that created it alive. #[allow(dead_code)] win: Rc<Window>, } impl Drop for Renderer { // Note(Lokathor): The drop for the Rc runs *after* this drop code. fn drop(&mut self) { unsafe { fermium::SDL_DestroyRenderer(self.nn.as_ptr()); } } } pub struct RendererWindow { rend: Rc<Renderer>, win: Rc<Window>, } impl Deref for RendererWindow { type Target = Window; #[inline] #[must_use] fn deref(&self) -> &Self::Target { &self.win } } impl RendererWindow { pub(crate) fn new( init: Arc<Initialization>, title: &str, pos: Option<[i32; 2]>, size: [u32; 2], flags: WindowCreationFlags, ) -> Result<Self, SdlError> { let win = Rc::new(Window::new(init, title, pos, size, flags)?); let nn = NonNull::new(unsafe { fermium::SDL_CreateRenderer( win.as_ptr(), -1, (fermium::SDL_RENDERER_ACCELERATED | fermium::SDL_RENDERER_PRESENTVSYNC) as u32, ) }) .ok_or_else(sdl_get_error)?; let rend = Rc::new(Renderer { nn, win: win.clone() }); Ok(RendererWindow { win, rend }) } pub fn clear(&self) -> Result<(), SdlError> { let ret = unsafe { fermium::SDL_RenderClear(self.rend.nn.as_ptr()) }; if ret >= 0 { Ok(()) } else { Err(sdl_get_error()) } } pub fn present(&self) { unsafe { fermium::SDL_RenderPresent(self.rend.nn.as_ptr()) } } pub fn create_texture( &self, pixel_format: PixelFormatEnum, access: TextureAccess, w: u32, h: u32, ) -> Result<Texture, SdlError> { NonNull::new(unsafe { fermium::SDL_CreateTexture( self.rend.nn.as_ptr(), pixel_format.0, access as _, w.try_into().unwrap(), h.try_into().unwrap(), ) }) .ok_or_else(sdl_get_error) .map(|nn| Texture { nn, rend: self.rend.clone() }) } pub fn create_texture_from_surface( &self, surface: &Surface, ) -> Result<Texture, SdlError> { NonNull::new(unsafe { fermium::SDL_CreateTextureFromSurface( self.rend.nn.as_ptr(), surface.nn.as_ptr(), ) }) .ok_or_else(sdl_get_error) .map(|nn| Texture { nn, rend: self.rend.clone() }) } } pub enum TextureAccess { /// Changes rarely, not lockable. Static = fermium::SDL_TEXTUREACCESS_STATIC as _, /// Changes frequently, lockable. Streaming = fermium::SDL_TEXTUREACCESS_STREAMING as _, /// Can be used as a render target. Target = fermium::SDL_TEXTUREACCESS_TARGET as _, }
/* Copyright 2015 Tyler Neely Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use std::collections::BTreeMap; use std::collections::Bound::{Included, Unbounded}; use std::{self, num}; use std::io::{self, Read, Write, Seek, SeekFrom, BufWriter}; use std::fs::{self, File, OpenOptions, read_dir, PathExt}; //use std::old_io::fs::PathExtensions; use coding::{decode_u32, decode_u64}; use whence::Whence; use logfile::LogFile; use sync_policy::SyncPolicy; use producer::ProducerOptions; use message_and_offset::MessageAndOffset; pub trait Store { pub fn active_log(&self) -> Option<LogFile>; pub fn log_for_index(&self, u64) -> Option<LogFile>; } pub struct WriteStore<'a> { active_log: Option<&'a LogFile<'a>>, max_offset: u64, stores: BTreeMap<u64, LogFile<'a>>, options: ProducerOptions, } pub struct ReadStore<'a> { active_log: Option<LogFile<'a>>, stores: BTreeMap<u64, LogFile<'a>>, } impl<'a> WriteStore<'a> { pub fn new<'b>(directory: &'b str, options: ProducerOptions) -> Result<WriteStore<'b>, io::Error> { let mut opts = OpenOptions::new(); opts.write(true).append(true).create(true); let path = Path::new(directory); let stores = try!(stores_from_dir(directory, opts)); if stores.len() == 0 { println!("found no suitable log files, initializing new one."); let zero_path = Path::new(format!("{:016x}.log", 0)); let initial_log_file = try!(LogFile::new(&zero_path, opts)); stores.insert(0, initial_log_file); } let leading_file = stores.range(Unbounded, Unbounded) .next_back().unwrap().1; Ok(WriteStore { active_log: Some(leading_file), max_offset: leading_file.max_offset(), stores: stores, options: options, }) } pub fn append(&'a mut self, msg: &[u8]) -> Result<(), io::Error> { self.max_offset += 1; if self.should_roll() { self.roll_active_file(); } self.active_log.unwrap().append(self.max_offset, msg) } //TODO do this check before writing, so that the limit is only crossed // when a single massive message exceeds the threshold and gets its // own file. fn should_roll(&'a self) -> bool { let af = self.active_log_file().unwrap(); let should = af.len() > self.options.file_roll_size; println!("current len: {} max len: {} should_roll: {}", af.len(), self.options.file_roll_size, should); should } fn active_log_file(&'a self) -> Option<&'a LogFile> { self.stores.range(Unbounded, Unbounded) .next_back() .map(move |index_log_file| { index_log_file.1 }) } fn roll_active_file(&self) { //TODO get max index, create new file, add to stores map } } impl<'a> ReadStore<'a> { //TODO initialize correct file and offset using whence pub fn new<'b>(directory: &'b str, whence: Whence) -> Result<ReadStore<'b>, io::Error> { let mut opts = OpenOptions::new(); opts.read(true); let path = Path::new(directory); Ok(ReadStore { active_log: None, stores: try!(stores_from_dir(directory, opts)), }) } pub fn read<'b>(&mut self) -> Option<MessageAndOffset<'b>> { let original_pos = self.active_log.unwrap().f.seek(SeekFrom::Current(0)).unwrap(); let offset_buf = &mut[0u8; 8]; let size_buf = &mut[0u8; 4]; // loop acts as "poor man's goto" for streamlined error handling //TODO traverse files if we hit the end loop { if self.active_log.unwrap().f.read(offset_buf).unwrap() < 8 { break; } let msg_offset = decode_u64(*offset_buf); if self.active_log.unwrap().f.read(size_buf).unwrap() < 4 { break; } let msg_size = decode_u32(*size_buf); let mut msg = Vec::with_capacity(msg_size as usize); unsafe { msg.set_len(msg_size as usize); } let mut s = msg.as_mut_slice(); let n = self.active_log.unwrap().f.read(s).unwrap(); if n < msg_size as usize { break; } return Some(MessageAndOffset { message: msg, offset: msg_offset }); } // if we couldn't read a complete message, seek back self.active_log.unwrap().f.seek(SeekFrom::Start(original_pos)).unwrap(); None } //TODO handle 0 case where we need to pick the first log available fn log_for_index(&'a self, index: u64) -> Option<LogFile<'a>> { self.stores.range(Unbounded, Included(&index)) .next_back() .map(move |index_log_file| { *index_log_file.1 }) } //TODO test the hell out of this pub fn seek(&'a mut self, whence: Whence) -> Result<u64, io::Error> { let stop_pos = match whence { Whence::Oldest => 0, Whence::Latest => std::u64::MAX, Whence::Position(p) => p, }; self.active_log = self.log_for_index(stop_pos); // this will be optimized when we have indexes let original_pos = self.active_log.unwrap().f.seek(SeekFrom::Current(0)).unwrap(); let file_size = self.active_log.unwrap().f.metadata().unwrap().len(); let mut msg_offset = None; let offset_buf = &mut[0u8; 8]; let size_buf = &mut[0u8; 4]; loop { if self.active_log.unwrap().f.read(offset_buf).unwrap() < 8 { break; } if self.active_log.unwrap().f.read(size_buf).unwrap() < 4 { break; } let msg_size = decode_u32(*size_buf); let old_pos = self.active_log.unwrap().f.seek(SeekFrom::Current(0)).unwrap(); println!("max_offset: skipping ahead {} bytes", msg_size); let new_pos = self.active_log.unwrap().f.seek(SeekFrom::Current(msg_size as i64)).unwrap(); if (new_pos - old_pos < msg_size as u64) { break; } msg_offset = Some(decode_u64(*offset_buf)); } self.active_log.unwrap().f.seek(SeekFrom::Start(original_pos)).unwrap(); Ok(msg_offset.unwrap_or(0)) } } fn stores_from_dir<'a>(directory: &str, opts: OpenOptions) -> Result<BTreeMap<u64, LogFile<'a>>, io::Error> { let dir_path = Path::new(directory); if !dir_path.is_dir() { println!("attempting to create new log directory: {}", directory); try!(fs::create_dir_all(&directory)); } let mut stores: BTreeMap<u64, LogFile<'a>> = BTreeMap::new(); for possible_file in try!(read_dir(directory)) { let f = try!(possible_file); let fpath = f.path(); let fname = fpath.file_name().unwrap().to_str().unwrap(); let splits = fname.split_str(".").collect::<Vec<&str>>(); if splits.len() == 2 && splits[1] == "log" { num::from_str_radix::<u64>(splits[0], 16).map(|number| { println!("found log file: {}", number); let log_file = try!(LogFile::new(&fpath, opts)); Ok(stores.insert(number, log_file)) }); } } Ok(stores) }
// Whitespace is defined as any unicode whitespace character, // a comma, or a newline #[inline(always)] pub fn is_whitespace(ch: char) -> bool { ch.is_whitespace() || ch == ',' } #[inline(always)] pub fn is_newline(ch: char) -> bool { ch == '\n' }
use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize}; use near_sdk::json_types::{Base64VecU8, U128}; use near_sdk::serde::{Deserialize, Serialize}; use near_sdk::AccountId; pub const SHARES_FT_METADATA_SPEC: &str = "shares-ft-1.0.0"; pub type TokenId = String; #[derive(BorshDeserialize, BorshSerialize, Clone, Deserialize, Serialize)] #[serde(crate = "near_sdk::serde")] pub struct SharesMetadata { pub spec: String, pub name: String, pub symbol: String, pub icon: Option<String>, pub reference: Option<String>, pub reference_hash: Option<Base64VecU8>, pub decimals: u8, pub nft_contract_address: AccountId, pub nft_token_id: TokenId, pub share_price: U128, pub released: bool } pub trait SharesMetadataProvider { fn ft_metadata(&self) -> SharesMetadata; } impl SharesMetadata { pub fn assert_valid(&self) { assert_eq!(&self.spec, SHARES_FT_METADATA_SPEC); assert_eq!(self.reference.is_some(), self.reference_hash.is_some()); if let Some(reference_hash) = &self.reference_hash { assert_eq!(reference_hash.0.len(), 32, "Hash has to be 32 bytes"); } } pub fn set_as_released(&mut self) { self.released = true; } }
type Passwords = Vec<(usize, usize, char, String)>; fn find_valid(pass: Passwords) -> usize { pass.into_iter().fold(0, |acc, (min, max, c, password)| { let n = n_occurrences(password, c); if n >= min && n <= max { acc + 1 } else { acc } }) } fn n_occurrences(password: String, c: char) -> usize { password.chars().fold(0, |acc, ch| acc + (c == ch) as usize) } fn find_valid_part_2(pass: Passwords) -> usize { pass.into_iter().fold(0, |acc, (min, max, c, password)| { let (f, l) = ( password.chars().nth(min - 1).unwrap(), password.chars().nth(max - 1).unwrap(), ); acc + match (f == c, l == c) { (true, true) => 0, (false, true) => 1, (true, false) => 1, _ => 0, } }) } fn prepare_data(data: Vec<String>) -> Passwords { data.into_iter() .map(|line| { let split = line .split_whitespace() .flat_map(|s| s.split('-')) .flat_map(|s| s.split(':')) .filter(|s| !s.is_empty()) .collect::<Vec<&str>>(); ( split[0].parse().unwrap(), split[1].parse().unwrap(), split[2].chars().next().unwrap(), split[3].into(), ) }) .collect() } #[cfg(test)] mod tests { use super::*; use crate::data_parser::parse_lines; #[test] fn should_find_valid_pass() { let data = prepare_data(vec![ "1-3 a: abcde".into(), "1-3 b: cdefg".into(), "2-9 c: ccccccccc".into(), ]); assert_eq!(2, find_valid(data)); } #[test] fn should_find_valid_pass_part_2() { let data = prepare_data(vec![ "1-3 a: abcde".into(), "1-3 b: cdefg".into(), "2-9 c: ccccccccc".into(), ]); assert_eq!(1, find_valid_part_2(data)); } #[test] fn should_find_valid_pass_day_2_data() { let data = prepare_data(parse_lines("input/day_2_data.txt")); assert_eq!(660, find_valid(data)); } #[test] fn should_find_valid_pass_day_2_data_part_2() { let data = prepare_data(parse_lines("input/day_2_data.txt")); assert_eq!(530, find_valid_part_2(data)); } #[test] fn should_print_prepared_data() { println!( "{:?}", prepare_data(vec![ "1-3 a: abcde".into(), "1-3 b: cdefg".into(), "2-9 c: ccccccccc".into() ]) ) } }
extern crate clap; extern crate byteorder; #[macro_use] extern crate lazy_static; use std::fs::File; mod args; mod conv; //mod charmap; fn main() { let app = args::build_app("conv"); let input = app.value_of("SOURCE").unwrap_or_else(|| {"/dev/stdin"}); let output = app.value_of("output").unwrap_or_else(|| {"/dev/stdout"}); let safely = app.is_present("safely"); let from_code = app.value_of("from").unwrap(); let to_code = app.value_of("to").unwrap(); let converter = conv::Converter::new(from_code, to_code); let input_stream = File::open(input).unwrap(); let mut output_stream = File::create(output).unwrap(); let mut replace = b'?'; if let Some(r) = app.value_of("replace") { replace = r.as_bytes()[0]; } converter.convert(input_stream, &mut output_stream, safely, replace); }
mod ast; mod compile; mod parser; use crate::compile::compile; fn main() { compile("sr/prelude.sr").unwrap(); }
use std::collections::{HashMap, HashSet}; use std::io::{self, BufRead as _}; type BoxError = Box<dyn std::error::Error>; trait Validation: std::fmt::Debug { fn validate(&self, input: &str) -> bool; } #[derive(Debug)] struct ValidLength { len: usize, } impl ValidLength { fn new(len: usize) -> Self { Self { len } } } impl Validation for ValidLength { fn validate(&self, input: &str) -> bool { input.len() == self.len } } #[derive(Debug)] struct ValidNumber { min: usize, max: usize, } impl ValidNumber { fn new(min: usize, max: usize) -> Self { Self { min, max } } } impl Validation for ValidNumber { fn validate(&self, input: &str) -> bool { if let Ok(n) = input.parse() { self.min <= n && n <= self.max } else { false } } } #[derive(Debug)] struct ValidNumberWithUnit { valid_number: ValidNumber, unit: &'static str, } impl ValidNumberWithUnit { fn new(unit: &'static str, min: usize, max: usize) -> Self { Self { valid_number: ValidNumber::new(min, max), unit, } } } impl Validation for ValidNumberWithUnit { fn validate(&self, input: &str) -> bool { if let Some(number) = input.strip_suffix(self.unit) { self.valid_number.validate(number) } else { false } } } #[derive(Debug)] struct ValidString { options: HashSet<&'static str>, } impl ValidString { fn new(options: &[&'static str]) -> Self { Self { options: options.iter().cloned().collect(), } } } impl Validation for ValidString { fn validate(&self, input: &str) -> bool { self.options.contains(input) } } #[derive(Debug)] struct ValidHexColor; impl Validation for ValidHexColor { fn validate(&self, input: &str) -> bool { if let Some(hex) = input.strip_prefix("#") { hex.len() == 6 && hex.chars().all(|c| c.is_digit(16)) } else { false } } } #[derive(Debug)] struct And { a: Box<dyn Validation>, b: Box<dyn Validation>, } impl And { fn new(a: Box<dyn Validation>, b: Box<dyn Validation>) -> Self { Self { a, b } } } impl Validation for And { fn validate(&self, input: &str) -> bool { self.a.validate(input) && self.b.validate(input) } } #[derive(Debug)] struct Or { a: Box<dyn Validation>, b: Box<dyn Validation>, } impl Or { fn new(a: Box<dyn Validation>, b: Box<dyn Validation>) -> Self { Self { a, b } } } impl Validation for Or { fn validate(&self, input: &str) -> bool { self.a.validate(input) || self.b.validate(input) } } fn main() -> Result<(), BoxError> { let mut required_fields: HashMap<&'static str, Box<dyn Validation>> = HashMap::new(); required_fields.insert("byr", Box::new(ValidNumber::new(1920, 2002))); required_fields.insert("iyr", Box::new(ValidNumber::new(2010, 2020))); required_fields.insert("eyr", Box::new(ValidNumber::new(2020, 2030))); required_fields.insert( "hgt", Box::new(Or::new( Box::new(ValidNumberWithUnit::new("cm", 150, 193)), Box::new(ValidNumberWithUnit::new("in", 59, 76)), )), ); required_fields.insert("hcl", Box::new(ValidHexColor)); required_fields.insert( "ecl", Box::new(ValidString::new(&[ "amb", "blu", "brn", "gry", "grn", "hzl", "oth", ])), ); required_fields.insert( "pid", Box::new(And::new( Box::new(ValidNumber::new(0, 999999999)), Box::new(ValidLength::new(9)), )), ); let mut valid = 0; let mut current_passport_fields = 0; for line in io::stdin().lock().lines() { let line = line?; if line.is_empty() { if current_passport_fields >= required_fields.len() { valid += 1; } current_passport_fields = 0; } current_passport_fields += line .split(' ') .filter_map(|field| { let mut parts = field.splitn(2, ':'); if let (Some(name), Some(value)) = (parts.next(), parts.next()) { Some((name, value)) } else { None } }) .filter_map(|(name, value)| { required_fields .get(name) .map(|validation| (validation, value)) }) .filter(|(validation, value)| validation.validate(value)) .count(); } println!("{}", valid); Ok(()) }
extern crate pulldown_cmark; extern crate ramhorns; extern crate fs_extra; extern crate regex; extern crate toml; #[macro_use] extern crate die; use pulldown_cmark::{Parser, html}; use ramhorns::{Template, Ramhorns}; use regex::{Captures, Regex}; use fs_extra::dir; use std::fs::{read_dir, read_to_string, create_dir_all, write}; use std::path::{Path, PathBuf}; use std::collections::HashMap; use std::error::Error; static TEMPLATE: &str = "{{{body}}}"; fn main() -> Result<(), Box<dyn Error>> { let content_regex = Regex::new( r#"\[\[(?P<content>(((\.\.?/)|([.a-zA-Z0-9_/\-\\]))+(\.[a-zA-Z0-9]+)?))(?P<template> +(((\.\.?/)|([.a-zA-Z0-9_/\-\\]))+(\.[a-zA-Z0-9]+)?))?\]\]"#, )?; let mut template_cache = HashMap::new(); template_cache.insert("base".to_string(), Template::new(TEMPLATE)?); let template_files = read_dir("theme")? .into_iter() .filter_map(|x| x.ok()) .map(|x| x.path()) .filter(|x| x.is_file()) .collect::<Vec<PathBuf>>(); let mut templates = Ramhorns::from_folder("theme")?; create_dir_all("build")?; dir::copy("media", "build/", &{ let mut c = dir::CopyOptions::new(); c.overwrite = true; c })?; dir::copy("theme/static", "build/", &{ let mut c = dir::CopyOptions::new(); c.overwrite = true; c })?; template_files.iter().for_each(|path| { let tpl = templates .from_file(&path.strip_prefix("theme").unwrap().display().to_string()) .unwrap(); if let Err(e) = tpl.render_to_file( &PathBuf::from("build").join(&path.strip_prefix("theme").unwrap()), &(), ) { die!("failed to render to file: {}", e); } }); let built = read_dir("build")? .filter_map(|x| x.ok().map(|x| x.path())) .filter(|x| x.is_file()) .collect::<Vec<PathBuf>>(); built .iter() .map(|x| (x, read_to_string(x))) .filter_map(|x| if let (n, Ok(s)) = x { Some((n, s)) } else { None }) .for_each(|(path, contents)| { let processed = content_regex.replace_all(&contents, |caps: &Captures| { let path = Path::new(&caps["content"]); let mut files = match path { p if !p.exists() => die!("path does not exist: {}", p.display()), p if p.is_file() => vec![p.to_owned()], p if p.is_dir() => read_dir(p) .unwrap_or_else(|_| { die!("could not read directory {}", p.display()) }) .filter_map(|x| x.ok().map(|x| dbg!(x.path()))) .filter(|x| dbg!(x.is_file()) && dbg!(x.to_str().unwrap_or_default().ends_with(".md"))) .inspect(|x| {dbg!(x);}) .collect::<Vec<PathBuf>>(), p => die!("invalid path: {}", p.display()), }; files.sort(); dbg!(&files); let mut s = String::new(); println!("{}", caps.len()); for f in files { let content = match read_to_string(dbg!(f)) { Ok(s) => s, Err(e) => die!("failed to read file {}: {}", path.display(), e), }; let tpl_name = caps .name("template") .map(|x| x.as_str().trim()) .unwrap_or("base") .to_string(); let (head, body); let v: Vec<&str> = content.splitn(2, "\n\n").collect(); match v.len() { 1 => { body = v[0].trim(); head = "".to_string(); } _ => { head = v[0].trim().to_string(); body = v[1].trim(); } } dbg!(&head); dbg!(&body); let body = { let mut h = String::new(); html::push_html(&mut h, Parser::new(&body)); h }; let data = match toml::from_str::<HashMap<String, String>>(&head) { Ok(mut s) => { s.insert("body".into(), body.into()); s } Err(_) => { let mut h = HashMap::new(); h.insert("body".into(), body.into()); h } }; let tpl = template_cache.entry(tpl_name.clone()).or_insert_with(|| { match read_to_string(&tpl_name) { Ok(s) => Template::new(s) .unwrap_or_else(|_| die!("template suck")), Err(e) => die!( "failed to make template from file {}: {}", tpl_name, e ), } }); s.push_str(&tpl.render(&data)); } s }); if let Err(e) = write(path, processed.to_string()) { die!("failed to write to file {}: {}", path.display(), e); } }); Ok(()) }
use crate::{tools, tools::rational::Rational}; use ffmpeg_sys_next::*; use libc::{c_char, c_void}; use std::{collections::HashMap, ffi::CString, hash::BuildHasher}; #[derive(Clone, Debug, Deserialize, PartialEq)] #[serde(untagged)] pub enum ParameterValue { Bool(bool), Int64(i64), Float(f64), Rational(Rational), String(String), ChannelLayout(u64), } pub fn set_parameters<S: BuildHasher>( context: *mut c_void, parameters: &HashMap<String, ParameterValue, S>, ) -> Result<(), String> { for (key, value) in parameters { value.set(key, context)?; } Ok(()) } impl ParameterValue { pub fn set(&self, key: &str, context: *mut c_void) -> Result<(), String> { match self { ParameterValue::Bool(data) => self.set_int_parameter(context, key, *data as i64), ParameterValue::Int64(data) => self.set_int_parameter(context, key, *data), ParameterValue::Float(data) => self.set_float_parameter(context, key, *data), ParameterValue::Rational(data) => { self.set_rational_parameter(context, key, data.num, data.den) } ParameterValue::String(data) => self.set_str_parameter(context, key, data), ParameterValue::ChannelLayout(data) => { let mut ch_layout = [0; 64]; unsafe { av_get_channel_layout_string(ch_layout.as_mut_ptr(), 64, 0, *data); } self.set_parameter(context, key, ch_layout.as_ptr()) } } } fn set_parameter( &self, context: *mut c_void, key: &str, value: *const c_char, ) -> Result<(), String> { let key_str = CString::new(key).unwrap(); unsafe { check_result!(av_opt_set( context, key_str.as_ptr(), value, AV_OPT_SEARCH_CHILDREN )); } Ok(()) } fn set_str_parameter(&self, context: *mut c_void, key: &str, value: &str) -> Result<(), String> { let key_str = CString::new(key).unwrap(); let value_str = CString::new(value).unwrap(); unsafe { check_result!(av_opt_set( context, key_str.as_ptr(), value_str.as_ptr(), AV_OPT_SEARCH_CHILDREN )); } Ok(()) } fn set_int_parameter(&self, context: *mut c_void, key: &str, value: i64) -> Result<(), String> { let key_str = CString::new(key).unwrap(); unsafe { check_result!(av_opt_set_int( context, key_str.as_ptr(), value, AV_OPT_SEARCH_CHILDREN )); } Ok(()) } fn set_float_parameter(&self, context: *mut c_void, key: &str, value: f64) -> Result<(), String> { let key_str = CString::new(key).unwrap(); unsafe { check_result!(av_opt_set_double( context, key_str.as_ptr(), value, AV_OPT_SEARCH_CHILDREN )); } Ok(()) } fn set_rational_parameter( &self, context: *mut c_void, key: &str, num: i32, den: i32, ) -> Result<(), String> { let key_str = CString::new(key).unwrap(); let rational = AVRational { num, den }; unsafe { check_result!(av_opt_set_q( context, key_str.as_ptr(), rational, AV_OPT_SEARCH_CHILDREN )); } Ok(()) } }
use crate::rtb_type; rtb_type! { DisplayPlacementType, 500, InTheFeedOfContents=1; InTheAtomicUnitOfTheContent = 2; OutsideTheCoreContent = 3; RecommendationWidget = 4 }
extern crate gprl_lib_demo; fn main() { println!("本程序演示如何使用外部库"); let result = gprl_lib_demo::compute_add(1,1000); println!("调用结果为:{}", result); }
#![cfg(feature = "std")] use papergrid::color::AnsiColor; use papergrid::config::{AlignmentHorizontal, Border, Borders, Entity, Indent, Sides}; use crate::util::grid; use testing_table::test_table; test_table!( override_by_global_alignment_0, grid(2, 2) .data([["xxxxx", "xx"], ["y", "yyyyyyyyyy"]]) .config(|cfg| cfg.set_alignment_horizontal(Entity::Cell(0, 1), AlignmentHorizontal::Right)) .build(), "+-----+----------+" "|xxxxx| xx|" "+-----+----------+" "|y |yyyyyyyyyy|" "+-----+----------+" ); test_table!( override_by_global_alignment_1, grid(2, 2) .data([["xxxxx", "xx"], ["y", "yyyyyyyyyy"]]) .config(|cfg| cfg.set_alignment_horizontal(Entity::Global, AlignmentHorizontal::Center)) .build(), "+-----+----------+" "|xxxxx| xx |" "+-----+----------+" "| y |yyyyyyyyyy|" "+-----+----------+" ); test_table!( remove_border_test, grid(2, 2) .config(|cfg| { cfg.set_borders(Borders::default()); cfg.set_border( (0, 0), Border { top: Some('x'), bottom: Some('o'), left: Some('q'), ..Default::default() }, ); cfg.remove_border((0, 0), (2, 2)); }) .build(), "0-00-1\n1-01-1" ); test_table!( entity_row_overrides_column_intersection_0, grid(2, 2) .config(|cfg| { cfg.set_borders(Borders::default()); cfg.set_padding( Entity::Column(0), Sides { bottom: Indent::new(3, '$'), ..Default::default() }, ); }) .build(), "0-00-1" "$$$ " "$$$ " "$$$ " "1-01-1" "$$$ " "$$$ " "$$$ " ); test_table!( entity_row_overrides_column_intersection_1, grid(2, 2) .config(|cfg| { cfg.set_borders(Borders::default()); cfg.set_padding( Entity::Column(0), Sides { bottom: Indent::new(3, '$'), ..Default::default() }, ); cfg.set_padding( Entity::Row(1), Sides { bottom: Indent::new(2, '#'), ..Default::default() }, ); }) .build(), "0-00-1" "$$$ " "$$$ " "$$$ " "1-01-1" "######" "######" ); test_table!( entity_column_overrides_row_intersection_0, grid(2, 2) .config(|cfg| { cfg.set_borders(Borders::default()); cfg.set_padding( Entity::Row(0), Sides { bottom: Indent::new(3, '$'), ..Default::default() }, ); }) .build(), "0-00-1\n$$$$$$\n$$$$$$\n$$$$$$\n1-01-1" ); test_table!( entity_column_overrides_row_intersection_1, grid(2, 2) .config(|cfg| { cfg.set_borders(Borders::default()); cfg.set_padding( Entity::Row(0), Sides::new( Indent::default(), Indent::default(), Indent::default(), Indent::new(3, '$'), ), ); cfg.set_padding( Entity::Column(1), Sides::new( Indent::default(), Indent::default(), Indent::default(), Indent::new(2, '#'), ), ); }) .build(), "0-00-1\n$$$###\n$$$###\n$$$###\n1-01-1\n ###\n ###" ); test_table!( test_justification_char_left_alignment, grid(2, 2) .data([["Hello", "World"], ["", "Hello Hello Hello Hello Hello"]]) .config(|cfg| cfg.set_justification(Entity::Global, '$')) .build(), "+-----+-----------------------------+" "|Hello|World$$$$$$$$$$$$$$$$$$$$$$$$|" "+-----+-----------------------------+" "|$$$$$|Hello Hello Hello Hello Hello|" "+-----+-----------------------------+" ); test_table!( test_justification_char_right_alignment, grid(2, 2) .data([["Hello", "World"], ["", "Hello Hello Hello Hello Hello"]]) .config(|cfg| { cfg.set_justification(Entity::Global, '$'); cfg.set_alignment_horizontal(Entity::Global, AlignmentHorizontal::Right); }) .build(), "+-----+-----------------------------+" "|Hello|$$$$$$$$$$$$$$$$$$$$$$$$World|" "+-----+-----------------------------+" "|$$$$$|Hello Hello Hello Hello Hello|" "+-----+-----------------------------+" ); test_table!( test_justification_char_center_alignment, grid(2, 2) .data([["Hello", "World"], ["", "Hello Hello Hello Hello Hello"]]) .config(|cfg| { cfg.set_justification(Entity::Global, '$'); cfg.set_alignment_horizontal(Entity::Global, AlignmentHorizontal::Center); }) .build(), "+-----+-----------------------------+" "|Hello|$$$$$$$$$$$$World$$$$$$$$$$$$|" "+-----+-----------------------------+" "|$$$$$|Hello Hello Hello Hello Hello|" "+-----+-----------------------------+" ); test_table!( test_justification_color_left_alignment, grid(2, 2) .data([["Hello", "World"], ["", "Hello Hello Hello Hello Hello"]]) .config(|cfg| { cfg.set_justification(Entity::Global, '$'); cfg.set_justification_color(Entity::Global, Some(AnsiColor::new("\u{1b}[34m".into(), "\u{1b}[39m".into()))); }) .build(), "+-----+-----------------------------+" "|Hello|World\u{1b}[34m$$$$$$$$$$$$$$$$$$$$$$$$\u{1b}[39m|" "+-----+-----------------------------+" "|\u{1b}[34m$$$$$\u{1b}[39m|Hello Hello Hello Hello Hello|" "+-----+-----------------------------+" ); test_table!( test_justification_color_right_alignment, grid(2, 2) .data([["Hello", "World"], ["", "Hello Hello Hello Hello Hello"]]) .config(|cfg| { cfg.set_justification(Entity::Global, '$'); cfg.set_justification_color(Entity::Global, Some(AnsiColor::new("\u{1b}[34m".into(), "\u{1b}[39m".into()))); cfg.set_alignment_horizontal(Entity::Global, AlignmentHorizontal::Right); }) .build(), "+-----+-----------------------------+" "|Hello|\u{1b}[34m$$$$$$$$$$$$$$$$$$$$$$$$\u{1b}[39mWorld|" "+-----+-----------------------------+" "|\u{1b}[34m$$$$$\u{1b}[39m|Hello Hello Hello Hello Hello|" "+-----+-----------------------------+" ); test_table!( test_justification_color_center_alignment, grid(2, 2) .data([["Hello", "World"], ["", "Hello Hello Hello Hello Hello"]]) .config(|cfg| { cfg.set_justification(Entity::Global, '$'); cfg.set_justification_color(Entity::Global, Some(AnsiColor::new("\u{1b}[34m".into(), "\u{1b}[39m".into()))); cfg.set_alignment_horizontal(Entity::Global, AlignmentHorizontal::Center); }) .build(), "+-----+-----------------------------+" "|Hello|\u{1b}[34m$$$$$$$$$$$$\u{1b}[39mWorld\u{1b}[34m$$$$$$$$$$$$\u{1b}[39m|" "+-----+-----------------------------+" "|\u{1b}[34m$$\u{1b}[39m\u{1b}[34m$$$\u{1b}[39m|Hello Hello Hello Hello Hello|" "+-----+-----------------------------+" ); test_table!( test_justification_color_center_alignment_entity, grid(2, 2) .data([["Hello", "World"], ["", "Hello Hello Hello Hello Hello"]]) .config(|cfg| { cfg.set_justification(Entity::Cell(0, 0), '$'); cfg.set_justification_color(Entity::Column(1), Some(AnsiColor::new("\u{1b}[34m".into(), "\u{1b}[39m".into()))); cfg.set_alignment_horizontal(Entity::Row(2), AlignmentHorizontal::Center); }) .build(), "+-----+-----------------------------+" "|Hello|World\u{1b}[34m \u{1b}[39m|" "+-----+-----------------------------+" "| |Hello Hello Hello Hello Hello|" "+-----+-----------------------------+" );
use crate::time_tracker_store::{TimeTrackerStore, Event}; use std::fs::{File, OpenOptions}; use std::path::Path; use std::io::{Write, Read, Seek, SeekFrom}; pub struct FileDatabase { file: File } impl TimeTrackerStore for FileDatabase { fn prepare(profile_name: &str) -> FileDatabase { FileDatabase { file: get_handler(profile_name) } } fn add_topic(&mut self, topic: &str, time: u128) { write_new_topic(&mut self.file, topic, time) } fn retrieve_events(&mut self) -> Vec<crate::time_tracker_store::Event> { let mut file_contents= String::new(); self.file.seek(SeekFrom::Start(0)).expect("Moving file cursor failed"); self.file.read_to_string(&mut file_contents).expect("Reading file contents failed"); self.file.seek(SeekFrom::End(0)).expect("Moving file cursor failed"); return get_events_from_file_contents(&file_contents); } } fn get_handler(profile_name: &str) -> File { let file_name = format!("time_data_{}.txt", profile_name); let path = Path::new(&file_name); let display = path.display(); return match OpenOptions::new().create(true).append(true).read(true).open(&path) { Ok(file) => file, Err(why) => panic!("Couldn't create {}: {}", display, why), }; } fn write_new_topic(file_handler: &mut File, topic: &str, time: u128 ) { let line: String = format!("{}; {}\n", time.to_string(), &topic.trim()); file_handler.write(line.as_ref()).expect("Writing to file failed!"); } fn get_events_from_file_contents(file_contents: &str) -> Vec<Event> { let mut events = Vec::new(); for line in file_contents.split("\n") { if line != "" { events.push(get_event_from_line(line) ); } } return events; } fn get_event_from_line(line: &str) -> Event { let parts : Vec<&str> = line.split(";").collect(); let topic = parts[1].trim().to_string(); let start_time = parts[0].to_string().parse::<u128>().unwrap(); return Event { start_time, topic } } #[cfg(test)] mod tests { use crate::file_database::{get_handler, write_new_topic, get_events_from_file_contents, FileDatabase}; use std::path::Path; use std::io::Read; use std::fs::File; use crate::time_tracker_store::{Event, TimeTrackerStore}; #[test] fn should_create_file() { let _file = get_handler("file_creation"); let path = Path::new("time_data_file_creation.txt"); std::fs::remove_file(path).expect("Removing file failed!"); } #[test] fn should_write_to_file() { let mut file_handler = get_handler("file_single_write"); let path = Path::new("time_data_file_single_write.txt"); let mut file_content = String::new(); write_new_topic(&mut file_handler, "TestTopic", 12); let mut file_reader = File::open(path).unwrap(); file_reader.read_to_string(&mut file_content).expect("File reading failed"); assert_eq!(file_content, "12; TestTopic\n"); std::fs::remove_file(path).unwrap(); } #[test] fn should_write_multiple_lines_to_file() { let mut file_handler = get_handler("file_double_write"); let path = Path::new("time_data_file_double_write.txt"); let mut file_content = String::new(); write_new_topic(&mut file_handler, "TestTopic", 12); write_new_topic(&mut file_handler, "TestTopic2", 22); let mut file_reader = File::open(path).unwrap(); file_reader.read_to_string(&mut file_content).expect("File reading failed"); assert_eq!(file_content, "12; TestTopic\n22; TestTopic2\n"); std::fs::remove_file(path).unwrap(); } #[test] fn should_open_in_append_mode() { let mut file_handler = get_handler("file_append_mode"); let path = Path::new("time_data_file_append_mode.txt"); let mut file_content = String::new(); write_new_topic(&mut file_handler, "TestTopic", 16); get_handler("file_append_mode"); let mut file_reader = File::open(path).unwrap(); file_reader.read_to_string(&mut file_content).expect("File reading failed"); assert_eq!(file_content, "16; TestTopic\n"); std::fs::remove_file(path).unwrap(); } #[test] fn should_have_line_break_after_writing() { let mut file_handler = get_handler("has_newlines"); let path = Path::new("time_data_has_newlines.txt"); let mut file_content = String::new(); write_new_topic(&mut file_handler, "TestTopicWithoutNewline", 20); let mut file_reader = File::open(path).unwrap(); file_reader.read_to_string(&mut file_content).expect("File reading failed"); assert_eq!(file_content, "20; TestTopicWithoutNewline\n"); std::fs::remove_file(path).unwrap(); } #[test] fn should_have_exactly_one_line_break_after_writing() { let mut file_handler = get_handler("clean_newlines"); let path = Path::new("time_data_clean_newlines.txt"); let mut file_content = String::new(); write_new_topic(&mut file_handler, "TestTopicWithNewline\n", 19); let mut file_reader = File::open(path).unwrap(); file_reader.read_to_string(&mut file_content).expect("File reading failed"); assert_eq!(file_content, "19; TestTopicWithNewline\n"); std::fs::remove_file(path).unwrap(); } #[test] fn should_get_events_from_file_string_correctly() { let input = "\ 111; TestString\n\ 1234; MoreTest\n\ "; let expected = vec![ Event { topic: "TestString".to_string(), start_time: 111 }, Event { topic: "MoreTest".to_string(), start_time: 1234 } ]; let actual = get_events_from_file_contents(input); assert_eq!(actual, expected); } #[test] fn should_get_events_from_file_correctly() { let mut database_handler = FileDatabase::prepare("get_events_tests"); let path = Path::new("time_data_get_events_tests.txt"); database_handler.add_topic( "RealWrite", 29); database_handler.add_topic( "Another", 31); let expected = vec![ Event { topic: "RealWrite".to_string(), start_time: 29 }, Event { topic: "Another".to_string(), start_time: 31 } ]; let actual = database_handler.retrieve_events(); assert_eq!(actual, expected); std::fs::remove_file(path).unwrap(); } }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories."] pub fmc_bcr1: FMC_BCR1, #[doc = "0x04 - This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers)."] pub fmc_btr1: FMC_BTR1, #[doc = "0x08 - This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories."] pub fmc_bcr2: FMC_BCR2, #[doc = "0x0c - This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers)."] pub fmc_btr2: FMC_BTR2, #[doc = "0x10 - This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories."] pub fmc_bcr3: FMC_BCR3, #[doc = "0x14 - This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers)."] pub fmc_btr3: FMC_BTR3, #[doc = "0x18 - This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories."] pub fmc_bcr4: FMC_BCR4, #[doc = "0x1c - This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers)."] pub fmc_btr4: FMC_BTR4, #[doc = "0x20 - PSRAM Chip Select Counter Register"] pub fmc_pcscntr: FMC_PCSCNTR, _reserved9: [u8; 92usize], #[doc = "0x80 - NAND Flash control registers"] pub fmc_pcr: FMC_PCR, #[doc = "0x84 - This register contains information about the FIFO status and interrupt. The FMC features a FIFO that is used when writing to memories to transfer up to 16 words of data.This is used to quickly write to the FIFO and free the AXI bus for transactions to peripherals other than the FMC, while the FMC is draining its FIFO into the memory. One of these register bits indicates the status of the FIFO, for ECC purposes.The ECC is calculated while the data are written to the memory. To read the correct ECC, the software must consequently wait until the FIFO is empty."] pub fmc_sr: FMC_SR, #[doc = "0x88 - The FMC_PMEM read/write register contains the timing information for NAND Flash memory bank. This information is used to access either the common memory space of the NAND Flash for command, address write access and data read/write access."] pub fmc_pmem: FMC_PMEM, #[doc = "0x8c - The FMC_PATT read/write register contains the timing information for NAND Flash memory bank. It is used for 8-bit accesses to the attribute memory space of the NAND Flash for the last address write access if the timing must differ from that of previous accesses (for Ready/Busy management, refer to Section20.8.5: NAND Flash prewait feature)."] pub fmc_patt: FMC_PATT, #[doc = "0x90 - FMC Hamming parity result registers"] pub fmc_hpr: FMC_HPR, #[doc = "0x94 - FMC Hamming code ECC result register"] pub fmc_heccr: FMC_HECCR, _reserved15: [u8; 108usize], #[doc = "0x104 - This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access."] pub fmc_bwtr1: FMC_BWTR1, _reserved16: [u8; 4usize], #[doc = "0x10c - This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access."] pub fmc_bwtr2: FMC_BWTR2, _reserved17: [u8; 4usize], #[doc = "0x114 - This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access."] pub fmc_bwtr3: FMC_BWTR3, _reserved18: [u8; 4usize], #[doc = "0x11c - This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access."] pub fmc_bwtr4: FMC_BWTR4, _reserved19: [u8; 224usize], #[doc = "0x200 - FMC NAND Command Sequencer Control Register"] pub fmc_csqcr: FMC_CSQCR, #[doc = "0x204 - FMC NAND Command Sequencer Configuration Register 1"] pub fmc_csqcfgr1: FMC_CSQCFGR1, #[doc = "0x208 - FMC NAND Command Sequencer Configuration Register 2"] pub fmc_csqcfgr2: FMC_CSQCFGR2, #[doc = "0x20c - FMC NAND Command Sequencer Configuration Register 3"] pub fmc_csqcfgr3: FMC_CSQCFGR3, #[doc = "0x210 - FMC NAND Command Sequencer Address Register 1"] pub fmc_csqar1: FMC_CSQAR1, #[doc = "0x214 - FMC NAND Command Sequencer Address Register 2"] pub fmc_csqar2: FMC_CSQAR2, _reserved25: [u8; 8usize], #[doc = "0x220 - FMC NAND Command Sequencer Interrupt Enable Register"] pub fmc_csqier: FMC_CSQIER, #[doc = "0x224 - FMC NAND Command Sequencer Interrupt Status Register"] pub fmc_csqisr: FMC_CSQISR, #[doc = "0x228 - FMC NAND Command Sequencer Interrupt Status Register"] pub fmc_csqicr: FMC_CSQICR, _reserved28: [u8; 4usize], #[doc = "0x230 - FMC NAND Command Sequencer Interrupt Status Register"] pub fmc_csqemsr: FMC_CSQEMSR, _reserved29: [u8; 28usize], #[doc = "0x250 - FMC BCH Interrupt Enable Register"] pub fmc_bchier: FMC_BCHIER, #[doc = "0x254 - FMC BCH Interrupt and Status Register"] pub fmc_bchisr: FMC_BCHISR, #[doc = "0x258 - FMC BCH Interrupt Clear Register"] pub fmc_bchicr: FMC_BCHICR, _reserved32: [u8; 4usize], #[doc = "0x260 - FMC BCH Parity Bits Register 1"] pub fmc_bchpbr1: FMC_BCHPBR1, #[doc = "0x264 - FMC BCH Parity Bits Register 2"] pub fmc_bchpbr2: FMC_BCHPBR2, #[doc = "0x268 - FMC BCH Parity Bits Register 3"] pub fmc_bchpbr3: FMC_BCHPBR3, #[doc = "0x26c - FMC BCH Parity Bits Register 4"] pub fmc_bchpbr4: FMC_BCHPBR4, _reserved36: [u8; 12usize], #[doc = "0x27c - FMC BCH Decoder Status register 0"] pub fmc_bchdsr0: FMC_BCHDSR0, #[doc = "0x280 - FMC BCH Decoder Status register"] pub fmc_bchdsr1: FMC_BCHDSR1, #[doc = "0x284 - FMC BCH Decoder Status register"] pub fmc_bchdsr2: FMC_BCHDSR2, #[doc = "0x288 - FMC BCH Decoder Status register"] pub fmc_bchdsr3: FMC_BCHDSR3, #[doc = "0x28c - FMC BCH Decoder Status register"] pub fmc_bchdsr4: FMC_BCHDSR4, _reserved41: [u8; 348usize], #[doc = "0x3ec - FMC Hardware configuration register 2"] pub fmc_hwcfgr2: FMC_HWCFGR2, #[doc = "0x3f0 - FMC Hardware configuration register 1"] pub fmc_hwcfgr1: FMC_HWCFGR1, #[doc = "0x3f4 - FMC Version register"] pub fmc_verr: FMC_VERR, #[doc = "0x3f8 - FMC Identification register"] pub fmc_idr: FMC_IDR, #[doc = "0x3fc - FMC Size Identification register"] pub fmc_sidr: FMC_SIDR, } #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bcr1](fmc_bcr1) module"] pub type FMC_BCR1 = crate::Reg<u32, _FMC_BCR1>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCR1; #[doc = "`read()` method returns [fmc_bcr1::R](fmc_bcr1::R) reader structure"] impl crate::Readable for FMC_BCR1 {} #[doc = "`write(|w| ..)` method takes [fmc_bcr1::W](fmc_bcr1::W) writer structure"] impl crate::Writable for FMC_BCR1 {} #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories."] pub mod fmc_bcr1; #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers).\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_btr1](fmc_btr1) module"] pub type FMC_BTR1 = crate::Reg<u32, _FMC_BTR1>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BTR1; #[doc = "`read()` method returns [fmc_btr1::R](fmc_btr1::R) reader structure"] impl crate::Readable for FMC_BTR1 {} #[doc = "`write(|w| ..)` method takes [fmc_btr1::W](fmc_btr1::W) writer structure"] impl crate::Writable for FMC_BTR1 {} #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers)."] pub mod fmc_btr1; #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bcr2](fmc_bcr2) module"] pub type FMC_BCR2 = crate::Reg<u32, _FMC_BCR2>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCR2; #[doc = "`read()` method returns [fmc_bcr2::R](fmc_bcr2::R) reader structure"] impl crate::Readable for FMC_BCR2 {} #[doc = "`write(|w| ..)` method takes [fmc_bcr2::W](fmc_bcr2::W) writer structure"] impl crate::Writable for FMC_BCR2 {} #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories."] pub mod fmc_bcr2; #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers).\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_btr2](fmc_btr2) module"] pub type FMC_BTR2 = crate::Reg<u32, _FMC_BTR2>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BTR2; #[doc = "`read()` method returns [fmc_btr2::R](fmc_btr2::R) reader structure"] impl crate::Readable for FMC_BTR2 {} #[doc = "`write(|w| ..)` method takes [fmc_btr2::W](fmc_btr2::W) writer structure"] impl crate::Writable for FMC_BTR2 {} #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers)."] pub mod fmc_btr2; #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bcr3](fmc_bcr3) module"] pub type FMC_BCR3 = crate::Reg<u32, _FMC_BCR3>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCR3; #[doc = "`read()` method returns [fmc_bcr3::R](fmc_bcr3::R) reader structure"] impl crate::Readable for FMC_BCR3 {} #[doc = "`write(|w| ..)` method takes [fmc_bcr3::W](fmc_bcr3::W) writer structure"] impl crate::Writable for FMC_BCR3 {} #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories."] pub mod fmc_bcr3; #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers).\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_btr3](fmc_btr3) module"] pub type FMC_BTR3 = crate::Reg<u32, _FMC_BTR3>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BTR3; #[doc = "`read()` method returns [fmc_btr3::R](fmc_btr3::R) reader structure"] impl crate::Readable for FMC_BTR3 {} #[doc = "`write(|w| ..)` method takes [fmc_btr3::W](fmc_btr3::W) writer structure"] impl crate::Writable for FMC_BTR3 {} #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers)."] pub mod fmc_btr3; #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bcr4](fmc_bcr4) module"] pub type FMC_BCR4 = crate::Reg<u32, _FMC_BCR4>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCR4; #[doc = "`read()` method returns [fmc_bcr4::R](fmc_bcr4::R) reader structure"] impl crate::Readable for FMC_BCR4 {} #[doc = "`write(|w| ..)` method takes [fmc_bcr4::W](fmc_bcr4::W) writer structure"] impl crate::Writable for FMC_BCR4 {} #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories."] pub mod fmc_bcr4; #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers).\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_btr4](fmc_btr4) module"] pub type FMC_BTR4 = crate::Reg<u32, _FMC_BTR4>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BTR4; #[doc = "`read()` method returns [fmc_btr4::R](fmc_btr4::R) reader structure"] impl crate::Readable for FMC_BTR4 {} #[doc = "`write(|w| ..)` method takes [fmc_btr4::W](fmc_btr4::W) writer structure"] impl crate::Writable for FMC_BTR4 {} #[doc = "This register contains the control information of each memory bank, used for SRAMs, PSRAM and NOR Flash memories.If the EXTMOD bit is set in the FMC_BCRx register, then this register is partitioned for write and read access, that is, 2 registers are available: one to configure read accesses (this register) and one to configure write accesses (FMC_BWTRx registers)."] pub mod fmc_btr4; #[doc = "This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bwtr1](fmc_bwtr1) module"] pub type FMC_BWTR1 = crate::Reg<u32, _FMC_BWTR1>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BWTR1; #[doc = "`read()` method returns [fmc_bwtr1::R](fmc_bwtr1::R) reader structure"] impl crate::Readable for FMC_BWTR1 {} #[doc = "`write(|w| ..)` method takes [fmc_bwtr1::W](fmc_bwtr1::W) writer structure"] impl crate::Writable for FMC_BWTR1 {} #[doc = "This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access."] pub mod fmc_bwtr1; #[doc = "This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bwtr2](fmc_bwtr2) module"] pub type FMC_BWTR2 = crate::Reg<u32, _FMC_BWTR2>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BWTR2; #[doc = "`read()` method returns [fmc_bwtr2::R](fmc_bwtr2::R) reader structure"] impl crate::Readable for FMC_BWTR2 {} #[doc = "`write(|w| ..)` method takes [fmc_bwtr2::W](fmc_bwtr2::W) writer structure"] impl crate::Writable for FMC_BWTR2 {} #[doc = "This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access."] pub mod fmc_bwtr2; #[doc = "This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bwtr3](fmc_bwtr3) module"] pub type FMC_BWTR3 = crate::Reg<u32, _FMC_BWTR3>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BWTR3; #[doc = "`read()` method returns [fmc_bwtr3::R](fmc_bwtr3::R) reader structure"] impl crate::Readable for FMC_BWTR3 {} #[doc = "`write(|w| ..)` method takes [fmc_bwtr3::W](fmc_bwtr3::W) writer structure"] impl crate::Writable for FMC_BWTR3 {} #[doc = "This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access."] pub mod fmc_bwtr3; #[doc = "This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bwtr4](fmc_bwtr4) module"] pub type FMC_BWTR4 = crate::Reg<u32, _FMC_BWTR4>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BWTR4; #[doc = "`read()` method returns [fmc_bwtr4::R](fmc_bwtr4::R) reader structure"] impl crate::Readable for FMC_BWTR4 {} #[doc = "`write(|w| ..)` method takes [fmc_bwtr4::W](fmc_bwtr4::W) writer structure"] impl crate::Writable for FMC_BWTR4 {} #[doc = "This register contains the control information of each memory bank. It is used for SRAMs, PSRAMs and NOR Flash memories. When the EXTMOD bit is set in the FMC_BCRx register, then this register is active for write access."] pub mod fmc_bwtr4; #[doc = "PSRAM Chip Select Counter Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_pcscntr](fmc_pcscntr) module"] pub type FMC_PCSCNTR = crate::Reg<u32, _FMC_PCSCNTR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_PCSCNTR; #[doc = "`read()` method returns [fmc_pcscntr::R](fmc_pcscntr::R) reader structure"] impl crate::Readable for FMC_PCSCNTR {} #[doc = "`write(|w| ..)` method takes [fmc_pcscntr::W](fmc_pcscntr::W) writer structure"] impl crate::Writable for FMC_PCSCNTR {} #[doc = "PSRAM Chip Select Counter Register"] pub mod fmc_pcscntr; #[doc = "NAND Flash control registers\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_pcr](fmc_pcr) module"] pub type FMC_PCR = crate::Reg<u32, _FMC_PCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_PCR; #[doc = "`read()` method returns [fmc_pcr::R](fmc_pcr::R) reader structure"] impl crate::Readable for FMC_PCR {} #[doc = "`write(|w| ..)` method takes [fmc_pcr::W](fmc_pcr::W) writer structure"] impl crate::Writable for FMC_PCR {} #[doc = "NAND Flash control registers"] pub mod fmc_pcr; #[doc = "This register contains information about the FIFO status and interrupt. The FMC features a FIFO that is used when writing to memories to transfer up to 16 words of data.This is used to quickly write to the FIFO and free the AXI bus for transactions to peripherals other than the FMC, while the FMC is draining its FIFO into the memory. One of these register bits indicates the status of the FIFO, for ECC purposes.The ECC is calculated while the data are written to the memory. To read the correct ECC, the software must consequently wait until the FIFO is empty.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_sr](fmc_sr) module"] pub type FMC_SR = crate::Reg<u32, _FMC_SR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_SR; #[doc = "`read()` method returns [fmc_sr::R](fmc_sr::R) reader structure"] impl crate::Readable for FMC_SR {} #[doc = "This register contains information about the FIFO status and interrupt. The FMC features a FIFO that is used when writing to memories to transfer up to 16 words of data.This is used to quickly write to the FIFO and free the AXI bus for transactions to peripherals other than the FMC, while the FMC is draining its FIFO into the memory. One of these register bits indicates the status of the FIFO, for ECC purposes.The ECC is calculated while the data are written to the memory. To read the correct ECC, the software must consequently wait until the FIFO is empty."] pub mod fmc_sr; #[doc = "The FMC_PMEM read/write register contains the timing information for NAND Flash memory bank. This information is used to access either the common memory space of the NAND Flash for command, address write access and data read/write access.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_pmem](fmc_pmem) module"] pub type FMC_PMEM = crate::Reg<u32, _FMC_PMEM>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_PMEM; #[doc = "`read()` method returns [fmc_pmem::R](fmc_pmem::R) reader structure"] impl crate::Readable for FMC_PMEM {} #[doc = "`write(|w| ..)` method takes [fmc_pmem::W](fmc_pmem::W) writer structure"] impl crate::Writable for FMC_PMEM {} #[doc = "The FMC_PMEM read/write register contains the timing information for NAND Flash memory bank. This information is used to access either the common memory space of the NAND Flash for command, address write access and data read/write access."] pub mod fmc_pmem; #[doc = "The FMC_PATT read/write register contains the timing information for NAND Flash memory bank. It is used for 8-bit accesses to the attribute memory space of the NAND Flash for the last address write access if the timing must differ from that of previous accesses (for Ready/Busy management, refer to Section20.8.5: NAND Flash prewait feature).\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_patt](fmc_patt) module"] pub type FMC_PATT = crate::Reg<u32, _FMC_PATT>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_PATT; #[doc = "`read()` method returns [fmc_patt::R](fmc_patt::R) reader structure"] impl crate::Readable for FMC_PATT {} #[doc = "`write(|w| ..)` method takes [fmc_patt::W](fmc_patt::W) writer structure"] impl crate::Writable for FMC_PATT {} #[doc = "The FMC_PATT read/write register contains the timing information for NAND Flash memory bank. It is used for 8-bit accesses to the attribute memory space of the NAND Flash for the last address write access if the timing must differ from that of previous accesses (for Ready/Busy management, refer to Section20.8.5: NAND Flash prewait feature)."] pub mod fmc_patt; #[doc = "FMC Hamming parity result registers\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_hpr](fmc_hpr) module"] pub type FMC_HPR = crate::Reg<u32, _FMC_HPR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_HPR; #[doc = "`read()` method returns [fmc_hpr::R](fmc_hpr::R) reader structure"] impl crate::Readable for FMC_HPR {} #[doc = "FMC Hamming parity result registers"] pub mod fmc_hpr; #[doc = "FMC Hamming code ECC result register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_heccr](fmc_heccr) module"] pub type FMC_HECCR = crate::Reg<u32, _FMC_HECCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_HECCR; #[doc = "`read()` method returns [fmc_heccr::R](fmc_heccr::R) reader structure"] impl crate::Readable for FMC_HECCR {} #[doc = "FMC Hamming code ECC result register"] pub mod fmc_heccr; #[doc = "FMC NAND Command Sequencer Control Register\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_csqcr](fmc_csqcr) module"] pub type FMC_CSQCR = crate::Reg<u32, _FMC_CSQCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_CSQCR; #[doc = "`write(|w| ..)` method takes [fmc_csqcr::W](fmc_csqcr::W) writer structure"] impl crate::Writable for FMC_CSQCR {} #[doc = "FMC NAND Command Sequencer Control Register"] pub mod fmc_csqcr; #[doc = "FMC NAND Command Sequencer Configuration Register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_csqcfgr1](fmc_csqcfgr1) module"] pub type FMC_CSQCFGR1 = crate::Reg<u32, _FMC_CSQCFGR1>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_CSQCFGR1; #[doc = "`read()` method returns [fmc_csqcfgr1::R](fmc_csqcfgr1::R) reader structure"] impl crate::Readable for FMC_CSQCFGR1 {} #[doc = "`write(|w| ..)` method takes [fmc_csqcfgr1::W](fmc_csqcfgr1::W) writer structure"] impl crate::Writable for FMC_CSQCFGR1 {} #[doc = "FMC NAND Command Sequencer Configuration Register 1"] pub mod fmc_csqcfgr1; #[doc = "FMC NAND Command Sequencer Configuration Register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_csqcfgr2](fmc_csqcfgr2) module"] pub type FMC_CSQCFGR2 = crate::Reg<u32, _FMC_CSQCFGR2>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_CSQCFGR2; #[doc = "`read()` method returns [fmc_csqcfgr2::R](fmc_csqcfgr2::R) reader structure"] impl crate::Readable for FMC_CSQCFGR2 {} #[doc = "`write(|w| ..)` method takes [fmc_csqcfgr2::W](fmc_csqcfgr2::W) writer structure"] impl crate::Writable for FMC_CSQCFGR2 {} #[doc = "FMC NAND Command Sequencer Configuration Register 2"] pub mod fmc_csqcfgr2; #[doc = "FMC NAND Command Sequencer Configuration Register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_csqcfgr3](fmc_csqcfgr3) module"] pub type FMC_CSQCFGR3 = crate::Reg<u32, _FMC_CSQCFGR3>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_CSQCFGR3; #[doc = "`read()` method returns [fmc_csqcfgr3::R](fmc_csqcfgr3::R) reader structure"] impl crate::Readable for FMC_CSQCFGR3 {} #[doc = "`write(|w| ..)` method takes [fmc_csqcfgr3::W](fmc_csqcfgr3::W) writer structure"] impl crate::Writable for FMC_CSQCFGR3 {} #[doc = "FMC NAND Command Sequencer Configuration Register 3"] pub mod fmc_csqcfgr3; #[doc = "FMC NAND Command Sequencer Address Register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_csqar1](fmc_csqar1) module"] pub type FMC_CSQAR1 = crate::Reg<u32, _FMC_CSQAR1>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_CSQAR1; #[doc = "`read()` method returns [fmc_csqar1::R](fmc_csqar1::R) reader structure"] impl crate::Readable for FMC_CSQAR1 {} #[doc = "`write(|w| ..)` method takes [fmc_csqar1::W](fmc_csqar1::W) writer structure"] impl crate::Writable for FMC_CSQAR1 {} #[doc = "FMC NAND Command Sequencer Address Register 1"] pub mod fmc_csqar1; #[doc = "FMC NAND Command Sequencer Address Register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_csqar2](fmc_csqar2) module"] pub type FMC_CSQAR2 = crate::Reg<u32, _FMC_CSQAR2>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_CSQAR2; #[doc = "`read()` method returns [fmc_csqar2::R](fmc_csqar2::R) reader structure"] impl crate::Readable for FMC_CSQAR2 {} #[doc = "`write(|w| ..)` method takes [fmc_csqar2::W](fmc_csqar2::W) writer structure"] impl crate::Writable for FMC_CSQAR2 {} #[doc = "FMC NAND Command Sequencer Address Register 2"] pub mod fmc_csqar2; #[doc = "FMC NAND Command Sequencer Interrupt Enable Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_csqier](fmc_csqier) module"] pub type FMC_CSQIER = crate::Reg<u32, _FMC_CSQIER>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_CSQIER; #[doc = "`read()` method returns [fmc_csqier::R](fmc_csqier::R) reader structure"] impl crate::Readable for FMC_CSQIER {} #[doc = "`write(|w| ..)` method takes [fmc_csqier::W](fmc_csqier::W) writer structure"] impl crate::Writable for FMC_CSQIER {} #[doc = "FMC NAND Command Sequencer Interrupt Enable Register"] pub mod fmc_csqier; #[doc = "FMC NAND Command Sequencer Interrupt Status Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_csqisr](fmc_csqisr) module"] pub type FMC_CSQISR = crate::Reg<u32, _FMC_CSQISR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_CSQISR; #[doc = "`read()` method returns [fmc_csqisr::R](fmc_csqisr::R) reader structure"] impl crate::Readable for FMC_CSQISR {} #[doc = "`write(|w| ..)` method takes [fmc_csqisr::W](fmc_csqisr::W) writer structure"] impl crate::Writable for FMC_CSQISR {} #[doc = "FMC NAND Command Sequencer Interrupt Status Register"] pub mod fmc_csqisr; #[doc = "FMC NAND Command Sequencer Interrupt Status Register\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_csqicr](fmc_csqicr) module"] pub type FMC_CSQICR = crate::Reg<u32, _FMC_CSQICR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_CSQICR; #[doc = "`write(|w| ..)` method takes [fmc_csqicr::W](fmc_csqicr::W) writer structure"] impl crate::Writable for FMC_CSQICR {} #[doc = "FMC NAND Command Sequencer Interrupt Status Register"] pub mod fmc_csqicr; #[doc = "FMC NAND Command Sequencer Interrupt Status Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_csqemsr](fmc_csqemsr) module"] pub type FMC_CSQEMSR = crate::Reg<u32, _FMC_CSQEMSR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_CSQEMSR; #[doc = "`read()` method returns [fmc_csqemsr::R](fmc_csqemsr::R) reader structure"] impl crate::Readable for FMC_CSQEMSR {} #[doc = "FMC NAND Command Sequencer Interrupt Status Register"] pub mod fmc_csqemsr; #[doc = "FMC BCH Interrupt Enable Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchier](fmc_bchier) module"] pub type FMC_BCHIER = crate::Reg<u32, _FMC_BCHIER>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHIER; #[doc = "`read()` method returns [fmc_bchier::R](fmc_bchier::R) reader structure"] impl crate::Readable for FMC_BCHIER {} #[doc = "`write(|w| ..)` method takes [fmc_bchier::W](fmc_bchier::W) writer structure"] impl crate::Writable for FMC_BCHIER {} #[doc = "FMC BCH Interrupt Enable Register"] pub mod fmc_bchier; #[doc = "FMC BCH Interrupt and Status Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchisr](fmc_bchisr) module"] pub type FMC_BCHISR = crate::Reg<u32, _FMC_BCHISR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHISR; #[doc = "`read()` method returns [fmc_bchisr::R](fmc_bchisr::R) reader structure"] impl crate::Readable for FMC_BCHISR {} #[doc = "FMC BCH Interrupt and Status Register"] pub mod fmc_bchisr; #[doc = "FMC BCH Interrupt Clear Register\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchicr](fmc_bchicr) module"] pub type FMC_BCHICR = crate::Reg<u32, _FMC_BCHICR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHICR; #[doc = "`write(|w| ..)` method takes [fmc_bchicr::W](fmc_bchicr::W) writer structure"] impl crate::Writable for FMC_BCHICR {} #[doc = "FMC BCH Interrupt Clear Register"] pub mod fmc_bchicr; #[doc = "FMC BCH Parity Bits Register 1\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchpbr1](fmc_bchpbr1) module"] pub type FMC_BCHPBR1 = crate::Reg<u32, _FMC_BCHPBR1>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHPBR1; #[doc = "`read()` method returns [fmc_bchpbr1::R](fmc_bchpbr1::R) reader structure"] impl crate::Readable for FMC_BCHPBR1 {} #[doc = "FMC BCH Parity Bits Register 1"] pub mod fmc_bchpbr1; #[doc = "FMC BCH Parity Bits Register 2\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchpbr2](fmc_bchpbr2) module"] pub type FMC_BCHPBR2 = crate::Reg<u32, _FMC_BCHPBR2>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHPBR2; #[doc = "`read()` method returns [fmc_bchpbr2::R](fmc_bchpbr2::R) reader structure"] impl crate::Readable for FMC_BCHPBR2 {} #[doc = "FMC BCH Parity Bits Register 2"] pub mod fmc_bchpbr2; #[doc = "FMC BCH Parity Bits Register 3\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchpbr3](fmc_bchpbr3) module"] pub type FMC_BCHPBR3 = crate::Reg<u32, _FMC_BCHPBR3>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHPBR3; #[doc = "`read()` method returns [fmc_bchpbr3::R](fmc_bchpbr3::R) reader structure"] impl crate::Readable for FMC_BCHPBR3 {} #[doc = "FMC BCH Parity Bits Register 3"] pub mod fmc_bchpbr3; #[doc = "FMC BCH Parity Bits Register 4\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchpbr4](fmc_bchpbr4) module"] pub type FMC_BCHPBR4 = crate::Reg<u32, _FMC_BCHPBR4>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHPBR4; #[doc = "`read()` method returns [fmc_bchpbr4::R](fmc_bchpbr4::R) reader structure"] impl crate::Readable for FMC_BCHPBR4 {} #[doc = "FMC BCH Parity Bits Register 4"] pub mod fmc_bchpbr4; #[doc = "FMC BCH Decoder Status register 0\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchdsr0](fmc_bchdsr0) module"] pub type FMC_BCHDSR0 = crate::Reg<u32, _FMC_BCHDSR0>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHDSR0; #[doc = "`read()` method returns [fmc_bchdsr0::R](fmc_bchdsr0::R) reader structure"] impl crate::Readable for FMC_BCHDSR0 {} #[doc = "FMC BCH Decoder Status register 0"] pub mod fmc_bchdsr0; #[doc = "FMC BCH Decoder Status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchdsr1](fmc_bchdsr1) module"] pub type FMC_BCHDSR1 = crate::Reg<u32, _FMC_BCHDSR1>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHDSR1; #[doc = "`read()` method returns [fmc_bchdsr1::R](fmc_bchdsr1::R) reader structure"] impl crate::Readable for FMC_BCHDSR1 {} #[doc = "FMC BCH Decoder Status register"] pub mod fmc_bchdsr1; #[doc = "FMC BCH Decoder Status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchdsr2](fmc_bchdsr2) module"] pub type FMC_BCHDSR2 = crate::Reg<u32, _FMC_BCHDSR2>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHDSR2; #[doc = "`read()` method returns [fmc_bchdsr2::R](fmc_bchdsr2::R) reader structure"] impl crate::Readable for FMC_BCHDSR2 {} #[doc = "FMC BCH Decoder Status register"] pub mod fmc_bchdsr2; #[doc = "FMC BCH Decoder Status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchdsr3](fmc_bchdsr3) module"] pub type FMC_BCHDSR3 = crate::Reg<u32, _FMC_BCHDSR3>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHDSR3; #[doc = "`read()` method returns [fmc_bchdsr3::R](fmc_bchdsr3::R) reader structure"] impl crate::Readable for FMC_BCHDSR3 {} #[doc = "FMC BCH Decoder Status register"] pub mod fmc_bchdsr3; #[doc = "FMC BCH Decoder Status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_bchdsr4](fmc_bchdsr4) module"] pub type FMC_BCHDSR4 = crate::Reg<u32, _FMC_BCHDSR4>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_BCHDSR4; #[doc = "`read()` method returns [fmc_bchdsr4::R](fmc_bchdsr4::R) reader structure"] impl crate::Readable for FMC_BCHDSR4 {} #[doc = "FMC BCH Decoder Status register"] pub mod fmc_bchdsr4; #[doc = "FMC Size Identification register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_sidr](fmc_sidr) module"] pub type FMC_SIDR = crate::Reg<u32, _FMC_SIDR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_SIDR; #[doc = "`read()` method returns [fmc_sidr::R](fmc_sidr::R) reader structure"] impl crate::Readable for FMC_SIDR {} #[doc = "FMC Size Identification register"] pub mod fmc_sidr; #[doc = "FMC Identification register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_idr](fmc_idr) module"] pub type FMC_IDR = crate::Reg<u32, _FMC_IDR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_IDR; #[doc = "`read()` method returns [fmc_idr::R](fmc_idr::R) reader structure"] impl crate::Readable for FMC_IDR {} #[doc = "FMC Identification register"] pub mod fmc_idr; #[doc = "FMC Version register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_verr](fmc_verr) module"] pub type FMC_VERR = crate::Reg<u32, _FMC_VERR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_VERR; #[doc = "`read()` method returns [fmc_verr::R](fmc_verr::R) reader structure"] impl crate::Readable for FMC_VERR {} #[doc = "FMC Version register"] pub mod fmc_verr; #[doc = "FMC Hardware configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_hwcfgr1](fmc_hwcfgr1) module"] pub type FMC_HWCFGR1 = crate::Reg<u32, _FMC_HWCFGR1>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_HWCFGR1; #[doc = "`read()` method returns [fmc_hwcfgr1::R](fmc_hwcfgr1::R) reader structure"] impl crate::Readable for FMC_HWCFGR1 {} #[doc = "FMC Hardware configuration register 1"] pub mod fmc_hwcfgr1; #[doc = "FMC Hardware configuration register 2\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fmc_hwcfgr2](fmc_hwcfgr2) module"] pub type FMC_HWCFGR2 = crate::Reg<u32, _FMC_HWCFGR2>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FMC_HWCFGR2; #[doc = "`read()` method returns [fmc_hwcfgr2::R](fmc_hwcfgr2::R) reader structure"] impl crate::Readable for FMC_HWCFGR2 {} #[doc = "FMC Hardware configuration register 2"] pub mod fmc_hwcfgr2;
extern crate libc; //#[deny(missing_docs)] /// The only module of that crate (for now), it will allow you to retrieve /// stats from a running, or dead, Varnish Shared Memory space pub mod vapi;
use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::iter::FromIterator; use regex::Regex; const MAX_TIME: i32 = 2147483647; const NO_WORK: char = '.'; #[derive(Debug)] struct Step { id: char, dependencies: Vec<char>, time: i32, } fn parse_dependencies(filename: &str) -> HashMap<char, Step> { let mut steps: HashMap<char, Step> = HashMap::new(); let file = File::open(filename).expect("file not found"); // Format: Step G must be finished before step L can begin. let claim_regex = Regex::new(r"Step (\w) must be finished before step (\w) can begin.").expect("bad regex"); for line in BufReader::new(file).lines() { let data = line.unwrap(); let caps = claim_regex.captures(&data).expect("line does not match regex"); let step_before = caps[1].parse::<char>().expect("not a char"); let step = caps[2].parse::<char>().expect("not a char"); // ensure the before step exists steps.entry(step_before).or_insert(Step { id: step_before, dependencies: Vec::new(), time: (step_before as i32 - 64) + 60, }); // add the before step to the specified step let dependency = steps.entry(step).or_insert(Step { id: step, dependencies: Vec::new(), time: (step as i32 - 64) + 60, }); dependency.dependencies.push(step_before); } steps } fn remove_step(steps: &mut HashMap<char, Step>, remove: char) { for step in steps.values_mut() { let result = step.dependencies.iter().position(|c| *c == remove); match result { Some(i) => { step.dependencies.remove(i); () } None => () } } } pub fn part1(filename: &str) -> String { let mut steps = parse_dependencies(filename); let mut order: Vec<char> = Vec::new(); while steps.len() > 0 { let mut valid_next: Vec<char> = Vec::new(); for step in steps.values() { if step.dependencies.len() == 0 { valid_next.push(step.id); } } valid_next.sort(); order.push(valid_next[0]); remove_step(&mut steps, valid_next[0]); steps.remove(&valid_next[0]); } String::from_iter(order) } pub fn part2(filename: &str) -> i32 { let mut steps = parse_dependencies(filename); let mut worker_complete_time = [0; 5]; let mut worker_step = [NO_WORK; 5]; let mut now = 0; while steps.len() > 0 { // check if any steps available let mut valid_next: Vec<char> = Vec::new(); for step in steps.values() { if step.dependencies.len() == 0 && !worker_step.contains(&step.id) { valid_next.push(step.id); } } valid_next.sort(); // attempt to assign work to non-busy workers for i in 0..worker_step.len() { if valid_next.len() > 0 && worker_step[i] == NO_WORK { let c = valid_next.remove(0); let step = steps.get(&c).unwrap(); worker_complete_time[i] = now + step.time; worker_step[i] = c; } } // find the next worker that will complete their work let mut next_time = MAX_TIME; let mut next_complete_worker = 0; for i in 0..worker_step.len() { if worker_step[i] == NO_WORK { continue; } if worker_complete_time[i] < next_time { next_time = worker_complete_time[i]; next_complete_worker = i; } } // adjust time now = worker_complete_time[next_complete_worker]; // complete workers step remove_step(&mut steps, worker_step[next_complete_worker]); steps.remove(&worker_step[next_complete_worker]); worker_step[next_complete_worker] = NO_WORK; worker_complete_time[next_complete_worker] = 0; } now }
use std::collections::HashMap; use std::sync::Mutex; #[derive(Default)] pub struct Counter { counts: Mutex<HashMap<String, u32>>, } impl Counter { pub fn new() -> Self { Counter { counts: Mutex::new(HashMap::new()), } } pub fn inc<S: Into<String>>(&self, name: S) { let name = name.into(); let mut counts = self.counts.lock().unwrap(); if let Some(count) = counts.get_mut(&name) { *count += 1; return; } counts.insert(name, 1); } pub fn count(&self, name: &str) -> u32 { if let Some(count) = self.counts.lock().unwrap().get(name) { *count } else { 0 } } } #[cfg(test)] mod tests { use super::*; #[test] fn test() { let c = Counter::new(); c.inc("c1"); c.inc("c2"); c.inc("c1"); c.inc("c1"); c.inc("c3"); c.inc("c1"); c.inc("c3"); assert_eq!(c.count("c1"), 4); assert_eq!(c.count("c2"), 1); assert_eq!(c.count("c3"), 2); assert_eq!(c.count("c4"), 0); } }
use crypto::curve25519::curve25519; use crypto::chacha20poly1305::ChaCha20Poly1305; use crypto::aead::AeadDecryptor; use std::io::BufRead; use std::error::Error; use std::fmt::{Display, Formatter}; use std::convert::TryInto; #[derive(Debug)] enum DecryptError { Invalid, Malformed } impl Display for DecryptError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self) } } impl Error for DecryptError {} pub fn decrypt(secret_key: &[u8; 32], message: &[u8]) -> Result<Vec<u8>, Box<dyn Error>> { if message.len() < 48 { return Err(Box::new(DecryptError::Malformed)); } let ephemeral_public_key = &message[0..32]; let tag = &message[32..48]; let ciphertext = &message[48..]; let mut plaintext = vec![0; ciphertext.len()]; let symmetric_key = curve25519(secret_key, ephemeral_public_key); let mut decrypter = ChaCha20Poly1305::new(&symmetric_key[..], &[0u8; 8][..], &[]); if !decrypter.decrypt(ciphertext, &mut plaintext[..], tag) { return Err(Box::new(DecryptError::Invalid)); } Ok(plaintext) } fn iteration() -> Result<(), Box<dyn Error>> { let stdin1 = std::io::stdin(); let mut iter = stdin1.lock().lines(); println!("Enter message to decrypt:"); let msg = iter.next().unwrap()?.trim().to_owned(); let decoded = base64::decode(msg)?; println!("Enter decode key:"); let key = iter.next().unwrap()?.trim().to_owned(); let key_decoded = base64::decode(key)?; let dec = decrypt(&key_decoded.try_into().unwrap(), &decoded)?; println!("---------------------"); println!(" Decoded message "); println!("---------------------"); let msg = String::from_utf8_lossy(&dec); if matches!(msg.find("\n"), Some(_)) { print!("{}", msg) } else { println!("{}", msg); } println!("---------------------"); Ok(()) } fn main() { loop { if let Err(e) = iteration() { println!("Error while processing: {:?}", e); } } }
#[cfg(feature = "async")] use reqwest::Method; use serde::Deserialize; use url::Url; const BASE_URL: &str = "https://newsapi.org/v2"; #[derive(thiserror::Error, Debug)] pub enum NewsApiError { #[error("Failed fetching articles")] RequestFailed(#[from] ureq::Error), #[error("Failed converting response to string")] FailedResponseToString(#[from] std::io::Error), #[error("Article Parsing failed")] ArticleParseFailed(#[from] serde_json::Error), #[error("Url parsing failed")] UrlParsing(#[from] url::ParseError), #[error("Request failed: {0}")] BadRequest(&'static str), #[error("Async request failed")] #[cfg(feature = "async")] AsyncRequestFailed(#[from] reqwest::Error), } #[derive(Deserialize, Debug)] pub struct NewsAPIResponse { status: String, articles: Vec<Article>, code: Option<String>, } impl NewsAPIResponse { pub fn articles(&self) -> &Vec<Article> { &self.articles } } #[derive(Deserialize, Debug)] pub struct Article { title: String, url: String, description: Option<String>, } impl Article { pub fn title(&self) -> &str { &self.title } pub fn url(&self) -> &str { &self.url } pub fn desc(&self) -> Option<&String> { self.description.as_ref() } } pub enum Endpoint { TopHeadlines, } impl ToString for Endpoint { fn to_string(&self) -> String { match self { Self::TopHeadlines => "top-headlines".to_string(), } } } pub enum Country { Us, } impl ToString for Country { fn to_string(&self) -> String { match self { Self::Us => "us".to_string(), } } } pub struct NewsAPI { api_key: String, endpoint: Endpoint, country: Country, } impl NewsAPI { pub fn new(api_key: &str) -> NewsAPI { NewsAPI { api_key: api_key.to_string(), endpoint: Endpoint::TopHeadlines, country: Country::Us, } } pub fn endpoint(&mut self, endpoint: Endpoint) -> &mut NewsAPI { self.endpoint = endpoint; self } pub fn country(&mut self, country: Country) -> &mut NewsAPI { self.country = country; self } fn prepare_url(&self) -> Result<String, NewsApiError> { let mut url = Url::parse(BASE_URL)?; url.path_segments_mut() .unwrap() .push(&self.endpoint.to_string()); let country = format!("country={}", self.country.to_string()); url.set_query(Some(&country)); Ok(url.to_string()) } pub fn fetch(&self) -> Result<NewsAPIResponse, NewsApiError> { let url = self.prepare_url()?; let req = ureq::get(&url).set("Authorization", &self.api_key); let response: NewsAPIResponse = req.call()?.into_json()?; match response.status.as_str() { "ok" => return Ok(response), _ => return Err(map_response_err(response.code)), } } #[cfg(feature = "async")] pub async fn fetch_async(&self) -> Result<NewsAPIResponse, NewsApiError> { let url = self.prepare_url()?; let client = reqwest::Client::new(); let request = client .request(Method::GET, url) .header("Authorization", &self.api_key) .build() .map_err(|e| NewsApiError::AsyncRequestFailed(e))?; let response: NewsAPIResponse = client .execute(request) .await? .json() .await .map_err(|e| NewsApiError::AsyncRequestFailed(e))?; match response.status.as_str() { "ok" => return Ok(response), _ => return Err(map_response_err(response.code)), } } } fn map_response_err(code: Option<String>) -> NewsApiError { if let Some(code) = code { match code.as_str() { "apiKeyDisabled" => NewsApiError::BadRequest("Your API key has been disabled"), _ => NewsApiError::BadRequest("Unknown error"), } } else { NewsApiError::BadRequest("Unknown error") } }
// Copyright (c) 2018-2020 Jeron Aldaron Lau // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0>, the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, or the ZLib // license <LICENSE-ZLIB or https://www.zlib.net/zlib_license.html> at // your option. This file may not be copied, modified, or distributed // except according to those terms. //! Component channels use crate::private::Sealed; use core::{ cmp::Ordering, fmt::Debug, ops::{Add, Div, Mul, Neg, Sub}, }; /// Component of a speaker configuration, such as *front left*, *lfe*, *etc*. pub trait Channel: Copy + Debug + Default + From<f64> + Ord + Add<Output = Self> + Div<Output = Self> + Mul<Output = Self> + Sub<Output = Self> + Sealed { /// Minimum value (*negative one*) const MIN: Self; /// Mid value (*zero/silence*) const MID: Self; /// Maximum value (*one*) const MAX: Self; /// Convert to `f64` fn to_f64(self) -> f64; /// Linear interpolation fn lerp(self, rhs: Self, t: Self) -> Self; } /// 8-bit sample [Channel](trait.Channel.html). #[derive(Clone, Copy, Debug, Default, PartialEq, PartialOrd, Ord, Eq)] pub struct Ch8(i8); /// 16-bit sample [Channel](trait.Channel.html). #[derive(Clone, Copy, Debug, Default, PartialEq, PartialOrd, Ord, Eq)] pub struct Ch16(i16); /// 32-bit sample [Channel](trait.Channel.html). #[derive(Clone, Copy, Debug, Default, PartialEq, PartialOrd)] pub struct Ch32(f32); /// 64-bit sample [Channel](trait.Channel.html). #[derive(Clone, Copy, Debug, Default, PartialEq, PartialOrd)] pub struct Ch64(f64); impl Eq for Ch32 {} impl Eq for Ch64 {} impl Ord for Ch32 { fn cmp(&self, other: &Ch32) -> Ordering { self.partial_cmp(other).unwrap() } } impl Ord for Ch64 { fn cmp(&self, other: &Ch64) -> Ordering { self.partial_cmp(other).unwrap() } } impl Ch8 { /// Create a new 8-bit `Channel` value. pub const fn new(value: i8) -> Self { Ch8(value) } } impl Ch16 { /// Create a new 16-bit `Channel` value. pub const fn new(value: i16) -> Self { Ch16(value) } } impl Ch32 { /// Create a new 32-bit `Channel` value. pub const fn new(value: f32) -> Self { Ch32(value) } } impl Ch64 { /// Create a new 64-bit `Channel` value. pub const fn new(value: f64) -> Self { Ch64(value) } } impl From<i8> for Ch8 { fn from(value: i8) -> Self { Ch8(value) } } impl From<Ch8> for i8 { fn from(c: Ch8) -> i8 { c.0 } } impl From<i16> for Ch16 { fn from(value: i16) -> Self { Ch16(value) } } impl From<Ch16> for i16 { fn from(c: Ch16) -> i16 { c.0 } } impl From<f32> for Ch32 { fn from(value: f32) -> Self { Ch32(value) } } impl From<Ch32> for f32 { fn from(c: Ch32) -> f32 { c.0 } } impl From<Ch64> for f64 { fn from(c: Ch64) -> f64 { c.0 } } impl<R> Add<R> for Ch8 where Self: From<R>, { type Output = Self; fn add(self, rhs: R) -> Self { let rhs = Self::from(rhs); Ch8(self.0.saturating_add(rhs.0)) } } impl<R> Sub<R> for Ch8 where Self: From<R>, { type Output = Self; fn sub(self, rhs: R) -> Self { let rhs = Self::from(rhs); Ch8(self.0.saturating_sub(rhs.0)) } } impl<R> Mul<R> for Ch8 where Self: From<R>, { type Output = Self; fn mul(self, rhs: R) -> Self { let rhs = Self::from(rhs); let l = i32::from(self.0); let l = (l * 16) + (l / 16); let r = i32::from(rhs.0); let r = (r * 16) + (r / 16); let value = ((l * r) / i16::MAX as i32) as i8; Ch8(value) } } impl<R> Div<R> for Ch8 where Self: From<R>, { type Output = Self; #[allow(clippy::suspicious_arithmetic_impl)] fn div(self, rhs: R) -> Self { let rhs = Self::from(rhs); if rhs.0 > 0 { let ss = i32::from(self.0) * 256; let rr = i32::from(rhs.0); let value = (ss / rr).min(255) as i8; Ch8(value) } else { Ch8(0) } } } impl<R> Add<R> for Ch16 where Self: From<R>, { type Output = Self; fn add(self, rhs: R) -> Self { let rhs = Self::from(rhs); Ch16(self.0.saturating_add(rhs.0)) } } impl<R> Sub<R> for Ch16 where Self: From<R>, { type Output = Self; fn sub(self, rhs: R) -> Self { let rhs = Self::from(rhs); Ch16(self.0.saturating_sub(rhs.0)) } } impl<R> Mul<R> for Ch16 where Self: From<R>, { type Output = Self; fn mul(self, rhs: R) -> Self { let rhs = Self::from(rhs); let l = i64::from(self.0); let l = (l * 256) + (l / 256); let r = i64::from(rhs.0); let r = (r * 256) + (r / 256); let value = ((l * r) / u32::MAX as i64) as i16; Ch16(value) } } impl<R> Div<R> for Ch16 where Self: From<R>, { type Output = Self; fn div(self, rhs: R) -> Self { #![allow(clippy::single_match, clippy::suspicious_arithmetic_impl)] let rhs = Self::from(rhs); if rhs.0 > 0 { let ss = i64::from(self.0) << 16; let rr = i64::from(rhs.0); let value = (ss / rr).min(i16::MAX.into()) as i16; Ch16(value) } else { Ch16(0) } } } impl<R> Add<R> for Ch32 where Self: From<R>, { type Output = Self; fn add(self, rhs: R) -> Self { let value = self.0 + Self::from(rhs).0; Ch32(value.min(1.0)) } } impl<R> Sub<R> for Ch32 where Self: From<R>, { type Output = Self; fn sub(self, rhs: R) -> Self { let value = self.0 - Self::from(rhs).0; Ch32(value.max(0.0)) } } impl<R> Mul<R> for Ch32 where Self: From<R>, { type Output = Self; fn mul(self, rhs: R) -> Self { Ch32(self.0 * Self::from(rhs).0) } } impl<R> Div<R> for Ch32 where Self: From<R>, { type Output = Self; fn div(self, rhs: R) -> Self { let v = Self::from(rhs).0; if v > 0.0 { Ch32((self.0 / v).min(1.0)) } else { Ch32(0.0) } } } impl<R> Add<R> for Ch64 where Self: From<R>, { type Output = Self; fn add(self, rhs: R) -> Self { let value = self.0 + Self::from(rhs).0; Ch64(value.min(1.0)) } } impl<R> Sub<R> for Ch64 where Self: From<R>, { type Output = Self; fn sub(self, rhs: R) -> Self { let value = self.0 - Self::from(rhs).0; Ch64(value.max(0.0)) } } impl<R> Mul<R> for Ch64 where Self: From<R>, { type Output = Self; fn mul(self, rhs: R) -> Self { Ch64(self.0 * Self::from(rhs).0) } } impl<R> Div<R> for Ch64 where Self: From<R>, { type Output = Self; fn div(self, rhs: R) -> Self { let v = Self::from(rhs).0; if v > 0.0 { Ch64((self.0 / v).min(1.0)) } else { Ch64(0.0) } } } impl Channel for Ch8 { const MIN: Ch8 = Ch8(i8::MIN); const MID: Ch8 = Ch8(0); const MAX: Ch8 = Ch8(i8::MAX); fn to_f64(self) -> f64 { Ch64::from(self).0 } /// Linear interpolation #[inline] fn lerp(self, rhs: Self, t: Self) -> Self { let v0: i32 = i8::from(self).into(); let v1: i32 = i8::from(rhs).into(); let r = v0 + scale_i32(i8::from(t), v1 - v0); Self::new(r as i8) } } impl Channel for Ch16 { const MIN: Ch16 = Ch16(i16::MIN); const MID: Ch16 = Ch16(0); const MAX: Ch16 = Ch16(i16::MAX); fn to_f64(self) -> f64 { Ch64::from(self).0 } /// Linear interpolation #[inline] fn lerp(self, rhs: Self, t: Self) -> Self { let v0: i64 = i16::from(self).into(); let v1: i64 = i16::from(rhs).into(); let r = v0 + scale_i64(i16::from(t), v1 - v0); Self::new(r as i16) } } impl Channel for Ch32 { const MIN: Ch32 = Ch32(-1.0); const MID: Ch32 = Ch32(0.0); const MAX: Ch32 = Ch32(1.0); fn to_f64(self) -> f64 { Ch64::from(self).0 } /// Linear interpolation #[inline] fn lerp(self, rhs: Self, t: Self) -> Self { let v0 = f32::from(self); let v1 = f32::from(rhs); let r = v0 + f32::from(t) * (v1 - v0); Self::new(r) } } impl Channel for Ch64 { const MIN: Ch64 = Ch64(-1.0); const MID: Ch64 = Ch64(0.0); const MAX: Ch64 = Ch64(1.0); fn to_f64(self) -> f64 { self.0 } /// Linear interpolation #[inline] fn lerp(self, rhs: Self, t: Self) -> Self { let v0 = f64::from(self); let v1 = f64::from(rhs); let r = v0 + f64::from(t) * (v1 - v0); Self::new(r) } } /// Scale an i32 value by a i8 (for lerp) #[inline] fn scale_i32(t: i8, v: i32) -> i32 { let c = v * i32::from(t); ((c + 1) + (c / 255)) / 255 } /// Scale an i64 value by a i16 (for lerp) #[inline] fn scale_i64(t: i16, v: i64) -> i64 { let c = v * i64::from(t); ((c + 1) + (c / 65535)) / 65535 } impl From<f64> for Ch8 { fn from(value: f64) -> Self { Ch64::new(value).into() } } impl From<f64> for Ch16 { fn from(value: f64) -> Self { println!("he"); Ch64::new(value).into() } } impl From<f64> for Ch32 { fn from(value: f64) -> Self { Ch64::new(value).into() } } impl From<f64> for Ch64 { fn from(value: f64) -> Self { Ch64::new(value) } } impl From<Ch64> for Ch8 { fn from(value: Ch64) -> Self { let v: f64 = value.into(); Ch8::new((v * i8::MAX as f64) as i8) } } impl From<Ch64> for Ch16 { fn from(value: Ch64) -> Self { let v: f64 = value.into(); Ch16::new((v * i16::MAX as f64) as i16) } } impl From<Ch64> for Ch32 { fn from(value: Ch64) -> Self { let v: f64 = value.into(); Ch32::new(v as f32) } } impl From<Ch32> for Ch8 { fn from(value: Ch32) -> Self { let value = value.0; debug_assert!(value >= -1.0 && value <= 1.0); // this cast is not UB since the value is guaranteed // to be between -1.0 and 1.0 (see bug #10184) Ch8::new((value * i8::MAX as f32).round() as i8) } } impl From<Ch32> for Ch16 { fn from(value: Ch32) -> Self { let value = value.0; debug_assert!(value >= -1.0 && value <= 1.0); // this cast is not UB since the value is guaranteed // to be between -1.0 and 1.0 (see bug #10184) Ch16::new((value * i16::MAX as f32).round() as i16) } } impl From<Ch32> for Ch64 { fn from(value: Ch32) -> Self { let v: f32 = value.into(); Ch64::new(v.into()) } } impl From<Ch16> for Ch8 { fn from(c: Ch16) -> Self { Ch8::new((c.0 / 256) as i8) } } impl From<Ch16> for Ch32 { fn from(c: Ch16) -> Self { Ch32(f32::from(c.0) / 65535.0) } } impl From<Ch16> for Ch64 { fn from(c: Ch16) -> Self { Ch64(f64::from(c.0) / 65535.0) } } impl From<Ch8> for Ch16 { fn from(c: Ch8) -> Self { let value = i16::from(c.0); Ch16::new(value * 256 + value) } } impl From<Ch8> for Ch32 { fn from(c: Ch8) -> Self { Ch32(f32::from(c.0) / 65535.0) } } impl From<Ch8> for Ch64 { fn from(c: Ch8) -> Self { Ch64(f64::from(c.0) / 65535.0) } } impl Neg for Ch8 { type Output = Ch8; /// Invert sound wave (-x). fn neg(self) -> Self { Ch8(-self.0) } } impl Neg for Ch16 { type Output = Ch16; /// Invert sound wave (-x). fn neg(self) -> Self { Ch16(-self.0) } } impl Neg for Ch32 { type Output = Ch32; /// Invert sound wave (-x). fn neg(self) -> Self { Ch32(-self.0) } } impl Neg for Ch64 { type Output = Ch64; /// Invert sound wave (-x). fn neg(self) -> Self { Ch64(-self.0) } }
#[doc = "Register `SYSCFG_ITLINE10` reader"] pub type R = crate::R<SYSCFG_ITLINE10_SPEC>; #[doc = "Field `DMA1_CH2` reader - DMA1 channel 2 interrupt request pending"] pub type DMA1_CH2_R = crate::BitReader; #[doc = "Field `DMA1_CH3` reader - DMA1 channel 3 interrupt request pending"] pub type DMA1_CH3_R = crate::BitReader; impl R { #[doc = "Bit 0 - DMA1 channel 2 interrupt request pending"] #[inline(always)] pub fn dma1_ch2(&self) -> DMA1_CH2_R { DMA1_CH2_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - DMA1 channel 3 interrupt request pending"] #[inline(always)] pub fn dma1_ch3(&self) -> DMA1_CH3_R { DMA1_CH3_R::new(((self.bits >> 1) & 1) != 0) } } #[doc = "SYSCFG interrupt line 10 status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`syscfg_itline10::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct SYSCFG_ITLINE10_SPEC; impl crate::RegisterSpec for SYSCFG_ITLINE10_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`syscfg_itline10::R`](R) reader structure"] impl crate::Readable for SYSCFG_ITLINE10_SPEC {} #[doc = "`reset()` method sets SYSCFG_ITLINE10 to value 0"] impl crate::Resettable for SYSCFG_ITLINE10_SPEC { const RESET_VALUE: Self::Ux = 0; }
pub mod serial { extern crate spin; use crate::mmio::{ self, transmit_fifo_full, receive_fifo_empty, UART_DR }; use spin::Mutex; use numtoa::NumToA; use core::fmt; pub fn writec(c: u8) { while transmit_fifo_full() {} mmio::write(UART_DR, c as u32); } pub fn getc() -> u8 { while receive_fifo_empty() {} mmio::read(UART_DR) as u8 } pub fn write(s: &str) { for c in s.chars() { writec(c as u8); } } pub fn writeln(s: &str) { write(s); writec(b'\n'); } pub fn write_hex(s: u64) { let mut buf: [u8; 24] = [0; 24]; write("0x"); writeln(s.numtoa_str(16, &mut buf)); } pub struct Writer; impl fmt::Write for Writer { fn write_str(&mut self, s: &str) -> fmt::Result { write(s); Ok(()) } } pub static mut SERIAL_WRITER: Mutex<Writer> = Mutex::new(Writer{}); #[macro_export] macro_rules! print { ($($arg:tt)*) => ( $crate::io::serial::_print(format_args!($($arg)*)) ); } #[macro_export] macro_rules! println { () => ({$crate::print!("\n")}); ($($arg:tt)*) => ({ $crate::print!("{}\n", format_args!($($arg)*)) }); } pub fn _print(args: fmt::Arguments) { use core::fmt::Write; unsafe { SERIAL_WRITER.lock().write_fmt(args).unwrap(); } } #[test_case] fn test_println() { let mut writer = Writer{}; println!("Testing println!() macro"); } }
use std::ops::Range; use arrayvec::ArrayVec; use bitflags::bitflags; use fal::{read_u16, read_u32, read_u64}; use crate::{BlockAddr, ObjPhys, ObjectIdentifier, TransactionIdentifier}; #[derive(Debug)] pub struct SpacemanagerDevice { pub block_count: u64, pub chunk_count: u64, pub cib_count: u32, pub cab_count: u32, pub free_count: u64, pub addr_offset: u32, pub reserved: u32, pub reserved2: u64, } impl SpacemanagerDevice { pub const LEN: usize = 48; pub const MAIN_IDX: usize = 0; pub const TIER2_IDX: usize = 1; pub fn parse(bytes: &[u8]) -> Self { Self { block_count: read_u64(bytes, 0), chunk_count: read_u64(bytes, 8), cib_count: read_u32(bytes, 16), cab_count: read_u32(bytes, 20), free_count: read_u64(bytes, 24), addr_offset: read_u32(bytes, 32), reserved: read_u32(bytes, 36), reserved2: read_u64(bytes, 40), } } } #[derive(Debug)] pub struct SpacemanagerFreeQueue { pub count: u64, pub tree_oid: ObjectIdentifier, pub oldest_xid: TransactionIdentifier, pub tree_node_limit: u16, pub pad16: u16, pub pad32: u32, pub reserved: u64, } impl SpacemanagerFreeQueue { pub const INTERNAL_POOL_IDX: usize = 0; pub const MAIN_IDX: usize = 1; pub const TIER2_IDX: usize = 2; pub const LEN: usize = 40; pub fn parse(bytes: &[u8]) -> Self { Self { count: read_u64(bytes, 0), tree_oid: read_u64(bytes, 8).into(), oldest_xid: read_u64(bytes, 16), tree_node_limit: read_u16(bytes, 24), pad16: read_u16(bytes, 26), pad32: read_u32(bytes, 28), reserved: read_u64(bytes, 32), } } } pub type SpacemanagerAllocZoneBoundaries = Range<u64>; #[derive(Debug)] pub struct SpacemanagerAllocZoneInfoPhys { pub current_boundaries: SpacemanagerAllocZoneBoundaries, pub previous_boundaries: Vec<SpacemanagerAllocZoneBoundaries>, pub zone_id: u16, pub previous_boundary_idx: u16, pub reserved: u32, } impl SpacemanagerAllocZoneInfoPhys { pub const LEN: usize = 136; pub fn parse(bytes: &[u8]) -> Self { const PREVIOUS_BOUNDARIES_COUNT: usize = 7; // This collection starts at 16, and ends at 16 + 7 * 16 = 128. let previous_boundaries = (0..PREVIOUS_BOUNDARIES_COUNT) .map(|i| read_u64(bytes, 16 + i * 16 + 0)..read_u64(bytes, 16 + i * 16 + 8)) .collect(); Self { current_boundaries: read_u64(bytes, 0)..read_u64(bytes, 8), previous_boundaries, zone_id: read_u16(bytes, 128), previous_boundary_idx: read_u16(bytes, 130), reserved: read_u32(bytes, 132), } } } #[derive(Debug)] pub struct SpacemanagerDatazoneInfoPhys { allocation_zones: Vec<SpacemanagerAllocZoneInfoPhys>, } impl SpacemanagerDatazoneInfoPhys { pub const ALLOCATION_ZONE_COUNT: usize = 8; pub const LEN: usize = Self::ALLOCATION_ZONE_COUNT * SpacemanagerAllocZoneInfoPhys::LEN; pub fn parse(bytes: &[u8]) -> Self { Self { allocation_zones: (0..SpacemanagerPhys::DEVICE_COUNT * Self::ALLOCATION_ZONE_COUNT) .map(|i| { SpacemanagerAllocZoneInfoPhys::parse( &bytes[i * SpacemanagerAllocZoneInfoPhys::LEN ..(i + 1) * SpacemanagerAllocZoneInfoPhys::LEN], ) }) .collect(), } } } bitflags! { pub struct SpacemanagerFlags: u32 { const VERSIONED = 0x1; } } #[derive(Debug)] pub struct SpacemanagerPhys { pub header: ObjPhys, pub block_size: u32, pub blocks_per_chunk: u32, pub chunks_per_cib: u32, pub cibs_per_cab: u32, pub devices: ArrayVec<[SpacemanagerDevice; Self::DEVICE_COUNT]>, pub flags: SpacemanagerFlags, pub ip_bm_tx_multiplier: u32, pub ip_block_count: u64, pub ip_bm_size_in_blocks: u32, pub ip_bm_block_count: u32, pub ip_bm_base: BlockAddr, pub ip_base: BlockAddr, pub fs_reserve_block_count: u64, pub fs_reserve_alloc_count: u64, pub free_queues: ArrayVec<[SpacemanagerFreeQueue; Self::FREE_QUEUE_COUNT]>, pub ip_bm_free_head: u16, pub ip_bm_free_tail: u16, pub ip_bm_xid_offset: u32, pub ip_bitmap_offset: u32, pub ip_bm_free_next_offset: u32, pub version: u32, pub struct_size: u32, pub datazone: SpacemanagerDatazoneInfoPhys, } impl SpacemanagerPhys { pub const DEVICE_COUNT: usize = 2; pub const FREE_QUEUE_COUNT: usize = 3; pub fn parse(bytes: &[u8]) -> Self { let header = ObjPhys::parse(bytes); // The devices field starts at 48, and since SD_COUNT = 2 and LEN = 48, it stops at 144. let devices = (0..Self::DEVICE_COUNT) .map(|i| SpacemanagerDevice::parse(&bytes[48 + i * 48..48 + (i + 1) * 48])) .collect(); // The free queues field starts at 200, and since SFQ_COUNT = 3 and LEN = 40, it stops at // 320. let free_queues = (0..Self::FREE_QUEUE_COUNT) .map(|i| SpacemanagerFreeQueue::parse(&bytes[200 + i * 40..200 + (i + 1) * 40])) .collect(); Self { header, block_size: read_u32(bytes, 32), blocks_per_chunk: read_u32(bytes, 36), chunks_per_cib: read_u32(bytes, 40), cibs_per_cab: read_u32(bytes, 44), devices, flags: SpacemanagerFlags::from_bits(read_u32(bytes, 144)).unwrap(), ip_bm_tx_multiplier: read_u32(bytes, 148), ip_block_count: read_u64(bytes, 152), ip_bm_size_in_blocks: read_u32(bytes, 160), ip_bm_block_count: read_u32(bytes, 164), ip_bm_base: read_u64(bytes, 168) as i64, ip_base: read_u64(bytes, 176) as i64, fs_reserve_block_count: read_u64(bytes, 184), fs_reserve_alloc_count: read_u64(bytes, 192), free_queues, ip_bm_free_head: read_u16(bytes, 320), ip_bm_free_tail: read_u16(bytes, 322), ip_bm_xid_offset: read_u32(bytes, 324), ip_bitmap_offset: read_u32(bytes, 328), ip_bm_free_next_offset: read_u32(bytes, 332), version: read_u32(bytes, 336), struct_size: read_u32(bytes, 340), datazone: SpacemanagerDatazoneInfoPhys::parse(&bytes[344..]), } } pub fn main_device(&self) -> &SpacemanagerDevice { &self.devices[SpacemanagerDevice::MAIN_IDX] } pub fn tier2_device(&self) -> &SpacemanagerDevice { &self.devices[SpacemanagerDevice::TIER2_IDX] } pub fn internal_pool_fq(&self) -> &SpacemanagerFreeQueue { &self.free_queues[SpacemanagerFreeQueue::INTERNAL_POOL_IDX] } pub fn main_fq(&self) -> &SpacemanagerFreeQueue { &self.free_queues[SpacemanagerFreeQueue::MAIN_IDX] } pub fn tier2_fq(&self) -> &SpacemanagerFreeQueue { &self.free_queues[SpacemanagerFreeQueue::TIER2_IDX] } } #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub struct SpacemanagerFreeQueueKey { pub xid: TransactionIdentifier, pub physaddr: BlockAddr, } impl SpacemanagerFreeQueueKey { pub fn parse(bytes: &[u8]) -> Self { Self { xid: read_u64(bytes, 0), physaddr: read_u64(bytes, 8) as i64, } } }
//! This module lets you load an XInput DLL and use it. //! //! ## How To Use This //! //! 1) Call `dynamic_load_xinput()`. This will attempt to load in a DLL that //! supports XInput. Note that the user might not have XInput installed, so //! be prepared to fall back to a keyboard/mouse if that happens. //! 2) Call `xinput_get_state(controller)` to get your data. Usually you do this //! once at the start of each frame of the game. You can poll for controllers //! 0, 1, 2, or 3. If a controller is connected you'll get `Ok(data)`. //! Otherwise you'll get some sort of `Err` info. //! 3) Call `xinput_set_state(controller, left_speed, right_speed)` to set a //! rumble effect on the controller. As with `xinput_get_state`, you can //! select slots 0, 1, 2 or 3, and missing controllers or out of bounds //! selections will give an `Err` of some kind. Devices other than literal //! XBox 360 controllers have XInput drivers, but not all of them actually //! have rumble support, so this should be an extra not an essential. //! //! If xinput isn't fully loaded, a call to get_state or set_state is still //! entirely safe to perform, you'll just get an `Err`. //! //! Note that there are theoretically other XInput extras you might care about, //! but they're only available in Windows 8+ and I use Windows 7, so oh well. #![no_std] #![allow(non_upper_case_globals)] #![warn(missing_docs)] #![forbid(missing_debug_implementations)] #[macro_use] extern crate log; extern crate winapi; use winapi::shared::minwindef::{DWORD, HMODULE}; use winapi::shared::winerror::{ERROR_DEVICE_NOT_CONNECTED, ERROR_SUCCESS}; use winapi::um::libloaderapi::{FreeLibrary, GetProcAddress, LoadLibraryW}; use winapi::um::xinput::*; type XInputGetStateFunc = unsafe extern "system" fn(DWORD, *mut XINPUT_STATE) -> DWORD; type XInputSetStateFunc = unsafe extern "system" fn(DWORD, *mut XINPUT_VIBRATION) -> DWORD; static mut global_xinput_handle: HMODULE = ::core::ptr::null_mut(); static mut opt_xinput_get_state: Option<XInputGetStateFunc> = None; static mut opt_xinput_set_state: Option<XInputSetStateFunc> = None; static xinput_status: ::core::sync::atomic::AtomicUsize = ::core::sync::atomic::ATOMIC_USIZE_INIT; const ordering: ::core::sync::atomic::Ordering = ::core::sync::atomic::Ordering::SeqCst; const xinput_UNINITIALIZED: usize = 0; const xinput_LOADING: usize = 1; const xinput_ACTIVE: usize = 2; /// Quick and dirty wrapper to let us format log messages easier. pub(crate) struct WideNullU16<'a>(&'a [u16; ::winapi::shared::minwindef::MAX_PATH]); impl<'a> ::core::fmt::Debug for WideNullU16<'a> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { for &u in self.0.iter() { if u == 0 { break; } else { write!(f, "{}", u as u8 as char)? } } Ok(()) } } /// Converts a rusty string into a win32 string. pub(crate) fn wide_null<S: AsRef<str>>(s: S) -> [u16; ::winapi::shared::minwindef::MAX_PATH] { let mut output: [u16; ::winapi::shared::minwindef::MAX_PATH] = [0; ::winapi::shared::minwindef::MAX_PATH]; let mut i = 0; for u in s.as_ref().encode_utf16() { if i == output.len() - 1 { break; } else { output[i] = u; } i += 1; } output[i] = 0; output } /// The ways that a dynamic load of XInput can fail. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub enum XInputLoadingFailure { /// The xinput system was already in the process of loading in some other /// thread. This attempt failed because of that, but that other attempt might /// still succeed. AlreadyLoading, /// The xinput system was already active. A failure of this kind leaves the /// system active. AlreadyActive, /// The system was not loading or active, but was in some unknown state. If /// you get this, it's probably a bug that you should report. UnknownState, /// No DLL for XInput could be found. This places the system back into an /// "uninitialized" status, and you could potentially try again later if the /// user fiddles with the program's DLL path or whatever. NoDLL, /// A DLL was found that matches one of the expected XInput DLL names, but it /// didn't contain both of the expected functions. This is probably a weird /// situation to find. Either way, the xinput status is set to "uninitialized" /// and as with the NoDLL error you could potentially try again. NoPointers, } /// Attempts to dynamically load an XInput DLL and get the function pointers. /// /// This operation is thread-safe and can be performed at any time. If xinput /// hasn't been loaded yet, or if there was a failed load attempt, then /// `xinput_get_state` and `xinput_set_state` will safety return an `Err` value /// to that effect. /// /// There's no way provided to unload XInput once it's been loaded, because that /// makes the normal operation a little faster. Why would you want to unload it /// anyway? Don't be silly. /// /// # Failure /// /// This can fail in a few ways, as explained in the `XInputLoadingFailure` /// type. The most likely failure case is that the user's system won't have the /// required DLL, in which case you should probably allow them to play with just /// a keyboard/mouse instead. /// /// # Current DLL Names /// /// Currently the following DLL names are searched for in this order: /// /// * `xinput9_1_0.dll` /// * `xinput1_4.dll` /// * `xinput1_3.dll` /// * `xinput1_2.dll` /// * `xinput1_1.dll` pub fn dynamic_load_xinput() -> Result<(), XInputLoadingFailure> { // The result status is if the value was what we expected, and the value // inside is actual value seen. match xinput_status.compare_exchange(xinput_UNINITIALIZED, xinput_LOADING, ordering, ordering) { Err(xinput_LOADING) => { debug!("A call to 'dynamic_load_xinput' was made while XInput was already loading."); Err(XInputLoadingFailure::AlreadyLoading) } Err(xinput_ACTIVE) => { debug!("A call to 'dynamic_load_xinput' was made while XInput was already active."); Err(XInputLoadingFailure::AlreadyActive) } Err(_) => { warn!("A call to 'dynamic_load_xinput' was made while XInput was in an unknown state."); Err(XInputLoadingFailure::UnknownState) } Ok(_) => { let xinput91 = wide_null("xinput9_1_0.dll"); let xinput14 = wide_null("xinput1_4.dll"); let xinput13 = wide_null("xinput1_3.dll"); let xinput12 = wide_null("xinput1_2.dll"); let xinput11 = wide_null("xinput1_1.dll"); let mut xinput_handle: HMODULE = ::core::ptr::null_mut(); for lib_name in [xinput91, xinput14, xinput13, xinput12, xinput11].into_iter() { trace!("Attempting to load XInput DLL: {:?}", WideNullU16(lib_name)); // It's always safe to call `LoadLibraryW`, the worst that can happen is // that we get a null pointer back. xinput_handle = unsafe { LoadLibraryW(lib_name.as_ptr()) }; if !xinput_handle.is_null() { debug!("Success: XInput Loaded: {:?}", WideNullU16(lib_name)); break; } } if xinput_handle.is_null() { debug!("Failure: XInput could not be loaded."); xinput_status .compare_exchange(xinput_LOADING, xinput_UNINITIALIZED, ordering, ordering) .ok(); Err(XInputLoadingFailure::NoDLL) } else { let get_state_name = b"XInputGetState\0"; let set_state_name = b"XInputSetState\0"; // using transmute is so dodgy we'll put that in its own unsafe block. unsafe { let get_state_ptr = GetProcAddress(xinput_handle, get_state_name.as_ptr() as *mut i8); if !get_state_ptr.is_null() { trace!("Found XInputGetState."); opt_xinput_get_state = Some(::core::mem::transmute(get_state_ptr)); } else { trace!("Could not find XInputGetState."); } } // using transmute is so dodgy we'll put that in its own unsafe block. unsafe { let set_state_ptr = GetProcAddress(xinput_handle, set_state_name.as_ptr() as *mut i8); if !set_state_ptr.is_null() { trace!("Found XInputSetState."); opt_xinput_set_state = Some(::core::mem::transmute(set_state_ptr)); } else { trace!("Could not find XInputSetState."); } } // this is safe because no other code can be loading xinput at the same time as us. unsafe { if opt_xinput_get_state.is_some() && opt_xinput_set_state.is_some() { global_xinput_handle = xinput_handle; debug!("All function pointers loaded successfully."); xinput_status .compare_exchange(xinput_LOADING, xinput_ACTIVE, ordering, ordering) .ok(); Ok(()) } else { opt_xinput_get_state = None; opt_xinput_set_state = None; FreeLibrary(xinput_handle); debug!("Could not load the function pointers."); xinput_status .compare_exchange(xinput_LOADING, xinput_UNINITIALIZED, ordering, ordering) .ok(); Err(XInputLoadingFailure::NoPointers) } } } } } } /// This wraps an `XINPUT_STATE` value and provides a more rusty (read-only) /// interface to the data it contains. /// /// All three major game companies use different names for most of the buttons, /// so the docs for each button method list out what each of the major companies /// call that button. To the driver it's all the same, it's just however you /// want to think of them. /// /// If sequential calls to `xinput_get_state` for a given controller slot have /// the same packet number then the controller state has not changed since the /// last call. The `PartialEq` and `Eq` implementations for this wrapper type /// reflect that. The exact value of the packet number is unimportant. /// /// If you want to do something that the rust wrapper doesn't support, just use /// the raw field to get at the inner value. pub struct XInputState { /// The raw value we're wrapping. pub raw: XINPUT_STATE, } impl ::core::cmp::PartialEq for XInputState { /// Equality for `XInputState` values is based _only_ on the /// `dwPacketNumber` of the wrapped `XINPUT_STATE` value. This is entirely /// correct for values obtained from the xinput system, but if you make your /// own `XInputState` values for some reason you can confuse it. fn eq(&self, other: &XInputState) -> bool { self.raw.dwPacketNumber == other.raw.dwPacketNumber } } impl ::core::cmp::Eq for XInputState {} impl ::core::fmt::Debug for XInputState { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "XInputState (_)") } } impl XInputState { /// The north button of the action button group. /// /// * Nintendo: X /// * Playstation: Triangle /// * XBox: Y #[inline] pub fn north_button(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_Y != 0 } /// The south button of the action button group. /// /// * Nintendo: B /// * Playstation: X /// * XBox: A #[inline] pub fn south_button(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_A != 0 } /// The east button of the action button group. /// /// * Nintendo: A /// * Playstation: Circle /// * XBox: B #[inline] pub fn east_button(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_B != 0 } /// The west button of the action button group. /// /// * Nintendo: Y /// * Playstation: Square /// * XBox: X #[inline] pub fn west_button(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_X != 0 } /// The up button on the directional pad. #[inline] pub fn arrow_up(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_DPAD_UP != 0 } /// The down button on the directional pad. #[inline] pub fn arrow_down(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_DPAD_DOWN != 0 } /// The left button on the directional pad. #[inline] pub fn arrow_left(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_DPAD_LEFT != 0 } /// The right button on the directional pad. #[inline] pub fn arrow_right(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_DPAD_RIGHT != 0 } /// The "start" button. /// /// * Nintendo: Start (NES / SNES), '+' (Pro Controller) /// * Playstation: Start /// * XBox: Start #[inline] pub fn start_button(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_START != 0 } /// The "not start" button. /// /// * Nintendo: Select (NES / NES), '-' (Pro Controller) /// * Playstation: Select /// * XBox: Back #[inline] pub fn select_button(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_BACK != 0 } /// The upper left shoulder button. /// /// * Nintendo: L /// * Playstation: L1 /// * XBox: LB #[inline] pub fn left_shoulder(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_RIGHT_SHOULDER != 0 } /// The upper right shoulder button. /// /// * Nintendo: R /// * Playstation: R1 /// * XBox: RB #[inline] pub fn right_shoulder(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_RIGHT_SHOULDER != 0 } /// The default threshold to count a trigger as being "pressed". pub const TRIGGER_THRESHOLD: u8 = XINPUT_GAMEPAD_TRIGGER_THRESHOLD; /// The lower left shoulder trigger. If you want to use this as a simple /// boolean it is suggested that you compare it to the `TRIGGER_THRESHOLD` /// constant. /// /// * Nintendo: ZL /// * Playstation: L2 /// * XBox: LT #[inline] pub fn left_trigger(&self) -> u8 { self.raw.Gamepad.bLeftTrigger } /// The lower right shoulder trigger. If you want to use this as a simple /// boolean it is suggested that you compare it to the `TRIGGER_THRESHOLD` /// constant. /// /// * Nintendo: ZR /// * Playstation: R2 /// * XBox: RT #[inline] pub fn right_trigger(&self) -> u8 { self.raw.Gamepad.bRightTrigger } /// The lower left shoulder trigger as a bool using the default threshold. /// /// * Nintendo: ZL /// * Playstation: L2 /// * XBox: LT #[inline] pub fn left_trigger_bool(&self) -> bool { self.left_trigger() >= XInputState::TRIGGER_THRESHOLD } /// The lower right shoulder trigger as a bool using the default threshold. /// /// * Nintendo: ZR /// * Playstation: R2 /// * XBox: RT #[inline] pub fn right_trigger_bool(&self) -> bool { self.right_trigger() >= XInputState::TRIGGER_THRESHOLD } /// The left thumb stick being pressed inward. /// /// * Nintendo: (L) /// * Playstation: L3 /// * XBox: (L) #[inline] pub fn left_thumb_button(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_LEFT_THUMB != 0 } /// The right thumb stick being pressed inward. /// /// * Nintendo: (R) /// * Playstation: R3 /// * XBox: (R) #[inline] pub fn right_thumb_button(&self) -> bool { self.raw.Gamepad.wButtons & XINPUT_GAMEPAD_RIGHT_THUMB != 0 } /// The suggested default deadzone for use with the left thumb stick. pub const LEFT_STICK_DEADZONE: i16 = XINPUT_GAMEPAD_LEFT_THUMB_DEADZONE; /// The suggested default deadzone for use with the right thumb stick. pub const RIGHT_STICK_DEADZONE: i16 = XINPUT_GAMEPAD_RIGHT_THUMB_DEADZONE; /// The left stick raw value. /// /// Positive values are to the right (X-axis) or up (Y-axis). #[inline] pub fn left_stick_raw(&self) -> (i16, i16) { (self.raw.Gamepad.sThumbLX, self.raw.Gamepad.sThumbLY) } /// The right stick raw value. /// /// Positive values are to the right (X-axis) or up (Y-axis). #[inline] pub fn right_stick_raw(&self) -> (i16, i16) { (self.raw.Gamepad.sThumbRX, self.raw.Gamepad.sThumbRY) } /// The left stick value normalized with the default dead-zone. /// /// See `normalize_raw_stick_value` for more. #[inline] pub fn left_stick_normalized(&self) -> (f32, f32) { XInputState::normalize_raw_stick_value(self.left_stick_raw(), XInputState::LEFT_STICK_DEADZONE) } /// The right stick value normalized with the default dead-zone. /// /// See `normalize_raw_stick_value` for more. #[inline] pub fn right_stick_normalized(&self) -> (f32, f32) { XInputState::normalize_raw_stick_value( self.right_stick_raw(), XInputState::RIGHT_STICK_DEADZONE, ) } /// This helper normalizes a raw stick value using the given deadzone. /// /// If the raw value's 2d length is less than the deadzone the result will be /// `(0.0,0.0)`, otherwise the result is normalized across the range from the /// deadzone point to the maximum value. /// /// The `deadzone` value is clamped to the range 0 to 32,766 (inclusive) /// before use. Negative inputs or maximum value inputs make the normalization /// just work improperly. #[inline] pub fn normalize_raw_stick_value(raw_stick: (i16, i16), deadzone: i16) -> (f32, f32) { let deadzone_float = deadzone.max(0).min(i16::max_value() - 1) as f32; let raw_float = (raw_stick.0 as f32, raw_stick.1 as f32); let length = (raw_float.0 * raw_float.0 + raw_float.1 * raw_float.1).sqrt(); let normalized = (raw_float.0 / length, raw_float.1 / length); if length > deadzone_float { // clip our value to the expected maximum length. let length = length.min(32_767.0); let scale = (length - deadzone_float) / (32_767.0 - deadzone_float); (normalized.0 * scale, normalized.1 * scale) } else { (0.0, 0.0) } } } #[test] fn normalize_raw_stick_value_test() { for &x in [i16::min_value(), i16::max_value()].into_iter() { for &y in [i16::min_value(), i16::max_value()].into_iter() { #[cfg_attr(rustfmt, rustfmt_skip)] for &deadzone in [i16::min_value(), 0, i16::max_value() / 2, i16::max_value() - 1, i16::max_value()].into_iter() { let f = XInputState::normalize_raw_stick_value((x, y), deadzone); #[cfg_attr(rustfmt, rustfmt_skip)] assert!(f.0.abs() <= 1.0, "XFail: x {}, y {}, dz {} f {:?}", x, y, deadzone, f); #[cfg_attr(rustfmt, rustfmt_skip)] assert!(f.1.abs() <= 1.0, "YFail: x {}, y {}, dz {} f {:?}", x, y, deadzone, f); } } } } /// These are all the sorts of problems that can come up when you're using the /// xinput system. #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub enum XInputUsageError { /// XInput isn't currently loaded. XInputNotLoaded, /// The controller ID you gave was 4 or more. InvalidControllerID, /// Not really an error, this controller is just missing. DeviceNotConnected, /// There was some sort of unexpected error happened, this is the error code /// windows returned. UnknownError(u32), } /// Polls the controller port given for the current controller state. /// /// # Notes /// /// It is a persistent problem (since ~2007?) with xinput that polling for the /// data of a controller that isn't connected will cause a long delay. In the /// area of 500_000 cpu cycles. That's like 2_000 cache misses in a row. /// /// Once a controller is detected as not being plugged in you are strongly /// advised to not poll for its data again next frame. Instead, you should /// probably only poll for one known-missing controller per frame at most. /// /// Alternately, you can register for your app to get plug and play events and /// then wait for one of them to come in before you ever poll for a missing /// controller a second time. That's up to you. /// /// # Errors /// /// A few things can cause an `Err` value to come back, as explained by the /// `XInputUsageError` type. /// /// Most commonly, a controller will simply not be connected. Most people don't /// have all four slots plugged in all the time. pub fn xinput_get_state(user_index: u32) -> Result<XInputState, XInputUsageError> { if xinput_status.load(ordering) != xinput_ACTIVE { Err(XInputUsageError::XInputNotLoaded) } else if user_index >= 4 { Err(XInputUsageError::InvalidControllerID) } else { let mut output: XINPUT_STATE = unsafe { ::core::mem::zeroed() }; let return_status = unsafe { // This unwrap is safe only because we don't currently support unloading // the system once it's active. Otherwise we'd have to use a full mutex // and all that. let func = opt_xinput_get_state.unwrap(); func(user_index, &mut output) }; match return_status { ERROR_SUCCESS => return Ok(XInputState { raw: output }), ERROR_DEVICE_NOT_CONNECTED => Err(XInputUsageError::DeviceNotConnected), s => { trace!("Unexpected error code: {}", s); Err(XInputUsageError::UnknownError(s)) } } } } /// Allows you to set the rumble speeds of the left and right motors. /// /// Valid motor speeds are across the whole `u16` range, and the number is the /// scale of the motor intensity. In other words, 0 is 0%, and 65,535 is 100%. /// /// On a 360 controller the left motor is low-frequency and the right motor is /// high-frequency. On other controllers running through xinput this might be /// the case, or the controller might not even have rumble ability at all. If /// rumble is missing from the device you'll still get `Ok` return values, so /// treat rumble as an extra, not an essential. /// /// # Errors /// /// A few things can cause an `Err` value to come back, as explained by the /// `XInputUsageError` type. /// /// Most commonly, a controller will simply not be connected. Most people don't /// have all four slots plugged in all the time. pub fn xinput_set_state( user_index: u32, left_motor_speed: u16, right_motor_speed: u16, ) -> Result<(), XInputUsageError> { if xinput_status.load(ordering) != xinput_ACTIVE { Err(XInputUsageError::XInputNotLoaded) } else if user_index >= 4 { Err(XInputUsageError::InvalidControllerID) } else { let mut input = XINPUT_VIBRATION { wLeftMotorSpeed: left_motor_speed, wRightMotorSpeed: right_motor_speed, }; let return_status = unsafe { // This unwrap is safe only because we don't currently support unloading // the system once it's active. Otherwise we'd have to use a full mutex // and all that. let func = opt_xinput_set_state.unwrap(); func(user_index, &mut input) }; match return_status { ERROR_SUCCESS => Ok(()), ERROR_DEVICE_NOT_CONNECTED => Err(XInputUsageError::DeviceNotConnected), s => { trace!("Unexpected error code: {}", s); Err(XInputUsageError::UnknownError(s)) } } } }
use fnv::{FnvHashMap, FnvHashSet}; use std::collections::HashSet; use std::fs; use std::time::Instant; extern crate fnv; fn mov((x, y): (i32, i32), direction: char) -> (i32, i32) { match direction { 'U' => (x, y + 1), 'D' => (x, y - 1), 'R' => (x + 1, y), 'L' => (x - 1, y), _ => unreachable!(), } } struct Point { // marker: char, distance: u32, } type Grid = FnvHashMap<(i32, i32), Point>; type Wire = Vec<String>; fn fill_grid(wire: &Wire) -> Grid { let mut grid = Grid::default(); let mut current_location = (0, 0); let mut total_distance = 0; for instruction in wire { let direction = instruction.as_bytes()[0] as char; let distance_str = &instruction[1..]; let distance: u32 = distance_str.parse().unwrap(); for _i in 0..distance { total_distance += 1; current_location = mov(current_location, direction); if !grid.contains_key(&current_location) { if direction == 'L' || direction == 'R' { let point = Point { /*marker: '-', */ distance: distance, }; grid.insert(current_location, point); } if direction == 'U' || direction == 'D' { let point = Point { /* marker: '|', */ distance: distance, }; grid.insert(current_location, point); } } grid.insert( current_location, Point { /* marker: '+', */ distance: total_distance, }, ); } } println!("total len: {}", grid.len()); return grid; } fn manhattan_distance((x, y): (i32, i32), _grids: &Vec<Grid>) -> u32 { return (x.abs() + y.abs()) as u32; } fn total_distance((x, y): (i32, i32), grids: &Vec<Grid>) -> u32 { return grids .iter() .map(|grid: &Grid| grid.get(&(x, y)).unwrap().distance) .sum(); } type MetricFunction = fn(location: (i32, i32), grids: &Vec<Grid>) -> u32; // type Intersections = Intersection<'_, (i32, i32), std::collections::hash_map::RandomState>; fn find_intersections(wires: Vec<Wire>) -> (Vec<Grid>, HashSet<(i32, i32)>) { let mut now = Instant::now(); let grids: Vec<_> = wires.iter().map(|x| fill_grid(x)).collect(); println!("Created grids {}", now.elapsed().as_millis()); now = Instant::now(); let grid_1_keys: HashSet<_> = grids[0].keys().cloned().collect(); let grid_2_keys: HashSet<_> = grids[1].keys().cloned().collect(); println!("Created hashsets {}", now.elapsed().as_millis()); let mut now = Instant::now(); let intersections: HashSet<_> = grid_1_keys.intersection(&grid_2_keys).cloned().collect(); println!("Found Intersections {}", now.elapsed().as_millis()); return (grids, intersections); } fn find_closest_intersection( grids: &Vec<Grid>, metric: MetricFunction, intersections: &HashSet<(i32, i32)>, ) -> u32 { let mut now = Instant::now(); let mut distances: Vec<_> = intersections .iter() .map(|&pos| metric(pos, &grids)) .collect(); distances.sort(); let closest = distances[0]; println!("Found closest distance {}", now.elapsed().as_millis()); return closest; } fn _array_to_wire(wire: &[&str]) -> Wire { return wire.iter().map(|st| st.to_string()).collect(); } fn main() { let filename = "input.txt"; let data = fs::read_to_string(filename).unwrap(); let lines: Vec<String> = data.split("\n").map(|st| st.to_string()).collect(); let wires: Vec<Wire> = lines .iter() .map(|line| line.split(",").map(|x| x.to_string()).collect()) .collect(); let (grids, intersections) = find_intersections(wires); println!( "{}", find_closest_intersection(&grids, manhattan_distance, &intersections) ); println!( "{}", find_closest_intersection(&grids, total_distance, &intersections) ); // let wire1a = array_to_wire(&["R8", "U5", "L5", "D3"]); // let wire1b = array_to_wire(&["U7", "R6", "D4", "L4"]); // println!("{}", find_intersection(vec![&wire1a, &wire1b], manhattan_distance)); // let wire2a = array_to_wire(&["R75", "D30", "R83", "U83", "L12", "D49", "R71", "U7", "L72"]); // let wire2b = array_to_wire(&["U62", "R66", "U55", "R34", "D71", "R55", "D58", "R83"]); // println!("{}", find_intersection(vec![&wire2a, &wire2b], manhattan_distance)); // let wire3a = array_to_wire(&["R98", "U47", "R26", "D63", "R33", "U87", "L62", "D20", "R33", "U53", "R51"]); // let wire3b = array_to_wire(&["U98", "R91", "D20", "R16", "D67", "R40", "U7", "R15", "U6", "R7"]); // println!("{}", find_intersection(vec![&wire3a, &wire3b], manhattan_distance)); // println!("{}", find_intersection(vec![&wires[0], &wires[1]], manhattan_distance)); // println!("{}", find_intersection(vec![&wire1a, &wire1b], total_distance)); // println!("{}", find_intersection(vec![&wire2a, &wire2b], total_distance)); // println!("{}", find_intersection(vec![&wire3a, &wire3b], total_distance)); // println!("{}", find_intersection(vec![&wires[0], &wires[1]], total_distance)); }
use parameterized_macro::parameterized; #[parameterized(v = {"a", "b"}, w={1,2})] fn my_test(v: &str, w: i32) {} fn main() {}
use std::collections::HashMap; use std::fmt::{Display, Debug}; use std::cmp::Eq; use std::hash::Hash; #[derive(Debug, Clone, PartialEq)] pub struct Search { pub options: HashMap<String, String>, } impl Search { pub fn from_attributes<T: Display + Debug + Eq + Hash>(attrs: HashMap<T, T>) -> Search { let mut new_attrs = HashMap::new(); for (k, v) in &attrs { new_attrs.insert(k.to_string(), v.to_string()); } Search { options: new_attrs } } }
use super::{repo::Repo, repo_operations::RepoOperations}; use anyhow::{Context, Result}; use log::debug; use termion::color; /// Struct describing all repositories `rgit` is working on pub struct Repositories { pub repos: Vec<Repo>, } impl Repositories { /// Creates new instance of `Repositories` pub fn new() -> Repositories { Repositories { repos: Vec::new() } } /// Pretty prints title of executed command fn print_title(&self, title: &str) { print!( "{}\n{}{}{}\n", color::Fg(color::Red), title, color::Reset.fg_str(), color::Reset.bg_str() ); } } impl RepoOperations for Repositories { /// Executes custom git command on all repos /// /// # Arguments /// /// * `cmd` - git command to execute fn custom_cmd(&self, cmd: String) -> Result<()> { debug!("Executing command: {} on all repositories", cmd); self.print_title(&format!("git {}", cmd)); for repo in &self.repos { repo.custom_cmd(String::from(&cmd)) .context("Failed to execute command on repo")?; } Ok(()) } /// Executes `git status --porcelain` on all repositories fn porcelain(&self) -> Result<()> { debug!("Executing git status --porcelain on all repositories"); self.print_title("git status --porcelain"); for repo in &self.repos { repo.porcelain() .context("Failed to execute porcelain command")?; } Ok(()) } /// Finds repositories which have cherry-picks in history fn find_cherry_picks(&self) -> Result<Option<String>> { debug!("Trying to find all repositories which have cherry-picks in history"); self.print_title("repositories with cherry-picks in git reflog"); for repo in &self.repos { repo.find_cherry_picks() .context("Failed to find cherry picks")?; } Ok(None) } /// Prints all cherry picks found in history fn print_cherry_picks(&self) -> Result<()> { debug!("Prints all cherry picks found in history"); self.print_title("repositories with cherry-picks in git reflog"); for repo in &self.repos { repo.print_cherry_picks() .context("Failed to print cherry picks")?; } Ok(()) } /// Print repositories for which there is an author in last `number` of commits /// /// # Arguments /// /// * `number` - last number of commits to look into /// * `author` - author to look for fn print_commits_with_author(&self, number: u32, author: &str) -> Result<()> { debug!( "Prints all repositories which have author in last {} commits", number ); self.print_title(&format!( "repositories with author {} in last {} commits", author, number )); for repo in &self.repos { repo.print_commits_with_author(number, author) .context("Failed to print commits with author")?; } Ok(()) } }
#![allow(non_snake_case)] #[macro_use] extern crate criterion; use criterion::Criterion; // Code below copied from ../tests/r1cs.rs // // Ideally we wouldn't duplicate it, but AFAIK criterion requires a // seperate benchmark harness, while the test code uses a different // test harness, so I (hdevalence) just copied the code over. It // should not be edited here. In the future it would be good if // someone wants to figure a way to use #[path] attributes or // something to avoid the duplication. extern crate bulletproofs; extern crate curve25519_dalek; extern crate merlin; extern crate rand; use bulletproofs::r1cs::*; use bulletproofs::{BulletproofGens, PedersenGens}; use curve25519_dalek::ristretto::CompressedRistretto; use curve25519_dalek::scalar::Scalar; use merlin::Transcript; use rand::seq::SliceRandom; use rand::Rng; // Shuffle gadget (documented in markdown file) /// A proof-of-shuffle. struct ShuffleProof(R1CSProof); impl ShuffleProof { fn gadget<CS: RandomizableConstraintSystem>( cs: &mut CS, x: Vec<Variable>, y: Vec<Variable>, ) -> Result<(), R1CSError> { assert_eq!(x.len(), y.len()); let k = x.len(); if k == 1 { cs.constrain(y[0] - x[0]); return Ok(()); } cs.specify_randomized_constraints(move |cs| { let z = cs.challenge_scalar(b"shuffle challenge"); // Make last x multiplier for i = k-1 and k-2 let (_, _, last_mulx_out) = cs.multiply(x[k - 1] - z, x[k - 2] - z); // Make multipliers for x from i == [0, k-3] let first_mulx_out = (0..k - 2).rev().fold(last_mulx_out, |prev_out, i| { let (_, _, o) = cs.multiply(prev_out.into(), x[i] - z); o }); // Make last y multiplier for i = k-1 and k-2 let (_, _, last_muly_out) = cs.multiply(y[k - 1] - z, y[k - 2] - z); // Make multipliers for y from i == [0, k-3] let first_muly_out = (0..k - 2).rev().fold(last_muly_out, |prev_out, i| { let (_, _, o) = cs.multiply(prev_out.into(), y[i] - z); o }); // Constrain last x mul output and last y mul output to be equal cs.constrain(first_mulx_out - first_muly_out); Ok(()) }) } } impl ShuffleProof { /// Attempt to construct a proof that `output` is a permutation of `input`. /// /// Returns a tuple `(proof, input_commitments || output_commitments)`. pub fn prove<'a, 'b>( pc_gens: &'b PedersenGens, bp_gens: &'b BulletproofGens, transcript: &'a mut Transcript, input: &[Scalar], output: &[Scalar], ) -> Result< ( ShuffleProof, Vec<CompressedRistretto>, Vec<CompressedRistretto>, ), R1CSError, > { // Apply a domain separator with the shuffle parameters to the transcript // XXX should this be part of the gadget? let k = input.len(); transcript.append_message(b"dom-sep", b"ShuffleProof"); transcript.append_u64(b"k", k as u64); let mut prover = Prover::new(&pc_gens, transcript); // Construct blinding factors using an RNG. // Note: a non-example implementation would want to operate on existing commitments. let mut blinding_rng = rand::thread_rng(); let (input_commitments, input_vars): (Vec<_>, Vec<_>) = input .into_iter() .map(|v| prover.commit(*v, Scalar::random(&mut blinding_rng))) .unzip(); let (output_commitments, output_vars): (Vec<_>, Vec<_>) = output .into_iter() .map(|v| prover.commit(*v, Scalar::random(&mut blinding_rng))) .unzip(); ShuffleProof::gadget(&mut prover, input_vars, output_vars)?; let proof = prover.prove(&bp_gens)?; Ok((ShuffleProof(proof), input_commitments, output_commitments)) } } impl ShuffleProof { /// Attempt to verify a `ShuffleProof`. pub fn verify<'a, 'b>( &self, pc_gens: &'b PedersenGens, bp_gens: &'b BulletproofGens, transcript: &'a mut Transcript, input_commitments: &Vec<CompressedRistretto>, output_commitments: &Vec<CompressedRistretto>, ) -> Result<(), R1CSError> { // Apply a domain separator with the shuffle parameters to the transcript // XXX should this be part of the gadget? let k = input_commitments.len(); transcript.append_message(b"dom-sep", b"ShuffleProof"); transcript.append_u64(b"k", k as u64); let mut verifier = Verifier::new(transcript); let input_vars: Vec<_> = input_commitments .iter() .map(|V| verifier.commit(*V)) .collect(); let output_vars: Vec<_> = output_commitments .iter() .map(|V| verifier.commit(*V)) .collect(); ShuffleProof::gadget(&mut verifier, input_vars, output_vars)?; verifier.verify(&self.0, &pc_gens, &bp_gens) } } // End of copied code. /// Binary logarithm of maximum shuffle size. const LG_MAX_SHUFFLE_SIZE: usize = 10; /// Maximum shuffle size to benchmark. const MAX_SHUFFLE_SIZE: usize = 1 << LG_MAX_SHUFFLE_SIZE; fn bench_kshuffle_prove(c: &mut Criterion) { // Construct Bulletproof generators externally let pc_gens = PedersenGens::default(); let bp_gens = BulletproofGens::new(2 * MAX_SHUFFLE_SIZE, 1); c.bench_function_over_inputs( "k-shuffle proof creation", move |b, k| { // Generate inputs and outputs to kshuffle let mut rng = rand::thread_rng(); let (min, max) = (0u64, std::u64::MAX); let input: Vec<Scalar> = (0..*k) .map(|_| Scalar::from(rng.gen_range(min, max))) .collect(); let mut output = input.clone(); output.shuffle(&mut rand::thread_rng()); // Make kshuffle proof b.iter(|| { let mut prover_transcript = Transcript::new(b"ShuffleBenchmark"); ShuffleProof::prove(&pc_gens, &bp_gens, &mut prover_transcript, &input, &output) .unwrap(); }) }, (1..=LG_MAX_SHUFFLE_SIZE) .map(|i| 1 << i) .collect::<Vec<_>>(), ); } criterion_group! { name = kshuffle_prove; // Lower the sample size to run faster; larger shuffle sizes are // long so we're not microbenchmarking anyways. config = Criterion::default().sample_size(10); targets = bench_kshuffle_prove, } fn bench_kshuffle_verify(c: &mut Criterion) { // Construct Bulletproof generators externally let pc_gens = PedersenGens::default(); let bp_gens = BulletproofGens::new(2 * MAX_SHUFFLE_SIZE, 1); c.bench_function_over_inputs( "k-shuffle proof verification", move |b, k| { // Generate the proof in its own scope to prevent reuse of // prover variables by the verifier let (proof, input_commitments, output_commitments) = { // Generate inputs and outputs to kshuffle let mut rng = rand::thread_rng(); let (min, max) = (0u64, std::u64::MAX); let input: Vec<Scalar> = (0..*k) .map(|_| Scalar::from(rng.gen_range(min, max))) .collect(); let mut output = input.clone(); output.shuffle(&mut rand::thread_rng()); let mut prover_transcript = Transcript::new(b"ShuffleBenchmark"); ShuffleProof::prove(&pc_gens, &bp_gens, &mut prover_transcript, &input, &output) .unwrap() }; // Verify kshuffle proof b.iter(|| { let mut verifier_transcript = Transcript::new(b"ShuffleBenchmark"); proof .verify( &pc_gens, &bp_gens, &mut verifier_transcript, &input_commitments, &output_commitments, ) .unwrap(); }) }, (1..=LG_MAX_SHUFFLE_SIZE) .map(|i| 1 << i) .collect::<Vec<_>>(), ); } criterion_group! { name = kshuffle_verify; // Lower the sample size to run faster; larger shuffle sizes are // long so we're not microbenchmarking anyways. config = Criterion::default().sample_size(10); targets = bench_kshuffle_verify, } criterion_main!(kshuffle_prove, kshuffle_verify);
use crate::errors::*; use crate::extraction::extract_dir; #[allow(unused_imports)] use crate::PERSEUS_VERSION; use cargo_toml::Manifest; use include_dir::{include_dir, Dir}; use std::env; use std::fs; use std::fs::OpenOptions; use std::io::Write; use std::path::PathBuf; use std::process::Command; // This literally includes the entire subcrate in the program, allowing more efficient development. // This MUST be copied in from `../../examples/cli/.perseus/` every time the CLI is tested (use the Bonnie script). const SUBCRATES: Dir = include_dir!("./.perseus"); /// Prepares the user's project by copying in the `.perseus/` subcrates. We use these subcrates to do all the building/serving, we just /// have to execute the right commands in the CLI. We can essentially treat the subcrates themselves as a blackbox of just a folder. pub fn prepare(dir: PathBuf) -> Result<(), PrepError> { // The location in the target directory at which we'll put the subcrates let mut target = dir; target.extend([".perseus"]); if target.exists() { // We don't care if it's corrupted etc., it just has to exist // If the user wants to clean it, they can do that // Besides, we want them to be able to customize stuff Ok(()) } else { // Write the stored directory to that location, creating the directory first if let Err(err) = fs::create_dir(&target) { return Err(PrepError::ExtractionFailed { target_dir: target.to_str().map(|s| s.to_string()), source: err, }); } // Notably, this function will not do anything or tell us if the directory already exists... if let Err(err) = extract_dir(SUBCRATES, &target) { return Err(PrepError::ExtractionFailed { target_dir: target.to_str().map(|s| s.to_string()), source: err, }); } // Use the current version of this crate (and thus all Perseus crates) to replace the relative imports // That way everything works in dev and in prod on another system! // We have to store `Cargo.toml` as `Cargo.toml.old` for packaging let mut root_manifest_pkg = target.clone(); root_manifest_pkg.extend(["Cargo.toml.old"]); let mut root_manifest = target.clone(); root_manifest.extend(["Cargo.toml"]); let mut server_manifest_pkg = target.clone(); server_manifest_pkg.extend(["server", "Cargo.toml.old"]); let mut server_manifest = target.clone(); server_manifest.extend(["server", "Cargo.toml"]); let root_manifest_contents = fs::read_to_string(&root_manifest_pkg).map_err(|err| { PrepError::ManifestUpdateFailed { target_dir: root_manifest_pkg.to_str().map(|s| s.to_string()), source: err, } })?; let server_manifest_contents = fs::read_to_string(&server_manifest_pkg).map_err(|err| { PrepError::ManifestUpdateFailed { target_dir: server_manifest_pkg.to_str().map(|s| s.to_string()), source: err, } })?; // Get the name of the user's crate (which the subcrates depend on) // We assume they're running this in a folder with a Cargo.toml... let user_manifest = Manifest::from_path("./Cargo.toml") .map_err(|err| PrepError::GetUserManifestFailed { source: err })?; let user_crate_name = user_manifest.package; let user_crate_name = match user_crate_name { Some(package) => package.name, None => return Err(PrepError::MalformedUserManifest), }; // Update the name of the user's crate (Cargo needs more than just a path and an alias) // Also create a workspace so the subcrates share a `target/` directory (speeds up builds) let updated_root_manifest = root_manifest_contents .replace("perseus-example-basic", &user_crate_name) + "\n[workspace]\nmembers = [ \"server\" ]"; let updated_server_manifest = server_manifest_contents.replace("perseus-example-basic", &user_crate_name); // If we're not in development, also update relative path references #[cfg(not(debug_assertions))] let updated_root_manifest = updated_root_manifest.replace( "{ path = \"../../../packages/perseus\" }", &format!("\"{}\"", PERSEUS_VERSION), ); #[cfg(not(debug_assertions))] let updated_server_manifest = updated_server_manifest.replace( "{ path = \"../../../../packages/perseus-actix-web\" }", &format!("\"{}\"", PERSEUS_VERSION), ); // Write the updated manifests back if let Err(err) = fs::write(&root_manifest, updated_root_manifest) { return Err(PrepError::ManifestUpdateFailed { target_dir: root_manifest.to_str().map(|s| s.to_string()), source: err, }); } if let Err(err) = fs::write(&server_manifest, updated_server_manifest) { return Err(PrepError::ManifestUpdateFailed { target_dir: server_manifest.to_str().map(|s| s.to_string()), source: err, }); } // If we aren't already gitignoring the subcrates, update .gitignore to do so if let Ok(contents) = fs::read_to_string(".gitignore") { if contents.contains(".perseus/") { return Ok(()); } } let file = OpenOptions::new() .append(true) .create(true) // If it doesn't exist, create it .open(".gitignore"); let mut file = match file { Ok(file) => file, Err(err) => return Err(PrepError::GitignoreUpdateFailed { source: err }), }; // Check for errors with appending to the file if let Err(err) = file.write_all(b"\n.perseus/") { return Err(PrepError::GitignoreUpdateFailed { source: err }); } Ok(()) } } /// Checks if the user has the necessary prerequisites on their system (i.e. `cargo` and `wasm-pack`). These can all be checked /// by just trying to run their binaries and looking for errors. If the user has other paths for these, they can define them under the /// environment variables `PERSEUS_CARGO_PATH` and `PERSEUS_WASM_PACK_PATH`. pub fn check_env() -> Result<(), PrepError> { // We'll loop through each prerequisite executable to check their existence // If the spawn returns an error, it's considered not present, success means presence let prereq_execs = vec![ ( env::var("PERSEUS_CARGO_PATH").unwrap_or_else(|_| "cargo".to_string()), "cargo", "PERSEUS_CARGO_PATH", ), ( env::var("PERSEUS_WASM_PACK_PATH").unwrap_or_else(|_| "wasm-pack".to_string()), "wasm-pack", "PERSEUS_WASM_PACK_PATH", ), ]; for exec in prereq_execs { let res = Command::new(&exec.0).output(); // Any errors are interpreted as meaning that the user doesn't have the prerequisite installed properly. if let Err(err) = res { return Err(PrepError::PrereqNotPresent { cmd: exec.1.to_string(), env_var: exec.2.to_string(), source: err, }); } } Ok(()) }
use super::expression::Expression; use super::import::Import; use crate::ast_transform::Transformer; use crate::scm::Scm; use crate::source::SourceLocation; use crate::syntax::Reify; #[derive(Debug, Clone)] pub struct Program { pub imports: Import, pub body: Expression, span: SourceLocation, } impl_sourced!(Program); impl Program { pub fn new(imports: Import, body: Expression, span: SourceLocation) -> Self { Program { imports, body, span, } } pub fn transform(mut self, visitor: &mut impl Transformer) -> Self { //self.imports = self.imports.into_iter().map(|import| import.transform(visitor)).collect(); self.body = self.body.transform(visitor); self } } impl Reify for Program { fn reify(&self) -> Scm { let imports = self.imports.reify(); let body = self.body.reify(); Scm::vector(vec![imports, body]) } }
// use crate::keypair::{PublicKey, SecretKey}; use crate::lbvrf::LBVRF; use crate::param::Param; use crate::serde::Serdes; use crate::VRF; #[test] fn test_keygen() { let seed = [0u8; 32]; // let mut rng = rand::thread_rng(); // let param = Param::init(&mut rng); let param: Param = <LBVRF as VRF>::paramgen(seed).unwrap(); let (_pk, _sk) = <LBVRF as VRF>::keygen(seed, param).unwrap(); } #[test] fn test_serdes_keygen() { let seed = [0u8; 32]; // let mut rng = rand::thread_rng(); // let param = Param::init(&mut rng); let param: Param = <LBVRF as VRF>::paramgen(seed).unwrap(); let (pk, sk) = <LBVRF as VRF>::keygen(seed, param).unwrap(); let mut buf: Vec<u8> = vec![]; assert!(pk.serialize(&mut buf).is_ok()); println!("{:?}", buf); let pk2 = <LBVRF as VRF>::PublicKey::deserialize(&mut buf[..].as_ref()).unwrap(); assert_eq!(pk, pk2); let mut buf: Vec<u8> = vec![]; assert!(sk.serialize(&mut buf).is_ok()); println!("{:02x?}", buf); let sk2 = <LBVRF as VRF>::SecretKey::deserialize(&mut buf[..].as_ref()).unwrap(); assert_eq!(sk, sk2); }
#[doc = "Register `WWDG_SR` reader"] pub type R = crate::R<WWDG_SR_SPEC>; #[doc = "Register `WWDG_SR` writer"] pub type W = crate::W<WWDG_SR_SPEC>; #[doc = "Field `EWIF` reader - EWIF"] pub type EWIF_R = crate::BitReader; #[doc = "Field `EWIF` writer - EWIF"] pub type EWIF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - EWIF"] #[inline(always)] pub fn ewif(&self) -> EWIF_R { EWIF_R::new((self.bits & 1) != 0) } } impl W { #[doc = "Bit 0 - EWIF"] #[inline(always)] #[must_use] pub fn ewif(&mut self) -> EWIF_W<WWDG_SR_SPEC, 0> { EWIF_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wwdg_sr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`wwdg_sr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct WWDG_SR_SPEC; impl crate::RegisterSpec for WWDG_SR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`wwdg_sr::R`](R) reader structure"] impl crate::Readable for WWDG_SR_SPEC {} #[doc = "`write(|w| ..)` method takes [`wwdg_sr::W`](W) writer structure"] impl crate::Writable for WWDG_SR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets WWDG_SR to value 0"] impl crate::Resettable for WWDG_SR_SPEC { const RESET_VALUE: Self::Ux = 0; }
// Test tuple litterals fn foo() { let a = (a, a, a, a, a); let aaaaaaaaaaaaaaaa = (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaa, aaaaaaaaaaaaaa); let aaaaaaaaaaaaaaaaaaaaaa = (aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa, aaaa); let a = (a,); }
use common::BinarySerializable; use std::io; use std::io::Read; use std::io::Write; /// `Field` is actually a `u8` identifying a `Field` /// The schema is in charge of holding mapping between field names /// to `Field` objects. /// /// Because the field id is a `u8`, tantivy can only have at most `255` fields. /// Value 255 is reserved. #[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)] pub struct Field(pub u32); impl BinarySerializable for Field { fn serialize<W: Write>(&self, writer: &mut W) -> io::Result<()> { self.0.serialize(writer) } fn deserialize<R: Read>(reader: &mut R) -> io::Result<Field> { u32::deserialize(reader).map(Field) } }
use crate::Bin; /// The function table to be implemented for `Bin` types. This is only required if you /// implement your own binary type. pub struct FnTable { /// Drop function. It's `None` if dropping is not required. pub drop: Option<fn(bin: &mut Bin)>, /// Returns a slice of this binary. /// /// It's allowed to be `None` if this binary is always empty (constant). pub as_slice: Option<fn(bin: &Bin) -> &[u8]>, /// True if this binary has a length of 0. /// /// It's allowed to be `None` if this binary is always empty (constant). pub is_empty: Option<fn(bin: &Bin) -> bool>, /// Clones this type. /// /// IMPORTANT: It's required to return a sync binary if self is also a /// sync binary (this is not checked by the compiler, it's in the responsibility of the /// implementer). pub clone: fn(bin: &Bin) -> Bin, /// Converts this binary into a vector; Tries to avoid allocation/memory-copy whenever possible. pub into_vec: fn(bin: Bin) -> Vec<u8>, /// Returns a slice of the given binary. Returns `None` if the given range is out of bounds. /// /// Important: If `bin` is synchronized, the returned `Bin` MUST be synchronized too. pub slice: fn(bin: &Bin, start: usize, end_excluded: usize) -> Option<Bin>, /// Returns an un-synchronized version (not just a view - if there's an un-synchronized version). /// /// This is allowed to be `None` if: /// /// * This is already the un-synchronized version. /// * There's no un-synchronized version. pub convert_into_un_sync: Option<fn(bin: Bin) -> Bin>, /// Returns a synchronized version. /// /// This is allowed to be `None` if: This is already the synchronized version. IMPORTANT: /// IT IS NEVER allowed to return `None` here if this is not the synchronized version (this /// can't be checked by the compiler; it's in the responsibility of the implementer). pub convert_into_sync: Option<fn(bin: Bin) -> Bin>, /// Tries to re-integrate the given slice into the given binary. /// /// Details: If the given binary is a slice of the given binary, it returns a re-integrated /// version. Example: Say `bin` is a reference-counted binary from address 150 to 220 /// (length 70) and the given slice points to memory address 170 and has a length of 30, /// this function returns a slice of the reference-counted binary (start 20, length 30). /// /// This is `None` if the binary type does not support re-integration altogether. This /// function returns `None` if the given slice cannot be re-integrated. This method usually /// makes only sense for reference-counted binaries or static binaries. This is purely an /// optimization - it's valid to always return `None` here. /// /// IMPORTANT: If `bin` is a synchronized binary, the returned binary has to be /// synchronized too. pub try_re_integrate: Option<TryReIntegrateFn>, } /// Re-integrate function; see `FnTable`. This is only required if you implement your /// own binary type. pub type TryReIntegrateFn = fn(bin: &Bin, slice: &[u8]) -> Option<Bin>;
#[doc = "Register `APB1LPENR` reader"] pub type R = crate::R<APB1LPENR_SPEC>; #[doc = "Register `APB1LPENR` writer"] pub type W = crate::W<APB1LPENR_SPEC>; #[doc = "Field `TIM2LPEN` reader - Timer 2 clock enable during Sleep mode"] pub type TIM2LPEN_R = crate::BitReader<TIM2LPEN_A>; #[doc = "Timer 2 clock enable during Sleep mode\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum TIM2LPEN_A { #[doc = "0: Clock disabled"] Disabled = 0, #[doc = "1: Clock enabled"] Enabled = 1, } impl From<TIM2LPEN_A> for bool { #[inline(always)] fn from(variant: TIM2LPEN_A) -> Self { variant as u8 != 0 } } impl TIM2LPEN_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> TIM2LPEN_A { match self.bits { false => TIM2LPEN_A::Disabled, true => TIM2LPEN_A::Enabled, } } #[doc = "Clock disabled"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == TIM2LPEN_A::Disabled } #[doc = "Clock enabled"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == TIM2LPEN_A::Enabled } } #[doc = "Field `TIM2LPEN` writer - Timer 2 clock enable during Sleep mode"] pub type TIM2LPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TIM2LPEN_A>; impl<'a, REG, const O: u8> TIM2LPEN_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Clock disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(TIM2LPEN_A::Disabled) } #[doc = "Clock enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(TIM2LPEN_A::Enabled) } } #[doc = "Field `TIM3LPEN` reader - Timer 3 clock enable during Sleep mode"] pub use TIM2LPEN_R as TIM3LPEN_R; #[doc = "Field `TIM4LPEN` reader - Timer 4 clock enable during Sleep mode"] pub use TIM2LPEN_R as TIM4LPEN_R; #[doc = "Field `TIM5LPEN` reader - Timer 5 clock enable during Sleep mode"] pub use TIM2LPEN_R as TIM5LPEN_R; #[doc = "Field `TIM6LPEN` reader - Timer 6 clock enable during Sleep mode"] pub use TIM2LPEN_R as TIM6LPEN_R; #[doc = "Field `TIM7LPEN` reader - Timer 7 clock enable during Sleep mode"] pub use TIM2LPEN_R as TIM7LPEN_R; #[doc = "Field `LCDLPEN` reader - LCD clock enable during Sleep mode"] pub use TIM2LPEN_R as LCDLPEN_R; #[doc = "Field `WWDGLPEN` reader - Window watchdog clock enable during Sleep mode"] pub use TIM2LPEN_R as WWDGLPEN_R; #[doc = "Field `SPI2LPEN` reader - SPI 2 clock enable during Sleep mode"] pub use TIM2LPEN_R as SPI2LPEN_R; #[doc = "Field `SPI3LPEN` reader - SPI 3 clock enable during Sleep mode"] pub use TIM2LPEN_R as SPI3LPEN_R; #[doc = "Field `USART2LPEN` reader - USART 2 clock enable during Sleep mode"] pub use TIM2LPEN_R as USART2LPEN_R; #[doc = "Field `USART3LPEN` reader - USART 3 clock enable during Sleep mode"] pub use TIM2LPEN_R as USART3LPEN_R; #[doc = "Field `UART4LPEN` reader - USART 4 clock enable during Sleep mode"] pub use TIM2LPEN_R as UART4LPEN_R; #[doc = "Field `UART5LPEN` reader - USART 5 clock enable during Sleep mode"] pub use TIM2LPEN_R as UART5LPEN_R; #[doc = "Field `I2C1LPEN` reader - I2C 1 clock enable during Sleep mode"] pub use TIM2LPEN_R as I2C1LPEN_R; #[doc = "Field `I2C2LPEN` reader - I2C 2 clock enable during Sleep mode"] pub use TIM2LPEN_R as I2C2LPEN_R; #[doc = "Field `USBLPEN` reader - USB clock enable during Sleep mode"] pub use TIM2LPEN_R as USBLPEN_R; #[doc = "Field `PWRLPEN` reader - Power interface clock enable during Sleep mode"] pub use TIM2LPEN_R as PWRLPEN_R; #[doc = "Field `DACLPEN` reader - DAC interface clock enable during Sleep mode"] pub use TIM2LPEN_R as DACLPEN_R; #[doc = "Field `COMPLPEN` reader - COMP interface clock enable during Sleep mode"] pub use TIM2LPEN_R as COMPLPEN_R; #[doc = "Field `TIM3LPEN` writer - Timer 3 clock enable during Sleep mode"] pub use TIM2LPEN_W as TIM3LPEN_W; #[doc = "Field `TIM4LPEN` writer - Timer 4 clock enable during Sleep mode"] pub use TIM2LPEN_W as TIM4LPEN_W; #[doc = "Field `TIM5LPEN` writer - Timer 5 clock enable during Sleep mode"] pub use TIM2LPEN_W as TIM5LPEN_W; #[doc = "Field `TIM6LPEN` writer - Timer 6 clock enable during Sleep mode"] pub use TIM2LPEN_W as TIM6LPEN_W; #[doc = "Field `TIM7LPEN` writer - Timer 7 clock enable during Sleep mode"] pub use TIM2LPEN_W as TIM7LPEN_W; #[doc = "Field `LCDLPEN` writer - LCD clock enable during Sleep mode"] pub use TIM2LPEN_W as LCDLPEN_W; #[doc = "Field `WWDGLPEN` writer - Window watchdog clock enable during Sleep mode"] pub use TIM2LPEN_W as WWDGLPEN_W; #[doc = "Field `SPI2LPEN` writer - SPI 2 clock enable during Sleep mode"] pub use TIM2LPEN_W as SPI2LPEN_W; #[doc = "Field `SPI3LPEN` writer - SPI 3 clock enable during Sleep mode"] pub use TIM2LPEN_W as SPI3LPEN_W; #[doc = "Field `USART2LPEN` writer - USART 2 clock enable during Sleep mode"] pub use TIM2LPEN_W as USART2LPEN_W; #[doc = "Field `USART3LPEN` writer - USART 3 clock enable during Sleep mode"] pub use TIM2LPEN_W as USART3LPEN_W; #[doc = "Field `UART4LPEN` writer - USART 4 clock enable during Sleep mode"] pub use TIM2LPEN_W as UART4LPEN_W; #[doc = "Field `UART5LPEN` writer - USART 5 clock enable during Sleep mode"] pub use TIM2LPEN_W as UART5LPEN_W; #[doc = "Field `I2C1LPEN` writer - I2C 1 clock enable during Sleep mode"] pub use TIM2LPEN_W as I2C1LPEN_W; #[doc = "Field `I2C2LPEN` writer - I2C 2 clock enable during Sleep mode"] pub use TIM2LPEN_W as I2C2LPEN_W; #[doc = "Field `USBLPEN` writer - USB clock enable during Sleep mode"] pub use TIM2LPEN_W as USBLPEN_W; #[doc = "Field `PWRLPEN` writer - Power interface clock enable during Sleep mode"] pub use TIM2LPEN_W as PWRLPEN_W; #[doc = "Field `DACLPEN` writer - DAC interface clock enable during Sleep mode"] pub use TIM2LPEN_W as DACLPEN_W; #[doc = "Field `COMPLPEN` writer - COMP interface clock enable during Sleep mode"] pub use TIM2LPEN_W as COMPLPEN_W; impl R { #[doc = "Bit 0 - Timer 2 clock enable during Sleep mode"] #[inline(always)] pub fn tim2lpen(&self) -> TIM2LPEN_R { TIM2LPEN_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - Timer 3 clock enable during Sleep mode"] #[inline(always)] pub fn tim3lpen(&self) -> TIM3LPEN_R { TIM3LPEN_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - Timer 4 clock enable during Sleep mode"] #[inline(always)] pub fn tim4lpen(&self) -> TIM4LPEN_R { TIM4LPEN_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - Timer 5 clock enable during Sleep mode"] #[inline(always)] pub fn tim5lpen(&self) -> TIM5LPEN_R { TIM5LPEN_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - Timer 6 clock enable during Sleep mode"] #[inline(always)] pub fn tim6lpen(&self) -> TIM6LPEN_R { TIM6LPEN_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - Timer 7 clock enable during Sleep mode"] #[inline(always)] pub fn tim7lpen(&self) -> TIM7LPEN_R { TIM7LPEN_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 9 - LCD clock enable during Sleep mode"] #[inline(always)] pub fn lcdlpen(&self) -> LCDLPEN_R { LCDLPEN_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 11 - Window watchdog clock enable during Sleep mode"] #[inline(always)] pub fn wwdglpen(&self) -> WWDGLPEN_R { WWDGLPEN_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 14 - SPI 2 clock enable during Sleep mode"] #[inline(always)] pub fn spi2lpen(&self) -> SPI2LPEN_R { SPI2LPEN_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 15 - SPI 3 clock enable during Sleep mode"] #[inline(always)] pub fn spi3lpen(&self) -> SPI3LPEN_R { SPI3LPEN_R::new(((self.bits >> 15) & 1) != 0) } #[doc = "Bit 17 - USART 2 clock enable during Sleep mode"] #[inline(always)] pub fn usart2lpen(&self) -> USART2LPEN_R { USART2LPEN_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - USART 3 clock enable during Sleep mode"] #[inline(always)] pub fn usart3lpen(&self) -> USART3LPEN_R { USART3LPEN_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - USART 4 clock enable during Sleep mode"] #[inline(always)] pub fn uart4lpen(&self) -> UART4LPEN_R { UART4LPEN_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - USART 5 clock enable during Sleep mode"] #[inline(always)] pub fn uart5lpen(&self) -> UART5LPEN_R { UART5LPEN_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - I2C 1 clock enable during Sleep mode"] #[inline(always)] pub fn i2c1lpen(&self) -> I2C1LPEN_R { I2C1LPEN_R::new(((self.bits >> 21) & 1) != 0) } #[doc = "Bit 22 - I2C 2 clock enable during Sleep mode"] #[inline(always)] pub fn i2c2lpen(&self) -> I2C2LPEN_R { I2C2LPEN_R::new(((self.bits >> 22) & 1) != 0) } #[doc = "Bit 23 - USB clock enable during Sleep mode"] #[inline(always)] pub fn usblpen(&self) -> USBLPEN_R { USBLPEN_R::new(((self.bits >> 23) & 1) != 0) } #[doc = "Bit 28 - Power interface clock enable during Sleep mode"] #[inline(always)] pub fn pwrlpen(&self) -> PWRLPEN_R { PWRLPEN_R::new(((self.bits >> 28) & 1) != 0) } #[doc = "Bit 29 - DAC interface clock enable during Sleep mode"] #[inline(always)] pub fn daclpen(&self) -> DACLPEN_R { DACLPEN_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bit 31 - COMP interface clock enable during Sleep mode"] #[inline(always)] pub fn complpen(&self) -> COMPLPEN_R { COMPLPEN_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 0 - Timer 2 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn tim2lpen(&mut self) -> TIM2LPEN_W<APB1LPENR_SPEC, 0> { TIM2LPEN_W::new(self) } #[doc = "Bit 1 - Timer 3 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn tim3lpen(&mut self) -> TIM3LPEN_W<APB1LPENR_SPEC, 1> { TIM3LPEN_W::new(self) } #[doc = "Bit 2 - Timer 4 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn tim4lpen(&mut self) -> TIM4LPEN_W<APB1LPENR_SPEC, 2> { TIM4LPEN_W::new(self) } #[doc = "Bit 3 - Timer 5 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn tim5lpen(&mut self) -> TIM5LPEN_W<APB1LPENR_SPEC, 3> { TIM5LPEN_W::new(self) } #[doc = "Bit 4 - Timer 6 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn tim6lpen(&mut self) -> TIM6LPEN_W<APB1LPENR_SPEC, 4> { TIM6LPEN_W::new(self) } #[doc = "Bit 5 - Timer 7 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn tim7lpen(&mut self) -> TIM7LPEN_W<APB1LPENR_SPEC, 5> { TIM7LPEN_W::new(self) } #[doc = "Bit 9 - LCD clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn lcdlpen(&mut self) -> LCDLPEN_W<APB1LPENR_SPEC, 9> { LCDLPEN_W::new(self) } #[doc = "Bit 11 - Window watchdog clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn wwdglpen(&mut self) -> WWDGLPEN_W<APB1LPENR_SPEC, 11> { WWDGLPEN_W::new(self) } #[doc = "Bit 14 - SPI 2 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn spi2lpen(&mut self) -> SPI2LPEN_W<APB1LPENR_SPEC, 14> { SPI2LPEN_W::new(self) } #[doc = "Bit 15 - SPI 3 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn spi3lpen(&mut self) -> SPI3LPEN_W<APB1LPENR_SPEC, 15> { SPI3LPEN_W::new(self) } #[doc = "Bit 17 - USART 2 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn usart2lpen(&mut self) -> USART2LPEN_W<APB1LPENR_SPEC, 17> { USART2LPEN_W::new(self) } #[doc = "Bit 18 - USART 3 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn usart3lpen(&mut self) -> USART3LPEN_W<APB1LPENR_SPEC, 18> { USART3LPEN_W::new(self) } #[doc = "Bit 19 - USART 4 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn uart4lpen(&mut self) -> UART4LPEN_W<APB1LPENR_SPEC, 19> { UART4LPEN_W::new(self) } #[doc = "Bit 20 - USART 5 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn uart5lpen(&mut self) -> UART5LPEN_W<APB1LPENR_SPEC, 20> { UART5LPEN_W::new(self) } #[doc = "Bit 21 - I2C 1 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn i2c1lpen(&mut self) -> I2C1LPEN_W<APB1LPENR_SPEC, 21> { I2C1LPEN_W::new(self) } #[doc = "Bit 22 - I2C 2 clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn i2c2lpen(&mut self) -> I2C2LPEN_W<APB1LPENR_SPEC, 22> { I2C2LPEN_W::new(self) } #[doc = "Bit 23 - USB clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn usblpen(&mut self) -> USBLPEN_W<APB1LPENR_SPEC, 23> { USBLPEN_W::new(self) } #[doc = "Bit 28 - Power interface clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn pwrlpen(&mut self) -> PWRLPEN_W<APB1LPENR_SPEC, 28> { PWRLPEN_W::new(self) } #[doc = "Bit 29 - DAC interface clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn daclpen(&mut self) -> DACLPEN_W<APB1LPENR_SPEC, 29> { DACLPEN_W::new(self) } #[doc = "Bit 31 - COMP interface clock enable during Sleep mode"] #[inline(always)] #[must_use] pub fn complpen(&mut self) -> COMPLPEN_W<APB1LPENR_SPEC, 31> { COMPLPEN_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "APB1 peripheral clock enable in low power mode register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1lpenr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1lpenr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct APB1LPENR_SPEC; impl crate::RegisterSpec for APB1LPENR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`apb1lpenr::R`](R) reader structure"] impl crate::Readable for APB1LPENR_SPEC {} #[doc = "`write(|w| ..)` method takes [`apb1lpenr::W`](W) writer structure"] impl crate::Writable for APB1LPENR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets APB1LPENR to value 0"] impl crate::Resettable for APB1LPENR_SPEC { const RESET_VALUE: Self::Ux = 0; }
pub mod math; pub mod domain; pub mod backend;
use { crate::{ env::Env, handler::{handler, healthz, metrics, statusz}, state::State, }, actix_web::{ middleware::Logger, web::{to, Data}, App, HttpServer, }, futures::future::{join, FutureExt}, std::io::Result, }; pub async fn start() -> Result<()> { let (service_state, metric_state) = State::new(); let (service_data, metric_data) = (Data::new(service_state), Data::new(metric_state)); let metric_server = HttpServer::new(move || { App::new() .app_data(metric_data.clone()) .route("/healthz", to(healthz)) .route("/metrics", to(metrics)) .route("/statusz", to(statusz)) }) .bind(Env::parse_metric_address())? .run(); let service_server = HttpServer::new(move || { App::new() .app_data(service_data.clone()) .route("*", to(handler)) .wrap(Logger::default()) }) .bind(Env::parse_service_address())? .run(); join(metric_server, service_server) .map(|(_, _)| Ok(())) .await }
/// A rectangle defined by its top-left corner and dimensions #[derive(Copy, Clone, Debug)] pub struct Rectangle { /// horizontal position of the top-left corner of the rectangle, in surface coordinates pub x: i32, /// vertical position of the top-left corner of the rectangle, in surface coordinates pub y: i32, /// width of the rectangle pub width: i32, /// height of the rectangle pub height: i32, } impl Rectangle { /// Checks whether given point is inside a rectangle pub fn contains(&self, point: (i32, i32)) -> bool { let (x, y) = point; (x >= self.x) && (x < self.x + self.width) && (y >= self.y) && (y < self.y + self.height) } }
use log::{debug, trace}; use num_bigint::{BigInt, ToBigInt}; use num_traits::{Pow, ToPrimitive}; use py27_marshal::bstr::BString; use py27_marshal::*; use pydis::opcode::py27::{self, Mnemonic}; use pydis::prelude::*; use std::collections::{BTreeMap, HashMap, VecDeque}; use std::convert::{TryFrom, TryInto}; use std::io::{Cursor, Read}; use std::sync::{Arc, Mutex}; pub enum WalkerState { /// Continue parsing normally Continue, /// Continue parsing and parse the next instruction even if it's already /// been parsed before ContinueIgnoreAnalyzedInstructions, /// Stop parsing Break, /// Immediately start parsing at the given offset and continue parsing JumpTo(u64), /// Assume the result of the previous comparison evaluated to the given bool /// and continue parsing AssumeComparison(bool), } impl WalkerState { /// Returns whether we need to force queue the next instruction fn force_queue_next(&self) -> bool { matches!( self, Self::ContinueIgnoreAnalyzedInstructions | Self::JumpTo(_) | Self::AssumeComparison(_) ) } } /// Represents a VM variable. The value is either `Some` (something we can) /// statically resolve or `None` (something that cannot be resolved statically) pub type VmVar = Option<Obj>; /// A VM variable and the data it tracks. Typically this will be a VmVarWithTracking<()>, /// or VmVarWithTracking<usize> where the usize represents an instruction index. But, /// this can be anything you'd like it to be within the context of how you'll be executing /// the instruction, and what data you'd like to track across instructions that share data. pub type VmVarWithTracking<T> = (VmVar, InstructionTracker<T>); /// The VM's stack state. pub type VmStack<T> = Vec<VmVarWithTracking<T>>; /// The VM's variable table pub type VmVars<T> = HashMap<u16, VmVarWithTracking<T>>; /// The VM's name table pub type VmNames<T> = HashMap<Arc<BString>, VmVarWithTracking<T>>; /// Names that get loaded while executing the VM. These are identifiers such as /// module names and names *from* modules. pub type LoadedNames = Arc<Mutex<Vec<Arc<BString>>>>; /// Implements high-level routines that are useful when performing taint tracking /// operations #[derive(Debug)] pub struct InstructionTracker<T>(pub Arc<Mutex<Vec<T>>>); /// We implement a custom Clone routine since, in some scenarios, we want to share /// the taint tracking across multiple objects in different locations. e.g. we may /// want to share taint tracking state between our saved objects in our tables (vm vars, names, etc.) /// and variables on the stack. impl<T> Clone for InstructionTracker<T> where T: Clone, { fn clone(&self) -> Self { InstructionTracker(Arc::clone(&self.0)) } } impl<T> InstructionTracker<T> where T: Clone, { /// Creates a new instruction tracker with no tracked data. pub fn new() -> InstructionTracker<T> { InstructionTracker(Arc::new(Mutex::new(vec![]))) } /// Performs a deep clone of this instruction tracking state pub fn deep_clone(&self) -> InstructionTracker<T> { InstructionTracker(Arc::new(Mutex::new(self.0.lock().unwrap().clone()))) } /// Pushes new data into the instruction tracking vector pub fn push(&self, data: T) { self.0.lock().unwrap().push(data) } /// Extends the state of this instruction tracker by copying all items from `other`'s /// tracked state into this. pub fn extend(&self, other: &InstructionTracker<T>) { self.0 .lock() .unwrap() .extend_from_slice(other.0.lock().unwrap().as_slice()); } } /// SAFETY: The data in an `InstructionTracker` is wrapped in an Arc<Mutex<T>> unsafe impl<T: Sync + Send> Send for InstructionTracker<T> {} /// SAFETY: The data in an `InstructionTracker` is wrapped in an Arc<Mutex<T>> unsafe impl<T: Sync + Send> Sync for InstructionTracker<T> {} use py27_marshal::ObjHashable; use crate::error::Error; pub(crate) const PYTHON27_COMPARE_OPS: [&str; 12] = [ "<", "<=", "==", "!=", ">", ">=", "in", "not in", "is", "is not", "exception match", "BAD", ]; /// Executes an instruction, altering the input state and returning an error /// when the instruction cannot be correctly emulated. For example, some complex /// instructions are not currently supported at this time. pub fn execute_instruction<O: Opcode<Mnemonic = py27::Mnemonic>, F, T>( instr: &Instruction<O>, code: Arc<Code>, stack: &mut VmStack<T>, vars: &mut VmVars<T>, names: &mut VmNames<T>, globals: &mut VmNames<T>, names_loaded: LoadedNames, mut function_callback: F, access_tracking: T, ) -> Result<(), Error<O>> where F: FnMut(VmVar, Vec<VmVar>, std::collections::HashMap<Option<ObjHashable>, VmVar>) -> VmVar, T: Clone + Copy, { macro_rules! apply_operator { ($operator_str:expr) => { let (tos, tos_accesses) = stack.pop().expect("no top of stack?"); let (tos1, tos1_accesses) = stack.pop().expect("no operand"); tos_accesses.push(access_tracking); let tos_accesses = tos_accesses.deep_clone(); tos_accesses.extend(&tos1_accesses); let operator_str = $operator_str; match &tos1 { Some(Obj::Long(left)) => { match &tos { Some(Obj::Long(right)) => { let value = match operator_str { "^" => { left.as_ref() ^ right.as_ref() } "|" => { left.as_ref() | right.as_ref() } "&" => { left.as_ref() & right.as_ref() } "%" => { left.as_ref() % right.as_ref() } "-" => { left.as_ref() - right.as_ref() } "+" => { left.as_ref() + right.as_ref() } "*" => { left.as_ref() * right.as_ref() } "/" => { left.as_ref() / right.as_ref() } "//" => { left.as_ref() / right.as_ref() } "**" => { // Check if our exponent is negative if let num_bigint::Sign::Minus = right.sign() { let positive_exponent = (-right.as_ref()).to_u32().unwrap(); let value = left.as_ref().pow(positive_exponent); stack.push(( Some(Obj::Float(1.0 / value.to_f64().unwrap())), tos_accesses, )); return Ok(()); } else { left.as_ref().pow(right.as_ref().to_u32().unwrap_or_else(|| panic!("could not convert {:?} to u32", right))) } } "///" => { // triple division is true divide -- convert to floats let value = left.as_ref().to_f64().unwrap() / right.as_ref().to_f64().unwrap(); stack.push(( Some(Obj::Float(value)), tos_accesses, )); return Ok(()); } other => { panic!("operator {:?} not handled for Long operands", other); } }; stack.push(( Some(Obj::Long(Arc::new( value ))), tos_accesses, )); } Some(Obj::Float(right)) => { match operator_str { "*" => { // For longs we can just use the operator outright let value = left.as_ref().to_f64().unwrap() * right; stack.push(( Some(Obj::Float( value )), tos_accesses, )); } "/" => { // For longs we can just use the operator outright let value = left.as_ref().to_f64().unwrap() / right; stack.push(( Some(Obj::Float( value )), tos_accesses, )); } "+" => { // For longs we can just use the operator outright let value = left.as_ref().to_f64().unwrap() / right; stack.push(( Some(Obj::Float( value )), tos_accesses, )); } "-" => { // For longs we can just use the operator outright let value = left.as_ref().to_f64().unwrap() / right; stack.push(( Some(Obj::Float( value )), tos_accesses, )); } _other => panic!("unsupported RHS. left: {:?}, right: {:?}. operator: {}", tos1.unwrap().typ(), "Float", operator_str), } } Some(right)=> panic!("unsupported RHS. left: {:?}, right: {:?}. operator: {}", tos1.unwrap().typ(), right.typ(), operator_str), None => stack.push((None, tos_accesses)), } } Some(Obj::Float(left)) => { match &tos { Some(Obj::Float(right)) => { match operator_str { "*" => { // For longs we can just use the operator outright let value = left * right; stack.push(( Some(Obj::Float( value )), tos_accesses, )); } "-" => { // For longs we can just use the operator outright let value = left - right; stack.push(( Some(Obj::Float( value )), tos_accesses, )); } "+" => { // For longs we can just use the operator outright let value = left + right; stack.push(( Some(Obj::Float( value )), tos_accesses, )); } "/" => { // For longs we can just use the operator outright let value = left + right; stack.push(( Some(Obj::Float( value )), tos_accesses, )); } _ => panic!("operator {:?} not handled for float", operator_str), } } Some(Obj::String(right)) => { panic!("{:?}", right); return Err(crate::error::ExecutionError::ComplexExpression(instr.clone(), Some(tos1.unwrap().typ())).into()); } Some(right)=> panic!("unsupported RHS. left: {:?}, right: {:?}. operator: {}", tos1.unwrap().typ(), right.typ(), operator_str), None => stack.push((None, tos_accesses)), } } Some(Obj::Set(left)) => { match &tos { Some(Obj::Set(right)) => { match operator_str { "&" => { let left_set = left.read().unwrap(); let right_set = right.read().unwrap(); let intersection = left_set.intersection(&right_set); stack.push(( Some(Obj::Set(Arc::new( std::sync::RwLock::new( intersection.cloned().collect::<std::collections::HashSet<_>>() ) ))), tos_accesses, )); } "|" => { let left_set = left.read().unwrap(); let right_set = right.read().unwrap(); let union = left_set.union(&right_set); stack.push(( Some(Obj::Set(Arc::new( std::sync::RwLock::new( union.cloned().collect::<std::collections::HashSet<_>>() ) ))), tos_accesses, )); } other => panic!("unsupported operator `{}` for {:?}", other, "set") } } Some(right)=> panic!("unsupported RHS. left: {:?}, right: {:?}. operator: {}", tos1.unwrap().typ(), right.typ(), operator_str), None => stack.push((None, tos_accesses)), } } Some(Obj::String(left)) => { // special case -- this is string formatting if operator_str == "%" { stack.push(( Some(Obj::String(Arc::new( left.as_ref().clone() ))), tos_accesses, )); return Ok(()); } match &tos{ Some(Obj::Long(right)) => { match operator_str { "*" => { let value = left.repeat(right.to_usize().unwrap()); stack.push(( Some(Obj::String(Arc::new( BString::from(value) ))), tos_accesses, )); } "+" => { let mut value = left.clone(); unsafe { Arc::get_mut_unchecked(&mut value) }.extend_from_slice(right.to_string().as_bytes()); stack.push(( Some(Obj::String(value)), tos_accesses, )); } _other => panic!("unsupported operator {:?} for LHS {:?} RHS {:?}", operator_str, tos1.unwrap().typ(), tos.unwrap().typ()) } } Some(Obj::String(right)) => { match operator_str { "+" => { let mut value = left.clone(); unsafe { Arc::get_mut_unchecked(&mut value) }.extend_from_slice(right.as_slice()); stack.push(( Some(Obj::String(value)), tos_accesses, )); } _other => { //return Err(crate::error::ExecutionError::ComplexExpression(instr.clone(), Some(tos1.unwrap().typ())).into()); panic!("unsupported operator {:?} for LHS {:?} RHS {:?}", operator_str, tos1.unwrap().typ(), tos.unwrap().typ()) } } } Some(right)=> panic!("unsupported RHS. left: {:?}, right: {:?}. operator: {}", tos1.unwrap().typ(), right.typ(), operator_str), None => stack.push((None, tos_accesses)), } } Some(left)=> match &tos { Some(right) => { panic!("unsupported LHS {:?} for operator {:?}. right was {:?}", left.typ(), operator_str, right.typ()) } None => { panic!("unsupported LHS {:?} for operator {:?}. right was None", left.typ(), operator_str) } } None => { stack.push((None, tos_accesses)); } } }; } use num_traits::Signed; macro_rules! apply_unary_operator { ($operator:tt) => { let (tos, tos_accesses) = stack.pop().expect("no top of stack?"); tos_accesses.push(access_tracking); let operator_str = stringify!($operator); match tos { Some(Obj::Bool(result)) => { let val = match operator_str { "!" => !result, other => panic!("unexpected unary operator {:?} for bool", other), }; stack.push((Some(Obj::Bool(val)), tos_accesses)); } Some(Obj::Long(result)) => { let val = match operator_str { "!" => { let truthy_value = *result != 0.to_bigint().unwrap(); stack.push((Some(Obj::Bool(!truthy_value)), tos_accesses)); return Ok(()); } "-" => -&*result, "+" => result.abs(), "~" => !&*result, other => panic!("unexpected unary operator {:?} for bool", other), }; stack.push((Some(Obj::Long(Arc::new(val))), tos_accesses)); } Some(Obj::None) => { let val = match operator_str { "!" => true, other => panic!("unexpected unary operator {:?} for None", other), }; stack.push((Some(Obj::Bool(val)), tos_accesses)); } Some(other) => { panic!("unexpected TOS type for condition: {:?}", other.typ()); } None => { stack.push((None, tos_accesses)); } } }; } match instr.opcode.mnemonic() { Mnemonic::ROT_TWO => { let (tos, tos_accesses) = stack.pop().unwrap(); let (tos1, tos1_accesses) = stack.pop().unwrap(); tos_accesses.push(access_tracking); tos1_accesses.push(access_tracking); stack.push((tos1, tos1_accesses)); stack.push((tos, tos_accesses)); } Mnemonic::ROT_THREE => { let (tos, tos_accesses) = stack.pop().unwrap(); let (tos1, tos1_accesses) = stack.pop().unwrap(); let (tos2, tos2_accesses) = stack.pop().unwrap(); tos_accesses.push(access_tracking); tos1_accesses.push(access_tracking); tos2_accesses.push(access_tracking); stack.push((tos2, tos2_accesses)); stack.push((tos1, tos1_accesses)); stack.push((tos, tos_accesses)); } Mnemonic::DUP_TOP => { let (var, accesses) = stack.last().unwrap(); accesses.push(access_tracking); let new_var = (var.clone(), accesses.deep_clone()); stack.push(new_var); } Mnemonic::COMPARE_OP => { let (right, right_modifying_instrs) = stack.pop().unwrap(); let (left, left_modifying_instrs) = stack.pop().unwrap(); left_modifying_instrs.push(access_tracking); let left_modifying_instrs = left_modifying_instrs.deep_clone(); left_modifying_instrs.extend(&right_modifying_instrs); if right.is_none() || left.is_none() { stack.push((None, left_modifying_instrs)); return Ok(()); } let left = left.unwrap(); let right = right.unwrap(); let op = PYTHON27_COMPARE_OPS[instr.arg.unwrap() as usize]; match op { "<" => match left { Obj::Long(l) => match right { Obj::Long(r) => stack.push((Some(Obj::Bool(l < r)), left_modifying_instrs)), other => panic!("unsupported right-hand operand: {:?}", other.typ()), }, Obj::String(left) => match right { Obj::String(right) => { for idx in 0..std::cmp::min(left.len(), right.len()) { if left[idx] != right[idx] { stack.push(( Some(Obj::Bool(left[idx] < right[idx])), left_modifying_instrs, )); return Ok(()); } } stack.push(( Some(Obj::Bool(left.len() < right.len())), left_modifying_instrs, )) } _other => { stack.push((Some(Obj::Bool(false)), left_modifying_instrs)); // panic!( // "unsupported right-hand operand for string >: {:?}", // other.typ() // ) } }, other => panic!( "unsupported left-hand operand: {:?} for op {}", other.typ(), op ), }, "<=" => match left { Obj::Long(l) => match right { Obj::Long(r) => { stack.push((Some(Obj::Bool(l <= r)), left_modifying_instrs)) } Obj::Float(r) => stack.push(( Some(Obj::Bool(l.to_f64().unwrap() <= r)), left_modifying_instrs, )), other => panic!( "unsupported right-hand operand for Long <=: {:?}", other.typ() ), }, Obj::Bool(l) => match right { Obj::Long(r) => stack.push(( Some(Obj::Bool((l as u32).to_bigint().unwrap() <= *r)), left_modifying_instrs, )), Obj::Float(r) => stack.push(( Some(Obj::Bool((l as u64) as f64 <= r)), left_modifying_instrs, )), Obj::Bool(r) => stack .push((Some(Obj::Bool(l as u32 <= r as u32)), left_modifying_instrs)), other => panic!( "unsupported right-hand operand for Long <=: {:?}", other.typ() ), }, other => panic!( "unsupported left-hand operand: {:?} for op {}", other.typ(), op ), }, "==" => match left { Obj::Long(l) => match right { Obj::Long(r) => { stack.push((Some(Obj::Bool(l == r)), left_modifying_instrs)) } other => panic!( "unsupported right-hand operand for Long ==: {:?}", other.typ() ), }, Obj::Set(left_set) => match right { Obj::Set(right_set) => { let left_set_lock = left_set.read().unwrap(); let right_set_lock = right_set.read().unwrap(); stack.push(( Some(Obj::Bool(&*left_set_lock == &*right_set_lock)), left_modifying_instrs, )) } other => panic!( "unsupported right-hand operand for Set == : {:?}", other.typ() ), }, other => panic!( "unsupported left-hand operand: {:?} for op {}", other.typ(), op ), }, "!=" => match left { Obj::Long(l) => match right { Obj::Long(r) => { stack.push((Some(Obj::Bool(l != r)), left_modifying_instrs)) } other => panic!( "unsupported right-hand operand for Long !=: {:?}", other.typ() ), }, Obj::Set(left_set) => match right { Obj::Set(right_set) => { let left_set_lock = left_set.read().unwrap(); let right_set_lock = right_set.read().unwrap(); stack.push(( Some(Obj::Bool(&*left_set_lock != &*right_set_lock)), left_modifying_instrs, )) } other => panic!("unsupported right-hand operand for !=: {:?}", other.typ()), }, other => panic!( "unsupported left-hand operand: {:?} for op {}", other.typ(), op ), }, ">" => match left { Obj::Long(l) => match right { Obj::Long(r) => stack.push((Some(Obj::Bool(l > r)), left_modifying_instrs)), Obj::Float(r) => stack.push(( Some(Obj::Bool(l.to_f64().unwrap() > r)), left_modifying_instrs, )), other => panic!( "unsupported right-hand operand for Long >: {:?}", other.typ() ), }, Obj::String(left) => match right { Obj::String(right) => { for idx in 0..std::cmp::min(left.len(), right.len()) { if left[idx] != right[idx] { stack.push(( Some(Obj::Bool(left[idx] > right[idx])), left_modifying_instrs, )); return Ok(()); } } stack.push(( Some(Obj::Bool(left.len() > right.len())), left_modifying_instrs, )) } _other => { stack.push((Some(Obj::Bool(true)), left_modifying_instrs)); // panic!( // "unsupported right-hand operand for string >: {:?}", // other.typ() // ) } }, other => panic!( "unsupported left-hand operand: {:?} for op {}", other.typ(), op ), }, ">=" => match left { Obj::Long(l) => match right { Obj::Long(r) => { stack.push((Some(Obj::Bool(l >= r)), left_modifying_instrs)) } Obj::Float(r) => stack.push(( Some(Obj::Bool(l.to_f64().unwrap() >= r)), left_modifying_instrs, )), other => { panic!("unsupported right-hand operand for Long: {:?}", other.typ()) } }, other => panic!( "unsupported left-hand operand: {:?} for op {}", other.typ(), op ), }, "is not" => match left { Obj::String(_left) => match right { Obj::None => stack.push((Some(Obj::Bool(true)), left_modifying_instrs)), other => panic!( "unsupported right-hand operand for string {:?}: {:?}", op, other.typ() ), }, Obj::None => match right { Obj::None => stack.push((Some(Obj::Bool(false)), left_modifying_instrs)), other => panic!( "unsupported right-hand operand for None, operator {:?}: {:?}", op, other.typ() ), }, other => panic!( "unsupported left-hand operand: {:?} for op {}. RHS is {:?}", other.typ(), op, right.typ(), ), }, "is" => match left { Obj::String(_left) => match right { // all => { // return Err(crate::error::ExecutionError::ComplexExpression( // instr.clone(), // Some(all.typ()), // ) // .into()) // } // Obj::None => stack.push((Some(Obj::Bool(true)), left_modifying_instrs)), other => panic!( "unsupported right-hand operand for string {:?}: {:?}", op, other.typ() ), }, Obj::None => match right { Obj::None => { stack.push((Some(Obj::Bool(true)), left_modifying_instrs)); } other => panic!( "unsupported right-hand operand for None {:?}: {:?}", op, other.typ() ), }, other => panic!( "unsupported left-hand operand: {:?} for op {}", other.typ(), op ), }, other => panic!("unsupported comparison operator: {:?}", other), } } Mnemonic::IMPORT_NAME => { let (_fromlist, fromlist_modifying_instrs) = stack.pop().unwrap(); let (_level, level_modifying_instrs) = stack.pop().unwrap(); level_modifying_instrs.extend(&fromlist_modifying_instrs); level_modifying_instrs.push(access_tracking); let _name = &code.names[instr.arg.unwrap() as usize]; // println!("importing: {}", name); stack.push((None, level_modifying_instrs)); } Mnemonic::IMPORT_FROM => { let (_module, accessing_instrs) = stack.last().unwrap(); accessing_instrs.push(access_tracking); let accessing_instrs = accessing_instrs.clone(); stack.push((None, accessing_instrs)); } Mnemonic::LOAD_ATTR => { // we don't support attributes let (_obj, obj_modifying_instrs) = stack.pop().unwrap(); let _name = &code.names[instr.arg.unwrap() as usize]; obj_modifying_instrs.push(access_tracking); stack.push((None, obj_modifying_instrs)); } Mnemonic::STORE_ATTR => { // we don't support attributes let (_obj, _obj_modifying_instrs) = stack.pop().unwrap(); let (_obj, _obj_modifying_instrs) = stack.pop().unwrap(); } Mnemonic::FOR_ITER => { // Top of stack needs to be something we can iterate over // get the next item from our iterator let top_of_stack_index = stack.len() - 1; let (tos, _modifying_instrs) = &mut stack[top_of_stack_index]; let new_tos = match tos { Some(Obj::String(s)) => { if let Some(byte) = unsafe { Arc::get_mut_unchecked(s) }.pop() { Some(Obj::Long(Arc::new(byte.to_bigint().unwrap()))) } else { // iterator is empty -- return return Ok(()); } } Some(other) => panic!("stack object `{:?}` is not iterable", other), None => None, }; // let modifying_instrs = Rc::new(RefCell::new(modifying_instrs.borrow().clone())); // modifying_instrs.borrow_mut().push(access_tracking); stack.push((new_tos, InstructionTracker::new())) } Mnemonic::STORE_FAST => { let (tos, accessing_instrs) = stack.pop().unwrap(); accessing_instrs.push(access_tracking); // Store TOS in a var slot vars.insert(instr.arg.unwrap(), (tos, accessing_instrs)); } Mnemonic::STORE_NAME => { let (tos, accessing_instrs) = stack.pop().unwrap(); let name = &code.names[instr.arg.unwrap() as usize]; accessing_instrs.push(access_tracking); // Store TOS in a var slot names.insert(Arc::clone(name), (tos, accessing_instrs)); } Mnemonic::LOAD_NAME => { let name = &code.names[instr.arg.unwrap() as usize]; names_loaded.lock().unwrap().push(Arc::clone(name)); if let Some((val, accesses)) = names.get(name) { accesses.push(access_tracking); stack.push((val.clone(), accesses.clone())); } else { let tracking = InstructionTracker::new(); tracking.push(access_tracking); stack.push((None, tracking)); } } Mnemonic::LOAD_FAST => { if let Some((var, accesses)) = vars.get(&instr.arg.unwrap()) { accesses.push(access_tracking); stack.push((var.clone(), accesses.clone())); } else { let tracking = InstructionTracker::new(); tracking.push(access_tracking); stack.push((None, tracking)); } } Mnemonic::LOAD_CONST => { let tracking = InstructionTracker::new(); tracking.push(access_tracking); stack.push(( Some(code.consts[instr.arg.unwrap() as usize].clone()), tracking, )); } Mnemonic::BINARY_FLOOR_DIVIDE => { apply_operator!("//"); } Mnemonic::BINARY_TRUE_DIVIDE => { apply_operator!("///"); } Mnemonic::BINARY_POWER => { apply_operator!("**"); } Mnemonic::BINARY_MODULO => { apply_operator!("%"); } Mnemonic::INPLACE_ADD | Mnemonic::BINARY_ADD => { apply_operator!("+"); } Mnemonic::INPLACE_MULTIPLY | Mnemonic::BINARY_MULTIPLY => { apply_operator!("*"); } Mnemonic::INPLACE_SUBTRACT | Mnemonic::BINARY_SUBTRACT => { apply_operator!("-"); } Mnemonic::STORE_SUBSCR => { let (key, key_accessing_instrs) = stack.pop().unwrap(); let (collection, collection_accessing_instrs) = stack.pop().unwrap(); let (value, value_accessing_instrs) = stack.pop().unwrap(); collection_accessing_instrs.extend(&key_accessing_instrs); collection_accessing_instrs.extend(&value_accessing_instrs); collection_accessing_instrs.push(access_tracking); // If key, value, or the collection are `None`, we destroy the entire collection // TODO: allow more granular failure of taint tracking at a per-index level match (collection, key, value) { (Some(collection), Some(key), Some(value)) => match collection { Obj::Dict(list_lock) => { let mut dict = list_lock.write().unwrap(); let key = ObjHashable::try_from(&key).expect("key is not hashable"); dict.insert(key, value); } Obj::List(list_lock) => { let mut list = list_lock.write().unwrap(); let index = key.extract_long().expect("key is not a long"); let index = index .to_usize() .expect("index cannot be converted to usize"); if index > list.len() { panic!("index {} is greater than list length {}", index, list.len()); } list[index] = value; } other => { panic!("need to implement STORE_SUBSCR for {:?}", other.typ()); } }, _ => { // we do nothing } } } Mnemonic::BINARY_SUBSC => { let (tos, accessing_instrs) = stack.pop().unwrap(); let (tos1, tos1_accessing_instrs) = stack.pop().unwrap(); accessing_instrs.extend(&tos1_accessing_instrs); accessing_instrs.push(access_tracking); if tos.is_none() { stack.push((None, accessing_instrs)); return Ok(()); } match tos1 { Some(Obj::List(list_lock)) => { let list = list_lock.read().unwrap(); if let Obj::Long(long) = tos.unwrap() { if long.to_usize().unwrap() >= list.len() { stack.push((None, accessing_instrs)); } else { stack.push(( Some(list[long.to_usize().unwrap()].clone()), accessing_instrs, )); } } else { panic!("TOS must be a long"); } } Some(other) => { return Err(crate::error::ExecutionError::ComplexExpression( instr.clone(), Some(other.typ()), ) .into()); } None => { stack.push((None, accessing_instrs)); } } } Mnemonic::BINARY_DIVIDE => { apply_operator!("/"); } Mnemonic::BINARY_XOR => { apply_operator!("^"); } Mnemonic::BINARY_AND => { apply_operator!("&"); } Mnemonic::BINARY_OR | Mnemonic::INPLACE_OR => { apply_operator!("|"); } Mnemonic::UNARY_NOT => { apply_unary_operator!(!); } Mnemonic::UNARY_NEGATIVE => { apply_unary_operator!(-); } Mnemonic::BINARY_RSHIFT => { let (tos, tos_accesses) = stack.pop().unwrap(); let tos_value = tos.map(|tos| match tos { Obj::Long(l) => Arc::clone(&l), other => panic!("did not expect type: {:?}", other.typ()), }); let (tos1, tos1_accesses) = stack.pop().unwrap(); let tos1_value = tos1.map(|tos| match tos { Obj::Long(l) => Arc::clone(&l), other => panic!("did not expect type: {:?}", other.typ()), }); tos_accesses.extend(&tos1_accesses); tos_accesses.push(access_tracking); if tos_value.is_some() && tos1_value.is_some() { stack.push(( Some(Obj::Long(Arc::new( &*tos1_value.unwrap() >> tos_value.unwrap().to_usize().unwrap(), ))), tos_accesses, )); } else { stack.push((None, tos_accesses)); } } Mnemonic::BINARY_LSHIFT => { let (tos, tos_accesses) = stack.pop().unwrap(); let tos_value = tos.map(|tos| match tos { Obj::Long(l) => Arc::clone(&l), other => panic!("did not expect type: {:?}", other.typ()), }); let (tos1, tos1_accesses) = stack.pop().unwrap(); let tos1_value = tos1.map(|tos| match tos { Obj::Long(l) => Arc::clone(&l), other => panic!("did not expect type: {:?}", other.typ()), }); let tos_accesses = tos_accesses.deep_clone(); tos_accesses.extend(&tos1_accesses); tos_accesses.push(access_tracking); if tos_value.is_some() && tos1_value.is_some() { stack.push(( Some(Obj::Long(Arc::new( &*tos1_value.unwrap() << tos_value.unwrap().to_usize().unwrap(), ))), tos_accesses, )); } else { stack.push((None, tos_accesses)); } } Mnemonic::LIST_APPEND => { let (tos, tos_modifiers) = stack.pop().unwrap(); let tos_value = tos.map(|tos| { match tos { Obj::Long(l) => Arc::clone(&l), other => panic!("did not expect type: {:?}", other.typ()), } .to_u8() .unwrap() }); let stack_len = stack.len(); let (output, output_modifiers) = &mut stack[stack_len - instr.arg.unwrap() as usize]; output_modifiers.extend(&tos_modifiers); output_modifiers.push(access_tracking); match output { Some(Obj::String(s)) => { unsafe { Arc::get_mut_unchecked(s) }.push(tos_value.unwrap()); } Some(other) => { return Err(crate::error::ExecutionError::ComplexExpression( instr.clone(), Some(other.typ()), ) .into()); } None => { // do nothing here } } } Mnemonic::UNPACK_SEQUENCE => { let (tos, tos_modifiers) = stack.pop().unwrap(); tos_modifiers.push(access_tracking); match tos { Some(Obj::Tuple(t)) => { for item in t.iter().rev().take(instr.arg.unwrap() as usize) { stack.push((Some(item.clone()), tos_modifiers.deep_clone())); } } Some(other) => { panic!("need to add UNPACK_SEQUENCE support for {:?}", other.typ()); } None => { for _i in 0..instr.arg.unwrap() { stack.push((None, tos_modifiers.deep_clone())); } } } } Mnemonic::BUILD_SET => { let mut set = std::collections::HashSet::new(); let mut push_none = false; let set_accessors = InstructionTracker::new(); for _i in 0..instr.arg.unwrap() { let (tos, tos_modifiers) = stack.pop().unwrap(); set_accessors.extend(&tos_modifiers); // we don't build the set if we can't resolve the args if tos.is_none() || push_none { push_none = true; continue; } tos_modifiers.push(access_tracking); set.insert(py27_marshal::ObjHashable::try_from(&tos.unwrap()).unwrap()); } set_accessors.push(access_tracking); if push_none { stack.push((None, set_accessors)); } else { stack.push(( Some(Obj::Set(Arc::new(std::sync::RwLock::new(set)))), set_accessors, )); } } Mnemonic::BUILD_TUPLE => { let mut tuple = Vec::new(); let mut push_none = false; let tuple_accessors = InstructionTracker::new(); for _i in 0..instr.arg.unwrap() { let (tos, tos_modifiers) = stack.pop().unwrap(); tuple_accessors.extend(&tos_modifiers); // we don't build the set if we can't resolve the args if tos.is_none() || push_none { push_none = true; continue; } tos_modifiers.push(access_tracking); tuple.push(tos.unwrap()); } tuple_accessors.push(access_tracking); if push_none { stack.push((None, tuple_accessors)); } else { stack.push((Some(Obj::Tuple(Arc::new(tuple))), tuple_accessors)); } } Mnemonic::BUILD_MAP => { let tracking = InstructionTracker::new(); tracking.push(access_tracking); let map = Some(Obj::Dict(Arc::new(std::sync::RwLock::new( std::collections::HashMap::with_capacity(instr.arg.unwrap() as usize), )))); stack.push((map, tracking)); } Mnemonic::LOAD_GLOBAL => { let tracking = InstructionTracker::new(); tracking.push(access_tracking); let name = &code.names[instr.arg.unwrap() as usize]; names_loaded.lock().unwrap().push(Arc::clone(name)); stack.push((None, tracking)); } Mnemonic::STORE_GLOBAL => { let (tos, accessing_instrs) = stack.pop().unwrap(); let name = &code.names[instr.arg.unwrap() as usize]; accessing_instrs.push(access_tracking); // Store TOS in a var slot globals.insert(Arc::clone(name), (tos, accessing_instrs)); } Mnemonic::LOAD_DEREF => { let tracking = InstructionTracker::new(); tracking.push(access_tracking); stack.push((None, tracking)); } Mnemonic::BUILD_LIST => { let mut list = Vec::new(); // TODO: this is always true right now to avoid // testing empty sets that are added to as truthy values let mut push_none = true; let tuple_accessors = InstructionTracker::new(); for _i in 0..instr.arg.unwrap() { let (tos, tos_modifiers) = stack.pop().unwrap(); tuple_accessors.extend(&tos_modifiers); // we don't build the set if we can't resolve the args if tos.is_none() { push_none = true; break; } tos_modifiers.push(access_tracking); list.push(tos.unwrap()); } tuple_accessors.push(access_tracking); if push_none { stack.push((None, tuple_accessors)); } else { stack.push(( Some(Obj::List(Arc::new(std::sync::RwLock::new(list)))), tuple_accessors, )); } } Mnemonic::BUILD_CLASS => { let (_tos, tos_accesses) = stack.pop().unwrap(); let (_tos1, tos1_accesses) = stack.pop().unwrap(); let (_tos2, tos2_accesses) = stack.pop().unwrap(); tos_accesses.extend(&tos1_accesses); tos_accesses.extend(&tos2_accesses); tos_accesses.push(access_tracking); stack.push((None, tos_accesses)); } Mnemonic::MAKE_FUNCTION => { let (_tos, tos_modifiers) = stack.pop().unwrap(); let tos_modifiers = tos_modifiers.deep_clone(); tos_modifiers.push(access_tracking); stack.push((None, tos_modifiers)); } Mnemonic::POP_TOP => { let (_tos, tos_modifiers) = stack.pop().unwrap(); tos_modifiers.push(access_tracking); } Mnemonic::GET_ITER => { // nop } Mnemonic::CALL_FUNCTION => { let accessed_instrs = InstructionTracker::new(); let kwarg_count = (instr.arg.unwrap() >> 8) & 0xFF; let mut kwargs = std::collections::HashMap::with_capacity(kwarg_count as usize); for _ in 0..kwarg_count { let (value, value_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&value_accesses); let (key, key_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&key_accesses); let key = key.map(|key| ObjHashable::try_from(&key).unwrap()); kwargs.insert(key, value); } let positional_args_count = instr.arg.unwrap() & 0xFF; let mut args = Vec::with_capacity(positional_args_count as usize); for _ in 0..positional_args_count { let (arg, arg_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&arg_accesses); args.push(arg); } // Function code reference // NOTE: we skip the function accesses here since we don't really // want to be tracking across functions let function = stack.pop().unwrap(); let result = function_callback(function.0, args, kwargs); accessed_instrs.push(access_tracking); stack.push((result, accessed_instrs)); // No name resolution for now -- let's assume this is ord(). // This function is a nop since it returns its input // panic!( // "we're calling a function with {} args: {:#?}", // instr.arg.unwrap(), // stack[stack.len() - (1 + instr.arg.unwrap()) as usize] // ); } Mnemonic::CALL_FUNCTION_VAR => { let (_additional_positional_args, arg_accesses) = stack.pop().unwrap(); let accessed_instrs = arg_accesses.deep_clone(); let kwarg_count = (instr.arg.unwrap() >> 8) & 0xFF; let mut kwargs = std::collections::HashMap::with_capacity(kwarg_count as usize); for _ in 0..kwarg_count { let (value, value_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&value_accesses); let (key, key_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&key_accesses); let key = key.map(|key| ObjHashable::try_from(&key).unwrap()); kwargs.insert(key, value); } let positional_args_count = instr.arg.unwrap() & 0xFF; let mut args = Vec::with_capacity(positional_args_count as usize); for _ in 0..positional_args_count { let (arg, arg_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&arg_accesses); args.push(arg); } // Function code reference // NOTE: we skip the function accesses here since we don't really // want to be tracking across functions let function = stack.pop().unwrap(); let result = function_callback(function.0, args, kwargs); accessed_instrs.push(access_tracking); stack.push((result, accessed_instrs)); } Mnemonic::CALL_FUNCTION_KW => { let (_additional_kw_args, arg_accesses) = stack.pop().unwrap(); let accessed_instrs = arg_accesses.deep_clone(); let kwarg_count = (instr.arg.unwrap() >> 8) & 0xFF; let mut kwargs = std::collections::HashMap::with_capacity(kwarg_count as usize); for _ in 0..kwarg_count { let (value, value_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&value_accesses); let (key, key_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&key_accesses); let key = key.map(|key| ObjHashable::try_from(&key).unwrap()); kwargs.insert(key, value); } let positional_args_count = instr.arg.unwrap() & 0xFF; let mut args = Vec::with_capacity(positional_args_count as usize); for _ in 0..positional_args_count { let (arg, arg_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&arg_accesses); args.push(arg); } // Function code reference // NOTE: we skip the function accesses here since we don't really // want to be tracking across functions let function = stack.pop().unwrap(); let result = function_callback(function.0, args, kwargs); accessed_instrs.push(access_tracking); stack.push((result, accessed_instrs)); } Mnemonic::CALL_FUNCTION_VAR_KW => { let (_additional_kw_args, arg_accesses) = stack.pop().unwrap(); let accessed_instrs = arg_accesses.deep_clone(); let (_additional_positional_args, arg_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&arg_accesses); let kwarg_count = (instr.arg.unwrap() >> 8) & 0xFF; let mut kwargs = std::collections::HashMap::with_capacity(kwarg_count as usize); for _ in 0..kwarg_count { let (value, value_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&value_accesses); let (key, key_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&key_accesses); let key = key.map(|key| ObjHashable::try_from(&key).unwrap()); kwargs.insert(key, value); } let positional_args_count = instr.arg.unwrap() & 0xFF; let mut args = Vec::with_capacity(positional_args_count as usize); for _ in 0..positional_args_count { let (arg, arg_accesses) = stack.pop().unwrap(); accessed_instrs.extend(&arg_accesses); args.push(arg); } // Function code reference // NOTE: we skip the function accesses here since we don't really // want to be tracking across functions let function = stack.pop().unwrap(); let result = function_callback(function.0, args, kwargs); accessed_instrs.push(access_tracking); stack.push((result, accessed_instrs)); } Mnemonic::POP_BLOCK | Mnemonic::JUMP_ABSOLUTE => { // nops } Mnemonic::PRINT_ITEM => { stack.pop(); } Mnemonic::PRINT_ITEM_TO => { stack.pop(); stack.pop(); } Mnemonic::PRINT_NEWLINE => { // nop } Mnemonic::PRINT_NEWLINE_TO => { stack.pop(); } Mnemonic::STORE_MAP => { let (key, key_accesses) = stack.pop().unwrap(); let (value, value_accesses) = stack.pop().unwrap(); let (dict, dict_accesses) = stack.pop().unwrap(); let mut new_accesses = dict_accesses; new_accesses.extend(&value_accesses); new_accesses.extend(&key_accesses); new_accesses.push(access_tracking); if dict.is_none() || key.is_none() || value.is_none() { // We cannot track the state of at least one of these variables. Corrupt // the entire state. // TODO: this is a bit aggressive. In the future when we develop a new map type // we should be able to track individual keys stack.push((None, new_accesses)); return Ok(()); } let dict_lock = dict.unwrap().extract_dict().unwrap(); let mut dict = dict_lock.write().unwrap(); let hashable_key: ObjHashable = key .as_ref() .unwrap() .try_into() .expect("key is not hashable"); dict.insert(hashable_key, value.unwrap()); drop(dict); stack.push((Some(Obj::Dict(dict_lock)), new_accesses)); } Mnemonic::MAP_ADD => { return Err( crate::error::ExecutionError::UnsupportedOpcode(Mnemonic::MAP_ADD.into()).into(), ); // let (value, value_accesses) = stack.pop().unwrap(); // let (dict, dict_accesses) = stack.pop().unwrap(); // let mut new_accesses = dict_accesses; // new_accesses.extend(&value_accesses); // new_accesses.push(access_tracking); // if dict.is_none() || key.is_none() || value.is_none() { // // We cannot track the state of at least one of these variables. Corrupt // // the entire state. // // TODO: this is a bit aggressive. In the future when we develop a new map type // // we should be able to track individual keys // stack.push((None, new_accesses)); // return Ok(()); // } // let arc_dict = dict.unwrap().extract_dict().unwrap(); // let mut dict = arc_dict.write().unwrap(); // let hashable_key: ObjHashable = key.as_ref().unwrap().try_into().expect("key is not hashable"); // dict.insert(hashable_key, value.unwrap()); // drop(dict); // stack.push((Some(Obj::Dict(arc_dict)), new_accesses)); } Mnemonic::YIELD_VALUE => { // todo: add to generator let (_tos, _accesses) = stack.pop().unwrap(); } other => { return Err(crate::error::ExecutionError::UnsupportedOpcode(other.into()).into()); } } Ok(()) } /// Represents an instruction that was parsed from its raw bytecode. #[derive(Debug, Clone, Eq, PartialEq)] pub enum ParsedInstr<O: Opcode<Mnemonic = py27::Mnemonic>> { Good(Arc<Instruction<O>>), Bad, } impl<O: Opcode<Mnemonic = py27::Mnemonic>> ParsedInstr<O> { #[track_caller] pub fn unwrap(&self) -> Arc<Instruction<O>> { if let ParsedInstr::Good(ins) = self { Arc::clone(ins) } else { panic!("unwrap called on bad instruction") } } } /// Walks the bytecode in a manner that only follows what "looks like" valid /// codepaths. This will only decode instructions that are either proven statically /// to be taken (with `JUMP_ABSOLUTE`, `JUMP_IF_TRUE` with a const value that evaluates /// to true, etc.) pub fn const_jmp_instruction_walker<F, O: Opcode<Mnemonic = py27::Mnemonic>>( bytecode: &[u8], consts: Arc<Vec<Obj>>, mut callback: F, ) -> Result<BTreeMap<u64, ParsedInstr<O>>, Error<O>> where F: FnMut(&Instruction<O>, u64) -> WalkerState, { let debug = !true; let mut rdr = Cursor::new(bytecode); let mut instruction_sequence = Vec::new(); let mut analyzed_instructions = BTreeMap::<u64, ParsedInstr<O>>::new(); // Offset of instructions that need to be read let mut instruction_queue = VecDeque::<u64>::new(); instruction_queue.push_front(0); macro_rules! queue { ($offset:expr) => { queue!($offset, false) }; ($offset:expr, $force_queue:expr) => { if $offset as usize > bytecode.len() { panic!( "bad offset queued: 0x{:X} (bufsize is 0x{:X}). Analyzed instructions: {:#?}", $offset, bytecode.len(), analyzed_instructions ); } if $force_queue { if debug { trace!("adding instruction at {} to front queue", $offset); } instruction_queue.push_front($offset); } else if (!analyzed_instructions.contains_key(&$offset) && !instruction_queue.contains(&$offset)) { if debug { trace!("adding instruction at {} to queue", $offset); } instruction_queue.push_back($offset); } }; }; if debug { trace!("{:#?}", consts); } 'decode_loop: while let Some(offset) = instruction_queue.pop_front() { if debug { trace!("offset: {}", offset); } if offset as usize == bytecode.len() { continue; } rdr.set_position(offset); // Ignore invalid instructions let instr = match decode_py27(&mut rdr) { Ok(instr) => Arc::new(instr), Err(e @ pydis::error::DecodeError::UnknownOpcode(_)) => { trace!(""); debug!( "Error decoding queued instruction at position: {}: {}", offset, e ); trace!( "previous: {:?}", instruction_sequence[instruction_sequence.len() - 1] ); //remove_bad_instructions_behind_offset(offset, &mut analyzed_instructions); // rdr.set_position(offset); // let instr_size = rdr.position() - offset; // let mut data = vec![0u8; instr_size as usize]; // rdr.read_exact(data.as_mut_slice())?; // let data_rc = Rc::new(data); analyzed_instructions.insert(offset, ParsedInstr::Bad); instruction_sequence.push(ParsedInstr::Bad); //queue!(rdr.position()); continue; } Err(e) => { if cfg!(debug_assertions) { panic!("{:?}", e); } return Err(e.into()); } }; trace!("{}", bytecode[offset as usize]); trace!("{:?}", instr); let next_instr_offset = rdr.position(); let state = callback(&instr, offset); // We should stop decoding now if matches!(state, WalkerState::Break) { break; } if let WalkerState::JumpTo(offset) = &state { queue!(*offset, true); continue; } //println!("Instruction: {:X?}", instr); instruction_sequence.push(ParsedInstr::Good(Arc::clone(&instr))); analyzed_instructions.insert(offset, ParsedInstr::Good(Arc::clone(&instr))); let mut ignore_jump_target = false; if instr.opcode.is_jump() { if matches!( instr.opcode.mnemonic(), Mnemonic::JUMP_ABSOLUTE | Mnemonic::JUMP_FORWARD ) { // We've reached an unconditional jump. We need to decode the target let target = if instr.opcode.is_relative_jump() { next_instr_offset + instr.arg.unwrap() as u64 } else { instr.arg.unwrap() as u64 }; if target as usize >= bytecode.len() { // This is a bad instruction. Replace it with bad instr analyzed_instructions.insert(offset, ParsedInstr::Bad); instruction_sequence.push(ParsedInstr::Bad); continue; } rdr.set_position(target); match decode_py27::<O, _>(&mut rdr) { Ok(_instr) => { // Queue the target queue!(target, state.force_queue_next()); continue; } Err(e @ pydis::error::DecodeError::UnknownOpcode(_)) => { // Definitely do not queue this target ignore_jump_target = true; debug!( "Error while parsing target opcode: {} at position {}", e, offset ); } Err(e) => { return Err(e.into()); } } } } let ignore_jump_target = false; if !ignore_jump_target && instr.opcode.is_absolute_jump() { if instr.arg.unwrap() as usize > bytecode.len() { debug!("instruction {:?} at {} has a bad target", instr, offset); //remove_bad_instructions_behind_offset(offset, &mut analyzed_instructions); } else { queue!(instr.arg.unwrap() as u64, state.force_queue_next()); } } if !ignore_jump_target && instr.opcode.is_relative_jump() { let target = next_instr_offset + instr.arg.unwrap() as u64; if target as usize > bytecode.len() { debug!("instruction {:?} at {} has a bad target", instr, offset); //remove_bad_instructions_behind_offset(offset, &mut analyzed_instructions); } else { queue!(target as u64); } } if instr.opcode.mnemonic() != Mnemonic::RETURN_VALUE && instr.opcode.mnemonic() != Mnemonic::RAISE_VARARGS { queue!(next_instr_offset, state.force_queue_next()); } } if true || debug { trace!("analyzed\n{:#?}", analyzed_instructions); } Ok(analyzed_instructions) } fn remove_bad_instructions_behind_offset<O: Opcode<Mnemonic = py27::Mnemonic>>( offset: u64, analyzed_instructions: &mut BTreeMap<u64, Arc<Instruction<O>>>, ) { // We need to remove all instructions parsed between the last // conditional jump and this instruction if let Some(last_jump_offset) = analyzed_instructions .iter() .rev() .find_map(|(addr, instr)| { if *addr < offset && instr.opcode.is_jump() { Some(*addr) } else { None } }) { let bad_offsets: Vec<u64> = analyzed_instructions .keys() .into_iter() .filter(|addr| **addr > last_jump_offset && **addr < offset) .copied() .collect(); for offset in bad_offsets { trace!("removing {:?}", analyzed_instructions.get(&offset)); analyzed_instructions.remove(&offset); } } } #[macro_export] macro_rules! Instr { ($opcode:expr) => { Instruction { opcode: $opcode, arg: None, } }; ($opcode:expr, $arg:expr) => { Instruction { opcode: $opcode, arg: Some($arg), } }; } #[cfg(test)] pub(crate) mod tests { use super::*; use num_bigint::BigInt; use py27_marshal::bstr::BString; use std::sync::{Arc, RwLock}; type TargetOpcode = pydis::opcode::py27::Standard; #[macro_export] macro_rules! Long { ($value:expr) => { py27_marshal::Obj::Long(Arc::new(BigInt::from($value))) }; } #[macro_export] macro_rules! String { ($value:expr) => { py27_marshal::Obj::String(Arc::new(bstr::BString::from($value))) }; } #[test] fn binary_xor() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let left = 0b10101010_11111111; let right = 0b01010101_11111111; let expected = left ^ right; let consts = vec![Long!(left), Long!(right)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::BINARY_XOR), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Long(l)) => { assert_eq!(*l.as_ref(), expected.to_bigint().unwrap()); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn binary_lshift() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let left = 0b10101010_11111111; let right = 3; let expected = left << right; let consts = vec![Long!(left), Long!(right)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::BINARY_LSHIFT), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Long(l)) => { assert_eq!(*l.as_ref(), expected.to_bigint().unwrap()); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn binary_rshift() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let left = 0b10101010_11111111; let right = 3; let expected = left >> right; let consts = vec![Long!(left), Long!(right)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::BINARY_RSHIFT), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Long(l)) => { assert_eq!(*l.as_ref(), expected.to_bigint().unwrap()); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn binary_modulo() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let left = 5; let right = 3; let expected = left % right; let consts = vec![Long!(left), Long!(right)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::BINARY_MODULO), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Long(l)) => { assert_eq!(*l.as_ref(), expected.to_bigint().unwrap()); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn binary_divide_longs() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let left = 5; let right = 3; let expected = left / right; let consts = vec![Long!(left), Long!(right)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::BINARY_DIVIDE), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Long(l)) => { assert_eq!(*l.as_ref(), expected.to_bigint().unwrap()); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn binary_floor_divide_longs() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let left = 5; let right = 3; let expected = left / right; let consts = vec![Long!(left), Long!(right)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::BINARY_FLOOR_DIVIDE), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Long(l)) => { assert_eq!(*l.as_ref(), expected.to_bigint().unwrap()); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn binary_positive_pow_longs() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let left = 5u32; let right = 3; let expected = left.pow(right); let consts = vec![Long!(left), Long!(right)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::BINARY_POWER), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Long(l)) => { assert_eq!(*l.as_ref(), expected.to_bigint().unwrap()); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn binary_negative_pow_longs() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let left = 5u32; let right = -3i32; let expected = 1.0 / left.pow((-right) as u32) as f64; let consts = vec![Long!(left), Long!(right)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::BINARY_POWER), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Float(f)) => { assert_eq!(*f, expected); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn binary_true_divide_longs() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let left = 5; let right = 3; let expected = left as f64 / right as f64; let consts = vec![Long!(left), Long!(right)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::BINARY_TRUE_DIVIDE), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Float(f)) => { assert_eq!(*f, expected); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn unary_not_long() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let num = 5u32; let expected = false; let consts = vec![Long!(num)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::UNARY_NOT), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Bool(result)) => { assert_eq!(*result, expected); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn unary_negative_long() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let num = 5u32; let expected = -5i32; let consts = vec![Long!(num)]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ Instr!(TargetOpcode::LOAD_CONST, 0), Instr!(TargetOpcode::UNARY_NEGATIVE), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert_eq!(stack.len(), 1); match &stack[0].0 { Some(Obj::Long(l)) => { assert_eq!(*l.as_ref(), expected.to_bigint().unwrap()); } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } #[test] fn store_subscr_list() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let key = Long!(0); let value = Long!(0x41); let mut expected_list = vec![0x41]; let actual_list = Obj::List(Arc::new(RwLock::new(vec![Long!(0)]))); let consts = vec![actual_list.clone(), key, value]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ // Load value on to stack Instr!(TargetOpcode::LOAD_CONST, 2), // Load list on to stack Instr!(TargetOpcode::LOAD_CONST, 0), // Load key on to stack Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::STORE_SUBSCR), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert!(stack.is_empty()); match &actual_list { Obj::List(list_lock) => { let list = list_lock.read().unwrap(); assert_eq!(list.len(), 1); assert_eq!(*list[0].clone().extract_long().unwrap(), BigInt::from(0x41)); } other => panic!("unexpected type: {:?}", other.typ()), } } #[test] fn store_subscr_dict() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let key = String!("key"); let value = Long!(0x41); let mut expected_hashmap = HashMap::new(); expected_hashmap.insert(ObjHashable::try_from(&key).unwrap(), value.clone()); let actual_dict = Obj::Dict(Default::default()); let consts = vec![actual_dict.clone(), key, value]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ // Load value on to stack Instr!(TargetOpcode::LOAD_CONST, 2), // Load dict on to stack Instr!(TargetOpcode::LOAD_CONST, 0), // Load key on to stack Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::STORE_SUBSCR), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } assert!(stack.is_empty()); match &actual_dict { Obj::Dict(dict_lock) => { let actual_dict = dict_lock.read().unwrap(); for (key, expected_value) in &expected_hashmap { let actual_value = actual_dict.get(key); assert!(actual_value.is_some()); let actual_value = actual_value.unwrap().clone().extract_long(); let expected_value = expected_value.clone().extract_long().unwrap(); assert_eq!(expected_value, actual_value.unwrap()); } } other => panic!("unexpected type: {:?}", other.typ()), } } #[test] fn store_map() { let (mut stack, mut vars, mut names, mut globals, names_loaded) = setup_vm_vars(); let mut code = default_code_obj(); let key = String!("key"); let value = Long!(0x41); let mut expected_hashmap = HashMap::new(); expected_hashmap.insert(ObjHashable::try_from(&key).unwrap(), value.clone()); let consts = vec![Obj::Dict(Default::default()), key, value]; Arc::get_mut(&mut code).unwrap().consts = Arc::new(consts); let instrs = [ // Load dict on to stack Instr!(TargetOpcode::LOAD_CONST, 0), // Load value on to stack Instr!(TargetOpcode::LOAD_CONST, 2), // Load key on to stack Instr!(TargetOpcode::LOAD_CONST, 1), Instr!(TargetOpcode::STORE_MAP), ]; for instr in &instrs { execute_instruction( instr, Arc::clone(&code), &mut stack, &mut vars, &mut names, &mut globals, Arc::clone(&names_loaded), |_f, _args, _kwargs| { panic!("functions should not be invoked"); }, (), ) .expect("unexpected error") } // The dict should still be on the stack assert_eq!(stack.len(), 1, "stack size is not 1"); match &stack[0].0 { Some(Obj::Dict(dict)) => { let actual_dict = dict.read().unwrap(); for (key, expected_value) in &expected_hashmap { let actual_value = actual_dict.get(key); assert!(actual_value.is_some()); let actual_value = actual_value.unwrap().clone().extract_long(); let expected_value = expected_value.clone().extract_long().unwrap(); assert_eq!(expected_value, actual_value.unwrap()); } } Some(other) => panic!("unexpected type: {:?}", other.typ()), _ => panic!("unexpected None value for TOS"), } } pub(crate) fn setup_vm_vars() -> ( VmStack<()>, VmVars<()>, VmNames<()>, VmNames<()>, LoadedNames, ) { ( VmStack::new(), VmVars::new(), VmNames::new(), VmNames::new(), LoadedNames::default(), ) } pub(crate) fn default_code_obj() -> Arc<Code> { Arc::new(py27_marshal::Code { argcount: 0, nlocals: 0, stacksize: 0, flags: CodeFlags::OPTIMIZED, code: Arc::new(vec![]), consts: Arc::new(vec![]), names: vec![], varnames: vec![], freevars: vec![], cellvars: vec![], filename: Arc::new(BString::from("filename")), name: Arc::new(BString::from("name")), firstlineno: 0, lnotab: Arc::new(vec![]), }) } }
//! Tests auto-converted from "sass-spec/spec/libsass-closed-issues/issue_2295" #[allow(unused)] use super::rsass; // From "sass-spec/spec/libsass-closed-issues/issue_2295/basic.hrx" #[test] #[ignore] // wrong result fn basic() { assert_eq!( rsass( ".my-scope {\r\ \n @import \'include.scss\';\r\ \n}" ) .unwrap(), ".my-scope .foo {\ \n display: none;\ \n}\ \n" ); } // From "sass-spec/spec/libsass-closed-issues/issue_2295/original.hrx" #[test] #[ignore] // wrong result fn original() { assert_eq!( rsass( "$include-foo: true !default;\r\ \n.my-scope {\r\ \n .bar { display: none; }\r\ \n @if ($include-foo) {\r\ \n .foo { display: none; }\r\ \n }\r\ \n @import \'input-bug\';\r\ \n}" ) .unwrap(), ".my-scope .bar {\ \n display: none;\ \n}\ \n.my-scope .foo {\ \n display: none;\ \n}\ \n.my-scope .bar {\ \n display: none;\ \n}\ \n.my-scope .foo {\ \n display: none;\ \n}\ \n" ); }
use std::{error::Error, fs, str::from_utf8}; use framework::{app, App, Canvas, Color, KeyCode, KeyState, KeyboardInput, Rect}; use roxmltree::{Document, Node}; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Serialize, Deserialize)] struct AsmData { file_info: Vec<FileInfo>, offset: usize, } #[derive(Debug, Clone, Serialize, Deserialize)] struct FileInfo { name: String, asm: Vec<String>, desc: String, regdiagram: Vec<String>, } use std::fmt::Write; fn find_indent(s: &str) -> usize { let lines = s.lines(); // Find the indent of the first non-empty line lines .clone() .find(|line| !line.trim().is_empty()) .map(|line| line.chars().take_while(|c| c.is_whitespace()).count()) .unwrap_or(0) } fn remove_common_indent(s: &str) -> String { // Find the indent of the first non-empty line let first_non_empty_indent = find_indent(s); let lines = s.lines(); // Remove that indent from all lines lines .map(|line| { if line.len() >= first_non_empty_indent { &line[first_non_empty_indent..] } else { line } }) .collect::<Vec<&str>>() .join("\n") } impl App for AsmData { type State = AsmData; fn init() -> Self { // local directory from cargo // let cargo_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); // let xml_file_path = format!("{}/{}", cargo_dir, "resources/onebigfile.xml"); Self { file_info: vec![], offset: 0, } } fn draw(&mut self) { let mut canvas = Canvas::new(); let foreground = Color::parse_hex("#dc9941"); let background = Color::parse_hex("#353f38"); let bounding_rect = Rect::new(0.0, 0.0, 2500.0, 1500.0); canvas.save(); canvas.set_color(&background); canvas.clip_rect(bounding_rect); canvas.draw_rrect(bounding_rect, 20.0); canvas.set_color(&foreground); canvas.translate(50.0, 50.0); if self.file_info.is_empty() { canvas.draw_str("Click to load file", 0.0, 0.0); } for file_info in self.file_info.iter().skip(self.offset).take(5) { let name = file_info.name.to_ascii_uppercase(); canvas.draw_str(&format!("# {}", &name), 0.0, 0.0); canvas.translate(0.0, 40.0); canvas.draw_str(&format!("# {}", &file_info.desc), 0.0, 0.0); canvas.translate(0.0, 40.0); for asm in file_info.asm.iter() { let asm = Document::parse(asm).unwrap(); let texts = asm .descendants() .filter(|x| x.has_tag_name("a")) .map(|x| x.text().unwrap_or("")); canvas.save(); canvas.draw_str("#", 0.0, 0.0); canvas.translate(30.0, 0.0); for text in texts { canvas.draw_str(text, 0.0, 0.0); canvas.translate(80.0, 0.0); } canvas.restore(); canvas.translate(0.0, 40.0); } canvas.translate(0.0, 40.0); struct Field { name: String, shift: u32, bits: String, width: u32, required: bool, } let mut fields = vec![]; canvas.save(); for regdiagram in file_info.regdiagram.iter() { let document = roxmltree::Document::parse(regdiagram).unwrap(); let root = document.root_element(); let name = root.attribute("name").unwrap_or(""); let bits: Vec<String> = root .descendants() .filter(|child| !child.is_text()) .filter_map(|child| { let text = child.text().unwrap_or_default().trim(); if !text.is_empty() { Some(text.replace(['(', ')'], "")) } else { None } }) .collect(); let hibit = root.attribute("hibit").unwrap().parse::<u32>().unwrap() + 1; let width = root .attribute("width") .unwrap_or("1") .parse::<u32>() .unwrap(); let shift = hibit - width; fields.push(Field { name: name.to_string(), shift, bits: if bits.is_empty() { "0".repeat(width as usize).to_string() } else { bits.join("") }, width, required: bits.is_empty(), }) } let params = fields .iter() .filter(|x| x.required) .map(|x| format!("{}: u32", x.name)) .collect::<Vec<String>>() .join(", "); let indent = " "; let mut define_params = String::new(); for field in fields.iter().filter(|x| x.required) { let name = field.name.to_ascii_lowercase(); define_params .write_str(&format!("{}{}: u32,\n", indent, name)) .unwrap(); } if !define_params.is_empty() { define_params.pop(); define_params = define_params[indent.len()..].to_string(); } let indent = " "; // rd: check_mask(rd, 0x1f), let mut init_params = String::new(); for field in fields.iter().filter(|x| x.required) { let mask = (1 << field.width) - 1; let name = field.name.to_ascii_lowercase(); init_params .write_str(&format!( "{}{}: check_mask({}, {:#02x}),\n", indent, name, name, mask )) .unwrap(); } if !init_params.is_empty() { init_params.pop(); init_params = init_params[indent.len()..].to_string(); } let mut encode = String::new(); let all_bits = fields .iter() .map(|x| x.bits.clone()) .collect::<Vec<String>>() .join("_"); encode.write_str(&format!("0b{}\n", all_bits)).unwrap(); // for lack of a better thing to do I copied from template let indent = " "; for field in fields.iter().filter(|x| x.required) { let name = field.name.to_ascii_lowercase(); encode .write_str(&format!("{}| (self.{} << {})\n", indent, name, field.shift)) .unwrap(); } encode.pop(); let template = remove_common_indent(&format!( " pub struct {name} {{ {define_params} }} impl {name} {{ pub fn new({params}) -> Self {{ {name} {{ {init_params} }} }} pub fn encode(&self) -> u32 {{ {encode} }} }} " )); for line in template.lines() { canvas.draw_str(line, 0.0, 0.0); canvas.translate(0.0, 40.0); } canvas.translate(0.0, file_info.regdiagram.len() as f32 * 5.0 + 30.0); } canvas.restore(); } fn on_click(&mut self, _x: f32, _y: f32) { // grab the xml file // self.xml_file_text = current_dir().unwrap().to_str().unwrap().to_string(); match self.get_xml_stuff() { Ok(_) => (), Err(_e) => {} } } fn on_key(&mut self, input: KeyboardInput) { match input { KeyboardInput { state: KeyState::Pressed, key_code, .. } => match key_code { KeyCode::R => { self.offset = 0; } KeyCode::DownArrow => { self.offset += 1; if self.offset >= self.file_info.len() { self.offset = 0; } } KeyCode::UpArrow => { if self.offset == 0 { self.offset = self.file_info.len() - 1; } else { self.offset -= 1; } } _ => {} }, _ => {} } } fn on_scroll(&mut self, _x: f64, _y: f64) {} fn get_state(&self) -> Self::State { self.clone() } fn set_state(&mut self, state: Self::State) { *self = state; } fn on_size_change(&mut self, _width: f32, _height: f32) {} } impl AsmData { fn get_xml_stuff(&mut self) -> Result<(), Box<dyn Error>> { if !self.file_info.is_empty() { // let name : String = self.file_info.iter().map(|x| format!("{:#?} \n", x)).collect(); // self.xml_file_text = name; return Ok(()); } let before_read = std::time::Instant::now(); let xml_file_bytes = fs::read("onebigfile.xml")?; let xml_file_text = from_utf8(&xml_file_bytes)?; println!("Read file in {}ms", before_read.elapsed().as_millis()); let before_parse = std::time::Instant::now(); let xml = roxmltree::Document::parse(xml_file_text.clone())?; println!("Parsed file in {}ms", before_parse.elapsed().as_millis()); let before_find = std::time::Instant::now(); let file_names = xml .descendants() .filter(|x| x.has_tag_name("iforms")) .find(|x| { x.attribute("title") .unwrap_or("") .contains("Base Instructions") }) .unwrap() .descendants() .filter(|x| x.has_tag_name("iform")) .filter_map(|x| x.attribute("iformfile")); let mut found_file_nodes = vec![]; for file_name in file_names { let file_ndoe = xml .descendants() .find(|x| x.attribute("file") == Some(file_name)); if let Some(file_node) = file_ndoe { found_file_nodes.push(file_node); } } let file_info: Vec<FileInfo> = found_file_nodes .iter() .flat_map(|x| { x.descendants() .filter(|x| x.has_tag_name("instructionsection")) }) .map(|x| { let name = x .attribute("id") .unwrap_or("No file found") .to_ascii_lowercase(); let asm = x .descendants() .filter(|x| x.has_tag_name("asmtemplate")) .map(|x| xml_file_text[x.range()].to_string()) .collect(); let desc = x .descendants() .find(|x| x.has_tag_name("desc")) .and_then(|x| x.descendants().find(|x| x.has_tag_name("brief"))) .and_then(|x| x.descendants().find(|x| x.has_tag_name("para"))) .map(|x| x.text().unwrap_or("")) .unwrap_or("") .to_string(); let regdiagram = x .descendants() .find(|x| x.has_tag_name("regdiagram")) .map(|x| { let boxes: Vec<Node> = x.descendants().filter(|x| x.has_tag_name("box")).collect(); boxes }) .unwrap_or_default() .iter() .map(|x| xml_file_text[x.range()].to_string()) .collect(); FileInfo { name, asm, desc, regdiagram, } }) .collect(); self.file_info = file_info.clone(); let _name: String = file_info.iter().map(|x| format!("{:#?} \n", x)).collect(); println!("Found file in {}ms", before_find.elapsed().as_millis()); // self.xml_file_text = format!("Files: {}", name); Ok(()) } } app!(AsmData);
use ::comfy_table::*; use ::comfy_table::presets::UTF8_HORIZONTAL_BORDERS_ONLY; use ::crossterm::style::style; use ::std::string::ToString; use ::pueue::message::*; use ::pueue::state::State; use ::pueue::task::TaskStatus; pub fn print_success(message: String) { println!("{}", message); } pub fn print_error(message: String) { let styled = style(message).with(Color::Red); println!("{}", styled); } /// Print the current state of the daemon in a nicely formatted table pub fn print_state(message: Message, json: bool) { let state = match message { Message::StatusResponse(state) => state, _ => return, }; if json { println!("{}", serde_json::to_string(&state).unwrap()); return; } if state.tasks.len() == 0 { println!("Task list is empty. Add tasks with `pueue add -- [cmd]`"); return; } let mut daemon_status = if state.running { style("Daemon status: running") } else { style("Daemon status: ") }; if state.running { daemon_status = daemon_status.with(Color::Green); } else { daemon_status = daemon_status.with(Color::Yellow); } println!("{}", daemon_status); let mut table = Table::new(); table .set_content_arrangement(ContentArrangement::Dynamic) .load_preset(UTF8_HORIZONTAL_BORDERS_ONLY) .set_header(vec![ Cell::new("Index"), Cell::new("Status"), Cell::new("Exitcode"), Cell::new("Command"), Cell::new("Path"), Cell::new("Start"), Cell::new("End"), ]); for (id, task) in state.tasks { let mut row = Row::new(); // Add a row per time row.add_cell(Cell::new(&id.to_string())); // Add status cell and color depending on state let status_cell = Cell::new(&task.status.to_string()); let status_cell = match task.status { TaskStatus::Running | TaskStatus::Done => status_cell.fg(Color::Green), TaskStatus::Failed | TaskStatus::Killed => status_cell.fg(Color::Red), TaskStatus::Paused => status_cell.fg(Color::White), _ => status_cell.fg(Color::Yellow), }; row.add_cell(status_cell); // Match the color of the exit code // If the exit_code is none, it has been killed by the task handler. match task.exit_code { Some(code) => { // Everything that's not 0, is failed task if code == 0 { row.add_cell(Cell::new(&code.to_string()).fg(Color::Green)); } else { row.add_cell(Cell::new(&code.to_string()).fg(Color::Red)); } } None => { row.add_cell(Cell::new("")); } } // Add command and path row.add_cell(Cell::new(&task.command)); row.add_cell(Cell::new(&task.path)); // Add start time, if already set if let Some(start) = task.start { let formatted = start.format("%H:%M").to_string(); row.add_cell(Cell::new(&formatted)); } else { row.add_cell(Cell::new("")); } // Add finish time, if already set if let Some(end) = task.end { let formatted = end.format("%H:%M").to_string(); row.add_cell(Cell::new(&formatted)); } else { row.add_cell(Cell::new("")); } table.add_row(row); } // Print the table println!("{}", table); } /// Print the log ouput of finished tasks. /// Either print the logs of every task /// or only print the logs of the specified tasks. pub fn print_logs(message: Message, task_ids: Vec<usize>, json: bool) { let state = match message { Message::StatusResponse(state) => state, _ => return, }; if json { println!("{}", serde_json::to_string(&state).unwrap()); return; } if !task_ids.is_empty() { for task_id in task_ids { print_log(task_id, &state); } } else { for task_id in state.tasks.keys() { print_log(*task_id, &state); } } } /// Print the log of a single task. pub fn print_log(task_id: usize, state: &State) { let task = match state.tasks.get(&task_id) { Some(task) => task, None => return, }; // We only show logs of finished tasks if !vec![TaskStatus::Done, TaskStatus::Failed].contains(&task.status) { return; } let exit_status = match task.exit_code { Some(code) => match code { 0 => style(format!("with exit code {}", code)).with(Color::Green), _ => style(format!("with exit code {}", code)).with(Color::Red), }, None => style("failed to Spawn".to_string()).with(Color::Red), }; // Print task id and exit code println!("\n"); print!( "{}", style(format!("Task {} ", task.id)).attribute(Attribute::Bold) ); println!("{}", exit_status); // Print command and path println!("Command: {}", task.command); println!("Path: {}", task.path); if let Some(start) = task.start { println!("Start: {}", start.to_rfc2822()); } if let Some(end) = task.end { println!("End: {}", end.to_rfc2822()); } if let Some(stdout) = &task.stdout { if !stdout.is_empty() { println!( "{}", style("Std_out: ") .with(Color::Green) .attribute(Attribute::Bold) ); println!("{}", stdout); } } if let Some(stderr) = &task.stderr { if !stderr.is_empty() { println!( "{}", style("Std_err: ") .with(Color::Red) .attribute(Attribute::Bold) ); println!("{}", stderr); } } }
#![allow(clippy::cast_sign_loss)] #![allow(clippy::cast_precision_loss)] #![allow(clippy::cast_possible_truncation)] #![allow(clippy::cast_possible_wrap)] use std::cmp::{max, min}; use std::collections::{HashMap, HashSet, VecDeque}; use std::sync::mpsc::{self, Receiver, Sender}; use intcode; fn main() { let start_time = std::time::Instant::now(); let program = intcode::load_program("day15/input.txt").unwrap_or_else(|err| { println!("Could not load input file!\n{:?}", err); std::process::exit(1); }); let (in_send, in_recv) = mpsc::channel(); let (out_send, out_recv) = mpsc::channel(); let mut computer = intcode::ChannelIOComputer::new(&program, in_recv, out_send); std::thread::spawn(move || { computer.run(); }); // Firstly, use the droid (via the Intcode computer) to construct a map of the entire ship. // Part 1 doesn't require that, but part 2 does, and having it up front makes part 1 simpler. let mut droid = Droid::new(in_send, out_recv); let ship = droid.explore_ship(); // Now we can use the completed map to solve both parts via breadth-first search. Part 1 is // shortest path from origin to oxygen system; part 2 is the "longest shortest path" from the // oxygen system, i.e. the shortest path to the furthest-away point. let distance_to_oxygen_system = search_maze(&ship.grid, ship.start, Some(ship.oxygen_system)); let oxygen_distance = search_maze(&ship.grid, ship.oxygen_system, None); println!( "Part 1: {}\nPart 2: {}\nTime: {}ms", distance_to_oxygen_system, oxygen_distance, start_time.elapsed().as_millis() ); } fn _print_ship(ship: &Ship) { for row in 0..ship.grid.len() { for col in 0..ship.grid[row].len() { if row == ship.start.row as usize && col == ship.start.col as usize { print!("D"); } else if row == ship.oxygen_system.row as usize && col == ship.oxygen_system.col as usize { print!("O"); } else { print!("{}", ship.grid[row][col]); } } println!(); } } struct BFSState { pos: Position, distance: usize, } impl PartialEq for BFSState { fn eq(&self, other: &Self) -> bool { self.pos == other.pos } } impl Eq for BFSState {} impl std::hash::Hash for BFSState { fn hash<H>(&self, state: &mut H) where H: std::hash::Hasher, { self.pos.hash(state); } } // Perform a breadth-first search of a supplied maze from a given starting point. // If `stop_at` is provided, returns the length of the shortest path to that point. // Otherwise, returns the length of the longest shortest path to any point. fn search_maze(grid: &[Vec<char>], from: Position, stop_at: Option<Position>) -> usize { let mut queue = VecDeque::new(); let mut seen_states = HashSet::new(); let mut max_distance = 0; queue.push_back(BFSState { pos: from, distance: 0, }); while !queue.is_empty() { let state = queue.pop_front().unwrap(); // Backtrack if we've been here before. if seen_states.contains(&state) { continue; } // Backtrack if we've walked into a wall. if grid[state.pos.row as usize][state.pos.col as usize] == '#' { continue; } if state.distance > max_distance { max_distance = state.distance; } // Stop if we've reached the target. if stop_at.is_some() && state.pos == stop_at.unwrap() { break; } // We're good, so add all adjacent positions to the queue. let adjacent_positions = [ state.pos + &Direction::North, state.pos + &Direction::South, state.pos + &Direction::West, state.pos + &Direction::East, ]; for pos in &adjacent_positions { queue.push_back(BFSState { pos: *pos, distance: state.distance + 1, }); } // Remember that we've been here. seen_states.insert(state); } max_distance } #[derive(Clone, Copy)] enum Direction { North, East, South, West, } impl Direction { fn turn_right(self) -> Self { match self { Self::North => Self::East, Self::East => Self::South, Self::South => Self::West, Self::West => Self::North, } } fn turn_left(self) -> Self { match self { Self::North => Self::West, Self::West => Self::South, Self::South => Self::East, Self::East => Self::North, } } fn input(self) -> i64 { match self { Self::North => 1, Self::South => 2, Self::West => 3, Self::East => 4, } } } #[derive(Clone, Copy, PartialEq, Eq, Hash)] struct Position { row: isize, col: isize, } impl Position { fn origin() -> Self { Self { row: 0, col: 0 } } } impl std::ops::Add for Position { type Output = Self; fn add(self, other: Self) -> Self { Self { row: self.row + other.row, col: self.col + other.col, } } } impl std::ops::Sub for Position { type Output = Self; fn sub(self, other: Self) -> Self { Self { row: self.row - other.row, col: self.col - other.col, } } } impl std::ops::Add<&Direction> for Position { type Output = Self; #[allow(clippy::suspicious_arithmetic_impl)] fn add(self, other: &Direction) -> Self { match other { Direction::North => Self { row: self.row - 1, col: self.col, }, Direction::South => Self { row: self.row + 1, col: self.col, }, Direction::West => Self { row: self.row, col: self.col - 1, }, Direction::East => Self { row: self.row, col: self.col + 1, }, } } } enum MoveResult { Wall, Corridor, OxygenSystem, } impl MoveResult { fn from(output: i64) -> Self { match output { 0 => Self::Wall, 1 => Self::Corridor, 2 => Self::OxygenSystem, _ => panic!("Unexpected output from computer: {}", output), } } } // A repository of information about a ship layout that we've learned so far. struct LearnedShipInfo { // All the grid locations we've learned about. map: HashMap<Position, char>, // The location of the oxygen system. oxygen_system: Position, // Whether, in exploring this ship, we've moved away from the starting point. // We store this because we're finished when we return to the starting point, // but don't necessarily move off it immediately. moved_off_starting_pos: bool, } impl LearnedShipInfo { fn new() -> Self { let mut info = Self { map: HashMap::new(), oxygen_system: Position::origin(), moved_off_starting_pos: false, }; info.map.insert(Position::origin(), '.'); info } } // Complete information about the ship layout. struct Ship { // A visual representation of the ship layout, as a 2-D vector (row, then column). grid: Vec<Vec<char>>, // The droid's starting location in the ship. start: Position, // The location of the oxygen system. oxygen_system: Position, } impl Ship { fn construct(info: LearnedShipInfo) -> Self { // Calculate boundaries let (min_row, min_col, max_row, max_col) = info.map.keys().fold((0,0,0,0), |(min_row, min_col, max_row, max_col), pos| { let min_row = min(min_row, pos.row); let min_col = min(min_col, pos.col); let max_row = max(max_row, pos.row); let max_col = max(max_col, pos.col); (min_row, min_col, max_row, max_col) }); let width = (max_col - min_col + 1) as usize; let height = (max_row - min_row + 1) as usize; let delta = Position::origin() - Position { row: min_row, col: min_col }; let mut grid: Vec<Vec<char>> = std::iter::repeat(std::iter::repeat(' ').take(height).collect()) .take(width) .collect(); for (pos, c) in info.map { let adjusted_pos = pos + delta; grid[adjusted_pos.row as usize][adjusted_pos.col as usize] = c; } Self { grid, start: delta, oxygen_system: info.oxygen_system + delta, } } } // Our representation of the droid that's exploring the ship, with which we // indirectly communicate via the Intcode computer. struct Droid { // The droid's current location in the ship. pos: Position, // A channel to send instructions to the computer for moving the droid. tx: Sender<i64>, // A channel to receive movement results from the computer. rx: Receiver<i64>, } impl Droid { fn new(tx: Sender<i64>, rx: Receiver<i64>) -> Self { Self { pos: Position { row: 0, col: 0 }, tx, rx, } } // Fully explore the ship. // Implements a basic "wall follower" algorithm, right-hand rule. fn explore_ship(&mut self) -> Ship { let mut learned_info = LearnedShipInfo::new(); let mut dir = Direction::North; let origin = Position::origin(); while !learned_info.moved_off_starting_pos || self.pos != origin { dir = self.attempt_move(&mut learned_info, dir); } Ship::construct(learned_info) } // Try to move in the specified direction, record what we learn by so doing, // and decide which direction to move in next. We simulate the droid keeping // its right hand on a wall at all times, so it tries to turn right if it // successfully moves forwards, and left if it hits a wall. fn attempt_move(&mut self, info: &mut LearnedShipInfo, dir: Direction) -> Direction { let target_pos = self.pos + &dir; self.tx.send(dir.input()).unwrap(); let (map_char, new_dir, moved) = match MoveResult::from(self.rx.recv().unwrap()) { MoveResult::Wall => ('#', dir.turn_left(), false), MoveResult::Corridor => ('.', dir.turn_right(), true), MoveResult::OxygenSystem => { info.oxygen_system = target_pos; ('.', dir.turn_right(), true) } }; if moved { self.pos = target_pos; info.moved_off_starting_pos = true; } info.map.insert(target_pos, map_char); new_dir } }
use std::{ ffi::c_int, io::{self, Read, Write}, os::unix::{net::UnixStream, process::CommandExt}, process::Command, }; use crate::exec::{ event::{EventRegistry, Process}, io_util::{retry_while_interrupted, was_interrupted}, use_pty::backchannel::{MonitorBackchannel, MonitorMessage, ParentMessage}, }; use crate::exec::{opt_fmt, signal_fmt}; use crate::system::signal::{ consts::*, register_handlers, SignalHandler, SignalHandlerBehavior, SignalNumber, SignalSet, SignalStream, }; use crate::{ exec::{ event::{PollEvent, StopReason}, use_pty::{SIGCONT_BG, SIGCONT_FG}, ProcessOutput, }, log::{dev_error, dev_info, dev_warn}, system::FileCloser, }; use crate::{ exec::{handle_sigchld, terminate_process, HandleSigchld}, system::{ fork, getpgid, getpgrp, interface::ProcessId, kill, setpgid, setsid, term::{PtyFollower, Terminal}, wait::{Wait, WaitError, WaitOptions}, ForkResult, }, }; use super::CommandStatus; // FIXME: This should return `io::Result<!>` but `!` is not stable yet. pub(super) fn exec_monitor( pty_follower: PtyFollower, command: Command, foreground: bool, backchannel: &mut MonitorBackchannel, mut file_closer: FileCloser, original_set: Option<SignalSet>, ) -> io::Result<ProcessOutput> { // SIGTTIN and SIGTTOU are ignored here but the docs state that it shouldn't // be possible to receive them in the first place. Investigate match SignalHandler::register(SIGTTIN, SignalHandlerBehavior::Ignore) { Ok(handler) => handler.forget(), Err(err) => dev_warn!("cannot set handler for SIGTTIN: {err}"), } match SignalHandler::register(SIGTTOU, SignalHandlerBehavior::Ignore) { Ok(handler) => handler.forget(), Err(err) => dev_warn!("cannot set handler for SIGTTOU: {err}"), } // Start a new terminal session with the monitor as the leader. setsid().map_err(|err| { dev_warn!("cannot start a new session: {err}"); err })?; // Set the follower side of the pty as the controlling terminal for the session. pty_follower.make_controlling_terminal().map_err(|err| { dev_warn!("cannot set the controlling terminal: {err}"); err })?; // Use a pipe to get the IO error if `exec_command` fails. let (mut errpipe_tx, errpipe_rx) = UnixStream::pair()?; // Don't close the error pipe as we need it to retrieve the error code if the command execution // fails. file_closer.except(&errpipe_tx); // Wait for the parent to give us green light before spawning the command. This avoids race // conditions when the command exits quickly. let event = retry_while_interrupted(|| backchannel.recv()).map_err(|err| { dev_warn!("cannot receive green light from parent: {err}"); err })?; // Given that `UnixStream` delivers messages in order it shouldn't be possible to // receive an event different to `ExecCommand` at the beginning. debug_assert_eq!(event, MonitorMessage::ExecCommand); // FIXME (ogsudo): Some extra config happens here if selinux is available. let ForkResult::Parent(command_pid) = fork().map_err(|err| { dev_warn!("unable to fork command process: {err}"); err })? else { drop(errpipe_rx); let err = exec_command(command, foreground, pty_follower, file_closer, original_set); dev_warn!("failed to execute command: {err}"); // If `exec_command` returns, it means that executing the command failed. Send the error to // the monitor using the pipe. if let Some(error_code) = err.raw_os_error() { errpipe_tx.write_all(&error_code.to_ne_bytes()).ok(); } return Ok(ProcessOutput::ChildExit); }; // Send the command's PID to the parent. if let Err(err) = backchannel.send(&ParentMessage::CommandPid(command_pid)) { dev_warn!("cannot send command PID to parent: {err}"); } let mut registry = EventRegistry::new(); let mut closure = MonitorClosure::new( command_pid, pty_follower, errpipe_rx, backchannel, &mut registry, )?; // Restore the signal mask now that the handlers have been setup. if let Some(set) = original_set { if let Err(err) = set.set_mask() { dev_warn!("cannot restore signal mask: {err}"); } } // Set the foreground group for the pty follower. if foreground { if let Err(err) = closure.pty_follower.tcsetpgrp(closure.command_pgrp) { dev_error!( "cannot set foreground progess group to {} (command): {err}", closure.command_pgrp ); } } // FIXME (ogsudo): Here's where the signal mask is removed because the handlers for the signals // have been setup after initializing the closure. // Start the event loop. let reason = registry.event_loop(&mut closure); // Terminate the command if it's not terminated. if let Some(command_pid) = closure.command_pid { terminate_process(command_pid, true); loop { match command_pid.wait(WaitOptions::new()) { Err(WaitError::Io(err)) if was_interrupted(&err) => {} _ => break, } } } // Take the controlling tty so the command's children don't receive SIGHUP when we exit. if let Err(err) = closure.pty_follower.tcsetpgrp(closure.monitor_pgrp) { dev_error!( "cannot set foreground process group to {} (monitor): {err}", closure.monitor_pgrp ); } // Disable nonblocking assetions as we will not poll the backchannel anymore. closure.backchannel.set_nonblocking_assertions(false); match reason { StopReason::Break(err) => match err.try_into() { Ok(msg) => { if let Err(err) = closure.backchannel.send(&msg) { dev_warn!("cannot send message over backchannel: {err}") } } Err(err) => { dev_warn!("socket error `{err:?}` cannot be converted to a message") } }, StopReason::Exit(command_status) => { if let Err(err) = closure.backchannel.send(&command_status.into()) { dev_warn!("cannot send message over backchannel: {err}") } } } // FIXME (ogsudo): The tty is restored here if selinux is available. Ok(ProcessOutput::ChildExit) } // FIXME: This should return `io::Result<!>` but `!` is not stable yet. fn exec_command( mut command: Command, foreground: bool, pty_follower: PtyFollower, file_closer: FileCloser, original_set: Option<SignalSet>, ) -> io::Error { // FIXME (ogsudo): Do any additional configuration that needs to be run after `fork` but before `exec` let command_pid = std::process::id() as ProcessId; setpgid(0, command_pid).ok(); // Wait for the monitor to set us as the foreground group for the pty if we are in the // foreground. if foreground { while !pty_follower.tcgetpgrp().is_ok_and(|pid| pid == command_pid) { std::thread::sleep(std::time::Duration::from_micros(1)); } } // Done with the pty follower. drop(pty_follower); if let Err(err) = file_closer.close_the_universe() { return err; } // Restore the signal mask now that the handlers have been setup. if let Some(set) = original_set { if let Err(err) = set.set_mask() { dev_warn!("cannot restore signal mask: {err}"); } } command.exec() } struct MonitorClosure<'a> { /// The command PID. /// /// This is `Some` iff the process is still running. command_pid: Option<ProcessId>, command_pgrp: ProcessId, monitor_pgrp: ProcessId, pty_follower: PtyFollower, errpipe_rx: UnixStream, backchannel: &'a mut MonitorBackchannel, signal_stream: &'static SignalStream, _signal_handlers: [SignalHandler; MonitorClosure::SIGNALS.len()], } impl<'a> MonitorClosure<'a> { const SIGNALS: [SignalNumber; 8] = [ SIGINT, SIGQUIT, SIGTSTP, SIGTERM, SIGHUP, SIGUSR1, SIGUSR2, SIGCHLD, ]; fn new( command_pid: ProcessId, pty_follower: PtyFollower, errpipe_rx: UnixStream, backchannel: &'a mut MonitorBackchannel, registry: &mut EventRegistry<Self>, ) -> io::Result<Self> { // Store the pgid of the monitor. let monitor_pgrp = getpgrp(); // Register the callback to receive the IO error if the command fails to execute. registry.register_event(&errpipe_rx, PollEvent::Readable, |_| { MonitorEvent::ReadableErrPipe }); // Enable nonblocking assertions as we will poll this inside the event loop. backchannel.set_nonblocking_assertions(true); // Register the callback to receive events from the backchannel registry.register_event(backchannel, PollEvent::Readable, |_| { MonitorEvent::ReadableBackchannel }); let signal_stream = SignalStream::init()?; registry.register_event(signal_stream, PollEvent::Readable, |_| MonitorEvent::Signal); let signal_handlers = register_handlers(Self::SIGNALS)?; // Put the command in its own process group. let command_pgrp = command_pid; if let Err(err) = setpgid(command_pid, command_pgrp) { dev_warn!("cannot set process group ID for process: {err}"); }; Ok(Self { command_pid: Some(command_pid), command_pgrp, monitor_pgrp, pty_follower, errpipe_rx, backchannel, signal_stream, _signal_handlers: signal_handlers, }) } /// Based on `mon_backchannel_cb` fn read_backchannel(&mut self, registry: &mut EventRegistry<Self>) { match self.backchannel.recv() { Err(err) => { // We can try later if receive is interrupted. if err.kind() != io::ErrorKind::Interrupted { // There's something wrong with the backchannel, break the event loop. dev_warn!("cannot read from backchannel: {err}"); registry.set_break(err); } } Ok(event) => { match event { // We shouldn't receive this event more than once. MonitorMessage::ExecCommand => unreachable!(), // Forward signal to the command. MonitorMessage::Signal(signal) => { if let Some(command_pid) = self.command_pid { self.send_signal(signal, command_pid, true) } } } } } } fn read_errpipe(&mut self, registry: &mut EventRegistry<Self>) { let mut buf = 0i32.to_ne_bytes(); match self.errpipe_rx.read_exact(&mut buf) { Err(err) if was_interrupted(&err) => { /* Retry later */ } Err(err) => registry.set_break(err), Ok(_) => { // Received error code from the command, forward it to the parent. let error_code = i32::from_ne_bytes(buf); self.backchannel .send(&ParentMessage::IoError(error_code)) .ok(); } } } /// Send a signal to the command. fn send_signal(&self, signal: c_int, command_pid: ProcessId, from_parent: bool) { dev_info!( "sending {}{} to command", signal_fmt(signal), opt_fmt(from_parent, " from parent"), ); // FIXME: We should call `killpg` instead of `kill`. match signal { SIGALRM => { terminate_process(command_pid, false); } SIGCONT_FG => { // Continue with the command as the foreground process group if let Err(err) = self.pty_follower.tcsetpgrp(self.command_pgrp) { dev_error!( "cannot set the foreground process group to {} (command): {err}", self.command_pgrp ); } kill(command_pid, SIGCONT).ok(); } SIGCONT_BG => { // Continue with the monitor as the foreground process group if let Err(err) = self.pty_follower.tcsetpgrp(self.monitor_pgrp) { dev_error!( "cannot set the foreground process group to {} (monitor): {err}", self.monitor_pgrp ); } kill(command_pid, SIGCONT).ok(); } signal => { // Send the signal to the command. kill(command_pid, signal).ok(); } } } fn on_signal(&mut self, registry: &mut EventRegistry<Self>) { let info = match self.signal_stream.recv() { Ok(info) => info, Err(err) => { dev_error!("could not receive signal: {err}"); return; } }; dev_info!( "monitor received{} {} from {}", opt_fmt(info.is_user_signaled(), " user signaled"), info.signal(), info.pid() ); // Don't do anything if the command has terminated already let Some(command_pid) = self.command_pid else { dev_info!("command was terminated, ignoring signal"); return; }; match info.signal() { SIGCHLD => handle_sigchld(self, registry, "command", command_pid), // Skip the signal if it was sent by the user and it is self-terminating. _ if info.is_user_signaled() && is_self_terminating(info.pid(), command_pid, self.command_pgrp) => {} signal => self.send_signal(signal, command_pid, false), } } } /// Decides if the signal sent by the process with `signaler_pid` PID is self-terminating. /// /// A signal is self-terminating if `signaler_pid`: /// - is the same PID of the command, or /// - is in the process group of the command and the command is the leader. fn is_self_terminating( signaler_pid: ProcessId, command_pid: ProcessId, command_pgrp: ProcessId, ) -> bool { if signaler_pid != 0 { if signaler_pid == command_pid { return true; } if let Ok(grp_leader) = getpgid(signaler_pid) { if grp_leader == command_pgrp { return true; } } } false } #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum MonitorEvent { Signal, ReadableErrPipe, ReadableBackchannel, } impl<'a> Process for MonitorClosure<'a> { type Event = MonitorEvent; type Break = io::Error; type Exit = CommandStatus; fn on_event(&mut self, event: Self::Event, registry: &mut EventRegistry<Self>) { match event { MonitorEvent::Signal => self.on_signal(registry), MonitorEvent::ReadableErrPipe => self.read_errpipe(registry), MonitorEvent::ReadableBackchannel => self.read_backchannel(registry), } } } impl<'a> HandleSigchld for MonitorClosure<'a> { const OPTIONS: WaitOptions = WaitOptions::new().untraced().no_hang(); fn on_exit(&mut self, exit_code: c_int, registry: &mut EventRegistry<Self>) { registry.set_exit(CommandStatus::Exit(exit_code)); self.command_pid = None; } fn on_term(&mut self, signal: c_int, registry: &mut EventRegistry<Self>) { registry.set_exit(CommandStatus::Term(signal)); self.command_pid = None; } fn on_stop(&mut self, signal: c_int, _registry: &mut EventRegistry<Self>) { // Save the foreground process group ID so we can restore it later. if let Ok(pgrp) = self.pty_follower.tcgetpgrp() { if pgrp != self.monitor_pgrp { self.command_pgrp = pgrp; } } self.backchannel .send(&CommandStatus::Stop(signal).into()) .ok(); } }
use crate::new_id; use proc_macro as pm; use proc_macro::TokenStream; use proc_macro2 as pm2; pub fn call(input: syn::Expr) -> TokenStream { match input { syn::Expr::Call(e) => { let func = e.func; let args = e.args; if args.len() == 1 && !args.trailing_punct() { quote::quote!(#func((#args,), ctx)).into() } else { quote::quote!(#func((#args), ctx)).into() } } _ => panic!("Expected function call expression"), } } pub fn call_async(input: syn::Expr) -> TokenStream { match input { syn::Expr::Call(e) => { let func = e.func; let args = e.args; if args.len() == 1 && !args.trailing_punct() { quote::quote!(#func((#args,), ctx).await).into() } else { quote::quote!(#func((#args), ctx).await).into() } } _ => panic!("Expected function call expression"), } } pub fn call_indirect(input: syn::Expr) -> TokenStream { match input { syn::Expr::Call(e) => { let func = e.func; let args = e.args; if args.len() == 1 && !args.trailing_punct() { quote::quote!((#func.ptr)((#args,), ctx)).into() } else { quote::quote!((#func.ptr)((#args), ctx)).into() } } _ => panic!("Expected function call expression"), } } pub fn enwrap(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let mut path: syn::Path = parse(&mut iter); concrete_enum_path(&mut path); let data: syn::Expr = parse(&mut iter); quote::quote!(#path(#data).alloc(ctx)).into() } pub fn is(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let mut path: syn::Path = parse(&mut iter); concrete_enum_path(&mut path); let data: syn::Expr = parse(&mut iter); quote::quote!(matches!(*#data.0.clone(), #path(_))).into() } pub fn unwrap(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let mut path: syn::Path = parse(&mut iter); concrete_enum_path(&mut path); let expr: syn::Expr = parse(&mut iter); quote::quote!(if let #path(v) = &*#expr.0 { v.clone() } else { unreachable!() }).into() } pub fn new(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let mut data: syn::ExprStruct = parse(&mut iter); concrete_struct_path(&mut data.path); quote::quote!((#data).alloc(ctx)).into() } pub fn vector(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let data: Vec<syn::Expr> = parse_all(&mut iter); quote::quote!(_vector!([#(#data),*], ctx)).into() } pub fn erase(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let expr: syn::Expr = parse(&mut iter); let ident: syn::Ident = parse(&mut iter); let (wrapper_impl, wrapper_cons) = generate_wrapper(&ident); let wrapper = wrapper_cons(expr); quote::quote!( { #wrapper_impl Erased::erase(#wrapper, ctx) } ) .into() } pub fn unerase(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let expr: syn::Expr = parse(&mut iter); let id: syn::Ident = parse(&mut iter); let (wrapper_impl, _) = generate_wrapper(&id); quote::quote!( { #wrapper_impl Erased::unerase::<#id>(#expr, ctx) } ) .into() } pub fn push(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let channel: syn::Expr = parse(&mut iter); let data: syn::Expr = parse(&mut iter); quote::quote!(#channel.push(#data, ctx).await?).into() } pub fn pull(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let channel: syn::Expr = parse(&mut iter); quote::quote!(#channel.pull(ctx).await?).into() } /// Create a future for pulling data from a channel. pub fn pull_transition(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let future: syn::Pat = parse(&mut iter); let pullable: syn::Expr = parse(&mut iter); let state: syn::Expr = parse(&mut iter); quote::quote!( { let mut tmp = #pullable.clone(); let #future = async move { tmp.pull(ctx).await }.boxed(); transition!(#state); } ) .into() } /// Create a future for pushing data into a channel. pub fn push_transition(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let future: syn::Pat = parse(&mut iter); let pushable: syn::Expr = parse(&mut iter); let data: syn::Expr = parse(&mut iter); let state: syn::Expr = parse(&mut iter); quote::quote!( { let mut tmp = #pushable.clone(); let #future = async move { tmp.push(#data, ctx).await }.boxed(); transition!(#state); } ) .into() } // /// Transition to a new state. pub fn transition(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let state: syn::Expr = parse(&mut iter); quote::quote!(return (Pending, #state.into())).into() } // /// Terminate the state machine. pub fn terminate(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let state: syn::Expr = parse(&mut iter); quote::quote!(return (Ready(()), #state.into())).into() } // /// Wait until a future completes. pub fn wait(input: TokenStream) -> TokenStream { let mut iter = input.into_iter(); let arg: syn::Expr = parse(&mut iter); let cx: syn::Expr = parse(&mut iter); let finished: syn::Expr = parse(&mut iter); let pending: syn::Expr = parse(&mut iter); quote::quote!( match #arg.as_mut().poll(#cx) { Ready(Finished) => terminate!(#finished), Ready(Continue(x)) => x, Pending => transition!(#pending), } ) .into() } fn generate_wrapper(id: &syn::Ident) -> (pm2::TokenStream, impl Fn(syn::Expr) -> pm2::TokenStream) { let span = id.span().unwrap().start(); let line = span.line; let column = span.column; let abstract_id: syn::Ident = new_id(format!("Wrapper_{}_{}", line, column)); let concrete_id: syn::Ident = new_id(format!("ConcreteWrapper_{}_{}", line, column)); let sharable_wrapper_mod_id = new_id(format!("sharable_{}", abstract_id)); let sendable_wrapper_mod_id = new_id(format!("sendable_{}", abstract_id)); let wrapper_impl = quote::quote!( mod #sharable_wrapper_mod_id { use arc_runtime::prelude::*; #[derive(Clone, Debug, Send, Sync, Unpin, From, Deref, Abstract, Collectable, Finalize, Trace)] #[repr(transparent)] pub struct #abstract_id(pub #concrete_id); #[derive(Clone, Debug, Collectable, Finalize, Trace)] #[repr(transparent)] pub struct #concrete_id(pub super::#id); } mod #sendable_wrapper_mod_id { use arc_runtime::prelude::*; #[derive(Clone, Debug, Deref, From, Abstract, Serialize, Deserialize)] #[repr(transparent)] pub struct #abstract_id(pub #concrete_id); #[derive(Clone, Debug, Serialize, Deserialize)] #[repr(transparent)] pub struct #concrete_id(pub <super::#id as DynSharable>::T); } impl DynSharable for #sharable_wrapper_mod_id::#abstract_id { type T = <Erased as DynSharable>::T; fn into_sendable(&self, ctx: Context) -> Self::T { Self::T::erase(#sendable_wrapper_mod_id::#abstract_id(#sendable_wrapper_mod_id::#concrete_id(self.0.0.into_sendable(ctx))), ctx) } } impl DynSendable for #sendable_wrapper_mod_id::#abstract_id { type T = Erased; fn into_sharable(&self, ctx: Context) -> Self::T { Self::T::erase(#sharable_wrapper_mod_id::#abstract_id(#sharable_wrapper_mod_id::#concrete_id(self.0.0.into_sharable(ctx))), ctx) } } ); let wrapper_cons = move |expr| quote::quote!(#sharable_wrapper_mod_id::#abstract_id(#sharable_wrapper_mod_id::#concrete_id(#expr))); (wrapper_impl, wrapper_cons) } fn concrete_enum_path(path: &mut syn::Path) { let mut x = path.segments.iter_mut(); match (x.next(), x.next(), x.next()) { (Some(_), Some(i), Some(_)) => i.ident = new_id(format!("Concrete{}", i.ident)), (Some(i), Some(_), None) => i.ident = new_id(format!("Concrete{}", i.ident)), (Some(_), None, None) => {} _ => unreachable!(), } } fn concrete_struct_path(path: &mut syn::Path) { let mut x = path.segments.iter_mut(); match (x.next(), x.next()) { (Some(_), Some(i)) => i.ident = new_id(format!("Concrete{}", i.ident)), (Some(i), None) => i.ident = new_id(format!("Concrete{}", i.ident)), _ => unreachable!(), } } fn parse<T: syn::parse::Parse>(input: &mut impl Iterator<Item = pm::TokenTree>) -> T { let mut stream = pm::TokenStream::new(); while let Some(token) = input.next() { match token { pm::TokenTree::Punct(t) if t.as_char() == ',' => break, _ => stream.extend([token]), } } syn::parse::<T>(stream).unwrap() } fn parse_all<T: syn::parse::Parse>(input: &mut impl Iterator<Item = pm::TokenTree>) -> Vec<T> { let mut nodes = Vec::new(); let mut stream = pm::TokenStream::new(); while let Some(token) = input.next() { match token { pm::TokenTree::Punct(t) if t.as_char() == ',' => { nodes.push(syn::parse::<T>(stream).unwrap()); stream = pm::TokenStream::new(); } _ => stream.extend([token]), } } nodes }
//! The following module provides an implementation of [UrlFormatter] for https, and the server //! code which responds to formatted urls. //! //! This is the code that replies to the url tickets generated by [HtsGet], in the case of [LocalStorage]. //! use std::fs::File; use std::io::BufReader; use std::net::{AddrParseError, SocketAddr}; use std::path::Path; use std::pin::Pin; use std::sync::Arc; use axum::Router; use axum_extra::routing::SpaRouter; use futures_util::future::poll_fn; use hyper::server::accept::Accept; use hyper::server::conn::{AddrIncoming, Http}; use rustls_pemfile::{certs, pkcs8_private_keys}; use tokio::net::TcpListener; use tokio_rustls::rustls::{Certificate, PrivateKey, ServerConfig}; use tokio_rustls::TlsAcceptor; use tower::MakeService; use tower_http::trace::TraceLayer; use tracing::instrument; use tracing::{info, trace}; use htsget_config::config::cors::CorsConfig; use htsget_config::config::{CertificateKeyPair, DataServerConfig}; use htsget_config::types::Scheme; use crate::storage::configure_cors; use crate::storage::StorageError::{IoError, ServerError}; use super::{Result, StorageError}; /// The maximum amount of time a CORS request can be cached for. pub const CORS_MAX_AGE: u64 = 86400; /// Ticket server url formatter. #[derive(Debug, Clone)] pub struct BindDataServer { addr: SocketAddr, cert_key_pair: Option<CertificateKeyPair>, scheme: Scheme, cors: CorsConfig, serve_at: String, } impl BindDataServer { pub fn new(addr: SocketAddr, cors: CorsConfig, serve_at: String) -> Self { Self { addr, cert_key_pair: None, scheme: Scheme::Http, cors, serve_at, } } pub fn new_with_tls( addr: SocketAddr, cors: CorsConfig, tls: CertificateKeyPair, serve_at: String, ) -> Self { Self { addr, cert_key_pair: Some(tls), scheme: Scheme::Https, cors, serve_at, } } /// Get the scheme this formatter is using - either HTTP or HTTPS. pub fn get_scheme(&self) -> &Scheme { &self.scheme } /// Eagerly bind the address by returning a `DataServer`. This function also updates the /// address to the actual bound address, and replaces the cert_key_pair with None. pub async fn bind_data_server(&mut self) -> Result<DataServer> { let server = DataServer::bind_addr( self.addr, &self.serve_at, self.cert_key_pair.take(), self.cors.clone(), ) .await?; self.addr = server.local_addr(); Ok(server) } /// Get the [SocketAddr] of this formatter. pub fn get_addr(&self) -> SocketAddr { self.addr } } impl From<DataServerConfig> for BindDataServer { /// Returns a ticket server with TLS enabled if the tls config is not None or without TLS enabled /// if it is None. fn from(config: DataServerConfig) -> Self { let addr = config.addr(); let cors = config.cors().clone(); let serve_at = config.serve_at().to_string(); match config.into_tls() { None => Self::new(addr, cors, serve_at), Some(tls) => Self::new_with_tls(addr, cors, tls, serve_at), } } } impl From<AddrParseError> for StorageError { fn from(err: AddrParseError) -> Self { StorageError::InvalidAddress(err) } } /// The local storage static http server. #[derive(Debug)] pub struct DataServer { listener: AddrIncoming, serve_at: String, cert_key_pair: Option<CertificateKeyPair>, cors: CorsConfig, } impl DataServer { /// Eagerly bind the the address for use with the server, returning any errors. #[instrument(skip(serve_at, cert_key_pair))] pub async fn bind_addr( addr: SocketAddr, serve_at: impl Into<String>, cert_key_pair: Option<CertificateKeyPair>, cors: CorsConfig, ) -> Result<DataServer> { let listener = TcpListener::bind(addr) .await .map_err(|err| IoError("binding data server addr".to_string(), err))?; let listener = AddrIncoming::from_listener(listener)?; info!(address = ?listener.local_addr(), "data server address bound to"); Ok(Self { listener, serve_at: serve_at.into(), cert_key_pair, cors, }) } /// Run the actual server, using the provided path, key and certificate. #[instrument(level = "trace", skip_all)] pub async fn serve<P: AsRef<Path>>(mut self, path: P) -> Result<()> { let mut app = Router::new() .merge(SpaRouter::new(&self.serve_at, path)) .layer(configure_cors(self.cors)?) .layer(TraceLayer::new_for_http()) .into_make_service_with_connect_info::<SocketAddr>(); match self.cert_key_pair { None => axum::Server::builder(self.listener) .serve(app) .await .map_err(|err| ServerError(err.to_string())), Some(tls) => { let rustls_config = Self::rustls_server_config(tls.key(), tls.cert())?; let acceptor = TlsAcceptor::from(Arc::new(rustls_config)); loop { let stream = poll_fn(|cx| Pin::new(&mut self.listener).poll_accept(cx)) .await .ok_or_else(|| ServerError("poll accept failed".to_string()))? .map_err(|err| ServerError(err.to_string()))?; let acceptor = acceptor.clone(); let app = app .make_service(&stream) .await .map_err(|err| ServerError(err.to_string()))?; trace!(stream = ?stream, "accepting stream"); tokio::spawn(async move { if let Ok(stream) = acceptor.accept(stream).await { let _ = Http::new().serve_connection(stream, app).await; } }); } } } } /// Get the local address the server has bound to. pub fn local_addr(&self) -> SocketAddr { self.listener.local_addr() } /// Load TLS server config. pub fn rustls_server_config<P: AsRef<Path>>(key: P, cert: P) -> Result<ServerConfig> { let mut key_reader = BufReader::new( File::open(key).map_err(|err| IoError("failed to open key file".to_string(), err))?, ); let mut cert_reader = BufReader::new( File::open(cert).map_err(|err| IoError("failed to open cert file".to_string(), err))?, ); let key = PrivateKey( pkcs8_private_keys(&mut key_reader) .map_err(|err| IoError("failed to read private keys".to_string(), err))? .into_iter() .next() .ok_or_else(|| ServerError("no private key found".to_string()))?, ); let certs = certs(&mut cert_reader) .map_err(|err| IoError("failed to read certificate".to_string(), err))? .into_iter() .map(Certificate) .collect(); let mut config = ServerConfig::builder() .with_safe_defaults() .with_no_client_auth() .with_single_cert(certs, key) .map_err(|err| ServerError(err.to_string()))?; config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()]; Ok(config) } } impl From<hyper::Error> for StorageError { fn from(error: hyper::Error) -> Self { ServerError(error.to_string()) } } #[cfg(test)] mod tests { use std::str::FromStr; use async_trait::async_trait; use http::header::HeaderName; use http::{HeaderMap, HeaderValue, Method}; use reqwest::{Client, ClientBuilder, RequestBuilder}; use htsget_test::cors_tests::{test_cors_preflight_request_uri, test_cors_simple_request_uri}; use htsget_test::http_tests::{ default_cors_config, default_test_config, Header, Response as TestResponse, TestRequest, TestServer, }; use htsget_test::util::generate_test_certificates; use crate::storage::local::tests::create_local_test_files; use crate::Config; use super::*; struct DataTestServer { config: Config, } struct DataTestRequest { client: Client, headers: HeaderMap, payload: String, method: Method, uri: String, } impl TestRequest for DataTestRequest { fn insert_header(mut self, header: Header<impl Into<String>>) -> Self { self.headers.insert( HeaderName::from_str(&header.name.into()).unwrap(), HeaderValue::from_str(&header.value.into()).unwrap(), ); self } fn set_payload(mut self, payload: impl Into<String>) -> Self { self.payload = payload.into(); self } fn uri(mut self, uri: impl Into<String>) -> Self { self.uri = uri.into().parse().unwrap(); self } fn method(mut self, method: impl Into<String>) -> Self { self.method = method.into().parse().unwrap(); self } } impl DataTestRequest { fn build(self) -> RequestBuilder { self .client .request(self.method, self.uri) .headers(self.headers) .body(self.payload) } } impl Default for DataTestRequest { fn default() -> Self { Self { client: ClientBuilder::new() .danger_accept_invalid_certs(true) .use_rustls_tls() .build() .unwrap(), headers: HeaderMap::default(), payload: "".to_string(), method: Method::GET, uri: "".to_string(), } } } impl Default for DataTestServer { fn default() -> Self { Self { config: default_test_config(), } } } #[async_trait(?Send)] impl TestServer<DataTestRequest> for DataTestServer { async fn get_expected_path(&self) -> String { "".to_string() } fn get_config(&self) -> &Config { &self.config } fn get_request(&self) -> DataTestRequest { DataTestRequest::default() } async fn test_server(&self, request: DataTestRequest, expected_path: String) -> TestResponse { let response = request.build().send().await.unwrap(); let status: u16 = response.status().into(); let headers = response.headers().clone(); let bytes = response.bytes().await.unwrap().to_vec(); TestResponse::new(status, headers, bytes, expected_path) } } #[tokio::test] async fn test_http_server() { let (_, base_path) = create_local_test_files().await; test_server("http", None, base_path.path().to_path_buf()).await; } #[tokio::test] async fn test_tls_server() { let (_, base_path) = create_local_test_files().await; let (key_path, cert_path) = generate_test_certificates(base_path.path(), "key.pem", "cert.pem"); test_server( "https", Some(CertificateKeyPair::new(cert_path, key_path)), base_path.path().to_path_buf(), ) .await; } #[test] fn http_scheme() { let formatter = BindDataServer::new( "127.0.0.1:8080".parse().unwrap(), CorsConfig::default(), "/data".to_string(), ); assert_eq!(formatter.get_scheme(), &Scheme::Http); } #[test] fn https_scheme() { assert_eq!(tls_formatter().get_scheme(), &Scheme::Https); } #[tokio::test] async fn test_rustls_server_config() { let (_, base_path) = create_local_test_files().await; let (key_path, cert_path) = generate_test_certificates(base_path.path(), "key.pem", "cert.pem"); let server_config = DataServer::rustls_server_config(key_path, cert_path).unwrap(); assert_eq!( server_config.alpn_protocols, vec![b"h2".to_vec(), b"http/1.1".to_vec()] ); } #[tokio::test] async fn get_addr_local_addr() { let mut formatter = BindDataServer::new( "127.0.0.1:0".parse().unwrap(), CorsConfig::default(), "/data".to_string(), ); let server = formatter.bind_data_server().await.unwrap(); assert_eq!(formatter.get_addr(), server.local_addr()); } #[tokio::test] async fn cors_simple_response() { let (_, base_path) = create_local_test_files().await; let port = start_server(None, base_path.path().to_path_buf()).await; test_cors_simple_request_uri( &DataTestServer::default(), &format!("http://localhost:{port}/data/key1"), ) .await; } #[tokio::test] async fn cors_options_response() { let (_, base_path) = create_local_test_files().await; let port = start_server(None, base_path.path().to_path_buf()).await; test_cors_preflight_request_uri( &DataTestServer::default(), &format!("http://localhost:{port}/data/key1"), ) .await; } fn tls_formatter() -> BindDataServer { BindDataServer::new_with_tls( "127.0.0.1:8080".parse().unwrap(), CorsConfig::default(), CertificateKeyPair::new("".parse().unwrap(), "".parse().unwrap()), "/data".to_string(), ) } async fn start_server<P>(cert_key_pair: Option<CertificateKeyPair>, path: P) -> u16 where P: AsRef<Path> + Send + 'static, { let addr = SocketAddr::from_str(&format!("{}:{}", "127.0.0.1", "0")).unwrap(); let server = DataServer::bind_addr(addr, "/data", cert_key_pair, default_cors_config()) .await .unwrap(); let port = server.local_addr().port(); tokio::spawn(async move { server.serve(path).await.unwrap() }); port } async fn test_server<P>(scheme: &str, cert_key_pair: Option<CertificateKeyPair>, path: P) where P: AsRef<Path> + Send + 'static, { let port = start_server(cert_key_pair, path).await; let test_server = DataTestServer::default(); let request = test_server .get_request() .method(Method::GET.to_string()) .uri(format!("{scheme}://localhost:{port}/data/key1")); let response = test_server.test_server(request, "".to_string()).await; assert!(response.is_success()); assert_eq!(response.body, b"value1"); } }
#![warn(clippy::dbg_macro, clippy::print_stdout, clippy::print_stderr)] mod app; mod assets; mod cli; mod config; mod dialog; mod markdown; mod opener; mod persistent; mod renderer; mod watcher; #[cfg(windows)] mod windows; mod wry; pub use crate::cli::Options; #[cfg(feature = "__bench")] pub use crate::markdown::{MarkdownContent, MarkdownParser}; #[cfg(feature = "__bench")] pub use crate::renderer::RawMessageWriter; #[cfg(windows)] pub use windows::WindowsConsole; use crate::app::Shiba; use crate::opener::SystemOpener; use crate::renderer::EventLoop; use crate::watcher::NopWatcher; use crate::wry::{WryEventLoop, WryRenderer}; use anyhow::Result; use notify::RecommendedWatcher; use rfd::FileDialog; pub fn run(options: Options) -> Result<()> { let event_loop = WryEventLoop::with_user_event(); if options.watch { let app = Shiba::<WryRenderer, SystemOpener, RecommendedWatcher, FileDialog>::new( options, &event_loop, )?; event_loop.start(app) } else { let app = Shiba::<WryRenderer, SystemOpener, NopWatcher, FileDialog>::new(options, &event_loop)?; event_loop.start(app) } }
#[macro_use] extern crate diesel; extern crate diesel_derive_enum; extern crate dotenv; use diesel::update; use crate::models::NewActor; use diesel::dsl::insert_into; use crate::models::Actor; use r2d2::Pool; use std::ops::Deref; use diesel::pg::PgConnection; use diesel::prelude::*; use dotenv::dotenv; use std::env; extern crate r2d2; extern crate r2d2_diesel; use std::thread; use r2d2_diesel::ConnectionManager; pub mod models; pub mod schema; use std::sync::atomic::{AtomicUsize, Ordering}; use std::time::{Duration, SystemTime}; use schema::actor::dsl::*; use schema::address::dsl::*; use schema::category::dsl::*; use schema::city::dsl::*; use schema::country::dsl::*; use schema::customer::dsl::*; use schema::film::dsl::*; use schema::film_actor::dsl::*; use schema::film_category::dsl::*; use schema::inventory::dsl::*; use schema::language::dsl::*; use schema::rental::dsl::*; use schema::staff::dsl::*; use schema::store::dsl::*; pub fn read_all_tables(conn: &PgConnection) { let results = actor.limit(50).load::<models::Actor>(conn).expect("Error loading actors"); println!("Displaying {} actors", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = address.limit(50).load::<models::Address>(conn).expect("Error loading address"); println!("Displaying {} address", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = category.limit(50).load::<models::Category>(conn).expect("Error loading category"); println!("Displaying {} categories .. ", results.len()); for post in results { println!("{:?}", post);} println!("----------\n"); let results = city.limit(50).load::<models::City>(conn).expect("Error loading posts"); println!("Displaying {} cities .. ", results.len()); for post in results { println!("{:?}", post);} println!("----------\n"); let results = country.limit(50).load::<models::Country>(conn).expect("Error loading posts"); println!("Displaying {} country .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = customer.limit(50).load::<models::Customer>(conn).expect("Error loading posts"); println!("Displaying {} customer .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = film.limit(50).load::<models::Film>(conn).expect("Error loading posts"); println!("Displaying {} films .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = film_actor.limit(50).load::<models::FilmActor>(conn).expect("Error loading posts"); println!("Displaying {} film actors .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = film_category.limit(50).load::<models::FilmCategory>(conn).expect("Error loading posts"); println!("Displaying {} film categories .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = inventory.limit(50).load::<models::Inventory>(conn).expect("Error loading posts"); println!("Displaying {} store inventories .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = language.limit(50).load::<models::Language>(conn).expect("Error loading posts"); println!("Displaying {} languages .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = rental.limit(50).load::<models::Rental>(conn).expect("Error loading posts"); println!("Displaying {} rental records .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = staff.limit(50).load::<models::Staff>(conn).expect("Error loading posts"); println!("Displaying {} Staff details.. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = store.limit(50).load::<models::Store>(conn).expect("Error loading posts"); println!("Displaying {} Stores .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); } static GLOBAL_THREAD_COUNT: AtomicUsize = AtomicUsize::new(0); pub fn read_data(pool: &Pool<ConnectionManager<diesel::PgConnection>>) { let mut vector = Vec::new(); // tweak the value below to acheive || connections to database. for _ in 0..1 { let pool = pool.clone(); let t1 = thread::spawn(move || { let connection = pool.get(); GLOBAL_THREAD_COUNT.fetch_add(1, Ordering::SeqCst); if connection.is_ok() { read_all_tables(connection.unwrap().deref()); } GLOBAL_THREAD_COUNT.fetch_sub(1, Ordering::SeqCst); }); vector.push(t1); } // Wait for other threads to finish. while GLOBAL_THREAD_COUNT.load(Ordering::SeqCst) != 0 { thread::sleep(Duration::from_millis(1)); } // Give some time for writes to finish otherwise, it would close the app without writing to stdout. thread::sleep(Duration::from_millis(1000)); } pub fn insert_find_update_value_into_table (pool: &Pool<ConnectionManager<diesel::PgConnection>>) { let connection = pool.get(); if connection.is_ok() { let unwrapped_conn = connection.unwrap(); let conn_handle = unwrapped_conn.deref(); let now = SystemTime::now().elapsed().unwrap().as_micros(); let sample_record = NewActor { actor_id: 100001, first_name: "Tom".to_string(), last_name: "Hanks".to_string(), last_update: diesel::pg::data_types::PgTimestamp((now+2) as i64) }; match insert_into(actor).values(&sample_record).get_results::<Actor>(conn_handle) { Ok(_value) => { println!("New value inserted in db"); println!("{:?}", _value); }, Err(_reason) => { println!("Error inserting (reason) -> {:?}", _reason); } } // Find the record inserted. let results = actor.filter(schema::actor::actor_id.eq(1001)).limit(5).load::<models::Actor>(conn_handle).expect("Error loading posts"); println!("Displaying {} actors .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let results = actor.filter(schema::actor::first_name.ilike("Arjun")).limit(5).load::<models::Actor>(conn_handle).expect("Error loading posts"); println!("Displaying {} actors .. ", results.len()); for post in results { println!("{:?}", post); } println!("----------\n"); let update_actors = actor.filter(schema::actor::actor_id.eq(1001)); match update(update_actors).set(( schema::actor::first_name.eq("Brad"), schema::actor::last_name.eq("Pitt")) ) .get_results::<Actor>(conn_handle) { Ok(_value) => { println!("Updated value in the db"); println!("{:?}", _value); }, Err(_reason) => { println!("Error updating (reason) -> {:?}", _reason); } } } } fn main() { dotenv().ok(); let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set in the .env file."); let manager = ConnectionManager::<PgConnection>::new(database_url); let pool = r2d2::Pool::builder().build(manager).expect("Failed to create pool."); read_data(&pool); insert_find_update_value_into_table(&pool); }
fn string_test() { println!("string_test --------------------"); // utf-8, on stack let s:&'static str = "hello world"; // &str = string slice for c in s.chars() { print!("{} ", c); // h e l l o w o r l d } println!(" "); if let Some(first) = s.chars().nth(0) { println!("First character : {}", first); } // String on heap let mut letters = String::new(); let mut a = 'a' as u8; // define a char while a <= ('g' as u8) { letters.push(a as char); // push char letters.push_str("-"); // push string a += 1; } println!("letters : {}", letters); // convertion &str <> String let u:&str = &letters; let mut newstr = "hello".to_string(); newstr.remove(0); } fn str_format_test() { println!("str_format_test ----------------"); let name = "Jim"; let greeting = format!("Hi, I'm {}", name); println!("{}", greeting); let format_idx = format!("{0} {1}, {0} {2}", "the", "good", "bad"); println!("{}", format_idx); let format_ele_name = format!("I'm {first}, {first} {last}", first = "John", last = "Doe"); println!("{}", format_ele_name); let format_mix = format!("{1} {} {0} {} {value}", "a", "b", value = "c"); println!("{}", format_mix); } pub fn characters() { str_format_test(); string_test(); }
#[cfg(feature = "ed25519_sign_sodium")] #[path = "ed25519_sign/sodium.rs"] pub mod ed25519_sign; #[cfg(feature = "ed25519_box_sodium")] #[path = "ed25519_box/sodium.rs"] // TODO: The name is misleading as the operations do not happen over ed25519 curve pub mod ed25519_box; pub use indy_utils::crypto::base64; #[allow(dead_code)] /* FIXME Do we really need this module? */ #[cfg(feature = "xsalsa20_sodium")] #[path = "xsalsa20/sodium.rs"] pub mod xsalsa20; pub use indy_utils::crypto::chacha20poly1305_ietf; pub mod signature_serializer; pub mod verkey_builder; #[cfg(feature = "sealedbox_sodium")] #[path = "sealedbox/sodium.rs"] pub mod sealedbox; #[cfg(feature = "randombytes_sodium")] #[path = "randombytes/sodium.rs"] pub mod randombytes;
use super::*; use fnv::FnvHasher; struct CustomHasher { key: u64, hasher: FnvHasher } impl Clone for CustomHasher { fn clone(&self) -> CustomHasher { CustomHasher { key: self.key, hasher: FnvHasher::with_key(self.key) } } } impl core::hash::Hasher for CustomHasher { fn write(&mut self, input: &[u8]) { self.hasher.write(input); } fn finish(&self) -> u64 { self.hasher.finish() } } #[test] fn make_array_hash() { let mut ah = ArrayHashBuilder::default().build(); const MAX :usize = 1_000_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(i, i); } let mut counter = 0; for _ in ah { // iterate and count number of entry manually counter += 1; } assert_eq!(counter, MAX); } #[test] fn put_get_xx() { let mut ah = ArrayHashBuilder::default().max_load_factor(10_000).build(); const MAX :usize = 1_000_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(i, i); } let begin = std::time::Instant::now(); for j in 0..MAX { if let Some(v) = ah.get(&j) { assert_eq!(*v, j); } else { panic!("Cannot retrieve value back using existing key") } } dbg!(begin.elapsed().as_millis()); } #[test] fn put_smart_get_xx() { let mut ah = ArrayHashBuilder::default().max_load_factor(10_000).build(); const MAX :usize = 1_000_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(Box::new(i), i); // Put value using smart pointer Box } let begin = std::time::Instant::now(); for j in 0..MAX { if let Some(v) = ah.smart_get(&j) { // Get value using borrow type assert_eq!(*v, j); } else { panic!("Cannot retrieve value back using existing key") } } dbg!(begin.elapsed().as_millis()); } #[test] fn put_contractual_get_xx() { let mut ah = ArrayHashBuilder::default().build(); #[derive(Hash, PartialEq)] struct A(usize); #[derive(Hash)] struct B(usize); impl PartialEq<A> for B { fn eq(&self, rhs: &A) -> bool { self.0 == rhs.0 } } const MAX :usize = 1_000_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(A(i), i); // Put value using smart pointer Box } assert!(ah.get(&A(MAX + 1)).is_none()); let begin = std::time::Instant::now(); for j in 0..MAX { if let Some(v) = ah.get(&B(j)) { // Get value using borrow type assert_eq!(*v, j); } else { panic!("Cannot retrieve value back using existing key") } } dbg!(begin.elapsed().as_millis()); } #[test] fn try_put() { let mut ah = ArrayHashBuilder::default().build(); assert_eq!(ah.try_put(1u8, 1u8).unwrap(), &1u8); assert_eq!(ah.try_put(1u8, 1u8).unwrap_err(), (1u8, 1u8, &1u8)); assert_eq!(ah.try_put(0u8, 1u8).unwrap(), &1u8); assert_eq!(ah.try_put(0u8, 1u8).unwrap_err(), (0u8, 1u8, &1u8)); } #[test] fn put_coalesced_get_xx() { let mut ah = ArrayHashBuilder::default().max_load_factor(10_000).build(); const MAX :usize = 1_000_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(vec![i], i); // Put value using smart pointer Box } assert!(ah.coerce_get::<[usize]>(&[MAX + 1]).is_none()); let begin = std::time::Instant::now(); for j in 0..MAX { if let Some(v) = ah.coerce_get::<[usize]>(&[j]) { // Get value using borrow type assert_eq!(*v, j); } else { panic!("Cannot retrieve value back using existing key") } } dbg!(begin.elapsed().as_millis()); } #[test] fn contains_all() { let mut ah = ArrayHashBuilder::default().max_load_factor(10_000).build(); let mut another = ah.clone(); let mut partly_eq = ah.clone(); const MAX :usize = 1_000_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(vec![i], i); // Put value using smart pointer Box } for i in 0..(MAX / 2) { another.put(vec![i], i); partly_eq.put(vec![i], 0); } let begin = std::time::Instant::now(); assert!(ah.contains_iter(&another)); assert!(!ah.contains_iter(&partly_eq)); dbg!(begin.elapsed().as_millis()); } #[test] fn partial_eq() { let mut ah = ArrayHashBuilder::default().max_load_factor(8).buckets_size(8).build(); let mut ah_2 = ArrayHashBuilder::default().max_load_factor(8).buckets_size(8).build(); const MAX :usize = 16; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(vec![i], i); ah_2.put(vec![i], i); // Put similar key/value to see if it's eq } assert_ne!(ah, ah_2); let ah_3 = ah.clone(); assert_eq!(ah, ah_3); let mut ah_4 = ah.to_builder().build(); let mut ah_5 = ah.to_builder().build(); for i in 0..MAX { ah_4.put(vec![i], i); // Put similar key/value to see if it's eq ah_5.put(vec![i], 0); // Put similar key but different value to see if it's eq } assert_eq!(ah, ah_4); assert_ne!(ah, ah_5); } #[test] fn hasher_eq() { let ah = ArrayHashBuilder::default().max_load_factor(8).buckets_size(8).build::<u8, u8>(); let ah_2 = ArrayHashBuilder::default().max_load_factor(8).buckets_size(8).build::<u8, u8>(); assert!(!ah.is_hasher_eq(&ah_2)); let ah_3 = ah.clone(); assert!(ah.is_hasher_eq(&ah_3)); let ah_4 = ah.to_builder().build::<u8, u8>(); let ah_5 = ah.to_builder().build::<u8, u8>(); assert!(ah.is_hasher_eq(&ah_4)); assert!(ah_5.is_hasher_eq(&ah_4)); } #[test] fn make_custom_hasher() { let seed = 0u64; let hasher = CustomHasher { key: seed, hasher: FnvHasher::with_key(seed) }; let mut ah = ArrayHashBuilder::with_hasher(hasher).build(); const MAX :usize = 1_000_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(i, i); } let mut counter = 0; for _ in ah { // iterate and count number of entry manually counter += 1; } assert_eq!(counter, MAX); } #[test] fn put_get_fnv() { let seed = 0u64; let hasher = CustomHasher { key: seed, hasher: FnvHasher::with_key(seed) }; let mut ah = ArrayHashBuilder::with_hasher(hasher).build(); const MAX :usize = 1_000_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(i, i); } for j in 0..MAX { if let Some(v) = ah.get(&j) { assert_eq!(*v, j); } else { panic!("Cannot retrieve value back using existing key") } } } #[test] fn test_iter() { let mut ah = ArrayHashBuilder::default().build(); const MAX :usize = 100_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(i, i); } let mut ah_vec = ah.iter().map(|(k, _)| *k).collect::<Vec<usize>>(); ah_vec.sort(); for i in 0..ah_vec.len() { assert_eq!(i, ah_vec[i]); } } #[test] fn test_iter_mut() { let mut ah = ArrayHashBuilder::default().build(); const MAX :usize = 100_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(i, i); } ah.iter_mut().enumerate().for_each(|(i, entry)| {entry.1 = i;}); for (i, (_, v)) in ah.iter().enumerate() { assert_eq!(i, *v); } let mut sorted: Vec<usize> = ah.into_iter().map(|(k, _)| k).collect(); sorted.sort(); for (i, k) in sorted.into_iter().enumerate() { assert_eq!(i, k); } } #[test] fn test_drain() { let mut ah = ArrayHashBuilder::default().build(); const MAX :usize = 100_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(i, i); } let mut keys : Vec<usize> = ah.drain().map(|(k, _)| k).collect(); keys.sort_unstable(); keys.into_iter().enumerate().for_each(|(i, k)| assert_eq!(i, k)); assert_eq!(ah.len(), 0); } #[test] fn test_drain_with() { let mut ah = ArrayHashBuilder::default().build(); const MAX :usize = 100_000; for i in 0..MAX { // put 1 million entry with one usize for each key and value ah.put(i, i); } let mut keys : Vec<usize> = ah.drain_with(|(_, v)| { *v >= MAX / 2 }).map(|(k, _)| k).collect(); keys.sort_unstable(); keys.into_iter().enumerate().for_each(|(i, k)| assert_eq!(i + MAX / 2, k)); assert_eq!(ah.len(), MAX / 2); } #[test] fn test_split_with() { let mut ah = ArrayHashBuilder::default().build(); const MAX :usize = 100_000; for i in 0..MAX { // put 1 hundred thousand entry with one usize for each key and value ah.put(i, i); } let ah2 = ah.split_by(|(k, _)| { *k >= MAX / 2 }); assert_eq!(ah.len(), 50_000); assert_eq!(ah2.len(), 50_000); for i in 0..(MAX / 2) { if let Some(v) = ah.get(&i) { assert_eq!(*v, i); } else { panic!("Value missing for {}", i) } } for i in (MAX / 2)..MAX { if let Some(v) = ah2.get(&i) { assert_eq!(*v, i); } else { panic!("Value missing for {}", i) } } } #[test] #[ignore] fn bench_compound_key() { let start = std::time::Instant::now(); let mut ah = ArrayHashBuilder::default().buckets_size(4096).build(); for i in 0..100_000 { let v: Vec<usize> = (i..(i + 100)).collect(); ah.put(v.clone(), v); } let created = start.elapsed().as_millis(); println!("Created in {}s {}ms", created / 1000, created % 1000); for i in 0..100_000 { let v: Vec<usize> = (i..(i + 100)).collect(); if let Some(value) = ah.get(&v) { assert_eq!(*value, v); } } let created = start.elapsed().as_millis(); println!("Got in {}s {}ms", created / 1000, created); }
#![no_std] use volatile_cell::VolatileCell; // Option registers for TIM2 and TIM5; both start at 0x50 relative to // base of their respective timer peripherals // (STM32F4) ioregs!(TIM2_OPT = { 0x00 => reg32 or { 10..11 => itr1_rmp, }, }); ioregs!(TIM5_OPT = { 0x00 => reg32 or { 6..7 => ti4_rmp, }, });
use super::Camera; use cgmath::InnerSpace; #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct Uniforms { view_proj: cgmath::Matrix4<f32>, // We use Vector4 instead of Vector3 due to GLSL block alignments // See https://stackoverflow.com/questions/35524814/ light_pos: cgmath::Vector4<f32>, light_color: cgmath::Vector4<f32>, } impl Uniforms { pub fn new() -> Self { use cgmath::SquareMatrix; Self { view_proj: cgmath::Matrix4::identity(), light_pos: cgmath::Vector4::unit_x(), light_color: (1.0, 1.0, 1.0, 1.0).into(), } } pub fn update_view_proj(&mut self, camera: &Camera) { self.view_proj = camera.build_view_projection_matrix(); let back = camera.eye(); let up = camera.up(); let left = up.cross(back); self.light_pos = (back + up + left).normalize_to(20.0).extend(1.0); } }
mod ability_score; mod ancestry; mod background; mod character; mod class; mod health; pub use ability_score::AbilityBoostChoice; pub use ability_score::AbilityBoostChoiceSet; pub use ability_score::AbilityScore; pub use ability_score::AbilityScoreSet; pub use ability_score::AbilityScoreType; pub use ancestry::Ancestry; pub use background::Background; pub use character::Character; pub use character::Size; pub use class::Class; pub use health::Health;
#[derive(Clone, Debug, PartialEq, Eq)] pub struct Error { pub line: usize, pub column: usize, pub msg: String, pub idx: usize, } impl ::std::error::Error for Error {} impl ::std::fmt::Display for Error { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "{} at {}:{}", self.msg, self.line, self.column) } } #[derive(Clone, Debug, PartialEq, Eq)] pub struct RawError { pub idx: usize, pub msg: String, } impl ::std::error::Error for RawError {} impl ::std::fmt::Display for RawError { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "{} at {}", self.msg, self.idx) } }
use hlist::*; use ty::{ Eval, Eval1, Infer, infer, }; use ty::bit::*; /// Type-level positive natural numbers (binary) pub trait Pos {} impl Pos for _1 {} impl<P: Pos, B> Pos for (P, B) {} /// Type-level successor for positive natural numbers #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum Succ {} impl Infer for Succ { type Arity = HC<(), HN>; type Mode = infer::mode::Constant; } /// `succ(1) ==> 1:0` impl Eval<Succ> for HC<_1, HN> { type Out = (_1, _0,); } /// `succ(p:0) ==> p:1` impl<P> Eval<Succ> for HC<(P, _0), HN> { type Out = (P, _1); } /// `p:1 ==> succ(p):0` impl<P, Rec> Eval<Succ> for HC<(P, _1), HN> where P: Eval1<Succ, Out = Rec>, { type Out = (Rec, _0); } /// Type-level addition for positive natural numbers #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum Add {} impl Infer for Add { type Arity = HC<(), HC<(), HN>>; type Mode = infer::mode::Constant; } /// `add(1, 1) ==> 1:0` impl Eval<Add> for HC<_1, HC<_1, HN>> { type Out = (_1, _0); } /// `add(1, q:0) ==> q:1` impl<P1> Eval<Add> for HC<_1, HC<(P1, _0), HN>> { type Out = (P1, _1); } /// `add(1, q:1) ==> succ(q):0` impl<P1, Rec> Eval<Add> for HC<_1, HC<(P1, _1), HN>> where P1: Eval1<Succ, Out = Rec>, { type Out = (Rec, _0); } /// `add(p:0, 1) ==> p:1` impl<P0> Eval<Add> for HC<(P0, _0), HC<_1, HN>> { type Out = (P0, _1); } /// `add(p:0, q:0) ==> add(p, q):0` impl<P0, P1, Rec> Eval<Add> for HC<(P0, _0), HC<(P1, _0), HN>> where HC<P0, HC<P1, HN>> : Eval<Add, Out = Rec>, { type Out = (Rec, _0); } /// `add(p:0, q:1) ==> add(p, q):1` impl<P0, P1, Rec> Eval<Add> for HC<(P0, _0), HC<(P1, _1), HN>> where HC<P0, HC<P1, HN>> : Eval<Add, Out = Rec>, { type Out = (Rec, _1); } /// `add(p:1, 1) ==> succ(p):0` impl<P0, Rec> Eval<Add> for HC<(P0, _1), HC<_1, HN>> where P0: Eval1<Succ, Out = Rec>, { type Out = (Rec, _0); } /// `add(p:1, q:0) ==> add(p, q):1` impl<P0, P1, Rec> Eval<Add> for HC<(P0, _1), HC<(P1, _0), HN>> where HC<P0, HC<P1, HN>> : Eval<Add, Out = Rec>, { type Out = (Rec, _1); } /// `add(p:1, q:1) ==> add_carry(p, q):1` impl<P0, P1, Rec> Eval<Add> for HC<(P0, _1), HC<(P1, _1), HN>> where HC<P0, HC<P1, HN>> : Eval<AddCarry, Out = Rec>, { type Out = (Rec, _0); } /// Type-level addition with carry for positive natural numbers #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum AddCarry {} /// ```ignore /// p : Pos /// q : Pos /// --------------------- /// add_carry(p, q) : Pos /// ``` impl Infer for AddCarry { type Arity = HC<(), HC<(), HN>>; type Mode = infer::mode::Constant; } /// `add_carry(1, 1) ==> 1:1` impl Eval<AddCarry> for HC<_1, HC<_1, HN>> { type Out = (_1, _1); } /// `add_carry(1, q:0) ==> succ(q):0` impl<P1, Rec> Eval<AddCarry> for HC<_1, HC<(P1, _0), HN>> where P1: Eval1<Succ, Out = Rec>, { type Out = (Rec, _0); } /// `add_carry(1, q:1) ==> succ(q):1` impl<P1, Rec> Eval<AddCarry> for HC<_1, HC<(P1, _1), HN>> where P1: Eval1<Succ, Out = Rec>, { type Out = (Rec, _1); } /// `add_carry(p:0, 1) ==> p:1` impl<P0, Rec> Eval<AddCarry> for HC<(P0, _0), HC<_1, HN>> where P0: Eval1<Succ, Out = Rec>, { type Out = (P0, _0); } /// `add_carry(p:0, q:0) ==> add(p, q):1` impl<P0, P1, Rec> Eval<AddCarry> for HC<(P0, _0), HC<(P1, _0), HN>> where HC<P0, HC<P1, HN>> : Eval<Add, Out = Rec>, { type Out = (Rec, _1); } /// `add_carry(p:0, q:1) ==> add_carry(p, q):0` impl<P0, P1, Rec> Eval<AddCarry> for HC<(P0, _0), HC<(P1, _1), HN>> where HC<P0, HC<P1, HN>> : Eval<AddCarry, Out = Rec>, { type Out = (Rec, _0); } /// `add_carry(p:1, 1) ==> succ(p):1` impl<P0, Rec> Eval<AddCarry> for HC<(P0, _1), HC<_1, HN>> where P0: Eval1<Succ, Out = Rec>, { type Out = (Rec, _1); } /// `add_carry(p:1, q:0) ==> add_carry(p, q):0` impl<P0, P1, Rec> Eval<AddCarry> for HC<(P0, _1), HC<(P1, _0), HN>> where HC<P0, HC<P1, HN>> : Eval<AddCarry, Out = Rec>, { type Out = (Rec, _0); } /// `add_carry(p:1, q:1) ==> add_carry(p, q):1` impl<P0, P1, Rec> Eval<AddCarry> for HC<(P0, _1), HC<(P1, _1), HN>> where HC<P0, HC<P1, HN>> : Eval<AddCarry, Out = Rec>, { type Out = (Rec, _1); } /// `λx. 2 * x - 1` #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum PredDouble {} impl Infer for PredDouble { type Arity = HC<(), HN>; type Mode = infer::mode::Constant; } /// `pred_double(1) ==> 1` impl Eval<PredDouble> for HC<_1, HN> { type Out = _1; } /// `pred_double(p:0) ==> pred_double(p):1` impl<P, Rec> Eval<PredDouble> for HC<(P, _0), HN> where P: Eval1<PredDouble, Out = Rec>, { type Out = (Rec, _1); } /// `pred_double(p:1) ==> p:0:1` impl<P> Eval<PredDouble> for HC<(P, _1), HN> { type Out = ((P, _0), _1); } /// Type-level predecessor for positive natural numbers #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum Pred {} /// ```ignore /// p : Pos /// ------------- /// pred(p) : Pos /// ``` impl Infer for Pred { type Arity = HC<(), HN>; type Mode = infer::mode::Constant; } /// `pred(p:1) ==> p:0` impl Eval<Pred> for HC<_1, HN> { type Out = _1; } /// `p:0 ==> pred_double(p)` impl<P, Rec> Eval<Pred> for HC<(P, _0), HN> where P: Eval1<PredDouble, Out = Rec>, { type Out = Rec; } /// `p:1 ==> p:0` impl<P> Eval<Pred> for HC<(P, _1), HN> { type Out = (P, _0); } /// Type-level multiplication for positive natural numbers #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum Mul {} impl Infer for Mul { type Arity = HC<(), HC<(), HN>>; type Mode = infer::mode::Constant; } /// `mul(1, q) ==> q` impl<P1> Eval<Mul> for HC<_1, HC<P1, HN>> { type Out = P1; } /// `mul(p:0, q) ==> mul(p, q):0` impl<P0, P1, Rec> Eval<Mul> for HC<(P0, _0), HC<P1, HN>> where HC<P0, HC<P1, HN>> : Eval<Mul, Out = Rec>, { type Out = (Rec, _0); } /// `mul(p:1, q) ==> add(q, mul(p, q)):0` impl<P0, P1, Rec0, Rec1> Eval<Mul> for HC<(P0, _1), HC<P1, HN>> where // mul(p0, p1) ==> r0 HC<P0, HC<P1, HN>> : Eval<Mul, Out = Rec0>, // mul(p1, r0) ==> r1 HC<P1, HC<Rec0, HN>> : Eval<Add, Out = Rec1>, { type Out = Rec1; } #[cfg(test)] mod test { use super::*; use ty::*; #[test] fn pred() { let x0 = Witness::<Ap1<nat::pos::Pred, (_1, _0)>>; let x1 = Witness::<_1>; x0 == x1; } }
use derive_new::new; use ordered_float::OrderedFloat; use std::fmt; use crate::contract::Contract; use crate::errors::TarotErrorKind; use crate::game_distributed::GameDistributed; use crate::helpers::wait_input; use crate::mode::Mode; use crate::options::Options; use crate::player::Player; use crate::player_in_game::PlayerInGame; use crate::points::{HasPoints, BASE_CONTRACT_POINTS, MAX_CARDS}; use crate::role::Role; use crate::team::Team; use crate::turn::Turn; #[derive(new)] pub struct GameStarted<'a, const MODE: usize> { game_distributed: &'a mut GameDistributed<'a, MODE>, taker_index: usize, contract: Contract, options: Options, #[new(default)] petit_au_bout_for_team: Option<Team>, #[new(default)] defense_cards: usize, #[new(default)] attack_cards: usize, } impl<const MODE: usize> fmt::Display for GameStarted<'_, MODE> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Contract : {}", self.contract)?; if let Some(team) = &self.petit_au_bout_for_team { writeln!(f, "Petit au bout? : {team}")?; } writeln!(f, "Defense cards : {}", self.defense_cards)?; writeln!(f, "Attack cards : {}", self.attack_cards)?; writeln!(f, "Players : ")?; for index in 0..MODE { let (player, player_in_game) = self.player_and_his_game(index); writeln!(f, "\t{player} {player_in_game}")?; } Ok(()) } } impl<'a, const MODE: usize> GameStarted<'a, MODE> { pub fn is_consistent(&mut self) -> Result<(), TarotErrorKind> { self.game_distributed.game().is_consistent() } pub fn finished(&mut self) -> bool { self.game_distributed.finished() } pub fn mode(&mut self) -> &Mode { self.game_distributed.game().mode() } #[must_use] pub fn player(&self, index: usize) -> &Player { self.game_distributed.player(index) } #[must_use] pub fn player_and_his_game(&self, index: usize) -> (&Player, &PlayerInGame) { self.game_distributed.player_and_his_game(index) } pub fn player_and_his_game_mut(&mut self, index: usize) -> (&Player, &mut PlayerInGame) { self.game_distributed.player_and_his_game_mut(index) } pub fn players_and_their_game_mut(&mut self) -> (&[Player; MODE], &mut [PlayerInGame; MODE]) { self.game_distributed.players_and_their_game_mut() } pub fn play(&mut self) -> Result<(), TarotErrorKind> { let mut turn = Turn::default(); let mut master_player_index: usize = 0; let quiet = self.options.quiet; for current_player_index in 0..MODE { let (current_player, current_player_in_game) = self.player_and_his_game_mut(current_player_index); let current_player_name = current_player.name(); if !quiet { println!("Current player {current_player_name} (index : {current_player_index})"); } let &Some(team) = current_player_in_game.team() else { return Err(TarotErrorKind::NoTeamForPlayer(current_player.name().to_string())); }; let card = current_player_in_game.play_card(current_player, &mut turn)?; if card.is_fool() { if current_player_in_game.last_turn() { // RULE: exception in the last turn, the fool is in game and can be lost turn.put(card); match team { Team::Attack => { if self.attack_cards == MAX_CARDS - self.mode().dog_size() { turn.master_index = Some(turn.len() - 1); master_player_index = current_player_index; } } Team::Defense => { if self.defense_cards == MAX_CARDS - self.mode().dog_size() { turn.master_index = Some(turn.len() - 1); master_player_index = current_player_index; } } } } else { // RULE: the fool is always preserved to his owner current_player_in_game.push_owned(card); turn.put(card); } } else { turn.put(card); if let Some(master) = turn.master_card() { if master.master(card) { if !quiet { let master_player_name = self.player(master_player_index).name(); println!( "Master card is {master}, so player {master_player_name} stays master", ); } } else { if !quiet { println!( "Master card is {card}, so player {current_player_name} becomes master", ); } master_player_index = current_player_index; turn.master_index = Some(turn.len() - 1); } } else { if !quiet { println!( "First card is {card}, so player {current_player_name} becomes master", ); } master_player_index = current_player_index; turn.master_index = Some(turn.len() - 1); } } if !self.options.quiet { println!("{turn}"); } } let mode = *self.mode(); let attack_near_slam = self.attack_cards == MAX_CARDS - mode.dog_size() - mode.players(); if attack_near_slam && !quiet { println!("Attack is near slam!"); } let defense_near_slam = self.defense_cards == MAX_CARDS - mode.dog_size() - mode.players(); if defense_near_slam && !quiet { println!("Defense is near slam!"); } let (players, players_in_game) = self.players_and_their_game_mut(); let master_player_name = players[master_player_index].name(); let master_player_in_game = &mut players_in_game[master_player_index]; if !quiet { println!("Player {master_player_name} has win turn"); } // RULE: petit au bout works for last turn, or before last turn if a slam is occuring let last_turn = master_player_in_game.last_turn(); if last_turn && !quiet { println!("Last turn detected"); } let before_last_turn = master_player_in_game.before_last_turn(); if before_last_turn && !quiet { println!("Before last turn detected"); } let Some(master_player_team) = master_player_in_game.team() else { return Err(TarotErrorKind::NoTeamForPlayer(master_player_name.to_string())); }; let turn_cards = turn.take_cards_except_fool(); let petit_au_bout_for_team = if turn_cards.has_petit() && (last_turn || (before_last_turn && (attack_near_slam || defense_near_slam))) { if !quiet { println!( "{master_player_name} (team: {master_player_team}) has Petit in last turn (Petit au bout) : +10 points", ); } wait_input(); Some(*master_player_team) } else { None }; let mut attack_cards = 0; let mut defense_cards = 0; match master_player_team { Team::Attack => attack_cards = turn_cards.len(), Team::Defense => defense_cards = turn_cards.len(), } master_player_in_game.extend_owned(&turn_cards); self.game_distributed.rotate_at(master_player_index); self.petit_au_bout_for_team = petit_au_bout_for_team; self.attack_cards += attack_cards; self.defense_cards += defense_cards; Ok(()) } pub fn count_points(&mut self) -> Result<(), TarotErrorKind> { let mut ally_index: Option<usize> = None; let mut attack: Vec<usize> = Vec::new(); let mut defense: Vec<usize> = Vec::new(); let mut owning_card_player_index: Option<usize> = None; let mut missing_card_player_index: Option<usize> = None; let mut handle_bonus = OrderedFloat(0.0); let quiet = self.options.quiet; for current_player_index in 0..MODE { let (current_player, current_player_in_game) = self.player_and_his_game_mut(current_player_index); if current_player_in_game.owe_card() { owning_card_player_index = Some(current_player_index); } if current_player_in_game.missing_card() { missing_card_player_index = Some(current_player_index); } if let Some(handle) = &current_player_in_game.handle() { handle_bonus = handle.points(); if !quiet { println!("Handle bonus: {handle_bonus}"); } } match current_player_in_game.role() { Some(Role::Taker) => { attack.push(current_player_index); } Some(Role::Ally) => { assert!(ally_index.is_none()); ally_index = Some(current_player_index); attack.push(current_player_index); } Some(Role::Defenser) => { defense.push(current_player_index); } None => { return Err(TarotErrorKind::NoRoleForPlayer( current_player.name().to_string(), )); } } } match self.game_distributed.game().mode() { Mode::Three => { assert_eq!(defense.len(), 2); assert_eq!(attack.len(), 1); }, Mode::Four => { assert_eq!(defense.len(), 3); assert_eq!(attack.len(), 1); }, Mode::Five => { if ally_index.is_some() { assert_eq!(defense.len(), 3); assert_eq!(attack.len(), 2); } else { assert_eq!(defense.len(), 4); assert_eq!(attack.len(), 1); } } }; // give a low card if someone owe a card to someone else if let (Some(owning_card_player_index), Some(missing_card_player_index)) = (owning_card_player_index, missing_card_player_index) { let (players, players_in_game) = self.players_and_their_game_mut(); let owning_card_player_name = players[owning_card_player_index].name(); let low_card = players_in_game[owning_card_player_index].give_low(); if let Some(low_card) = low_card { let missing_card_player_name = players[missing_card_player_index].name(); let missing_card_player_in_game = &mut players_in_game[missing_card_player_index]; missing_card_player_in_game.push_owned(low_card); if !quiet { println!("Player {owning_card_player_name} own a card to {missing_card_player_name}, giving a {low_card} in exchange"); } } else if !quiet { println!("Player {owning_card_player_name} cannot give a low card"); } } let taker_index = self.taker_index; if let Some(ally_index) = ally_index { let (players, players_in_game) = self.players_and_their_game_mut(); let ally_name = &players[ally_index].name(); let ally_in_game = &mut players_in_game[ally_index]; let ally_cards = ally_in_game.all_cards(); let taker_name = &players[taker_index].name(); let taker_in_game = &mut players_in_game[taker_index]; if !quiet { println!("{ally_name} gives his card to {taker_name}"); } taker_in_game.extend_owned(&ally_cards); } let (taker, taker_in_game) = self.player_and_his_game_mut(self.taker_index); let slam_bonus = taker_in_game.slam_bonus(); let taker_points = taker_in_game.points(); let points_for_oudlers = taker_in_game.points_for_oudlers()?; if !quiet { println!("Taker {taker} slam bonus: {slam_bonus}"); println!("Taker {taker} owned points: {taker_points}"); println!("Contract todo: {points_for_oudlers}"); println!("Contract base: {BASE_CONTRACT_POINTS}"); let difference = taker_points - points_for_oudlers; println!("Contract difference: {difference}"); } let contract_points = if taker_points >= points_for_oudlers { if !quiet { let total = taker_points - points_for_oudlers + BASE_CONTRACT_POINTS; println!("Contract total: {total}"); } (taker_points - points_for_oudlers + BASE_CONTRACT_POINTS) * self.contract.multiplier() } else { if !self.options.quiet { let total = taker_points - points_for_oudlers - BASE_CONTRACT_POINTS; println!("Contract total: {total}"); } (taker_points - points_for_oudlers - BASE_CONTRACT_POINTS) * self.contract.multiplier() }; if !self.options.quiet { println!( "Taker contract: {} (x{})", self.contract, self.contract.multiplier() ); println!("Taker contract points: {contract_points}"); } let points_petit_au_bout = 10.0 * self.contract.multiplier(); let petit_au_bout_bonus = match self.petit_au_bout_for_team { Some(Team::Defense) => { if !self.options.quiet { println!("Petit au bout for defense: -{points_petit_au_bout}"); } -points_petit_au_bout } Some(Team::Attack) => { if !self.options.quiet { println!("Petit au bout for attack: {points_petit_au_bout}"); } points_petit_au_bout } None => { if !self.options.quiet { println!("No petit au bout bonus"); } 0.0 } }; let ratio = self.mode().ratio(ally_index.is_some()); let points = contract_points + petit_au_bout_bonus + handle_bonus + slam_bonus; if contract_points >= OrderedFloat(0.0) { self.game_distributed .game() .player_mut(self.taker_index) .add_score(ratio * points); } else { handle_bonus *= -1.0; self.game_distributed .game() .player_mut(self.taker_index) .add_score(-ratio * points); } if !self.options.quiet { println!("Attack handle bonus: {}", handle_bonus.abs()); println!("Taker points: {points}"); println!( "Taker total points: {}", self.game_distributed .game() .player(self.taker_index) .score() ); } if let Some(ally_index) = ally_index { if contract_points >= OrderedFloat(0.0) { self.game_distributed .game() .player_mut(ally_index) .add_score(points); } else { self.game_distributed .game() .player_mut(ally_index) .add_score(-points); } if !self.options.quiet { println!( "Ally total points: {}", self.game_distributed.game().player(ally_index).score() ); } } for defenser_index in defense { if contract_points >= OrderedFloat(0.0) { self.game_distributed .game() .player_mut(defenser_index) .add_score(-points); } else { self.game_distributed .game() .player_mut(defenser_index) .add_score(points); } if !self.options.quiet { let defenser = self.game_distributed.game().player(defenser_index); println!("Defenser : {defenser}"); } } //if handle_bonus != 0.0 && petit_au_bout_bonus != 0.0 && slam_bonus != 0.0 && ratio == 4.0 { // helpers::wait_input(); //} self.is_consistent() } }
use std::cell::RefCell; use std::collections::HashMap; use std::rc::Rc; use super::{colors::Color, keybinds::*, output::Output, request::*, state::State, *}; import!( bind, buffer, clear, clear_history, connect, echo, exit, join, list_buffers, part, quit, rehash ); #[derive(Debug, PartialEq)] pub enum Error { InvalidArgument(String), InvalidBuffer(String), AlreadyConnected, NotConnected, ClientError(irc::Error), ReloadConfig, EmptyInput, UnknownCommand(String), ForceExit, // why is this an error? } pub enum Response { Nothing, Output(Output), } type CommandResult = Result<Response, Error>; type Command = fn(&Context) -> CommandResult; pub(crate) struct Context<'a> { pub(crate) state: Rc<State>, pub(crate) queue: Rc<MessageQueue<Request>>, pub(crate) config: Rc<RefCell<Config>>, pub(crate) parts: &'a [&'a str], } impl<'a> Context<'a> { pub fn request(&self, req: Request) { self.queue.enqueue(req) } pub fn status(&self, output: Output) { self.request(Request::Queue(0, output)); } } pub struct CommandProcessor { map: HashMap<&'static str, Command>, state: Rc<State>, queue: Rc<MessageQueue<Request>>, } impl CommandProcessor { pub fn new(state: Rc<State>, queue: Rc<MessageQueue<Request>>) -> Self { let mut this = CommandProcessor { map: HashMap::new(), state, queue, }; this.map.insert("/echo", echo_command); this.map.insert("/exit", exit_command); this.map.insert("/connect", connect_command); this.map.insert("/quit", quit_command); this.map.insert("/clear", clear_command); this.map.insert("/join", join_command); this.map.insert("/part", part_command); this.map.insert("/buffer", buffer_command); this.map.insert("/buffers", list_buffers_command); this.map.insert("/bind", bind_command); this.map.insert("/rehash", rehash_command); this.map.insert("/clearhistory", clear_history_command); this } pub fn dispatch(&mut self, input: &str) -> CommandResult { if input.is_empty() { return Err(Error::EmptyInput); } if !input.starts_with('/') { return self.try_send_message(input); } let input = input.to_string(); let mut input = input.split(' '); let query = input.next().unwrap(); if !self.map.contains_key(query) { return Err(Error::UnknownCommand(query.into())); } trace!("query: {}", query); let parts = input.collect::<Vec<_>>(); let func = self.map[&query]; let ctx = Context { state: Rc::clone(&self.state), queue: Rc::clone(&self.queue), config: Rc::clone(&self.state.config()), parts: &parts, }; func(&ctx) } fn try_send_message(&self, data: &str) -> CommandResult { use super::irc::IrcClient; let client = self.state.client().ok_or_else(|| Error::NotConnected)?; let (_index, buffer) = self.state.buffers().current(); if buffer.is_status() { return Err(Error::InvalidBuffer(buffer.name().into())); } client.privmsg(&buffer.name(), data); let nickname = client .state() .nickname() .expect("client should have a valid nickname"); let mut output = Output::stamp(); output.fg(Color::Green).add(nickname).add(" ").add(data); Ok(Response::Output(output.build())) } } fn assume_connected(ctx: &Context) -> Result<(), Error> { if ctx.state.client().is_none() { Err(Error::NotConnected)?; } Ok(()) } fn assume_args(ctx: &Context, msg: &'static str) -> Result<(), Error> { if ctx.parts.is_empty() { Err(Error::InvalidArgument(msg.into()))?; } Ok(()) }
//! This module defines the [`Lexer`] and the [`Token`] types, //! as well as utilities related to them (such as the [`Span`] type, and associated traits). use diagnostics::*; use std::fmt::{self, Debug, Display, Formatter, Write}; use std::ops::{Deref, DerefMut, Range}; use std::str; // //==========================================================================// // // SPAN & LOCATION // // //==========================================================================// /// Represents the position of a `Token` in source code. #[derive(PartialEq, Eq, Clone, Copy)] pub struct Span { start: usize, length: usize } /// Describes an object that has a [`Span`] in source code. pub trait HasSpan { /// Returns the [`Span`] of the object in source code. fn span(&self) -> Span; } impl HasSpan for Span { fn span(&self) -> Span { *self } } impl From<Range<Span>> for Span { fn from(range: Range<Span>) -> Self { let start = range.start.start; let length = range.start.length + range.end.length; Span { start, length } } } /// A spanned `T` object. pub struct Spanned<T> { span: Span, value: T } impl<T> Spanned<T> { /// Returns a new `Spanned` structure wrapping the given value. pub fn new(value: T, span: Span) -> Self { Spanned { value, span } } /// Consumes the `Spanned` structure, returning the object it contains. pub fn value(self) -> T { self.value } } impl<T> HasSpan for Spanned<T> { fn span(&self) -> Span { self.span } } impl<T> Deref for Spanned<T> { type Target = T; fn deref(&self) -> &T { &self.value } } impl<T> DerefMut for Spanned<T> { fn deref_mut(&mut self) -> &mut T { &mut self.value } } impl<T> AsRef<T> for Spanned<T> { fn as_ref(&self) -> &T { &self.value } } impl<T: Debug> Debug for Spanned<T> { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "[{}] {:?}", self.span, self.value) } } impl Span { /// Returns a new `Span` that starts at the given index, and with a length of 1. #[inline] pub fn with_start(start: usize) -> Self { Span { start: start, length: 1 } } /// Returns a new `Span` that starts at the given index, and with a specified length. #[inline] pub fn with_start_and_length(start: usize, length: usize) -> Self { Span { start: start, length: length } } /// Returns a new `Span` that starts at the given index, and ends at the other one. #[inline] pub fn with_start_and_end(start: usize, end: usize) -> Self { Span { start: start, length: end - start } } /// Returns the starting index of this `Span`. #[inline] pub fn start(&self) -> usize { self.start } /// Returns the length of this `Span`. #[inline] pub fn len(&self) -> usize { self.length } /// Returns whether this span is empty. #[inline] pub fn is_empty(&self) -> bool { self.length == 0 } /// Returns the starting index of this `Span`. #[inline] pub fn end(&self) -> usize { self.start + self.length } } impl Default for Span { /// Returns a `Span` with a null starting Span, and null length. #[inline] fn default() -> Self { Span { start: 0, length: 0 } } } impl From<usize> for Span { /// Constructs a `Span` from its starting position. #[inline] fn from(start: usize) -> Self { Span { start: start, length: 1 } } } impl From<(usize, usize)> for Span { /// Constructs a `Span` from a tuple containing its starting position and length. #[inline] fn from(tup: (usize, usize)) -> Self { Span { start: tup.0, length: tup.1 } } } impl From<Range<usize>> for Span { /// Constructs a `Span` from a range going from the start to the end. #[inline] fn from(range: Range<usize>) -> Self { Span { start: range.start, length: range.end - range.start } } } impl Display for Span { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.is_empty() { Display::fmt(&self.start(), f) } else { Display::fmt(&self.start(), f).and(f.write_char(':')).and(Display::fmt(&self.end(), f)) } } } impl Debug for Span { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(self, f) } } // //==========================================================================// // // TOKEN // // //==========================================================================// /// Defines a primitive token of Styx source code. #[derive(PartialEq, Clone, Debug)] pub enum Token { /// End-Of-File. EOF(usize), /// Ignored token (comment, for example). Ignored, /// A documentation comment. Doc(Span, String, bool), /// An indentation token. Indent(usize, usize), /// '(' character. LParen(usize), /// ')' character. RParen(usize), /// '{' character. LBracket(usize), /// '}' character. RBracket(usize), /// '<' character. LAngle(usize), /// '>' character. RAngle(usize), /// ',' character. Comma(usize), /// ';' character. Semicolon(usize), /// '$' character. Dollar(usize), /// An identifier; as defined by the following regular expression: [a-zA-Z_]\w* | [^\d\w\s"'<>()]+. Ident(Span, String), /// An integer. Int(Span, i64), /// A floating-point number. Real(Span, f64), /// A literal string. Str(Span, String), /// A literal character. Char(Span, char) } impl Token { /// Returns whether or not the `Token` represents the end of the file. pub fn is_eof(&self) -> bool { if let &Token::EOF(_) = self { true } else { false } } } impl HasSpan for Token { fn span(&self) -> Span { use self::Token::*; match self { &Ignored => Span::default(), &Indent(start, end) => Span::with_start_and_end(start, end), &EOF(start) | &Comma(start) | &Semicolon(start) | &Dollar(start) | &LAngle(start) | &RAngle(start) | &LParen(start) | &RParen(start) | &LBracket(start) | &RBracket(start) => Span::with_start(start), &Doc(loc, _, _) | &Ident(loc, _) | &Int(loc, _) | &Real(loc, _) | &Str(loc, _) | &Char(loc, _) => loc } } } impl Default for Token { fn default() -> Self { Token::Ignored } } impl fmt::Display for Token { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Token::Comma(_) => f.write_char(','), Token::Semicolon(_) => f.write_char(';'), Token::Dollar(_) => f.write_char('$'), Token::LParen(_) => f.write_char('('), Token::RParen(_) => f.write_char(')'), Token::LAngle(_) => f.write_char('<'), Token::RAngle(_) => f.write_char('>'), Token::LBracket(_) => f.write_char('{'), Token::RBracket(_) => f.write_char('}'), Token::Ident(_, ref ident) => f.write_str(ident), Token::Int(_, n) => fmt::Display::fmt(&n, f), Token::Real(_, n) => fmt::Display::fmt(&n, f), Token::Char(_, ch) => write!(f, "'{}'", ch), Token::Str(_, ref string) => { f.write_char('"').and(f.write_str(string)).and(f.write_char('"')) }, _ => Ok(()) } } } // //==========================================================================// // // LEXER // // //==========================================================================// /// Defines the result of a lexing operation. pub type LexResult = CompileResult<Token>; /// Defines the state of the lexer. pub struct State(usize, usize); /// Represents a `Lexer`, which transforms Styx source code /// into a `Token` stream. #[derive(Clone)] pub struct Lexer<'a> { byte_pos: usize, char_pos: usize, source: &'a str, reached_end: bool } impl<'a> From<&'a str> for Lexer<'a> { fn from(source: &'a str) -> Self { Lexer::new(source) } } impl<'a> Lexer<'a> { /// Creates a new `Lexer`, given its source `str`. #[inline] pub fn new(source: &'a str) -> Self { Lexer { source, byte_pos: 0, char_pos: 0, reached_end: false } } /// Creates a new `Lexer`, given its source `str` and its previous state. #[allow(needless_pass_by_value)] // Passing by value by design pub fn new_with_state(source: &'a str, state: State) -> Self { Lexer { source, byte_pos: state.0, char_pos: state.1, reached_end: false } } /// Gets a new state that allows to restore the `Lexer` to a previous state. #[inline] pub fn state(&self) -> State { State(self.byte_pos, self.char_pos) } /// Restores the `Lexer` to a previous state. #[inline] #[allow(needless_pass_by_value)] // Passing by value by design pub fn restore(&mut self, state: State) { self.byte_pos = state.0; self.char_pos = state.1; } /// Gets the source `str` of the `Lexer`. #[inline] pub fn source(&self) -> &'a str { self.source } /// Gets the position of the `Lexer` in its input string. #[inline] pub fn position(&self) -> usize { self.char_pos } /// Attempts to get the character following the given byte by the given offset, /// and returns a tuple containg the parsed character and the position of the next one. #[inline] pub fn following(src: &'a str, byte: usize, offset: usize) -> Option<(char, usize)> { match src.get(byte..) { Some(slice) => { let mut remaining = offset; let mut chars = slice.chars(); let mut bytes = 0; while remaining > 0 { match chars.next() { Some(ch) => bytes += ch.len_utf8(), None => return None } remaining -= 1; } match chars.next() { Some(ch) => Some((ch, ch.len_utf8() + byte + bytes)), None => None } }, None => None } } /// Returns whether or not the lexer has reached the end of the file. #[inline] pub fn at_end(&self) -> bool { self.byte_pos >= self.source.len() } /// Attempts to lex a `Token` from the input code. /// If an error is encountered, it will be added to the diagnostics. If the error is fatal, /// an error will be returned, instead of a token. #[allow(needless_return, while_let_loop, cyclomatic_complexity)] pub fn lex(&mut self, diagnostics: &mut DiagnosticBag) -> LexResult { let src = self.source; let mut startc = self.char_pos; let mut startb = self.byte_pos; let mut pos = startc; let mut byte = startb; /// Advances the current character in the peekable character stream, /// and on the `pos` variable. macro_rules! next { ($e: expr) => {{ match Self::following(src, byte, 0) { Some((ch, b)) => { byte = b; pos += 1; ch }, _ => $e } }}; () => { next!(return Err(Diagnostic::unexpected_eof(span!(startc => pos)))) }; } /// Updates the current position (and returns). macro_rules! update { () => {{ self.char_pos = pos; self.byte_pos = byte; }}; ($e: expr) => {{ self.char_pos = pos; self.byte_pos = byte; return $e; }}; } macro_rules! peek { () => { match Self::following(src, byte, 0) { Some((ch, _)) => ch, _ => return Err(Diagnostic::unexpected_eof(span!(startc => pos))) } }; ($e: expr) => { match Self::following(src, byte, 0) { Some((ch, _)) => ch, _ => $e } }; } // skip whitespaces loop { { if !peek!(return Ok(Token::EOF(pos))).is_whitespace() { break; } } next!(); } startc = pos; startb = byte; let result = match next!(update!(Ok(Token::EOF(startc)))) { '(' => Ok(Token::LParen(startc)), ')' => Ok(Token::RParen(startc)), '{' => Ok(Token::LBracket(startc)), '}' => Ok(Token::RBracket(startc)), ',' => Ok(Token::Comma(startc)), ';' => Ok(Token::Semicolon(startc)), '<' => Ok(Token::LAngle(startc)), '>' => Ok(Token::RAngle(startc)), '$' => Ok(Token::Dollar(startc)), '0' ... '9' => { // Parse number literal let mut is_float = false; loop { let ch = peek!(break); // Parse float. if !ch.is_digit(16) { if ch == '.' { is_float = true; } else { break; } } next!(); } let span = span!(startc => pos); Ok(match src[startb..byte].parse() { Ok(nbr) => if is_float { Token::Real(span, nbr) } else { Token::Int(span, nbr as i64) }, _ => { diagnostics.report(Diagnostic::invalid_number(span)); Token::Real(span, ::std::f64::NAN) } }) }, 'a' ... 'z' | 'A' ... 'Z' | '_' => { // Parse (word-like) identifier loop { let ch = peek!(break); // A word-like identifier only contains underscores and alphanumeric characters. if ch != '\u{a0}' && ch != '_' && !ch.is_alphanumeric() { break; } next!(); } Ok(Token::Ident(span!(startc => pos), src[startb..byte].to_string())) }, terminator @ '\'' | terminator @ '"' => { // Parse string / char literal let mut escaping = false; let mut string = String::new(); loop { let ch = peek!(); next!(); if escaping { escaping = false; string.push(match ch { 'n' => '\n', 't' => '\t', 'r' => '\r', '0' => '\0', '"' => '"' , '\\' => '\\', _ => { diagnostics.report(Diagnostic::unknown_escape_char(span!(pos - 2 => pos))); '\0' } }); } else if ch == '\\' { escaping = true; } else if ch == terminator { break; } else { string.push(ch); } } let location = span!(startc => pos); if terminator == '"' { Ok(Token::Str(location, string)) } else { if string.len() != 1 { diagnostics.report(Diagnostic::invalid_char(location)); } Ok(Token::Char(location, string.chars().next().unwrap_or('#'))) } }, ch => { // Parse (operator-like) identifier // But first, ensure this is no comment if ch == '/' { let next = peek!(); if next == '/' { // Single-line comment / doc comment. next!(); let next = next!(update!(Ok(Token::EOF(startc)))); loop { if next!(break) == '\n' { break; } } update!(match next { '/' | '!' => Ok(Token::Doc(span!(startc => pos), src[startb..byte].to_string(), ch == '!')), _ => Ok(Token::Ignored) }); } else if next == '*' { // Multi-line comment. loop { if next!() == '/' { break; } } update!(Ok(Token::Ignored)); } } loop { let ch = peek!(break); match ch { // Parenthesis / comma / other exception: break. '(' | ')' | ',' | ';' | '{' | '}' | '"' | '\'' => break, // Whitespace (except non-breaking space): break. ch if ch.is_whitespace() && ch != '\u{a0}' => break, // Alphanumeric character (which corresponds to a word-like identifier): break. ch if ch.is_alphanumeric() => break, // Anything else: add it to the identifier. _ => { next!(); } } } Ok(Token::Ident(span!(startc => pos), src[startb..byte].to_string())) } }; // update stored position, and return update!(result); } /// Consumes the `Lexer`, producing a vector containing all tokens of the source input. pub fn tokenize(mut self, diagnostics: &mut DiagnosticBag) -> CompileResult<Vec<Token>> { let mut vec = Vec::new(); loop { match self.lex(diagnostics) { Ok(Token::EOF(_)) => break, Ok(Token::Ignored) => continue, Ok(token) => vec.push(token), Err(error) => return Err(error) } } Ok(vec) } } #[cfg(test)] describe! lexer { describe! failures { it "fails immediately on invalid characters" { // fn should_fail(input: &str) { // assert!(Lexer::new(input).tokenize(&mut DiagnosticBag::default()).is_err()); // } // // fn should_not_fail(input: &str) { // assert!(Lexer::new(input).tokenize(&mut DiagnosticBag::default()).is_ok()); // } // unsafe { // // NOTE: This is currently not testable, as the lexer only accepts str's, // // and Rust str's are always valid. When going the unsafe route, // // the standard library will fail without our help. // should_fail(str::from_utf8_unchecked(&[ b'h', b'e', 0xfe ])); // should_fail(str::from_utf8_unchecked(&[ b'h', b'e', 0xc3, 0x28 ])); // should_not_fail(str::from_utf8_unchecked(&[ b'h', b'e', 0xc3, 0xb1 ])); // } } it "fails lazily on invalid tokens" { fn should_fail(input: &str) { let mut diags = DiagnosticBag::default(); assert!(Lexer::new(input).tokenize(&mut diags).is_ok()); assert!(diags.has_error()); } fn should_not_fail(input: &str) { let mut diags = DiagnosticBag::default(); assert!(Lexer::new(input).tokenize(&mut diags).is_ok()); assert!(!diags.has_error()); } should_fail("'single-quoted string'"); should_fail("'aa'"); should_fail("'\\y'"); should_not_fail("'\\n' '\\t'"); should_not_fail("'a'"); should_fail("1.11.1"); should_fail("0.1.1"); should_not_fail("1.11"); should_not_fail("0.1111"); } } before_each { fn tokenize(input: &str) -> Vec<Token> { Lexer::new(input).tokenize(&mut DiagnosticBag::default()).unwrap() } fn lex(input: &str) -> Vec<String> { tokenize(input).iter().map(|t| format!("{}", t)).collect() } } it "handles unicode characters" { assert_eq!(tokenize("👍🏼 👎🏼").len(), 2); } it "separates tokens correctly." { assert_eq!(lex("hello world everyone"), [ "hello", "world", "everyone" ]); assert_eq!(lex("hello+world+everyone"), [ "hello", "+", "world", "+", "everyone" ]); assert_eq!(lex("hello++world++everyone"), [ "hello", "++", "world", "++", "everyone" ]); assert_eq!(lex("+()<>++*$*-- $+++"), [ "+", "(", ")", "<", ">", "++*$*--", "$", "+++" ]); } it "does not separate tokens that belong together." { assert_eq!(lex("and he said \"foo bar\"!"), [ "and", "he", "said", "\"foo bar\"", "!" ]); assert_eq!(lex("non breaking space"), [ "non breaking space" ]); } }
use crate::prelude::*; pub mod sharable { use crate::prelude::*; #[derive(Clone, From, Deref, DerefMut, Debug, Collectable, Finalize, Send, Sync, Unpin)] #[from(forward)] pub struct String(pub Gc<ConcreteString>); pub type ConcreteString = comet::alloc::string::String<Immix>; impl Alloc<String> for ConcreteString { fn alloc(self, ctx: Context) -> String { String(ctx.mutator().allocate(self, AllocationSpace::New).into()) } } unsafe impl Trace for String { fn trace(&mut self, vis: &mut dyn Visitor) { self.0.trace(vis) } } } mod sendable { use crate::prelude::*; #[derive(Clone, From, Send, Serialize, Deserialize)] #[from(forward)] pub struct String(pub ConcreteString); pub type ConcreteString = Box<str>; } impl DynSharable for sharable::String { type T = sendable::String; fn into_sendable(&self, ctx: Context) -> Self::T { self.0.to_string().into() } } impl DynSendable for sendable::String { type T = sharable::String; fn into_sharable(&self, ctx: Context) -> Self::T { String::from_str(self.0.as_ref(), ctx) } } pub use sharable::String; #[rewrite] impl String { pub fn new(ctx: Context) -> String { sharable::ConcreteString::new(ctx.mutator()).alloc(ctx) } pub fn with_capacity(capacity: usize, ctx: Context) -> String { sharable::ConcreteString::with_capacity(ctx.mutator(), capacity).alloc(ctx) } pub fn push_char(mut self, ch: char, ctx: Context) { self.0.push(ctx.mutator(), ch) } pub fn push_str(mut self, s: &str, ctx: Context) { self.0.push_str(ctx.mutator(), s) } pub fn from_str(s: &str, ctx: Context) -> String { let mut new = sharable::ConcreteString::with_capacity(ctx.mutator(), s.len()); new.push_str(ctx.mutator(), s); new.alloc(ctx) } pub fn remove(mut self, idx: u32, _ctx: Context) -> char { self.0.remove(idx as usize) } pub fn insert_char(mut self, idx: u32, ch: char, ctx: Context) { self.0.insert(ctx.mutator(), idx as usize, ch) } pub fn is_empty(mut self, _ctx: Context) -> bool { self.0.is_empty() } pub fn split_off(mut self, at: u32, ctx: Context) -> String { self.0.split_off(ctx.mutator(), at as usize).alloc(ctx) } pub fn clear(mut self, _ctx: Context) { self.0.clear() } pub fn len(self, _ctx: Context) -> u32 { self.0.len() as u32 } pub fn from_i32(i: i32, ctx: Context) -> String { let mut new = String::new(ctx); new.0.push_str(ctx.mutator(), &i.to_string()); new } pub fn eq(self, other: String, ctx: Context) -> bool { self.0.eq(&other.0) } pub fn concat(self, other: String, ctx: Context) -> String { let mut new = String::new(ctx); new.0.push_str(ctx.mutator(), &self.0); new.0.push_str(ctx.mutator(), &other.0); new } }
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use std::sync::Arc; use std::thread; use std::thread::JoinHandle; use rayon::prelude::*; use crate::item::{ItemPool, MatchedItem}; use crate::spinlock::SpinLock; use crate::{CaseMatching, MatchEngineFactory}; use defer_drop::DeferDrop; use std::rc::Rc; //============================================================================== pub struct MatcherControl { stopped: Arc<AtomicBool>, processed: Arc<AtomicUsize>, matched: Arc<AtomicUsize>, items: Arc<SpinLock<Vec<MatchedItem>>>, thread_matcher: JoinHandle<()>, } impl MatcherControl { pub fn get_num_processed(&self) -> usize { self.processed.load(Ordering::Relaxed) } pub fn get_num_matched(&self) -> usize { self.matched.load(Ordering::Relaxed) } pub fn kill(self) { self.stopped.store(true, Ordering::Relaxed); let _ = self.thread_matcher.join(); } pub fn stopped(&self) -> bool { self.stopped.load(Ordering::Relaxed) } pub fn into_items(self) -> Arc<SpinLock<Vec<MatchedItem>>> { while !self.stopped.load(Ordering::Relaxed) {} self.items } } //============================================================================== pub struct Matcher { engine_factory: Rc<dyn MatchEngineFactory>, case_matching: CaseMatching, } impl Matcher { pub fn builder(engine_factory: Rc<dyn MatchEngineFactory>) -> Self { Self { engine_factory, case_matching: CaseMatching::default(), } } pub fn case(mut self, case_matching: CaseMatching) -> Self { self.case_matching = case_matching; self } pub fn build(self) -> Self { self } pub fn run<C>(&self, query: &str, item_pool: Arc<DeferDrop<ItemPool>>, callback: C) -> MatcherControl where C: Fn(Arc<SpinLock<Vec<MatchedItem>>>) + Send + 'static, { let matcher_engine = self.engine_factory.create_engine_with_case(query, self.case_matching); debug!("engine: {}", matcher_engine); let stopped = Arc::new(AtomicBool::new(false)); let stopped_clone = stopped.clone(); let processed = Arc::new(AtomicUsize::new(0)); let processed_clone = processed.clone(); let matched = Arc::new(AtomicUsize::new(0)); let matched_clone = matched.clone(); let matched_items = Arc::new(SpinLock::new(Vec::new())); let matched_items_clone = matched_items.clone(); let thread_matcher = thread::spawn(move || { let num_taken = item_pool.num_taken(); let items = item_pool.take(); // 1. use rayon for parallel // 2. return Err to skip iteration // check https://doc.rust-lang.org/std/result/enum.Result.html#method.from_iter trace!("matcher start, total: {}", items.len()); let result: Result<Vec<_>, _> = items .into_par_iter() .enumerate() .filter_map(|(index, item)| { processed.fetch_add(1, Ordering::Relaxed); if stopped.load(Ordering::Relaxed) { Some(Err("matcher killed")) } else if let Some(match_result) = matcher_engine.match_item(item.clone()) { matched.fetch_add(1, Ordering::Relaxed); Some(Ok(MatchedItem { item: item.clone(), rank: match_result.rank, matched_range: Some(match_result.matched_range), item_idx: (num_taken + index) as u32, })) } else { None } }) .collect(); if let Ok(items) = result { let mut pool = matched_items.lock(); *pool = items; trace!("matcher stop, total matched: {}", pool.len()); } callback(matched_items.clone()); stopped.store(true, Ordering::Relaxed); }); MatcherControl { stopped: stopped_clone, matched: matched_clone, processed: processed_clone, items: matched_items_clone, thread_matcher, } } }
use super::super::{head, navbar, scripts}; use maud::{DOCTYPE, PreEscaped, Markup}; pub fn id(id: &str) -> Markup { html! { (DOCTYPE) html .no-js lang="en" dir="ltr" { (head()) body { (navbar()) div .grid-container { div .grid-x .grid-padding-x { div class="large-12 cell" { h1 "Create a Build" } } form { div .grid-x .grid-padding-x { div class="large-2 medium-2 cell text-right" { "Primary weapon:" } div class="large-2 medium-2 cell" #weapon-primary {} } div .grid-x .grid-padding-x { div class="large-2 medium-2 cell text-right" { "Secondary weapon:" } div class="large-2 medium-2 cell" #weapon-secondary {} } hr / div .grid-x .grid-padding-x { div class="large-2 medium-2 cell text-right" { "Basic ability:" } div class="large-2 medium-2 cell" #basic-ability {} } @for i in 1..6 { div .grid-x .grid-padding-x { div class="large-2 medium-2 cell text-right" { "Active " (i) ":" } div class="large-4 medium-4 cell" id={"active-"(i)} {} } } hr / @for i in 1..6 { div .grid-x .grid-padding-x { div class="large-2 medium-2 cell text-right" { "Passive " (i) ":" } div class="large-4 medium-4 cell" id={"passive-"(i)} {} } } } } (scripts()) script src="/static/js/BigInteger.js" {} script src="/static/js/weapons.js" {} script src="/static/js/build/id.js" {} script { (PreEscaped("$(document).foundation()")) } script { (PreEscaped("$(function() { parse_index(\"")) (id) (PreEscaped("\"); });")) } } } } }
use byteorder::{ByteOrder, LittleEndian}; #[derive(Debug, Clone)] pub struct ByteStream { pub source: Vec<u8>, pub(crate) current: usize, } impl ByteStream { pub fn new(buffer: Vec<u8>) -> ByteStream { ByteStream { source: buffer, current: 0, } } pub fn get_short(&mut self) -> Vec<u8> { let mut x = vec![0; 2]; x[..2].clone_from_slice(&self.source[self.current..self.current + 2]); self.current += 2; x } pub fn read_short(&mut self) -> u16 { LittleEndian::read_u16(&self.get_short()) } }
#![deny( dead_code, nonstandard_style, unused_imports, unused_mut, unused_variables, unused_unsafe, unreachable_patterns )] #[cfg(windows)] mod exception_handling; #[cfg(windows)] pub use self::exception_handling::*;
#[tokio::test] #[cfg(not(feature = "celo"))] async fn can_stack_middlewares() { use ethers_core::{types::TransactionRequest, utils::Ganache}; use ethers_middleware::{ gas_oracle::{GasCategory, GasNow}, Client, GasOracleMiddleware, NonceManager, }; use ethers_providers::{Http, Middleware, Provider}; use ethers_signers::Wallet; use std::convert::TryFrom; let ganache = Ganache::new().block_time(5u64).spawn(); let gas_oracle = GasNow::new().category(GasCategory::SafeLow); let signer: Wallet = ganache.keys()[0].clone().into(); let address = signer.address(); // the base provider let provider = Provider::<Http>::try_from(ganache.endpoint()).unwrap(); let provider_clone = provider.clone(); // The gas price middleware MUST be below the signing middleware for things to work let provider = GasOracleMiddleware::new(provider, gas_oracle); // The signing middleware signs txs let provider = Client::new(provider, signer); // The nonce manager middleware MUST be above the signing middleware so that it overrides // the nonce and the signer does not make any eth_getTransaction count calls let provider = NonceManager::new(provider, address); let tx = TransactionRequest::new(); let mut tx_hash = None; for _ in 0..10 { tx_hash = Some(provider.send_transaction(tx.clone(), None).await.unwrap()); dbg!( provider .get_transaction(tx_hash.unwrap()) .await .unwrap() .unwrap() .gas_price ); } let receipt = provider_clone .pending_transaction(tx_hash.unwrap()) .await .unwrap(); dbg!(receipt); }
use syn::spanned::Spanned; #[derive(Debug)] pub enum PropertyGet { Default, Owned(syn::Path), Ref(syn::Path), } #[derive(Debug)] pub enum PropertySet { Default, WithPath(syn::Path), } pub struct PropertyAttrArgs { pub ty: syn::Type, pub path: Option<String>, pub default: Option<syn::Lit>, pub hint: Option<syn::Path>, pub get: Option<PropertyGet>, pub set: Option<PropertySet>, pub no_editor: bool, } pub struct PropertyAttrArgsBuilder { ty: syn::Type, path: Option<String>, default: Option<syn::Lit>, hint: Option<syn::Path>, get: Option<PropertyGet>, set: Option<PropertySet>, no_editor: bool, } impl PropertyAttrArgsBuilder { pub fn new(ty: &syn::Type) -> Self { Self { ty: ty.clone(), path: None, default: None, hint: None, get: None, set: None, no_editor: false, } } pub fn add_pair(&mut self, pair: &syn::MetaNameValue) -> Result<(), syn::Error> { let path_span = pair.lit.span(); let invalid_value_path = |_| { syn::Error::new( path_span, "Unexpected input, expected a double quoted string: \"path::to::something\"", ) }; let name = pair .path .get_ident() .expect("should be single identifier") .to_string(); match name.as_str() { "default" => { if let Some(old) = self.default.replace(pair.lit.clone()) { return Err(syn::Error::new( pair.span(), format!( "there is already a 'default' attribute with value: {:?}", old ), )); } } "path" => { let string = if let syn::Lit::Str(lit_str) = &pair.lit { lit_str.value() } else { return Err(syn::Error::new( pair.span(), "'path' value is not a string literal", )); }; if let Some(old) = self.path.replace(string) { return Err(syn::Error::new( pair.span(), format!("there is already a 'path' attribute with value: {:?}", old), )); } } "hint" => { let string = if let syn::Lit::Str(lit_str) = &pair.lit { lit_str.value() } else { return Err(syn::Error::new( pair.span(), "'hint' value is not a string literal", )); }; let path = syn::parse_str::<syn::Path>(string.as_str()).map_err(invalid_value_path)?; if let Some(old) = self.hint.replace(path) { return Err(syn::Error::new( pair.span(), format!("there is already a 'hint' attribute with value: {:?}", old), )); } } "get" => { let string = if let syn::Lit::Str(lit_str) = &pair.lit { lit_str.value() } else { return Err(syn::Error::new( pair.span(), "'get' value is not a string literal", )); }; let path = syn::parse_str::<syn::Path>(string.as_str()).map_err(invalid_value_path)?; let get = PropertyGet::Owned(path); if let Some(old) = self.get.replace(get) { return Err(syn::Error::new( pair.span(), format!("there is already a 'get' attribute with value: {:?}", old), )); } } "get_ref" => { let string = if let syn::Lit::Str(lit_str) = &pair.lit { lit_str.value() } else { return Err(syn::Error::new( pair.span(), "'get_ref' value is not a string literal", )); }; let path = syn::parse_str::<syn::Path>(string.as_str()).map_err(invalid_value_path)?; let get_ref = PropertyGet::Ref(path); if let Some(old) = self.get.replace(get_ref) { return Err(syn::Error::new( pair.span(), format!( "there is already a 'get_ref' attribute with value: {:?}", old ), )); } } "set" => { let string = if let syn::Lit::Str(lit_str) = &pair.lit { lit_str.value() } else { return Err(syn::Error::new( pair.span(), "'set' value is not a string literal", )); }; let path = syn::parse_str::<syn::Path>(string.as_str()).map_err(invalid_value_path)?; let set = PropertySet::WithPath(path); if let Some(old) = self.set.replace(set) { return Err(syn::Error::new( pair.span(), format!("there is already a 'set' attribute with value: {:?}", old), )); } } _ => { return Err(syn::Error::new( pair.span(), format!("unexpected argument: {}", &name), )) } } Ok(()) } pub fn add_path(&mut self, path: &syn::Path) -> Result<(), syn::Error> { if path.is_ident("no_editor") { self.no_editor = true; } else if path.is_ident("get") { if let Some(get) = self.get.replace(PropertyGet::Default) { return Err(syn::Error::new( path.span(), format!("there is already a 'get' attribute with value: {:?}", get), )); } } else if path.is_ident("set") { if let Some(set) = self.set.replace(PropertySet::Default) { return Err(syn::Error::new( path.span(), format!("there is already a 'set' attribute with value: {:?}", set), )); } } else { return Err(syn::Error::new( path.span(), format!("unexpected argument: {:?}", path.get_ident()), )); } Ok(()) } } impl PropertyAttrArgsBuilder { pub fn done(self) -> PropertyAttrArgs { PropertyAttrArgs { ty: self.ty, path: self.path, default: self.default, hint: self.hint, get: self.get, set: self.set, no_editor: self.no_editor, } } }
use exonum::storage::{Fork, MapIndex, Snapshot}; use currency::assets::AssetId; use currency::offers::OpenOffers; use currency::SERVICE_NAME; /// The schema for accessing wallets data. pub struct Schema<S>(pub S) where S: AsRef<Snapshot>; impl<S> Schema<S> where S: AsRef<Snapshot>, { /// Internal `MapIndex` with immutable access. pub fn index(self) -> MapIndex<S, AssetId, OpenOffers> { let key = SERVICE_NAME.to_string() + ".open_offers"; MapIndex::new(key, self.0) } /// Fetch state for the specified wallet from the snapshot. pub fn fetch(self, asset_id: &AssetId) -> OpenOffers { self.index() .get(asset_id) .unwrap_or_else(|| OpenOffers::new_open_offers() ) } } impl<'a> Schema<&'a mut Fork> { /// Internal `MapIndex` with mutable access. pub fn index_mut(&mut self) -> MapIndex<&mut Fork, AssetId, OpenOffers> { let key = SERVICE_NAME.to_string() + ".open_offers"; MapIndex::new(key, &mut *self.0) } /// Store the new state for a wallet in the database. pub fn store(&mut self, asset_id: &AssetId, open_offers: OpenOffers) { match (open_offers.bids().len(), open_offers.asks().len()) { (0, 0) => self.remove(asset_id), (_, _) => self.index_mut().put(asset_id, open_offers), }; } /// Remove wallet state from the database. pub fn remove(&mut self, asset_id: &AssetId) { self.index_mut().remove(asset_id); } }
//! Our custom result types. use std::result; use error::Error; pub type Result<T> = result::Result<T, Error>;
/*! ```rudra-poc [target] crate = "byte_struct" version = "0.6.0" [report] issue_url = "https://github.com/wwylele/byte-struct-rs/issues/1" issue_date = 2021-03-01 rustsec_url = "https://github.com/RustSec/advisory-db/pull/796" rustsec_id = "RUSTSEC-2021-0032" [[bugs]] analyzer = "UnsafeDataflow" bug_class = "PanicSafety" rudra_report_locations = ["src/lib.rs:410:13: 422:14"] ``` !*/ #![forbid(unsafe_code)] use byte_struct::*; // Custom type that panics when reading bytes. struct CustomByteStruct(u8); impl ByteStructLen for CustomByteStruct { const BYTE_LEN: usize = 1; } impl ByteStruct for CustomByteStruct { fn write_bytes(&self, bytes: &mut [u8]) { } fn read_bytes(bytes: &[u8]) -> Self { panic!("Panic when reading") } } impl Drop for CustomByteStruct { fn drop(&mut self) { println!("Dropping {}", self.0) } } // Wrapper around the type above so we can use the // `ByteStructUnspecifiedByteOrder for [T; $x]` impl. #[derive(ByteStruct)] #[byte_struct_le] struct ArrayOfCustomByteStruct { custom_structs: [CustomByteStruct; 2] } fn main() { let bytes = [0x01, 0x02]; let deserialized = ArrayOfCustomByteStruct::read_bytes(&bytes[..]); }
use std::cmp::Reverse; use std::collections::BinaryHeap; use std::u32; pub struct Mem { data: Vec<Option<Box<[u32]>>>, free_pq: BinaryHeap<Reverse<u32>>, } impl Mem { pub fn init(prog: Vec<u32>) -> Self { Mem { data: vec![Some(prog.into_boxed_slice())], free_pq: BinaryHeap::new(), } } pub fn copy_to_zero(&mut self, addr: u32) { if addr != 0 { self.data[0] = match self.data.get(addr as usize) { Some(Some(v)) => Some(v.clone()), Some(None) => panic!("copy_to_zero: attempt to copy from freed address {}", addr), None => panic!( "copy_to_zero: attempt to copy from unallocated address {}", addr ), } } } pub fn len(&self) -> u32 { self.data.len() as u32 } pub fn alloc(&mut self, size: u32) -> u32 { match self.free_pq.pop() { Some(Reverse(addr)) => { let v = vec![0; size as usize]; self.data[addr as usize] = Some(v.into_boxed_slice()); addr } None => { if self.len() == u32::MAX { panic!("alloc: memory exhausted"); } let v = vec![0; size as usize]; self.data.push(Some(v.into_boxed_slice())); self.len() - 1 } } } pub fn free(&mut self, addr: u32) { if addr == 0 { panic!("free: tried to free memory at program location (0)"); } match self.data.get_mut(addr as usize) { Some(v @ Some(_)) => { *v = None; self.free_pq.push(Reverse(addr)); } Some(None) => panic!( "free: attempt to free address {} which is already free", addr ), None => panic!("free: attempt to free unallocated address {}", addr), } } pub fn read(&self, addr: u32, offset: u32) -> &u32 { match self.data.get(addr as usize) { Some(Some(v)) => match v.get(offset as usize) { Some(val) => val, None => panic!( "read: offset {} is out of bounds for address {} (len: {})", offset, addr, v.len() ), }, Some(None) => panic!("read: address {} has been deallocated", addr), None => panic!("read: address {} has not been allocated", addr), } } pub fn write(&mut self, addr: u32, offset: u32, val: u32) { match self.data.get_mut(addr as usize) { Some(Some(v)) => { if (offset as usize) < v.len() { v[offset as usize] = val; } else { panic!( "write: offset {} is out of bounds for address {} (len: {})", offset, addr, v.len() ); } } Some(None) => panic!("write: address {} has been deallocated", addr), None => panic!("write: address {} has not been allocated", addr), } } } #[cfg(test)] mod tests { use super::*; #[test] fn alloc() { let mut mem = Mem::init(vec![]); let m0 = mem.alloc(10); let m1 = mem.alloc(20); assert_eq!(mem.len(), 3); assert_eq!(m0, 1); assert_eq!(m1, 2); } #[test] #[should_panic(expected = "1 has been deallocated")] fn free_err() { let mut mem = Mem::init(vec![]); let m0 = mem.alloc(10); mem.free(m0); mem.read(m0, 1); } #[test] #[should_panic(expected = "attempt to free unallocated address 1")] fn free_err2() { let mut mem = Mem::init(vec![]); mem.free(1); } #[test] #[should_panic(expected = "tried to free memory at program location")] fn free_err3() { let mut mem = Mem::init(vec![]); mem.free(0); } #[test] #[should_panic(expected = "attempt to free address 1 which is already free")] fn double_free_err() { let mut mem = Mem::init(vec![]); let m0 = mem.alloc(10); mem.free(m0); mem.free(m0); } #[test] fn alloc_lowest() { let mut mem = Mem::init(vec![]); let m0 = mem.alloc(10); let m1 = mem.alloc(20); let _m2 = mem.alloc(30); mem.free(m0); mem.free(m1); let m3 = mem.alloc(40); assert_eq!(m3, m0); } #[test] fn len4() { let mut mem = Mem::init(vec![]); let m0 = mem.alloc(10); let m1 = mem.alloc(20); let m2 = mem.alloc(30); mem.free(m0); mem.free(m1); mem.free(m2); assert_eq!(mem.len(), 4); } #[test] fn len2() { let mut mem = Mem::init(vec![]); let m0 = mem.alloc(10); mem.free(m0); let m1 = mem.alloc(20); mem.free(m1); mem.alloc(30); assert_eq!(mem.len(), 2); } #[test] fn init_with_zero() { let mut mem = Mem::init(vec![]); let m0 = mem.alloc(10); for i in 0..10 { assert_eq!(mem.read(m0, i), &0); } } #[test] #[should_panic] fn read_err_offset() { let mut mem = Mem::init(vec![]); let m0 = mem.alloc(10); mem.read(m0, 10); } #[test] #[should_panic] fn read_err_zero() { let mut mem = Mem::init(vec![]); let m0 = mem.alloc(0); mem.read(m0, 0); } #[test] #[should_panic] fn read_err_addr() { let mem = Mem::init(vec![]); mem.read(1, 0); } #[test] fn write_and_read() { let mut mem = Mem::init(vec![]); let block0 = mem.alloc(10); mem.write(block0, 0, 384); assert_eq!(mem.read(block0, 0), &384); } #[test] #[ignore] fn fill_all_memory() { let mut mem = Mem::init(vec![]); for _ in 0..=u32::MAX { mem.alloc(1); } assert_eq!(mem.len(), u32::MAX); } }
#![allow(unused_must_use)] extern crate kafka; use kafka::client::KafkaClient; use std::env; fn main() { let args: Vec<_> = env::args().collect(); let topic: String; let message: String; match args.len() { 3 => { topic = args[1].to_string(); message = args[2].to_string(); } _ => { println!("Usage: prod <topic> <message>"); return; } } let mut client = KafkaClient::new(vec!("localhost:9092".to_string())); client.load_metadata_all(); client.send_message(1, 0, topic.clone(), message.clone().into_bytes()); println!("Sending >{}< to >{}< ...", message, topic); }
use std::sync::Arc; use palette::Srgba; use ultraviolet::Vec3; use vulkano::buffer::{BufferUsage, ImmutableBuffer, TypedBufferAccess}; use vulkano::command_buffer::{ AutoCommandBufferBuilder, CommandBufferUsage, SecondaryAutoCommandBuffer, }; use vulkano::descriptor_set::SingleLayoutDescSetPool; use vulkano::device::Queue; use vulkano::pipeline::viewport::Viewport; use vulkano::pipeline::{GraphicsPipeline, PipelineBindPoint}; use vulkano::render_pass::Subpass; use vulkano::sync::GpuFuture; use crate::{ graphics::{ camera::CameraUBO, frame::object_draw::error::{ObjectDrawError, ObjectDrawSystemCreationError}, renderer::error::DescriptorSetCreationError, vertex::Vertex, }, window::Size, }; pub mod error; const fn indices() -> [u32; 12] { [0, 1, 2, 2, 3, 0, 4, 5, 6, 6, 7, 4] } fn vertices() -> [Vertex; 8] { [ Vertex::new(Vec3::new(-0.5, -0.5, 0.0), Srgba::new(1.0, 0.0, 0.0, 1.0)), Vertex::new(Vec3::new(0.5, -0.5, 0.0), Srgba::new(0.0, 1.0, 0.0, 1.0)), Vertex::new(Vec3::new(0.5, 0.5, 0.0), Srgba::new(0.0, 0.0, 1.0, 1.0)), Vertex::new(Vec3::new(-0.5, 0.5, 0.0), Srgba::new(1.0, 1.0, 1.0, 1.0)), Vertex::new(Vec3::new(-0.5, -0.5, -0.5), Srgba::new(1.0, 0.0, 0.0, 1.0)), Vertex::new(Vec3::new(0.5, -0.5, -0.5), Srgba::new(0.0, 1.0, 0.0, 1.0)), Vertex::new(Vec3::new(0.5, 0.5, -0.5), Srgba::new(0.0, 0.0, 1.0, 1.0)), Vertex::new(Vec3::new(-0.5, 0.5, -0.5), Srgba::new(1.0, 1.0, 1.0, 1.0)), ] } /// System that contains the necessary facilities for rendering game objects. pub struct ObjectDrawSystem { /// Queue to render. graphics_queue: Arc<Queue>, /// Buffer for all vertices of game objects. vertex_buffer: Arc<ImmutableBuffer<[Vertex]>>, /// Buffer for all indices of vertices in game object. index_buffer: Arc<ImmutableBuffer<[u32]>>, /// Graphics pipeline used for rendering of game objects. pipeline: Arc<GraphicsPipeline>, /// Pool of descriptor sets of uniform buffers with data for vertex shader. descriptor_set_pool: SingleLayoutDescSetPool, } impl ObjectDrawSystem { /// Creates new object draw system. pub fn new( graphics_queue: Arc<Queue>, subpass: Subpass, ) -> Result<Self, ObjectDrawSystemCreationError> { // Check queue for graphics support. if !graphics_queue.family().supports_graphics() { return Err(ObjectDrawSystemCreationError::QueueFamilyNotSupported); } let pipeline = { use crate::graphics::shader::default::{fragment, vertex}; let device = graphics_queue.device().clone(); let vert_shader_module = vertex::Shader::load(device.clone())?; let frag_shader_module = fragment::Shader::load(device.clone())?; Arc::new( GraphicsPipeline::start() .vertex_input_single_buffer::<Vertex>() .vertex_shader(vert_shader_module.main_entry_point(), ()) .fragment_shader(frag_shader_module.main_entry_point(), ()) .triangle_list() .primitive_restart(false) .viewports_dynamic_scissors_irrelevant(1) .depth_stencil_simple_depth() .cull_mode_back() .render_pass(subpass) .build(device)?, ) }; let vertex_buffer = { let (vertex_buffer, future) = ImmutableBuffer::from_iter( self::vertices(), BufferUsage::vertex_buffer(), graphics_queue.clone(), )?; future.flush()?; vertex_buffer }; let index_buffer = { let (index_buffer, future) = ImmutableBuffer::from_iter( self::indices(), BufferUsage::index_buffer(), graphics_queue.clone(), )?; future.flush()?; index_buffer }; let descriptor_set_pool = { let layout = &pipeline.layout().descriptor_set_layouts()[0]; SingleLayoutDescSetPool::new(layout.clone()) }; Ok(Self { graphics_queue, vertex_buffer, index_buffer, pipeline, descriptor_set_pool, }) } /// Builds a secondary command buffer that draws game objects on the current subpass. pub fn draw<B>( &mut self, viewport_size: Size, uniform_buffer: Arc<B>, ) -> Result<SecondaryAutoCommandBuffer, ObjectDrawError> where B: TypedBufferAccess<Content = CameraUBO> + Send + Sync + 'static, { let mut builder = AutoCommandBufferBuilder::secondary_graphics( self.graphics_queue.device().clone(), self.graphics_queue.family(), CommandBufferUsage::OneTimeSubmit, self.pipeline.subpass().clone(), )?; let descriptor_sets = { let mut builder = self.descriptor_set_pool.next(); builder .add_buffer(uniform_buffer) .map_err(DescriptorSetCreationError::from)?; let descriptor_set = builder.build().map_err(DescriptorSetCreationError::from)?; Arc::new(descriptor_set) }; let viewport = Viewport { origin: [0.0, 0.0], dimensions: [viewport_size.width as f32, viewport_size.height as f32], depth_range: 0.0..1.0, }; builder .set_viewport(0, std::iter::once(viewport)) .bind_pipeline_graphics(self.pipeline.clone()) .bind_vertex_buffers(0, self.vertex_buffer.clone()) .bind_index_buffer(self.index_buffer.clone()) .bind_descriptor_sets( PipelineBindPoint::Graphics, self.pipeline.layout().clone(), 0, descriptor_sets, ) .draw_indexed(self.index_buffer.len() as u32, 1, 0, 0, 0)?; Ok(builder.build()?) } }
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// UsageReportsType : The type of reports. /// The type of reports. #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum UsageReportsType { #[serde(rename = "reports")] REPORTS, } impl ToString for UsageReportsType { fn to_string(&self) -> String { match self { Self::REPORTS => String::from("reports"), } } }
use crate::css::Color; use crate::layout::Rect; pub type DisplayList = Vec<DisplayCommand>; #[derive(Debug)] pub enum DisplayCommand { SolidColor(Color, Rect), Font(Color, Rect, Vec<u8>), }