blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
8e51248ea7085b5cfc691aadd0308df7a45c5f2c
Rust
davidpdrsn/dilemma
/src/query_dsl.rs
UTF-8
7,600
2.546875
3
[]
no_license
use crate::group::*; use crate::order::*; use crate::select::*; use crate::*; pub trait QueryDsl<T> { fn select(self, selectable: impl Into<Select>) -> QueryWithSelect<T>; fn filter(self, filter: impl Into<Filter>) -> Query<T>; fn or_filter(self, filter: impl Into<Filter>) -> Query<T>; fn join<K>(self, join: impl Into<JoinOn<K>>) -> Query<T>; fn inner_join<K>(self, join: impl Into<JoinOn<K>>) -> Query<T>; fn outer_join<K>(self, join: impl Into<JoinOn<K>>) -> Query<T>; fn group_by(self, group: impl Into<Group>) -> Query<T>; fn then_group_by(self, group: impl Into<Group>) -> Query<T>; fn having(self, having: impl Into<Filter>) -> Query<T>; fn and_having(self, having: impl Into<Filter>) -> Query<T>; fn or_having(self, having: impl Into<Filter>) -> Query<T>; fn order_by(self, order: impl Into<Order>) -> Query<T>; fn then_order_by(self, order: impl Into<Order>) -> Query<T>; fn limit(self, limit: impl Into<Limit>) -> Query<T>; fn offset(self, offset: impl Into<Offset>) -> Query<T>; fn distinct(self) -> Query<T>; fn distinct_on(self, cols: impl IntoColumns) -> Query<T>; fn for_update(self) -> Query<T>; fn skip_locked(self) -> Query<T>; fn for_key_share(self) -> Query<T>; fn for_no_key_update(self) -> Query<T>; fn for_share(self) -> Query<T>; fn no_wait(self) -> Query<T>; fn with(self, ctes: impl Into<Ctes<T>>) -> Query<T>; fn explain(self) -> Query<T>; fn explain_analyze(self) -> Query<T>; fn merge<K>(self, other: impl Into<Query<K>>) -> Query<T>; } impl<T, K> QueryDsl<K> for T where T: Into<Query<K>>, { fn select(self, selectable: impl Into<Select>) -> QueryWithSelect<K> { QueryWithSelect { query: self.into(), selection: selectable.into(), } } fn filter(self, filter: impl Into<Filter>) -> Query<K> { let mut query = self.into(); query.filter = if let Some(prev_filter) = query.filter.take() { Some(Filter::And(Box::new(prev_filter), Box::new(filter.into()))) } else { Some(filter.into()) }; query } fn or_filter(self, filter: impl Into<Filter>) -> Query<K> { let mut query = self.into(); query.filter = if let Some(prev_filter) = query.filter.take() { Some(Filter::Or(Box::new(prev_filter), Box::new(filter.into()))) } else { Some(filter.into()) }; query } fn inner_join<J>(self, join: impl Into<JoinOn<J>>) -> Query<K> { let mut query = self.into(); query.add_join(join.into().cast_to::<K>(), JoinKind::Inner); query } fn join<J>(self, join: impl Into<JoinOn<J>>) -> Query<K> { let mut query = self.into(); query.add_join(join.into().cast_to::<K>(), JoinKind::Default); query } fn outer_join<J>(self, join: impl Into<JoinOn<J>>) -> Query<K> { let mut query = self.into(); query.add_join(join.into().cast_to::<K>(), JoinKind::Outer); query } fn group_by(self, group: impl Into<Group>) -> Query<K> { let mut query = self.into(); query.group = Some(group.into()); query } fn then_group_by(self, group: impl Into<Group>) -> Query<K> { let mut query = self.into(); let new_group = match query.group.take() { Some(lhs) => Group::And { lhs: Box::new(lhs), rhs: Box::new(group.into()), }, None => group.into(), }; query.group = Some(new_group); query } fn having(self, having: impl Into<Filter>) -> Query<K> { let mut query = self.into(); query.having = Some(having.into()); query } fn and_having(self, having: impl Into<Filter>) -> Query<K> { let mut query = self.into(); let new_having = if let Some(prev_having) = query.having.take() { prev_having.and(having.into()) } else { having.into() }; query.having = Some(new_having); query } fn or_having(self, having: impl Into<Filter>) -> Query<K> { let mut query = self.into(); let new_having = if let Some(prev_having) = query.having.take() { prev_having.or(having.into()) } else { having.into() }; query.having = Some(new_having); query } fn order_by(self, order: impl Into<Order>) -> Query<K> { let mut query = self.into(); query.order = Some(order.into()); query } fn then_order_by(self, order: impl Into<Order>) -> Query<K> { let mut query = self.into(); let mut new_order = order.into(); match query.order.take() { None => {} Some(Order::Simple(ordering)) => { new_order.add(ordering); } Some(Order::List(ordering)) => { new_order.extend(ordering); } }; query.order = Some(new_order); query } fn limit(self, limit: impl Into<Limit>) -> Query<K> { let mut query = self.into(); query.limit = Some(limit.into()); query } fn offset(self, offset: impl Into<Offset>) -> Query<K> { let mut query = self.into(); query.offset = Some(offset.into()); query } fn distinct(self) -> Query<K> { let mut query = self.into(); query.distinct = Some(Distinct::EachRow); query } fn distinct_on(self, cols: impl IntoColumns) -> Query<K> { let mut query = self.into(); query.distinct = Some(Distinct::On(cols.into_columns())); query } fn for_update(self) -> Query<K> { let mut query = self.into(); query.row_locking.for_update = true; query } fn skip_locked(self) -> Query<K> { let mut query = self.into(); query.row_locking.skip_locked = true; query } fn for_key_share(self) -> Query<K> { let mut query = self.into(); query.row_locking.for_key_share = true; query } fn for_no_key_update(self) -> Query<K> { let mut query = self.into(); query.row_locking.for_no_key_update = true; query } fn for_share(self) -> Query<K> { let mut query = self.into(); query.row_locking.for_share = true; query } fn no_wait(self) -> Query<K> { let mut query = self.into(); query.row_locking.no_wait = true; query } fn with(self, ctes: impl Into<Ctes<K>>) -> Query<K> { let mut query = self.into(); query.ctes.extend(ctes.into()); query } fn explain(self) -> Query<K> { let mut query = self.into(); query.explain = Some(Explain::Default); query } fn explain_analyze(self) -> Query<K> { let mut query = self.into(); query.explain = Some(Explain::Analyze); query } fn merge<J>(self, other: impl Into<Query<J>>) -> Query<K> { let mut lhs = self.into(); let rhs = other.into(); lhs.filter = match (lhs.filter, rhs.filter) { (Some(a), Some(b)) => Some(Filter::And(Box::new(a), Box::new(b))), (Some(a), None) => Some(a), (None, Some(b)) => Some(b), (None, None) => None, }; lhs.joins.extend(rhs.joins.cast_to::<K>()); lhs.ctes.extend(rhs.ctes.cast_to::<K>()); lhs } }
true
50540956f15dc69f9dc298f4f8ee986c0a6ec91f
Rust
ktomsic/xrl
/src/protocol/message.rs
UTF-8
4,594
3.140625
3
[ "MIT" ]
permissive
use std::io::Read; use serde_json::{from_reader, to_vec, Value}; use super::errors::*; #[derive(PartialEq, Clone, Debug)] pub enum Message { Request(Request), Response(Response), Notification(Notification), } #[derive(Serialize, PartialEq, Clone, Debug)] pub struct Request { pub id: u64, pub method: String, pub params: Value, } #[derive(Serialize, PartialEq, Clone, Debug)] pub struct Response { pub id: u64, pub result: Result<Value, Value>, } #[derive(Serialize, PartialEq, Clone, Debug)] pub struct Notification { pub method: String, pub params: Value, } impl Message { pub fn decode<R>(rd: &mut R) -> Result<Message, DecodeError> where R: Read, { let value = from_reader(rd)?; match get_message_type(&value) { ValueType::Request => Ok(Message::Request(Request::decode(value)?)), ValueType::Response => Ok(Message::Response(Response::decode(value)?)), ValueType::Notification => Ok(Message::Notification(Notification::decode(value)?)), ValueType::Invalid => Err(DecodeError::InvalidMessage), } } pub fn to_vec(&self) -> Vec<u8> { // According to serde_json's documentation for `to_value`: // // The conversion [of T to Value] can fail if T's implementation of // Serialize decides to // fail, or if T contains a map with non-string keys. // // This should not be the case here, so I think it's safe to unwrap. match *self { Message::Request(ref request) => to_vec(request).expect("Request serialization failed"), Message::Response(ref response) => { to_vec(response).expect("Response serialization failed") } Message::Notification(ref notification) => { to_vec(notification).expect("Notification serialization failed") } } } } impl Notification { fn decode(value: Value) -> Result<Self, DecodeError> { let mut value = value; let map = value.as_object_mut().ok_or(DecodeError::InvalidMessage)?; let method = map.remove("method") .ok_or(DecodeError::InvalidMessage)? .as_str() .ok_or(DecodeError::InvalidMessage)? .to_owned(); let params = map.remove("params").ok_or(DecodeError::InvalidMessage)?; Ok(Notification { method, params, }) } } impl Request { fn decode(value: Value) -> Result<Self, DecodeError> { let mut value = value; let map = value.as_object_mut().ok_or(DecodeError::InvalidMessage)?; let method = map.remove("method") .ok_or(DecodeError::InvalidMessage)? .as_str() .ok_or(DecodeError::InvalidMessage)? .to_owned(); let params = map.remove("params").ok_or(DecodeError::InvalidMessage)?; let id = map.remove("id") .ok_or(DecodeError::InvalidMessage)? .as_u64() .ok_or(DecodeError::InvalidMessage)?; Ok(Request { id, method, params, }) } } impl Response { fn decode(value: Value) -> Result<Self, DecodeError> { let mut value = value; let map = value.as_object_mut().ok_or(DecodeError::InvalidMessage)?; let result = if map.contains_key("result") { Ok(map.remove("result").ok_or(DecodeError::InvalidMessage)?) } else if map.contains_key("error") { Err(map.remove("error").ok_or(DecodeError::InvalidMessage)?) } else { return Err(DecodeError::InvalidMessage); }; let id = map.remove("id") .ok_or(DecodeError::InvalidMessage)? .as_u64() .ok_or(DecodeError::InvalidMessage)?; Ok(Response { id, result, }) } } enum ValueType { Request, Response, Notification, Invalid, } fn get_message_type(value: &Value) -> ValueType { if let Value::Object(ref map) = *value { if map.contains_key("method") && map.contains_key("params") { if map.contains_key("id") { ValueType::Request } else { ValueType::Notification } } else if (map.contains_key("result") || map.contains_key("error")) && map.contains_key("id") { ValueType::Response } else { ValueType::Invalid } } else { ValueType::Invalid } }
true
24c03dd7309d9a93f47abe650a605278ff3dc289
Rust
nguyenminhhieu12041996/casper-node
/node/src/components/deploy_acceptor.rs
UTF-8
8,307
2.625
3
[ "Apache-2.0" ]
permissive
mod config; mod event; use std::{convert::Infallible, fmt::Debug}; use thiserror::Error; use tracing::{debug, error, info}; use crate::{ components::Component, effect::{ announcements::DeployAcceptorAnnouncement, requests::{ContractRuntimeRequest, StorageRequest}, EffectBuilder, EffectExt, Effects, }, types::{chainspec::DeployConfig, Chainspec, Deploy, DeployValidationFailure, NodeId}, utils::Source, NodeRng, }; use casper_types::Key; use crate::effect::Responder; pub use config::Config; pub use event::Event; #[derive(Debug, Error)] pub enum Error { /// An invalid deploy was received from the client. #[error("invalid deploy: {0}")] InvalidDeploy(DeployValidationFailure), /// An invalid account sent a deploy. #[error("invalid account")] InvalidAccount, /// A deploy was sent from account with insufficient balance. #[error("insufficient balance")] InsufficientBalance, } /// A helper trait constraining `DeployAcceptor` compatible reactor events. pub trait ReactorEventT: From<Event> + From<DeployAcceptorAnnouncement<NodeId>> + From<StorageRequest> + From<ContractRuntimeRequest> + Send { } impl<REv> ReactorEventT for REv where REv: From<Event> + From<DeployAcceptorAnnouncement<NodeId>> + From<StorageRequest> + From<ContractRuntimeRequest> + Send { } /// The `DeployAcceptor` is the component which handles all new `Deploy`s immediately after they're /// received by this node, regardless of whether they were provided by a peer or a client. /// /// It validates a new `Deploy` as far as possible, stores it if valid, then announces the newly- /// accepted `Deploy`. #[derive(Debug)] pub struct DeployAcceptor { chain_name: String, deploy_config: DeployConfig, verify_accounts: bool, } impl DeployAcceptor { pub(crate) fn new(config: Config, chainspec: &Chainspec) -> Self { DeployAcceptor { chain_name: chainspec.network_config.name.clone(), deploy_config: chainspec.deploy_config, verify_accounts: config.verify_accounts(), } } /// Handles receiving a new `Deploy` from a peer or client. /// In the case of a peer, there should be no responder and the variant should be `None` /// In the case of a client, there should be a responder to communicate the validity of the /// deploy and the variant will be `Some` fn accept<REv: ReactorEventT>( &mut self, effect_builder: EffectBuilder<REv>, deploy: Box<Deploy>, source: Source<NodeId>, maybe_responder: Option<Responder<Result<(), Error>>>, ) -> Effects<Event> { let mut cloned_deploy = deploy.clone(); let mut effects = Effects::new(); let is_acceptable = cloned_deploy.is_acceptable(&self.chain_name, &self.deploy_config); if let Err(error) = is_acceptable { // The client has submitted an invalid deploy. Return an error to the RPC component via // the responder. if let Some(responder) = maybe_responder { effects.extend(responder.respond(Err(Error::InvalidDeploy(error))).ignore()); } effects.extend( effect_builder .announce_invalid_deploy(deploy, source) .ignore(), ); return effects; } let account_key = deploy.header().account().to_account_hash().into(); // Verify account if deploy received from client and node is configured to do so. if source.from_client() && self.verify_accounts { return effect_builder .is_verified_account(account_key) .event(move |verified| Event::AccountVerificationResult { deploy, source, account_key, verified, maybe_responder, }); } effect_builder .immediately() .event(move |_| Event::AccountVerificationResult { deploy, source, account_key, verified: Some(true), maybe_responder, }) } fn account_verification<REv: ReactorEventT>( &mut self, effect_builder: EffectBuilder<REv>, deploy: Box<Deploy>, source: Source<NodeId>, account_key: Key, verified: Option<bool>, maybe_responder: Option<Responder<Result<(), Error>>>, ) -> Effects<Event> { let mut effects = Effects::new(); match verified { Some(true) => { effects.extend(effect_builder.put_deploy_to_storage(deploy.clone()).event( move |is_new| Event::PutToStorageResult { deploy, source, is_new, maybe_responder, }, )); return effects; } Some(false) => { info! { "Received deploy from account {} that does not have minimum balance required", account_key }; // The client has submitted a deploy from an account that does not have minimum // balance required. Return an error message to the RPC component via the responder. if let Some(responder) = maybe_responder { effects.extend(responder.respond(Err(Error::InsufficientBalance)).ignore()); } } None => { // The client has submitted an invalid deploy. Return an error message to the RPC // component via the responder. info! { "Received deploy from invalid account using {}", account_key }; if let Some(responder) = maybe_responder { effects.extend(responder.respond(Err(Error::InvalidAccount)).ignore()); } } } effects.extend( effect_builder .announce_invalid_deploy(deploy, source) .ignore(), ); effects } fn handle_put_to_storage<REv: ReactorEventT>( &mut self, effect_builder: EffectBuilder<REv>, deploy: Box<Deploy>, source: Source<NodeId>, is_new: bool, maybe_responder: Option<Responder<Result<(), Error>>>, ) -> Effects<Event> { let mut effects = Effects::new(); if is_new { effects.extend( effect_builder .announce_new_deploy_accepted(deploy, source) .ignore(), ); } // We can now repond with result of accepting of the deploy if let Some(responder) = maybe_responder { effects.extend(responder.respond(Ok(())).ignore()); } effects } } impl<REv: ReactorEventT> Component<REv> for DeployAcceptor { type Event = Event; type ConstructionError = Infallible; fn handle_event( &mut self, effect_builder: EffectBuilder<REv>, _rng: &mut NodeRng, event: Self::Event, ) -> Effects<Self::Event> { debug!(?event, "handling event"); match event { Event::Accept { deploy, source, responder, } => self.accept(effect_builder, deploy, source, responder), Event::PutToStorageResult { deploy, source, is_new, maybe_responder, } => { self.handle_put_to_storage(effect_builder, deploy, source, is_new, maybe_responder) } Event::AccountVerificationResult { deploy, source, account_key, verified, maybe_responder, } => self.account_verification( effect_builder, deploy, source, account_key, verified, maybe_responder, ), } } }
true
cceb4dcc0ff8e22935332f91c56dd2f53f619ff9
Rust
cbackas/hookbuffer
/src/env.rs
UTF-8
2,341
3.203125
3
[]
no_license
pub fn get_server_port() -> u16 { match std::env::var("HOOKBUFFER_PORT") { Ok(port) => { println!("[INFO] Found HOOKBUFFER_PORT: {}", port); match port.parse::<u16>() { Ok(port) => port, Err(_) => { println!("[ERROR] Custom HOOKBUFFER_PORT is not a valid port number, using default port 8000"); 8000 } } } Err(_) => { println!("[INFO] No HOOKBUFFER_PORT found, using default port 8000"); 8000 } } } pub fn get_destination_url() -> String { match std::env::var("HOOKBUFFER_DESTINATION_URL") { Ok(mut url) => { if !url.ends_with('/') { url.push('/'); } println!("[INFO] Found custom HOOKBUFFER_DESTINATION_URL: {}", url); url } Err(_) => "https://discord.com/".to_string(), } } #[cfg(test)] mod tests { use serial_test::serial; use super::*; mod get_server_port { use super::*; #[test] #[serial] fn default() { std::env::remove_var("HOOKBUFFER_PORT"); assert_eq!(get_server_port(), 8000); } #[test] #[serial] fn custom() { std::env::set_var("HOOKBUFFER_PORT", "1234"); assert_eq!(get_server_port(), 1234); } #[test] #[serial] fn custom_invalid() { std::env::set_var("HOOKBUFFER_PORT", "abc"); assert_eq!(get_server_port(), 8000); } } mod get_destination_url { use super::*; #[test] #[serial] fn default() { std::env::remove_var("HOOKBUFFER_DESTINATION_URL"); assert_eq!(get_destination_url(), "https://discord.com/"); } #[test] #[serial] fn custom() { std::env::set_var("HOOKBUFFER_DESTINATION_URL", "https://example.com"); assert_eq!(get_destination_url(), "https://example.com/"); } #[test] #[serial] fn custom_no_trailing_slash() { std::env::set_var("HOOKBUFFER_DESTINATION_URL", "https://example.com"); assert_eq!(get_destination_url(), "https://example.com/"); } } }
true
cfc2b3d27caf96f7c35dab1841553dfaa52d8b35
Rust
kolgotko/command-pattern.rs
/src/main.rs
UTF-8
774
2.84375
3
[]
no_license
extern crate command_pattern; use std::error::Error; use std::any::Any; use command_pattern::*; fn main() -> Result<(), Box<Error>> { let mut inv: Invoker<Box<dyn Any>> = Invoker::new(); let result = exec_or_undo_all!(inv, { exec: move { println!("exec 1"); Ok(Box::new("i am result") as Box<Any>) }, unexec: move { println!("unexec 1"); Ok(()) } })?; let result: &str = result.downcast_ref::<&str>() .ok_or("downcast error")? .to_owned(); println!("received: {:?}", result); let result = exec_or_undo_all!(inv, move { println!("exec 2"); Err("i am error")? })?; println!("received: {:?}", result); Ok(()) }
true
8b1b2f4d48def979b339b6a5590870648503b701
Rust
TGElder/rust
/frontier/src/pathfinder/pathfinder.rs
UTF-8
19,721
2.546875
3
[ "CC-BY-4.0" ]
permissive
use crate::travel_duration::*; use commons::grid::Grid; use commons::index2d::*; use commons::manhattan::ManhattanDistance; use commons::*; use network::algorithms::ClosestOrigins; use network::ClosestTargetResult as NetworkClosestTargetResult; use network::Edge as NetworkEdge; use network::Network; use std::collections::HashMap; use std::collections::HashSet; use std::convert::TryInto; use std::hash::Hash; use std::sync::Arc; use std::time::Duration; pub struct Pathfinder<T> where T: TravelDuration, { index: Index2D, travel_duration: Arc<T>, network: Network, } impl<T> Pathfinder<T> where T: TravelDuration, { pub fn new(width: usize, height: usize, travel_duration: Arc<T>) -> Pathfinder<T> { Pathfinder { index: Index2D::new(width, height), travel_duration, network: Network::new(width * height, &[]), } } pub fn travel_duration(&self) -> &Arc<T> { &self.travel_duration } fn get_network_index(&self, position: &V2<usize>) -> usize { self.index.get_index(position).unwrap() } fn get_network_indices(&self, positions: &[V2<usize>]) -> Vec<usize> { positions .iter() .map(|position| self.get_network_index(position)) .collect() } fn get_position_from_network_index( &self, network_index: usize, ) -> Result<V2<usize>, IndexOutOfBounds> { self.index.get_position(network_index) } fn get_positions_from_network_indices(&self, network_indices: &[usize]) -> Vec<V2<usize>> { network_indices .iter() .flat_map(|index| self.get_position_from_network_index(*index)) .collect() } pub fn remove_edge(&mut self, from: &V2<usize>, to: &V2<usize>) { self.network .remove_edges(self.get_network_index(from), self.get_network_index(to)); } pub fn set_edge_duration(&mut self, from: &V2<usize>, to: &V2<usize>, duration: &Duration) { self.remove_edge(from, to); let network_edge = NetworkEdge::new( self.get_network_index(from), self.get_network_index(to), duration.as_millis().try_into().unwrap(), ); self.network.add_edge(&network_edge); } pub fn manhattan_distance(&self, to: &[V2<usize>]) -> impl Fn(usize) -> u64 { let to = to.to_vec(); let index = self.index; let minimum_duration = self.travel_duration.min_duration(); let minimum_cost: u64 = minimum_duration.as_millis().try_into().unwrap(); move |from| { let from = index.get_position(from).unwrap(); to.iter() .map(|to| { TryInto::<u64>::try_into(from.manhattan_distance(to)).unwrap() * minimum_cost }) .min() .unwrap() } } pub fn find_path(&self, from: &[V2<usize>], to: &[V2<usize>]) -> Option<Vec<V2<usize>>> { let to_indices = &self.get_network_indices(to); if to_indices.is_empty() { return None; } let from_indices = &self.get_network_indices(from); if from_indices.is_empty() { return None; } let path = self.network .find_path(from_indices, to_indices, None, &self.manhattan_distance(to)); match path { Some(ref path) if path.is_empty() => None, Some(ref path) => { let mut out = vec![self.get_position_from_network_index(path[0].from).unwrap()]; for edge in path { out.push(self.get_position_from_network_index(edge.to).unwrap()); } Some(out) } None => None, } } pub fn in_bounds(&self, position: &V2<usize>) -> bool { self.index.get_index(position).is_ok() } pub fn positions_within( &self, positions: &[V2<usize>], duration: &Duration, ) -> HashMap<V2<usize>, Duration> { let indices = self.get_network_indices(positions); let max_cost = duration.as_millis().try_into().unwrap(); self.network .nodes_within(&indices, max_cost) .into_iter() .flat_map(|result| { let position = self.get_position_from_network_index(result.index); match position { Ok(position) => Some((position, Duration::from_millis(result.cost))), _ => None, } }) .collect() } pub fn init_targets(&mut self, name: String) { self.network.init_targets(name); } pub fn load_target(&mut self, name: &str, position: &V2<usize>, target: bool) { self.network .load_target(name, self.get_network_index(position), target) } pub fn closest_targets( &self, positions: &[V2<usize>], targets: &str, n_closest: usize, ) -> Vec<ClosestTargetResult> { let indices = self.get_network_indices(positions); self.network .closest_loaded_targets(&indices, targets, n_closest) .drain(..) .map(|result| self.as_closest_target_result(result)) .collect() } fn as_closest_target_result(&self, result: NetworkClosestTargetResult) -> ClosestTargetResult { ClosestTargetResult { position: self.get_position_from_network_index(result.node).unwrap(), path: self.get_positions_from_network_indices(&result.path), duration: Duration::from_millis(result.cost), } } pub fn closest_origins<U: Copy + Eq + Hash>( &self, origin_to_positions: &HashMap<U, Vec<V2<usize>>>, ) -> Vec2D<HashSet<U>> { let origin_to_indices = origin_to_positions .iter() .map(|(origin, positions)| (*origin, self.get_network_indices(positions))) .collect(); let closest_origins = self.network.closest_origins(&origin_to_indices); let mut out = Vec2D::new(self.index.columns(), self.index.rows(), hashset! {}); for (index, origins) in closest_origins.into_iter().enumerate() { let position = self.get_position_from_network_index(index).unwrap(); *out.mut_cell_unsafe(&position) = origins; } out } } #[derive(Clone, Debug, PartialEq)] pub struct ClosestTargetResult { pub position: V2<usize>, pub path: Vec<V2<usize>>, pub duration: Duration, } #[cfg(test)] mod tests { use crate::world::World; use super::*; use commons::edge::Edge; use commons::grid::Grid; use commons::M; use isometric::cell_traits::*; use std::time::Duration; struct TestTravelDuration { max: Duration, } impl TravelDuration for TestTravelDuration { fn get_duration( &self, world: &World, from: &V2<usize>, to: &V2<usize>, ) -> Option<Duration> { match world.get_cell(to) { Some(cell) => { let elevation = cell.elevation(); if world.is_road(&Edge::new(*from, *to)) { return Some(Duration::from_millis(1)); } else if elevation != 0.0 { return Some(Duration::from_millis(elevation as u64)); } } _ => return None, } None } fn min_duration(&self) -> Duration { Duration::from_millis(1) } fn max_duration(&self) -> Duration { self.max } } fn travel_duration() -> TestTravelDuration { TestTravelDuration { max: Duration::from_millis(4), } } #[rustfmt::skip] fn world() -> World { World::new( M::from_vec(3, 3, vec![ 4.0, 2.0, 0.0, 3.0, 3.0, 2.0, 2.0, 3.0, 4.0] ), 0.5, ) } fn pathfinder() -> Pathfinder<TestTravelDuration> { let world = &world(); let mut out = Pathfinder::new(world.width(), world.height(), Arc::new(travel_duration())); let travel_duration = travel_duration(); for x in 0..world.width() { for y in 0..world.height() { for EdgeDuration { from, to, duration } in travel_duration.get_durations_for_position(world, v2(x, y)) { if let Some(duration) = duration { out.set_edge_duration(&from, &to, &duration) } } } } out } #[test] fn test_get_network_index() { let pathfinder = pathfinder(); assert_eq!(pathfinder.get_network_index(&v2(0, 0)), 0); assert_eq!(pathfinder.get_network_index(&v2(1, 0)), 1); assert_eq!(pathfinder.get_network_index(&v2(2, 0)), 2); assert_eq!(pathfinder.get_network_index(&v2(0, 1)), 3); assert_eq!(pathfinder.get_network_index(&v2(1, 1)), 4); assert_eq!(pathfinder.get_network_index(&v2(2, 1)), 5); assert_eq!(pathfinder.get_network_index(&v2(0, 2)), 6); assert_eq!(pathfinder.get_network_index(&v2(1, 2)), 7); assert_eq!(pathfinder.get_network_index(&v2(2, 2)), 8); } #[test] fn test_get_network_indices() { let pathfinder = pathfinder(); let positions = [ v2(0, 0), v2(1, 0), v2(2, 0), v2(0, 1), v2(1, 1), v2(2, 1), v2(0, 2), v2(1, 2), v2(2, 2), ]; let actual = pathfinder.get_network_indices(&positions); assert_eq!(actual, vec![0, 1, 2, 3, 4, 5, 6, 7, 8]); } #[test] #[should_panic] fn test_get_network_indices_out_of_bounds() { let pathfinder = pathfinder(); let positions = [v2(3, 0)]; pathfinder.get_network_indices(&positions); } #[test] fn test_get_network_position() { let pathfinder = pathfinder(); assert_eq!(pathfinder.get_position_from_network_index(0), Ok(v2(0, 0))); assert_eq!(pathfinder.get_position_from_network_index(1), Ok(v2(1, 0))); assert_eq!(pathfinder.get_position_from_network_index(2), Ok(v2(2, 0))); assert_eq!(pathfinder.get_position_from_network_index(3), Ok(v2(0, 1))); assert_eq!(pathfinder.get_position_from_network_index(4), Ok(v2(1, 1))); assert_eq!(pathfinder.get_position_from_network_index(5), Ok(v2(2, 1))); assert_eq!(pathfinder.get_position_from_network_index(6), Ok(v2(0, 2))); assert_eq!(pathfinder.get_position_from_network_index(7), Ok(v2(1, 2))); assert_eq!(pathfinder.get_position_from_network_index(8), Ok(v2(2, 2))); } #[test] fn test_get_positions_from_network_indices() { let pathfinder = pathfinder(); let indices = [0, 1, 2, 3, 4, 5, 6, 7, 8]; let actual = pathfinder.get_positions_from_network_indices(&indices); assert_eq!( actual, vec![ v2(0, 0), v2(1, 0), v2(2, 0), v2(0, 1), v2(1, 1), v2(2, 1), v2(0, 2), v2(1, 2), v2(2, 2) ] ); } #[test] fn test_get_positions_from_network_indices_out_of_bounds() { let pathfinder = pathfinder(); let indices = [9]; let actual = pathfinder.get_positions_from_network_indices(&indices); assert!(actual.is_empty()); } #[test] fn test_find_path() { let pathfinder = pathfinder(); assert_eq!( pathfinder.find_path(&[v2(2, 2)], &[v2(1, 0)]), Some(vec![v2(2, 2), v2(2, 1), v2(1, 1), v2(1, 0),]) ); } #[test] fn test_find_path_impossible() { let pathfinder = pathfinder(); assert_eq!(pathfinder.find_path(&[v2(2, 2)], &[v2(2, 0)]), None); } #[test] fn test_find_path_length_0() { let pathfinder = pathfinder(); assert_eq!(pathfinder.find_path(&[v2(2, 2)], &[v2(2, 2)]), None); } #[test] fn test_find_path_multiple_from() { let pathfinder = pathfinder(); assert_eq!( pathfinder.find_path(&[v2(0, 0), v2(1, 0)], &[v2(1, 2)]), Some(vec![v2(1, 0), v2(1, 1), v2(1, 2)]) ); } #[test] fn test_find_path_multiple_to() { let pathfinder = pathfinder(); assert_eq!( pathfinder.find_path(&[v2(0, 0)], &[v2(2, 1), v2(0, 2)]), Some(vec![v2(0, 0), v2(0, 1), v2(0, 2)]) ); } #[test] fn test_set_edge_duration() { // Given let mut pathfinder = pathfinder(); // When pathfinder.set_edge_duration(&v2(0, 0), &v2(1, 0), &Duration::from_millis(0)); // Then assert_eq!( pathfinder .network .get_out(&0) .iter() .find(|edge| edge.to == 1), Some(&NetworkEdge { from: 0, to: 1, cost: 0 }) ); assert_eq!( pathfinder .network .get_in(&1) .iter() .find(|edge| edge.from == 0), Some(&NetworkEdge { from: 0, to: 1, cost: 0 }) ); } #[test] fn test_remove_edge() { // Given let mut pathfinder = pathfinder(); // When pathfinder.set_edge_duration(&v2(0, 0), &v2(1, 0), &Duration::from_millis(0)); pathfinder.remove_edge(&v2(0, 0), &v2(1, 0)); // Then assert_eq!( pathfinder .network .get_out(&0) .iter() .find(|edge| edge.to == 1), None ); assert_eq!( pathfinder .network .get_in(&1) .iter() .find(|edge| edge.from == 0), None ); } #[test] fn test_positions_within() { let pathfinder = pathfinder(); let actual = pathfinder.positions_within(&[v2(0, 0)], &Duration::from_millis(5)); let expected = [ (v2(0, 0), Duration::from_millis(0)), (v2(1, 0), Duration::from_millis(2)), (v2(1, 1), Duration::from_millis(5)), (v2(0, 1), Duration::from_millis(3)), (v2(0, 2), Duration::from_millis(5)), ] .iter() .cloned() .collect(); assert_eq!(&actual, &expected); } #[test] fn test_closest_targets() { let mut pathfinder = pathfinder(); pathfinder.init_targets("targets".to_string()); pathfinder.load_target("targets", &v2(0, 2), true); pathfinder.load_target("targets", &v2(1, 2), true); pathfinder.load_target("targets", &v2(2, 2), true); let actual = pathfinder.closest_targets(&[v2(1, 0)], "targets", 1); let expected = vec![ClosestTargetResult { position: v2(1, 2), path: vec![v2(1, 0), v2(1, 1), v2(1, 2)], duration: Duration::from_millis(6), }]; assert_eq!(&actual, &expected); } #[test] fn test_manhattan_distance_single_target() { let pathfinder = pathfinder(); let manhattan_distance = pathfinder.manhattan_distance(&[v2(1, 2)]); assert_eq!( manhattan_distance(pathfinder.get_network_index(&v2(0, 0))), 3 ); assert_eq!( manhattan_distance(pathfinder.get_network_index(&v2(1, 2))), 0 ); } #[test] fn test_manhattan_distance_multiple_targets() { let pathfinder = pathfinder(); let manhattan_distance = pathfinder.manhattan_distance(&[v2(0, 2), v2(1, 2)]); assert_eq!( manhattan_distance(pathfinder.get_network_index(&v2(0, 0))), 2 ); assert_eq!( manhattan_distance(pathfinder.get_network_index(&v2(1, 2))), 0 ); } #[test] fn test_in_bounds() { let pathfinder = pathfinder(); assert!(pathfinder.in_bounds(&v2(0, 0))); assert!(pathfinder.in_bounds(&v2(1, 0))); assert!(pathfinder.in_bounds(&v2(2, 0))); assert!(!pathfinder.in_bounds(&v2(3, 0))); assert!(pathfinder.in_bounds(&v2(0, 1))); assert!(pathfinder.in_bounds(&v2(1, 1))); assert!(pathfinder.in_bounds(&v2(2, 1))); assert!(!pathfinder.in_bounds(&v2(3, 1))); assert!(pathfinder.in_bounds(&v2(0, 2))); assert!(pathfinder.in_bounds(&v2(1, 2))); assert!(pathfinder.in_bounds(&v2(2, 2))); assert!(!pathfinder.in_bounds(&v2(3, 2))); assert!(!pathfinder.in_bounds(&v2(0, 3))); assert!(!pathfinder.in_bounds(&v2(1, 3))); assert!(!pathfinder.in_bounds(&v2(2, 3))); assert!(!pathfinder.in_bounds(&v2(3, 3))); } #[test] fn test_closest_origins() { // Given struct TestTravelDuration {} impl TravelDuration for TestTravelDuration { fn get_duration(&self, _: &World, _: &V2<usize>, _: &V2<usize>) -> Option<Duration> { panic!("Not expecting get_duration to be called!"); } fn min_duration(&self) -> Duration { Duration::from_secs(1) } fn max_duration(&self) -> Duration { Duration::from_secs(2) } } let travel_duration = Arc::new(TestTravelDuration {}); let mut pathfinder = Pathfinder::new(5, 3, travel_duration); pathfinder.set_edge_duration(&v2(0, 0), &v2(0, 2), &Duration::from_secs(1)); pathfinder.set_edge_duration(&v2(0, 0), &v2(1, 1), &Duration::from_secs(1)); pathfinder.set_edge_duration(&v2(0, 0), &v2(2, 0), &Duration::from_secs(1)); pathfinder.set_edge_duration(&v2(0, 2), &v2(1, 1), &Duration::from_secs(1)); pathfinder.set_edge_duration(&v2(0, 2), &v2(2, 2), &Duration::from_secs(1)); pathfinder.set_edge_duration(&v2(2, 0), &v2(3, 1), &Duration::from_secs(1)); pathfinder.set_edge_duration(&v2(2, 0), &v2(4, 1), &Duration::from_secs(1)); pathfinder.set_edge_duration(&v2(2, 2), &v2(3, 1), &Duration::from_secs(2)); pathfinder.set_edge_duration(&v2(2, 2), &v2(4, 1), &Duration::from_secs(2)); pathfinder.set_edge_duration(&v2(4, 2), &v2(4, 1), &Duration::from_secs(1)); // When let actual = pathfinder.closest_origins(&hashmap! { v2(0, 0) => vec![v2(0, 0)], v2(4, 2) => vec![v2(0, 2), v2(4, 2)], }); // Then let mut expected = Vec2D::new(5, 3, hashset! {}); expected.mut_cell_unsafe(&v2(0, 0)).insert(v2(0, 0)); expected.mut_cell_unsafe(&v2(0, 2)).insert(v2(4, 2)); expected.mut_cell_unsafe(&v2(1, 1)).insert(v2(0, 0)); expected.mut_cell_unsafe(&v2(1, 1)).insert(v2(4, 2)); expected.mut_cell_unsafe(&v2(2, 0)).insert(v2(0, 0)); expected.mut_cell_unsafe(&v2(2, 2)).insert(v2(4, 2)); expected.mut_cell_unsafe(&v2(3, 1)).insert(v2(0, 0)); expected.mut_cell_unsafe(&v2(4, 1)).insert(v2(4, 2)); expected.mut_cell_unsafe(&v2(4, 2)).insert(v2(4, 2)); assert_eq!(actual, expected); } }
true
bcaf9d3f99e0a68e63e1e63442583929a21db891
Rust
EFanZh/LeetCode
/src/problem_0594_longest_harmonious_subsequence/iterative.rs
UTF-8
947
3
3
[]
no_license
pub struct Solution; // ------------------------------------------------------ snip ------------------------------------------------------ // use std::collections::HashMap; impl Solution { pub fn find_lhs(nums: Vec<i32>) -> i32 { let mut counts = HashMap::with_capacity(nums.len()); for num in nums { counts.entry(num).and_modify(|count| *count += 1).or_insert(1); } counts .iter() .filter_map(|(num, low)| counts.get(&(num + 1)).map(|high| low + high)) .max() .unwrap_or(0) } } // ------------------------------------------------------ snip ------------------------------------------------------ // impl super::Solution for Solution { fn find_lhs(nums: Vec<i32>) -> i32 { Self::find_lhs(nums) } } #[cfg(test)] mod tests { #[test] fn test_solution() { super::super::tests::run::<super::Solution>(); } }
true
0a2ba570d42aa5f8f327f04510c432e9394662f2
Rust
randombit/botan-rs
/botan-sys/tests/tests.rs
UTF-8
2,809
2.625
3
[ "MIT" ]
permissive
extern crate botan_sys; use std::ffi::CString; use botan_sys::*; #[test] fn test_hex() { let bin = vec![0x42, 0x23, 0x45, 0x8F]; let mut out = Vec::new(); out.resize(bin.len() * 2, 0); unsafe { assert_eq!( botan_hex_encode(bin.as_ptr(), bin.len(), out.as_mut_ptr(), 0), 0 ); } assert_eq!(out[0], '4' as _); assert_eq!(out[1], '2' as _); assert_eq!(out[2], '2' as _); assert_eq!(out[3], '3' as _); assert_eq!(out[4], '4' as _); assert_eq!(out[5], '5' as _); assert_eq!(out[6], '8' as _); assert_eq!(out[7], 'F' as _); let mut decoded = vec![0; 1024]; let mut out_len = decoded.len(); unsafe { assert_eq!( botan_hex_decode(out.as_ptr(), out.len(), decoded.as_mut_ptr(), &mut out_len), 0 ); } assert_eq!(out_len, bin.len()); decoded.resize(out_len, 0); assert_eq!(bin, decoded); } #[test] fn test_hash() { unsafe { let mut hash = std::ptr::null_mut(); let hash_name = CString::new("SHA-384").unwrap(); assert_eq!(botan_hash_init(&mut hash, hash_name.as_ptr(), 0u32), 0); let input = vec![97, 98, 99]; assert_eq!(botan_hash_update(hash, input.as_ptr(), input.len()), 0); assert_eq!(botan_hash_update(hash, input.as_ptr(), input.len()), 0); let mut output_len = 0; assert_eq!(botan_hash_output_length(hash, &mut output_len), 0); assert!(output_len == 48); let mut digest = vec![0u8; output_len]; assert_eq!(botan_hash_final(hash, digest.as_mut_ptr()), 0); assert_eq!(digest[0], 0xCA); assert_eq!(digest[1], 0xF3); assert_eq!(digest[47], 0x8D); assert_eq!(botan_hash_destroy(hash), 0); } } #[test] fn test_version() { unsafe { let api_version = botan_ffi_api_version(); assert!(botan_ffi_supports_api(api_version) == 0); assert!(botan_ffi_supports_api(api_version + 1) != 0); #[cfg(feature = "botan3")] { assert_eq!(botan_version_major(), 3); } #[cfg(not(feature = "botan3"))] { if botan_version_major() == 2 { assert!(botan_version_minor() > 8); } else { assert_eq!(botan_version_major(), 3); } } } } #[test] fn test_rng() { unsafe { let mut rng = std::ptr::null_mut(); botan_rng_init(&mut rng, std::ptr::null()); let mut rng1 = vec![0u8; 16]; let mut rng2 = vec![0u8; 16]; assert_eq!(botan_rng_get(rng, rng1.as_mut_ptr(), rng1.len()), 0); assert_eq!(botan_rng_get(rng, rng2.as_mut_ptr(), rng2.len()), 0); assert!(rng1 != rng2); assert_eq!(botan_rng_destroy(rng), 0); } }
true
2c2f10806286ec065cb281031338e2cc84722d4f
Rust
ThomasZumsteg/adventofcode2015
/day16.rs
UTF-8
2,161
3.15625
3
[]
no_license
use common::get_input; use std::collections::HashMap; use regex::Regex; type Input = Vec<HashMap<String, usize>>; fn part1(sues: &Input) -> usize { let facts = HashMap::from([ ("children", 3), ("cats", 7), ("samoyeds", 2), ("pomeranians", 3), ("akitas", 0), ("vizslas", 0), ("goldfish", 5), ("trees", 3), ("cars", 2), ("perfumes", 1), ]); let matches: Vec<&HashMap<String, usize>> = sues.iter().filter(|sue| sue.iter().all(|(fact, value)| { fact == "Sue" || facts[fact.as_str()] == *value })).collect(); assert!(matches.len() == 1); matches[0]["Sue"] } fn part2(sues: &Input) -> usize { let facts = HashMap::from([ ("children", 3), ("cats", 7), ("samoyeds", 2), ("pomeranians", 3), ("akitas", 0), ("vizslas", 0), ("goldfish", 5), ("trees", 3), ("cars", 2), ("perfumes", 1), ]); let matches: Vec<&HashMap<String, usize>> = sues.iter().filter(|sue| sue.iter().all(|(fact, value)| { match fact.as_str() { "Sue" => true, f if f == "cats" || f == "trees" => facts[f] < *value, f if f == "pomeranians" || f == "goldfish" => facts[f] > *value, f => facts[f] == *value } })).collect(); assert!(matches.len() == 1); matches[0]["Sue"] } fn parse(text: String) -> Input { let re_ader = Regex::new(r"^Sue (\d+): (\w+): (\d+), (\w+): (\d+), (\w+): (\d+)$").unwrap(); text.trim().split("\n").map(|line| { let captures = re_ader.captures(line).unwrap(); let mut facts: HashMap<String, usize> = (1..4).map(|n| ( captures.get(2*n).unwrap().as_str().to_string(), captures.get(2*n+1).unwrap().as_str().parse::<usize>().unwrap() )).collect(); facts.insert("Sue".to_string(), captures.get(1).unwrap().as_str().parse::<usize>().unwrap()); facts }).collect() } fn main() { let input = parse(get_input(16, 2015)); println!("Part 1: {}", part1(&input)); println!("Part 2: {}", part2(&input)); }
true
14c8f3d4108b23e7ed59648d781626b8e30e6b01
Rust
Artemkaaas/indy-sdk
/vcx/dummy-cloud-agent/src/indy/wallet_plugin.rs
UTF-8
10,374
2.578125
3
[ "Apache-2.0" ]
permissive
use std::ffi::CString; use indyrs::ErrorCode; use libc::c_char; use serde_json::Value; use crate::utils::dyn_lib::load_lib; pub fn load_storage_library(library: &str, initializer: &str) -> Result<libloading::Library, String> { debug!("Loading storage plugin '{:}' as dynamic library.", library); match load_lib(library) { Ok(lib) => { unsafe { debug!("Storage library '{:}' loaded. Resolving its init function '{:}'.", library, initializer); let init_func: libloading::Symbol<unsafe extern fn() -> ErrorCode> = lib.get(initializer.as_bytes()).unwrap(); debug!("Initializing library '{:}' by calling function '{:}'.", library, initializer); match init_func() { ErrorCode::Success => debug!("Basic initialization for library '{:}' succeeded.", library), err => return Err(format!("Failed to resolve init function '{:}' for storage library '{:}'. Details {:?}.", initializer, library, err)) }; Ok(lib) } } Err(err) => Err(format!("Storage library {:} failed to load. Details: {:?}", library, err)) } } const POSTGRES_ADDITIONAL_INITIALIZER: &str = "init_storagetype"; pub fn finish_loading_postgres(storage_lib: libloading::Library, storage_config: &str, storage_credentials: &str) -> Result<(), String> { unsafe { debug!("Finishing initialization for postgre wallet plugin."); let init_storage_func: libloading::Symbol<unsafe extern fn(config: *const c_char, credentials: *const c_char) -> ErrorCode> = storage_lib.get(POSTGRES_ADDITIONAL_INITIALIZER.as_bytes()).unwrap(); let init_config = CString::new(storage_config).expect("CString::new failed"); let init_credentials = CString::new(storage_credentials).expect("CString::new failed"); match init_storage_func(init_config.as_ptr(), init_credentials.as_ptr()) { ErrorCode::Success => { debug!("Successfully completed postgre library initialization."); } err => return Err(format!("Failed to complete postgre library initialization. Details {:?}.", err)) } } Ok(()) } fn get_plugin_library_path(storage_type: &str, plugin_library_path: &Option<String>) -> Result<String, String> { if storage_type == "postgres_storage" { Ok(plugin_library_path.clone().unwrap_or(DEFAULT_POSTGRES_PLUGIN_PATH.into())) } else { plugin_library_path.clone() .ok_or(format!("You have to specify 'storage.plugin_library_path' in config because storage of type {} does not have known default path.", storage_type)) } } fn get_plugin_init_function(storage_type: &str, plugin_init_function: &Option<String>) -> Result<String, String> { if storage_type == "postgres_storage" { Ok(plugin_init_function.clone().unwrap_or(DEFAULT_POSTGRES_PLUGIN_INITIALIZER.into())) } else { plugin_init_function.clone() .ok_or(format!("You have to specify 'storage.plugin_init_function' in con_load_libfig because storage of type {} does not have known default path.", storage_type)) } } const DEFAULT_POSTGRES_PLUGIN_INITIALIZER: &str = "postgresstorage_init"; #[cfg(target_os = "macos")] static DEFAULT_POSTGRES_PLUGIN_PATH: &str = "/usr/local/lib/libindystrgpostgres.dylib"; #[cfg(target_os = "linux")] static DEFAULT_POSTGRES_PLUGIN_PATH: &str = "/usr/lib/libindystrgpostgres.so"; #[cfg(target_os = "windows")] static DEFAULT_POSTGRES_PLUGIN_PATH: &str = "c:\\windows\\system32\\libindystrgpostgres.dll"; pub fn serialize_storage_plugin_configuration(storage_type: &str, storage_config: &Option<Value>, storage_credentials: &Option<Value>, plugin_library_path: &Option<String>, plugin_init_function: &Option<String>) -> Result<(String, String, String, String), String> { let plugin_library_path_serialized = get_plugin_library_path(storage_type, plugin_library_path)?; let plugin_init_function_serialized = get_plugin_init_function(storage_type, plugin_init_function)?; let storage_config_serialized = serde_json::to_string(storage_config) .map_err(|err| format!("Failed to serialize 'storage_config'. {:?}", err))?; let storage_credentials_serialized = serde_json::to_string(storage_credentials) .map_err(|err| format!("Failed to serialize 'storage_credentials' {:?}", err))?; Ok((plugin_library_path_serialized, plugin_init_function_serialized, storage_config_serialized, storage_credentials_serialized) ) } #[cfg(test)] mod tests { use crate::domain::config::WalletStorageConfig; use super::*; #[test] fn should_load_custom_storage_settings_from_config() { let sample_config = r#" { "config": { "foo": "bar" }, "credentials": { "password": "baz" }, "type": "foo_storage_plugin", "plugin_library_path": "/some/custom/path", "plugin_init_function": "init_foo" } "#; let config: WalletStorageConfig = serde_json::from_str(sample_config).expect("Cant deserialize test data"); let result = serialize_storage_plugin_configuration(&config.xtype.expect(""), &config.config, &config.credentials, &config.plugin_library_path, &config.plugin_init_function); match result { Err(err) => panic!(format!("Failed to process configuration. Details: {}", err)), Ok((path, initializer, config, credentials)) => { assert_eq!(path, "/some/custom/path"); assert_eq!(initializer, "init_foo"); assert_eq!(config, r#"{"foo":"bar"}"#); assert_eq!(credentials, r#"{"password":"baz"}"#); } }; } #[test] #[should_panic(expected = "You have to specify 'storage.plugin_library_path' in config")] fn should_fail_if_plugin_config_is_not_provided_for_unknown_plugin() { let sample_config = r#" { "config": { "foo": "bar" }, "credentials": { "password": "baz" }, "type": "foo_storage_plugin", "plugin_library_path": null, "plugin_init_function": null } "#; let config: WalletStorageConfig = serde_json::from_str(sample_config).expect("Cant deserialize test data"); let result = serialize_storage_plugin_configuration(&config.xtype.expect(""), &config.config, &config.credentials, &config.plugin_library_path, &config.plugin_init_function); match result { Err(err) => panic!(format!("Failed to process configuration. Details: {}", err)), Ok(_) => {} }; } #[test] fn should_use_default_values_for_storage_of_type_postgres_storage() { let sample_config = r#" { "config": { "foo": "bar" }, "credentials": { "password": "baz" }, "type": "postgres_storage", "plugin_library_path": null, "plugin_init_function": null } "#; let config: WalletStorageConfig = serde_json::from_str(sample_config).expect("Cant deserialize test data"); let result = serialize_storage_plugin_configuration(&config.xtype.expect(""), &config.config, &config.credentials, &config.plugin_library_path, &config.plugin_init_function); match result { Err(err) => panic!(format!("Failed to process configuration. Details: {}", err)), Ok((path, initializer, config, credentials)) => { assert_eq!(path, DEFAULT_POSTGRES_PLUGIN_PATH); assert_eq!(initializer, DEFAULT_POSTGRES_PLUGIN_INITIALIZER); assert_eq!(config, r#"{"foo":"bar"}"#); assert_eq!(credentials, r#"{"password":"baz"}"#); } }; } #[test] fn should_be_possible_to_override_postgres_storage_config_defaults() { let sample_config = r#" { "config": { "foo": "bar" }, "credentials": { "password": "baz" }, "type": "postgres_storage", "plugin_library_path": "OVERRIDE1", "plugin_init_function": "OVERRIDE2" } "#; let config: WalletStorageConfig = serde_json::from_str(sample_config).expect("Cant deserialize test data"); let result = serialize_storage_plugin_configuration(&config.xtype.expect(""), &config.config, &config.credentials, &config.plugin_library_path, &config.plugin_init_function); match result { Err(err) => panic!(format!("Failed to process configuration. Details: {}", err)), Ok((path, initializer, config, credentials)) => { assert_eq!(path, "OVERRIDE1"); assert_eq!(initializer, "OVERRIDE2"); assert_eq!(config, r#"{"foo":"bar"}"#); assert_eq!(credentials, r#"{"password":"baz"}"#); } }; } }
true
54e754ffadcb5fcf6f930b7a4f2071852249f7e5
Rust
dsouzadyn/smsh
/src/main.rs
UTF-8
2,274
3.46875
3
[]
no_license
use std::io::{self, Write}; use std::process::Command; use std::collections::HashMap; #[derive(PartialEq)] struct ShellCommand { name:&'static str, command_type: CommandType, } #[derive(PartialEq)] enum CommandType { INBUILT = 0, CUSTOM = 1, } fn flush(stdout: &mut io::Stdout) { stdout.flush().expect("Error: could not flush buffer!"); } impl ShellCommand { fn new(name: &'static str, command_type: CommandType) -> ShellCommand { ShellCommand { name: name, command_type: command_type} } fn execute(&self, args: Vec<&str>) { let child = Command::new(&self.name).args(args.iter()).spawn().expect("could not spawn process"); let output = child.wait_with_output().expect("Oops something went wrong"); print!("{}", String::from_utf8_lossy(&output.stdout)); print!("{}", String::from_utf8_lossy(&output.stderr)); } } fn exit() { std::process::exit(0x0000); } fn process(buf: String, stdout: &mut io::Stdout, commands: &HashMap<&str, ShellCommand>) { let tokens: Vec<&str> = buf.trim() .split(' ').collect(); let command = commands.get(tokens[0]); if command.is_none() != true { let comm = command.unwrap(); match comm.command_type { CommandType::INBUILT => comm.execute(tokens[1..].to_vec()), CommandType::CUSTOM => { if comm.name == "exit" { exit() }; }, } } else { print!("Error: No such command exists!\n"); } flush(stdout); } fn shell_loop() { let stdin = io::stdin(); let mut stdout = io::stdout(); let mut shell_commands: HashMap<&str, ShellCommand> = HashMap::new(); shell_commands.insert("clear", ShellCommand::new("clear", CommandType::INBUILT)); shell_commands.insert("ls", ShellCommand::new("ls", CommandType::INBUILT)); shell_commands.insert("exit", ShellCommand::new("exit", CommandType::CUSTOM)); loop { let mut buffer = String::new(); stdout.write(b"><>>> ").expect("Error: could not write to stdout!"); flush(&mut stdout); stdin.read_line(&mut buffer).expect("Error: could not read into buffer!"); process(buffer, &mut stdout, &shell_commands); } } fn main() { shell_loop(); }
true
ef6c2c51eafc68bf4901f57a10fb317bd1792fb3
Rust
sgravrock/adventofcode
/2020/rust/day3p1/src/main.rs
UTF-8
1,793
3.3125
3
[ "MIT" ]
permissive
mod input; use std::collections::HashMap; fn main() { println!("{}", n_trees_visited(&Grid::parse(input::puzzle_input()))); // 153 } #[derive(PartialEq, Eq, Hash, Debug)] struct Coord { x: usize, y: usize } #[derive(PartialEq, Eq, Debug)] struct Grid { pattern: HashMap<Coord, char>, max: Coord, } impl Grid { fn parse(input: &str) -> Grid { let mut pattern: HashMap<Coord, char> = HashMap::new(); let mut max_y = 0; let mut max_x = 0; for (y, line) in input.lines().enumerate() { for (x, c) in line.chars().enumerate() { pattern.insert(Coord {x, y}, c); max_x = x; // assume a rectangle max_y = y; } } Grid { pattern, max: Coord { x: max_x, y: max_y } } } fn tree_at(&self, pos: Coord) -> bool { let adjusted = Coord { x: pos.x % (self.max.x + 1), y: pos.y }; let space = self.pattern.get(&adjusted).unwrap(); *space == '#' } } #[test] fn test_parse() { let input = "..# #.."; let mut expected = Grid { pattern: HashMap::new(), max: Coord { x: 2, y: 1 } }; expected.pattern.insert(Coord { x: 0, y: 0}, '.'); expected.pattern.insert(Coord { x: 1, y: 0}, '.'); expected.pattern.insert(Coord { x: 2, y: 0}, '#'); expected.pattern.insert(Coord { x: 0, y: 1}, '#'); expected.pattern.insert(Coord { x: 1, y: 1}, '.'); expected.pattern.insert(Coord { x: 2, y: 1}, '.'); assert_eq!(Grid::parse(input), expected); } fn n_trees_visited(grid: &Grid) -> u32 { let mut result = 0; for y in 0..=grid.max.y { if grid.tree_at(Coord { x: 3*y, y }) { result += 1; } } result } #[test] fn test_n_trees_visited() { let grid = Grid::parse("..##....... #...#...#.. .#....#..#. ..#.#...#.# .#...##..#. ..#.##..... .#.#.#....# .#........# #.##...#... #...##....# .#..#...#.#"); assert_eq!(n_trees_visited(&grid), 7); }
true
914121a5e5e80c8426a83ccb87ec963635bb9048
Rust
Jason-Cooke/gluon
/base/src/resolve.rs
UTF-8
10,096
2.75
3
[ "MIT" ]
permissive
use std::borrow::Cow; use crate::{ fnv::FnvMap, symbol::Symbol, types::{AliasRef, Type, TypeContext, TypeEnv, TypeExt}, }; quick_error! { #[derive(Debug, PartialEq)] pub enum Error { UndefinedType(id: Symbol) { description("undefined type") display("Type `{}` does not exist.", id) } SelfRecursiveAlias(id: Symbol) { description("undefined type") display("Tried to remove self recursive alias `{}`.", id) } } } #[derive(Debug)] pub struct AliasRemover<T> { reduced_aliases: Vec<Symbol>, pub named_variables: FnvMap<Symbol, T>, } impl<T> Default for AliasRemover<T> { fn default() -> Self { AliasRemover { reduced_aliases: Default::default(), named_variables: Default::default(), } } } impl<T> AliasRemover<T> { pub fn new() -> Self { Self::default() } pub fn len(&self) -> usize { self.reduced_aliases.len() } pub fn is_empty(&self) -> bool { self.reduced_aliases.is_empty() } pub fn reset(&mut self, to: usize) { self.reduced_aliases.truncate(to) } pub fn clear(&mut self) { self.reduced_aliases.clear(); self.named_variables.clear(); } pub fn canonical_alias<'t, F>( &mut self, env: &dyn TypeEnv<Type = T>, interner: &mut impl TypeContext<Symbol, T>, typ: &'t T, mut canonical: F, ) -> Result<Cow<'t, T>, Error> where F: FnMut(&AliasRef<Symbol, T>) -> bool, T: TypeExt<Id = Symbol> + Clone + ::std::fmt::Display, { Ok(match peek_alias(env, typ) { Ok(Some(alias)) => { if self.reduced_aliases.contains(&alias.name) { return Err(Error::SelfRecursiveAlias(alias.name.clone())); } self.reduced_aliases.push(alias.name.clone()); if canonical(alias) { Cow::Borrowed(typ) } else { match alias.typ(interner).apply_args( alias.params(), &typ.unapplied_args(), interner, &mut self.named_variables, ) { Some(typ) => Cow::Owned( self.canonical_alias(env, interner, &typ, canonical)? .into_owned(), ), None => Cow::Borrowed(typ), } } } _ => Cow::Borrowed(typ), }) } pub fn remove_aliases_to_concrete<'a>( &mut self, env: &dyn TypeEnv<Type = T>, interner: &mut impl TypeContext<Symbol, T>, mut typ: T, ) -> Result<T, Error> where T: TypeExt<Id = Symbol> + ::std::fmt::Display, { loop { typ = match self.remove_alias_to_concrete(env, interner, &typ)? { Some((typ, args)) => match *typ { Type::Builtin(..) | Type::Function(..) | Type::Function(..) | Type::Record(..) | Type::Variant(..) | Type::Effect(..) | Type::EmptyRow | Type::ExtendRow { .. } if args.is_empty() => { return Ok(typ) } _ => { let typ = typ .replace_generics(interner, &mut self.named_variables) .unwrap_or_else(|| typ); interner.app(typ, args.iter().cloned().collect()) } }, None => return Ok(typ), }; } } pub fn remove_aliases( &mut self, env: &dyn TypeEnv<Type = T>, interner: &mut impl TypeContext<Symbol, T>, mut typ: T, ) -> Result<T, Error> where T: TypeExt<Id = Symbol> + ::std::fmt::Display, { loop { typ = match self.remove_alias(env, interner, &typ)? { Some(typ) => typ, None => return Ok(typ), }; } } pub fn remove_alias( &mut self, env: &dyn TypeEnv<Type = T>, interner: &mut impl TypeContext<Symbol, T>, typ: &T, ) -> Result<Option<T>, Error> where T: TypeExt<Id = Symbol> + ::std::fmt::Display, { Ok(self.remove_alias_to_concrete(env, interner, typ)?.map( |(non_replaced_type, unapplied_args)| { let non_replaced_type = non_replaced_type .replace_generics(interner, &mut self.named_variables) .unwrap_or_else(|| non_replaced_type.clone()); interner.app(non_replaced_type, unapplied_args.iter().cloned().collect()) }, )) } pub fn remove_alias_to_concrete<'a>( &mut self, env: &'a dyn TypeEnv<Type = T>, interner: &mut impl TypeContext<Symbol, T>, typ: &'a T, ) -> Result<Option<(T, Cow<'a, [T]>)>, Error> where T: TypeExt<Id = Symbol> + ::std::fmt::Display, { match peek_alias(env, &typ)? { Some(alias) => self.remove_alias_to_concrete_inner(interner, typ, alias), None => Ok(None), } } pub fn remove_alias_to_concrete_inner<'a>( &mut self, interner: &mut impl TypeContext<Symbol, T>, typ: &'a T, alias: &'a AliasRef<Symbol, T>, ) -> Result<Option<(T, Cow<'a, [T]>)>, Error> where T: TypeExt<Id = Symbol> + ::std::fmt::Display, { if self.reduced_aliases.iter().any(|name| *name == alias.name) { return Err(Error::SelfRecursiveAlias(alias.name.clone())); } self.reduced_aliases.push(alias.name.clone()); // Opaque types should only exist as the alias itself if let Type::Opaque = **alias.unresolved_type() { return Ok(None); } let unapplied_args = typ.unapplied_args(); let opt = alias.typ(interner).arg_application( alias.params(), &unapplied_args, interner, &mut self.named_variables, ); match opt { Some((t, a)) => { let l = unapplied_args.len() - a.len(); Ok(Some(( t, match unapplied_args { Cow::Borrowed(slice) => Cow::Borrowed(&slice[l..]), Cow::Owned(mut vec) => { vec.drain(l..); Cow::Owned(vec) } }, ))) } None => Ok(None), } } } /// Removes type aliases from `typ` until it is an actual type pub fn remove_aliases<T>( env: &dyn TypeEnv<Type = T>, interner: &mut impl TypeContext<Symbol, T>, mut typ: T, ) -> T where T: TypeExt<Id = Symbol> + ::std::fmt::Display, { while let Ok(Some(new)) = remove_alias(env, interner, &typ) { typ = new; } typ } pub fn remove_aliases_cow<'t, T>( env: &dyn TypeEnv<Type = T>, interner: &mut impl TypeContext<Symbol, T>, typ: &'t T, ) -> Cow<'t, T> where T: TypeExt<Id = Symbol> + ::std::fmt::Display, { match remove_alias(env, interner, typ) { Ok(Some(typ)) => Cow::Owned(remove_aliases(env, interner, typ)), _ => Cow::Borrowed(typ), } } /// Resolves aliases until `canonical` returns `true` for an alias in which case it returns the /// type that directly contains that alias pub fn canonical_alias<'t, F, T>( env: &dyn TypeEnv<Type = T>, interner: &mut impl TypeContext<Symbol, T>, typ: &'t T, mut canonical: F, ) -> Cow<'t, T> where F: FnMut(&AliasRef<Symbol, T>) -> bool, T: TypeExt<Id = Symbol> + Clone + ::std::fmt::Display, { match peek_alias(env, typ) { Ok(Some(alias)) => { if canonical(alias) { Cow::Borrowed(typ) } else { alias .typ(interner) .apply_args( alias.params(), &typ.unapplied_args(), interner, &mut Default::default(), ) .map(|typ| { Cow::Owned(canonical_alias(env, interner, &typ, canonical).into_owned()) }) .unwrap_or_else(|| Cow::Borrowed(typ)) } } _ => Cow::Borrowed(typ), } } /// Expand `typ` if it is an alias that can be expanded and return the expanded type. /// Returns `None` if the type is not an alias or the alias could not be expanded. pub fn remove_alias<T>( env: &dyn TypeEnv<Type = T>, interner: &mut impl TypeContext<Symbol, T>, typ: &T, ) -> Result<Option<T>, Error> where T: TypeExt<Id = Symbol> + ::std::fmt::Display, { Ok(peek_alias(env, &typ)?.and_then(|alias| { // Opaque types should only exist as the alias itself if let Type::Opaque = **alias.unresolved_type() { return None; } alias.typ(interner).apply_args( alias.params(), &typ.unapplied_args(), interner, &mut Default::default(), ) })) } pub fn peek_alias<'t, T>( env: &'t dyn TypeEnv<Type = T>, typ: &'t T, ) -> Result<Option<&'t AliasRef<Symbol, T>>, Error> where T: TypeExt<Id = Symbol> + ::std::fmt::Display, { let maybe_alias = typ.applied_alias(); match typ.alias_ident() { Some(id) => { let alias = match maybe_alias { Some(alias) => Some(alias), None => env.find_type_info(id).map(|a| &**a), }; Ok(alias) } None => Ok(None), } }
true
bb902ddcf6a6eb98e82875552ca71bc00f11473c
Rust
richard-dennehy/raytracer_rs
/src/renderer/render.rs
UTF-8
3,183
2.859375
3
[]
no_license
use super::*; use crate::core::Colour; use crate::scene::World; use smallvec::SmallVec; use std::fmt; use std::fmt::{Display, Formatter}; use std::num::NonZeroU8; use std::slice::Iter; /// # Parameters /// `show_progress`: set to `true` when using e.g. `cargo run` for real-time progress updates; /// set to `false` when running benchmarks, otherwise it'll cripple performance due to stdout locking pub fn render(world: &World, camera: &Camera, samples: &Samples, show_progress: bool) -> Canvas { let mut canvas = Canvas::new(camera.width(), camera.height()).expect("Camera dimensions are too large"); canvas.draw(show_progress, |x, y| { let mut corners = samples.corner_offsets(); let (x_offset, y_offset) = corners.next().unwrap(); let top_left = world.colour_at(camera.ray_at(x, y, *x_offset, *y_offset)); let average_samples = |acc: Colour, (x_offset, y_offset): &(f64, f64)| { let sample = world.colour_at(camera.ray_at(x, y, *x_offset, *y_offset)); acc.average(sample) }; let corner_avg = corners.fold(top_left, average_samples); if samples.inner_samples() == 0 || corner_avg.is_similar_to(&top_left) { return top_left; } samples.inner_offsets().fold(corner_avg, average_samples) }); canvas } #[derive(Debug, PartialEq)] pub struct Samples { inner: Vec<(f64, f64)>, corners: SmallVec<[(f64, f64); 4]>, } impl Samples { pub fn single() -> Self { let mut corners = SmallVec::new(); corners.push((0.5, 0.5)); Self { inner: vec![], corners, } } pub fn grid(grid_size: NonZeroU8) -> Self { let grid_size = grid_size.get(); if grid_size == 1 { return Self::single(); } let initial = 1.0 / (grid_size * 2) as f64; let increment = 1.0 / grid_size as f64; let max = initial + (increment * (grid_size - 1) as f64); let corners = SmallVec::from([ (initial, initial), (max, initial), (initial, max), (max, max), ]); let offsets = (0..grid_size) .flat_map(|y| (0..grid_size).map(move |x| (x, y))) // exclude corners .filter(|(x, y)| (*x != 0 && *x != grid_size - 1) || (*y != 0 && *y != grid_size - 1)) .map(|(x, y)| { ( initial + (x as f64) * increment, initial + (y as f64) * increment, ) }) .collect(); Self { inner: offsets, corners, } } pub(super) fn inner_offsets(&self) -> Iter<(f64, f64)> { self.inner.iter() } pub(super) fn corner_offsets(&self) -> Iter<(f64, f64)> { self.corners.iter() } fn samples(&self) -> usize { self.inner.len() + self.corners.len() } fn inner_samples(&self) -> usize { self.inner.len() } } impl Display for Samples { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { writeln!(f, "X{}", self.samples()) } }
true
be9ed04478088200e0e142961076db4fed79f37d
Rust
simon-auch/rust_GrandiOS
/GrandiOS/src/utils/exceptions/software_interrupt.rs
UTF-8
8,088
2.671875
3
[]
no_license
//Syscalls interface //How should a syscall look like (example read_char): // 1. reserve space for the return value of the syscall // 2. create a pointer to the reserved space for the return value // 3. reserve space for the parameters of the syscall // 4. create a pointer to the reserved space for the parameters // 5. move the pointer for the return values into r0 // 6. move the pointer for the parameters into r1 // 7. call the swi instruction with the correct number. //Important note from the docu for the push, pop operations: //"Registers are stored on the stack in numerical order, with the lowest numbered register at the lowest address." //Blocking interrupts (eg. read, sleep) must be used inside select. use swi; use core::ptr::read_volatile; use driver::interrupts::*; use driver::serial::*; use driver::led::*; use utils::vt; use utils::scheduler; use alloc::allocator::Alloc; use utils::exceptions::common_code::RegisterStack; use utils::thread::TCB; use utils::registers; use alloc::string::ToString; pub fn init(ic: &mut InterruptController) { //set the handler for the software interrupt ic.set_handler_software_interrupt(handler_software_interrupt); println!("Exception handler swi: 0x{:x}", handler_software_interrupt as u32); } #[naked] extern fn handler_software_interrupt(){ unsafe{ trampolin!(0, handler_software_interrupt_helper); } } #[inline(never)] extern fn handler_software_interrupt_helper(reg_sp: u32){ let regs = unsafe{ &mut(*(reg_sp as *mut RegisterStack)) }; let instruction = unsafe { read_volatile((regs.lr_irq - 0x4) as *mut u32) }; let immed = instruction & 0xFFFFFF; let mut sched = unsafe {scheduler::get_scheduler()}; match immed { SWITCH!() => { sched.switch(regs, scheduler::State::Ready); }, WRITE!() => { let mut input : &mut swi::write::Input = unsafe{ &mut *(regs.r1 as *mut _) }; let mut debug_unit = unsafe { DebugUnit::new(0xFFFFF200) }; write!(debug_unit, "{}", input.c as char).unwrap(); }, ALLOC!() => { let mut input : &mut swi::useralloc::Input = unsafe{ &mut *(regs.r1 as *mut _) }; let mut output: &mut swi::useralloc::Output = unsafe{ &mut *(regs.r0 as *mut _) }; let layout = input.l.clone(); let ptr = unsafe { Some((&mut &::GLOBAL).alloc(layout)) }; output.r = ptr.clone(); }, DEALLOC!() => { let mut input : &mut swi::userdealloc::Input = unsafe{ &mut *(regs.r1 as *mut _) }; let layout = input.l.clone(); unsafe { (&mut &::GLOBAL).dealloc(input.p, layout); } }, EXIT!() => { sched.switch(regs, scheduler::State::Terminate); }, GET_LED!() => { let mut input : &mut swi::get_led::Input = unsafe{ &mut *(regs.r1 as *mut _) }; let mut output: &mut swi::get_led::Output = unsafe{ &mut *(regs.r0 as *mut _) }; output.s = match input.l { 0 => unsafe{ PIO::new(PIO_LED_RED).is_on() }, 1 => unsafe{ PIO::new(PIO_LED_YELLOW).is_on() }, 2 => unsafe{ PIO::new(PIO_LED_GREEN).is_on() }, _ => false }; }, SET_LED!() => { let mut input : &mut swi::set_led::Input = unsafe{ &mut *(regs.r1 as *mut _) }; match input.l { 0 => unsafe{ PIO::new(PIO_LED_RED).set(input.s) }, 1 => unsafe{ PIO::new(PIO_LED_YELLOW).set(input.s) }, 2 => unsafe{ PIO::new(PIO_LED_GREEN).set(input.s) }, _ => {} }; }, SELECT!() => { let mut input : &mut swi::select::Input = unsafe{ &mut *(regs.r1 as *mut _) }; let mut output: &mut swi::select::Output= unsafe{ &mut *(regs.r0 as *mut _) }; let mut correct = true; correct = (correct) & (input.swi_numbers.len() == input.swi_inputs.len()); correct = (correct) & (input.swi_numbers.len() == output.swi_outputs.len()); if correct { sched.switch(regs, scheduler::State::Waiting(input.swi_numbers)); } } TCBS_STATISTICS!() => { let mut output: &mut swi::tcbs_statistics::Output = unsafe{ &mut *(regs.r0 as *mut _) }; output.c = sched.get_all_tcb_statistics(); }, SPAWN!() => { let mut input : &mut swi::spawn::Input = unsafe{ &mut *(regs.r1 as *mut _) }; let mut output: &mut swi::spawn::Output= unsafe{ &mut *(regs.r0 as *mut _) }; //create new tcb let mut tcb = TCB::new( "Maybe we should add the ability to give new threads names...".to_string(), input.pc, input.stack_size, registers::CPSR_MODE_USER | registers::CPSR_IMPRECISE_ABORT ); tcb.register_stack.r0 = input.r0; tcb.set_priority(5); sched.add_thread(tcb); sched.switch(regs, scheduler::State::Ready); }, _ => { let mut debug_unit = unsafe { DebugUnit::new(0xFFFFF200) }; write!(debug_unit, "{}Unknown SWI{}\n", &vt::CF_RED, &vt::CF_STANDARD).unwrap(); write!(debug_unit, "{}Exception{} software_interrupt at: 0x{:x}, instruction: 0x{:x}, swi value: 0x{:x}, registers:{:?}\n", &vt::CF_YELLOW, &vt::CF_STANDARD, regs.lr_irq - 0x4, instruction, immed, regs).unwrap(); } } } //contains all the function to process a syscall, given the needed inputs. Will typically be called from the scheduler if the inputs are available and the corresponding syscall was called for that thread //TODO: wenn we have a MMU we need to translate the addresses behind r0 and r1 before using them. pub mod work { use utils::thread::TCB; use swi; pub fn read(tcb: &mut TCB, c: u8){ //read should only be used inside select let mut select_input : &mut swi::select::Input = unsafe{ &mut *(tcb.register_stack.r1 as *mut _) }; let mut select_output: &mut swi::select::Output= unsafe{ &mut *(tcb.register_stack.r0 as *mut _) }; //find correct index for input and output struct for i in 0..select_input.swi_numbers.len() { match select_input.swi_numbers[i] { READ!() => { select_output.index = i as u32; let mut output : &mut swi::read::Output = unsafe{ &mut *(select_output.swi_outputs[i] as *mut _) }; output.c = c; return; }, _ => {} } } } pub fn sleep_get_ticks(tcb: &mut TCB) -> u32{ let mut select_input : &mut swi::select::Input = unsafe{ &mut *(tcb.register_stack.r1 as *mut _) }; let mut select_output: &mut swi::select::Output= unsafe{ &mut *(tcb.register_stack.r0 as *mut _) }; //find correct index for input and output struct let mut ticks = 0; for i in 0..select_input.swi_numbers.len() { match select_input.swi_numbers[i] { SLEEP!() => { let mut input : &mut swi::sleep::Input = unsafe{ &mut *(select_input.swi_inputs[i] as *mut _) }; ticks = input.t; break; }, _ => {} } } return ticks; } pub fn sleep(tcb: &mut TCB){ let mut select_input : &mut swi::select::Input = unsafe{ &mut *(tcb.register_stack.r1 as *mut _) }; let mut select_output: &mut swi::select::Output= unsafe{ &mut *(tcb.register_stack.r0 as *mut _) }; //find correct index for input and output struct for i in 0..select_input.swi_numbers.len() { match select_input.swi_numbers[i] { SLEEP!() => { select_output.index = i as u32; return; }, _ => {} } } } }
true
8f1b2ea97f80ed9c693c2b7060edeb2be6c3a9bc
Rust
GarmOfGnipahellir/advent-of-code
/2022/src/bin/10.rs
UTF-8
5,447
3.5625
4
[]
no_license
fn main() { println!("01: {}", part01(include_str!("../inputs/10"))); println!("02:"); part02(include_str!("../inputs/10")); } #[derive(Debug, PartialEq, Clone, Copy)] enum Instruction { NoOp, AddX(i32), } impl Instruction { fn parse(s: &str) -> Self { match s { "noop" => Self::NoOp, _ if s.starts_with("addx") => s .split_once(' ') .and_then(|(_, x)| x.parse::<i32>().ok()) .map(|x| Self::AddX(x)) .unwrap(), _ => unreachable!(), } } } #[derive(Debug, PartialEq)] struct Program(Vec<Instruction>); impl Program { fn parse(s: &str) -> Self { let mut instructions = Vec::new(); for line in s.lines() { instructions.push(Instruction::parse(line)); } Self(instructions) } } struct Cpu { program: Program, instruction_index: usize, instruction_start: usize, cycle: usize, x: i32, } impl Cpu { fn new(program: Program) -> Self { Self { program, instruction_index: 0, instruction_start: 0, cycle: 0, x: 1, } } fn tick(&mut self) { let instruction = self.program.0[self.instruction_index]; let cycles_since_instruction_start = self.cycle - self.instruction_start; match instruction { Instruction::NoOp => { if cycles_since_instruction_start >= 1 { self.instruction_index += 1; self.instruction_start = self.cycle; // println!("noop finished execution at the end of {}", self.cycle); } } Instruction::AddX(x) => { if cycles_since_instruction_start >= 2 { self.x += x; self.instruction_index += 1; self.instruction_start = self.cycle; // println!("addx {} finished execution at the end of {}", x, self.cycle); } } } self.cycle += 1; } } fn part01(input: &str) -> i32 { let mut cpu = Cpu::new(Program::parse(input)); let mut signal_strengths = Vec::new(); while cpu.instruction_index < cpu.program.0.len() { cpu.tick(); if [20, 60, 100, 140, 180, 220].contains(&cpu.cycle) { signal_strengths.push(cpu.cycle as i32 * cpu.x); } } signal_strengths.iter().sum() } fn part02(input: &str) { let mut cpu = Cpu::new(Program::parse(input)); let mut row = Vec::with_capacity(40); for _ in 0..240 { cpu.tick(); let i = row.len() as i32; if i >= cpu.x - 1 && i <= cpu.x + 1 { row.push("#") } else { row.push(" "); } if row.len() == 40 { println!("{}", row.join("")); row.clear(); } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse() { assert_eq!(Instruction::parse("noop"), Instruction::NoOp); assert_eq!(Instruction::parse("addx 3"), Instruction::AddX(3)); assert_eq!(Instruction::parse("addx -5"), Instruction::AddX(-5)); let input = r#"noop addx 3 addx -5"#; assert_eq!( Program::parse(input), Program(vec![ Instruction::NoOp, Instruction::AddX(3), Instruction::AddX(-5) ]) ); } #[test] fn test_tick() { let mut cpu = Cpu::new(Program(vec![ Instruction::NoOp, Instruction::AddX(3), Instruction::AddX(-5), ])); assert_eq!(cpu.cycle, 0); assert_eq!(cpu.x, 1); cpu.tick(); assert_eq!(cpu.cycle, 1); assert_eq!(cpu.x, 1); cpu.tick(); assert_eq!(cpu.cycle, 2); assert_eq!(cpu.x, 1); cpu.tick(); assert_eq!(cpu.cycle, 3); assert_eq!(cpu.x, 1); cpu.tick(); assert_eq!(cpu.cycle, 4); assert_eq!(cpu.x, 4); cpu.tick(); assert_eq!(cpu.cycle, 5); assert_eq!(cpu.x, 4); cpu.tick(); assert_eq!(cpu.cycle, 6); assert_eq!(cpu.x, -1); } #[test] fn example01() { assert_eq!(part01(EXAMPLE), 13140); } #[test] fn example02() { part02(EXAMPLE) } const EXAMPLE: &str = r#"addx 15 addx -11 addx 6 addx -3 addx 5 addx -1 addx -8 addx 13 addx 4 noop addx -1 addx 5 addx -1 addx 5 addx -1 addx 5 addx -1 addx 5 addx -1 addx -35 addx 1 addx 24 addx -19 addx 1 addx 16 addx -11 noop noop addx 21 addx -15 noop noop addx -3 addx 9 addx 1 addx -3 addx 8 addx 1 addx 5 noop noop noop noop noop addx -36 noop addx 1 addx 7 noop noop noop addx 2 addx 6 noop noop noop noop noop addx 1 noop noop addx 7 addx 1 noop addx -13 addx 13 addx 7 noop addx 1 addx -33 noop noop noop addx 2 noop noop noop addx 8 noop addx -1 addx 2 addx 1 noop addx 17 addx -9 addx 1 addx 1 addx -3 addx 11 noop noop addx 1 noop addx 1 noop noop addx -13 addx -19 addx 1 addx 3 addx 26 addx -30 addx 12 addx -1 addx 3 addx 1 noop noop noop addx -9 addx 18 addx 1 addx 2 noop noop addx 9 noop noop noop addx -1 addx 2 addx -37 addx 1 addx 3 noop addx 15 addx -21 addx 22 addx -6 addx 1 noop addx 2 addx 1 noop addx -10 noop noop addx 20 addx 1 addx 2 addx 2 addx -6 addx -11 noop noop noop"#; }
true
99a2b5005196f216965f23b2adeb0f29f4ef3737
Rust
ThomasdenH/casimir-fdfd
/src/greenfunctions/cosinebasis.rs
UTF-8
10,138
2.921875
3
[ "MIT" ]
permissive
use crate::config::SimulationConfig; use crate::fields::{ScalarField, VectorField}; use crate::greenfunctions::operator::{Operator, OperatorType}; use nalgebra::*; use pbr::ProgressBar; use std::f32::consts::PI; use std::io::Stdout; use std::sync::{Arc, Mutex}; /// Determines a direction in space. #[derive(Eq, PartialEq, Copy, Clone, Hash, Debug)] pub enum Direction { #[allow(missing_docs)] X, #[allow(missing_docs)] Y, #[allow(missing_docs)] Z, #[allow(missing_docs)] NegX, #[allow(missing_docs)] NegY, #[allow(missing_docs)] NegZ, } impl Direction { /// Converts a `Direction` to a normalized normal vector in that direction. fn vector(self) -> Vector3<f32> { match self { Direction::X => Vector3::new(1.0, 0.0, 0.0), Direction::Y => Vector3::new(0.0, 1.0, 0.0), Direction::Z => Vector3::new(0.0, 0.0, 1.0), Direction::NegX => Vector3::new(-1.0, 0.0, 0.0), Direction::NegY => Vector3::new(0.0, -1.0, 0.0), Direction::NegZ => Vector3::new(0.0, 0.0, -1.0), } } } /// A `CosineBasis` can compute the casimir force for a bounding plane surface by expanding the /// source as a cosine basis and measuring the response. pub struct CosineBasis<'a> { p0: Point3<usize>, p1: Point3<usize>, frequency: f32, normal: Direction, permitivity: &'a ScalarField, simulation_config: &'a SimulationConfig, progress_bar: Option<Arc<Mutex<ProgressBar<Stdout>>>>, } impl<'a> CosineBasis<'a> { /// Will construct a new cosine basis with the following points as start and end points. pub fn new( p0: Point3<usize>, p1: Point3<usize>, frequency: f32, permitivity: &'a ScalarField, simulation_config: &'a SimulationConfig, normal: Direction, ) -> CosineBasis<'a> { CosineBasis { p0, p1, frequency, permitivity, normal, simulation_config, progress_bar: None, } } /// If this method is called with a progress bar, that progress bar will be updated as the force /// is computed. If it is called with `None`, no progress bar will be used. pub fn with_progress_bar( mut self, progress_bar: Option<Arc<Mutex<ProgressBar<Stdout>>>>, ) -> CosineBasis<'a> { self.progress_bar = progress_bar; self } /// Compute the force due to this basis. pub fn force(&self) -> Vector3<f32> { let (amax, bmax) = match self.normal { Direction::X | Direction::NegX => (self.p1.y - self.p0.y, self.p1.z - self.p0.z), Direction::Y | Direction::NegY => (self.p1.x - self.p0.x, self.p1.z - self.p0.z), Direction::Z | Direction::NegZ => (self.p1.x - self.p0.x, self.p1.y - self.p0.y), }; let mut total_force = Vector3::new(0.0, 0.0, 0.0); let mut remaining = amax * bmax; let mut count = 0; for n_total in 0..=(amax + bmax) { let b_start = n_total.min(bmax); let b_end = 0.max(n_total as i64 - amax as i64) as usize; let mut difference = 0.0; for nb in b_start..=b_end { let na = n_total - nb; let force = self.force_for_basis(na, nb); total_force += force; difference += force.norm(); count += 1; if let Some(ref progress_bar) = self.progress_bar { progress_bar.lock().unwrap().inc(); remaining -= 1; } } if difference / ((b_start - b_end + 1) as f32) < total_force.norm() / count as f32 { if let Some(ref progress_bar) = self.progress_bar { progress_bar.lock().unwrap().add(remaining as u64); } return total_force; } } total_force } /// Generate a cosine basis source. `na` and `nb` determine the number of the expansion, and /// the `polarization` determines the component to set the source to. fn get_source(&self, na: usize, nb: usize, polarization: Direction) -> VectorField { let mut source_field = VectorField::new(self.permitivity.size()); match self.normal { Direction::X | Direction::NegX => { let dy = self.p1.y - self.p0.y; let dz = self.p1.z - self.p0.z; let vector = 2.0 / ((dy * dz) as f32).sqrt() * polarization.vector(); for y in self.p0.y..self.p1.y { for z in self.p0.z..self.p1.z { source_field[(self.p0.x, y, z)] = vector * (na as f32 * PI * y as f32 / dy as f32).cos() * (nb as f32 * PI * z as f32 / dz as f32).cos(); } } } Direction::Y | Direction::NegY => { let dx = self.p1.x - self.p0.x; let dz = self.p1.z - self.p0.z; let vector = 2.0 / ((dx * dz) as f32).sqrt() * polarization.vector(); for x in self.p0.x..self.p1.x { for z in self.p0.z..self.p1.z { source_field[(x, self.p0.y, z)] = vector * (na as f32 * PI * x as f32 / dx as f32).cos() * (nb as f32 * PI * z as f32 / dz as f32).cos(); } } } Direction::Z | Direction::NegZ => { let dx = self.p1.x - self.p0.x; let dy = self.p1.y - self.p0.y; let vector = 2.0 / ((dx * dy) as f32).sqrt() * polarization.vector(); for x in self.p0.x..self.p1.x { for y in self.p0.y..self.p1.y { source_field[(x, y, self.p0.z)] = vector * (na as f32 * PI * x as f32 / dx as f32).cos() * (nb as f32 * PI * y as f32 / dy as f32).cos(); } } } } source_field } /// Computes the force for a particular basis by multiplying the stress tensor with the Green /// tensor. pub fn force_for_basis(&self, na: usize, nb: usize) -> Vector3<f32> { self.stress_tensor(na, nb) * self.normal.vector() } /// Computes the stress tensor. When multiplied with a surface normal vector, this gives the /// force between both sides of that surface. pub fn stress_tensor(&self, na: usize, nb: usize) -> Matrix3<f32> { let electric_tensor = self.green_tensor(na, nb, OperatorType::Electric); let magnetic_tensor = self.green_tensor(na, nb, OperatorType::Magnetic); self.frequency * self.frequency / PI * ((magnetic_tensor - Matrix3::from_diagonal_element(0.5) * magnetic_tensor.trace()) * (electric_tensor - Matrix3::from_diagonal_element(0.5) * electric_tensor.trace())) } /// Computes the Green tensor, which has as columns the Green functions for different /// polarizations. pub fn green_tensor(&self, na: usize, nb: usize, operator_type: OperatorType) -> Matrix3<f32> { Matrix3::from_columns(&[ self.green_function(&self.get_source(na, nb, Direction::X), operator_type), self.green_function(&self.get_source(na, nb, Direction::Y), operator_type), self.green_function(&self.get_source(na, nb, Direction::Z), operator_type), ]) } /// Performs operator inversion using the conjugate gradient method to obtain the Green /// functions due to the incoming `source`. The `operator_type` determines the type of operator /// to use and thereby the type of Green function to compute. pub fn green_function( &self, source: &VectorField, operator_type: OperatorType, ) -> Vector3<f32> { // The operator let a = Operator::new(self.frequency, self.permitivity, operator_type); let size = self.permitivity.size(); let mut x = VectorField::new(size); let mut r = source - &a * x.clone(); let mut p = r.clone(); let mut rsold = &r * &r; // In theory the conjugate gradient method should converge in N steps. In practice,it converges // much quicker. let volume = size.x * size.y * size.z; let mut temp1 = VectorField::new(size); let mut temp2 = VectorField::new(size); let mut a_p = VectorField::new(size); for _ in 0..volume { let (next_a_p, next_temp1, next_temp2) = a.mul_with_temps(p.clone_to(a_p), temp1, temp2); a_p = next_a_p; temp1 = next_temp1; temp2 = next_temp2; let alpha = rsold / (&p * &a_p); x += alpha * &p; r -= alpha * &a_p; let rsnew = &r * &r; if rsnew.sqrt() < self.simulation_config.fdfd_convergence { break; } p = (rsnew / rsold) * p + &r; rsold = rsnew; } // Integrate over the surface let mut green = Vector3::new(0.0, 0.0, 0.0); match self.normal { Direction::X | Direction::NegX => { for py in self.p0.y..self.p1.y { for pz in self.p0.z..self.p1.z { green += x[(self.p0.x, py, pz)]; } } } Direction::Y | Direction::NegY => { for px in self.p0.x..self.p1.x { for pz in self.p0.z..self.p1.z { green += x[(px, self.p0.y, pz)]; } } } Direction::Z | Direction::NegZ => { for px in self.p0.x..self.p1.x { for py in self.p0.y..self.p1.y { green += x[(px, py, self.p0.z)]; } } } } green } }
true
f60306d2e952f7c5276a2a6b27bc4e5b6b17639b
Rust
iCalculated/RandomImage
/benches/number_gen_bench.rs
UTF-8
1,076
2.75
3
[ "MIT" ]
permissive
#[macro_use] extern crate criterion; extern crate rand; use criterion::Criterion; use rand::Rng; use rand::distributions::{Distribution, Uniform}; fn get_uniform() -> (u8, u8, u8) { let mut rng = rand::thread_rng(); let uniform = Uniform::from(1..255); (uniform.sample(&mut rng), uniform.sample(&mut rng), uniform.sample(&mut rng)) } fn get_thread() -> (u8, u8, u8) { let mut rng = rand::thread_rng(); rng.gen::<(u8, u8, u8)>() } pub fn random() -> u8 { unsafe { static mut STATE: u64 = 0x123456789abcdef0; STATE = STATE.wrapping_mul(2862933555777941757) .wrapping_add(3037000493); (STATE % 256) as u8 } } pub fn tri_random() -> (u8, u8, u8) { (random(), random(),random()) } fn criterion_benchmark(c: &mut Criterion) { c.bench_function("uniform random", |b| b.iter(|| get_uniform())); c.bench_function("thread random", |b| b.iter(|| get_thread())); c.bench_function("bad random", |b| b.iter(|| tri_random())); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches);
true
07bb3af1cb33fc37f9b6811afa86a594842a032c
Rust
EzgiS/rust-ncc
/src/animator/mod.rs
UTF-8
3,843
2.6875
3
[ "Apache-2.0", "MIT" ]
permissive
use crate::math::v2d::V2D; use crate::world::{Cells, Snapshot}; use crate::NVERTS; use cairo::{Context, Format, ImageSurface}; use std::io::Write; use std::path::Path; use std::process::{Command, Stdio}; fn set_background(context: &Context) { context.set_source_rgb(1.0, 1.0, 1.0); context.paint(); } fn draw_cell_poly(context: &Context, cell_poly: &[V2D; NVERTS]) { context.set_source_rgb(0.0, 0.0, 0.0); context.move_to(cell_poly[0].x as f32, cell_poly[0].y as f32); cell_poly[1..].iter().for_each(|v| { context.line_to(v.x as f32, v.y as f32); }); context.close_path(); context.set_line_width(2.0); context.stroke(); } fn create_mp4( data: &DrawingData, width: i32, height: i32, framerate: i32, output_path: &Path, ) { //let frames = history.len(); let surface = ImageSurface::create(Format::ARgb32, width, height) .expect("Couldn't create surface"); let context = Context::new(&surface); let mut child = Command::new("ffmpeg") .args(&[ "-f", "rawvideo", "-pix_fmt", "bgra", "-s", &format!("{}x{}", width, height), "-i", "-", "-pix_fmt", "yuv420p", "-r", &format!("{}", framerate), "-y", output_path.to_str().unwrap(), ]) .stdin(Stdio::piped()) .spawn() .expect("failed to execute process"); { // limited borrow of stdin let child_stdin = child.stdin.as_mut().expect("Failed toget stdin"); (0..data.num_frames).for_each(|frame| { set_background(&context); for cell_poly in data.get_cell_polys(frame) { draw_cell_poly(&context, cell_poly) } surface .with_data(|buf| { child_stdin .write_all(buf) .expect("Failed to write bytes") }) .expect("Failed to get_data"); }); } child.wait().expect("child process wasn't running"); } pub struct DrawingData { num_cells: usize, num_frames: usize, //time_strings: Vec<String>, cell_polys: Vec<[V2D; NVERTS]>, } impl DrawingData { pub fn from_history( history: &[&Cells], px_w: i32, px_h: i32, px_per_micron: f32, ) -> DrawingData { let num_cells = history[0].cell_states.len(); let mut cell_polys: Vec<[V2D; NVERTS]> = vec![]; for cells in history.iter() { for cell in cells.cell_states.iter() { let mut transformed_vs = [V2D::default(); NVERTS]; transformed_vs .iter_mut() .zip(cell.core.poly.iter()) .for_each(|(new_v, old_v)| { *new_v = old_v.scale(px_per_micron).translate( px_w as f32 * 0.5, px_h as f32 * 0.5, ); }); cell_polys.push(transformed_vs); } } DrawingData { num_cells, num_frames: history.len(), cell_polys, } } pub fn get_cell_polys(&self, frame: usize) -> &[[V2D; NVERTS]] { let start = frame * self.num_cells; let end = start + self.num_cells; &self.cell_polys[start..end] } } pub fn create_animation(history: &[Snapshot], output_path: &Path) { let cells_history = history.iter().map(|h| &h.cells).collect::<Vec<&Cells>>(); let data = DrawingData::from_history(&cells_history, 1280, 720, 2.0); create_mp4(&data, 1000, 1000, 30, output_path); }
true
6414b316e4f6da90564f2c6741fff67c94d72a9f
Rust
muthu95/ART
/src/node48.rs
UTF-8
5,776
2.765625
3
[]
no_license
use std; use std::{mem, ptr}; use crate::constants; use crate::node256; use crate::node16; use crate::key_interface; use crate::art_node_base; use crate::art_nodes; use crate::art_node_interface; macro_rules! make_array { ($n:expr, $constructor:expr) => {{ let mut items: [_; $n] = std::mem::uninitialized(); for place in items.iter_mut() { std::ptr::write(place, $constructor); } items }} } pub struct NodeType48<K, V> { pub base_struct: art_node_base::ArtNodeBase, pub keys: [u8; 256], pub children: mem::ManuallyDrop<[art_nodes::ArtNodeEnum<K, V>; 48]>, } impl<K, V> NodeType48<K, V> { pub fn new() -> Self { NodeType48 { base_struct: art_node_base::ArtNodeBase::new(), keys: [constants::EMPTY_CELL; 256], children: unsafe {mem::ManuallyDrop::new(make_array!(48, art_nodes::ArtNodeEnum::Empty))} } } } impl<K,V> Drop for NodeType48<K,V> { fn drop(&mut self) { for i in 0..256 { if self.keys[i] != constants::EMPTY_CELL { drop(&mut self.children[self.keys[i] as usize - 1]); } } } } impl<K: key_interface::KeyInterface, V> art_node_interface::ArtNodeInterface<K, V> for NodeType48<K, V> { fn add_child(&mut self, child: art_nodes::ArtNodeEnum<K, V>, byte: u8) { let idx = get_first_empty_cell(&self.children); self.children[idx] = child; //unsafe { ptr::write(&mut self.children[idx] as *mut art_nodes::ArtNodeEnum<K,V>, child);} //+1 because indices in children arr is referred from [1, 48]. 0 is Empty cell. self.keys[byte as usize] = (idx+1) as u8; self.base_struct.num_children += 1; } fn is_full(&self) -> bool { self.base_struct.num_children >= 48 } fn to_art_node(self: Box<Self>) -> art_nodes::ArtNodeEnum<K,V> { art_nodes::ArtNodeEnum::Inner48(self) } fn grow_and_add(mut self, leaf: art_nodes::ArtNodeEnum<K, V>, byte: u8) -> art_nodes::ArtNodeEnum<K, V> { //println!("creating node256"); let mut new_node = Box::new(node256::NodeType256::new()); new_node.base_struct.partial_len = self.base_struct.partial_len; let mut i: usize = 0; while i < self.base_struct.partial_len && i < constants::PREFIX_LENGTH_LIMIT { new_node.base_struct.partial[i] = self.base_struct.partial[i]; i += 1; } new_node.add_child(leaf, byte); for i in 0..256 { if self.keys[i] != constants::EMPTY_CELL { let child = std::mem::replace(&mut self.children[self.keys[i] as usize - 1], art_nodes::ArtNodeEnum::Empty); new_node.add_child(child, i as u8); } } art_nodes::ArtNodeEnum::Inner256(new_node) } fn mut_base(&mut self) -> &mut art_node_base::ArtNodeBase { &mut self.base_struct } fn base(&self) -> &art_node_base::ArtNodeBase { &self.base_struct } fn find_child_mut(&mut self, byte: u8) -> Option<&mut art_nodes::ArtNodeEnum<K, V>> { if self.keys[byte as usize] != constants::EMPTY_CELL { Some(&mut self.children[self.keys[byte as usize] as usize - 1]) } else { None } } fn find_child(&self, byte: u8) -> Option<&art_nodes::ArtNodeEnum<K, V>> { if self.keys[byte as usize] == constants::EMPTY_CELL { None } else { Some(&self.children[self.keys[byte as usize] as usize - 1]) } } fn remove_child(mut self, byte: u8) -> art_nodes::ArtNodeEnum<K, V> { let curr_children_count = self.base().num_children as usize; if curr_children_count == 17 { //println!("Reducing node48 to node16"); let mut new_node = Box::new(node16::NodeType16::new()); new_node.mut_base().partial_len = self.base().partial_len; let mut i = 0; while i < self.base().partial.len() { new_node.mut_base().partial[i] = self.base().partial[i]; i += 1; } i = 0; while i < 256 { if i as u8 != byte && self.keys[i as usize] != constants::EMPTY_CELL { //println!("Moving {} vs {}", self.keys[i], byte); let temp = mem::replace(&mut self.children[self.keys[byte as usize] as usize - 1], art_nodes::ArtNodeEnum::Empty); new_node.add_child(temp, self.keys[i as usize]); } i += 1; } new_node.to_art_node() } else { self.children[self.keys[byte as usize] as usize - 1] = art_nodes::ArtNodeEnum::Empty; self.keys[byte as usize] = constants::EMPTY_CELL; self.base_struct.num_children -= 1; Box::new(self).to_art_node() } } fn replace_child(&mut self, byte: u8, child: art_nodes::ArtNodeEnum<K, V>) { self.children[self.keys[byte as usize] as usize - 1] = child; } fn get_minimum(&self) -> &art_nodes::ArtNodeEnum<K,V> { let idx = get_first_non_empty_cell(&self.children); &self.children[idx] } } fn get_first_empty_cell<K, V>(children_arr: &[art_nodes::ArtNodeEnum<K, V>; 48]) -> usize { let mut i: usize = 0; while i < 48 { if let art_nodes::ArtNodeEnum::Empty = children_arr[i] { break; } i += 1; } i } fn get_first_non_empty_cell<K, V>(children_arr: &[art_nodes::ArtNodeEnum<K, V>; 48]) -> usize { let mut i: usize = 0; while i < 48 { match &children_arr[i] { art_nodes::ArtNodeEnum::Empty => i += 1, _ => break, } } i }
true
cb589fc40cbce5cfabe643808e4b9c09ce8cc8f0
Rust
starblue/advent_of_code
/a2018/src/bin/a201814a.rs
UTF-8
1,647
3.5
4
[]
no_license
use std::fmt; struct State { data: Vec<usize>, i1: usize, i2: usize, } impl State { fn next(&mut self) { let d1 = self.data[self.i1]; let d2 = self.data[self.i2]; let n = d1 + d2; if n >= 10 { self.data.push(1); } self.data.push(n % 10); let len = self.data.len(); self.i1 = (self.i1 + 1 + d1) % len; self.i2 = (self.i2 + 1 + d2) % len; } } impl fmt::Display for State { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for (i, &s) in self.data.iter().enumerate() { write!( f, "{}", if i == self.i1 { "(" } else if i == self.i2 { "[" } else { " " } )?; write!(f, "{}", s)?; write!( f, "{}", if i == self.i1 { ")" } else if i == self.i2 { "]" } else { " " } )?; } Ok(()) } } fn main() { let n = 260321; let input = vec![3, 7]; let output_len = 10; let skip = n; let data = input; let i1 = 0; let i2 = 1; let mut state = State { data, i1, i2 }; //println!("{}", state); while state.data.len() < skip + output_len { state.next(); //println!("{}", state); } for d in state.data.iter().skip(skip).take(output_len) { print!("{}", d); } println!(); }
true
2f4ae92461e11fd5287b04a0b876452a4faf32a0
Rust
thibautRe/rustrogueliketutorial
/chapter-34-vaults/src/map_builders/waveform_collapse/mod.rs
UTF-8
5,480
2.703125
3
[ "MIT" ]
permissive
use super::{MapBuilder, Map, TileType, Position, spawner, SHOW_MAPGEN_VISUALIZER, generate_voronoi_spawn_regions, remove_unreachable_areas_returning_most_distant}; use rltk::RandomNumberGenerator; use std::collections::HashMap; mod common; use common::*; mod constraints; use constraints::*; mod solver; use solver::*; /// Provides a map builder using the Wave Function Collapse algorithm. pub struct WaveformCollapseBuilder { map : Map, starting_position : Position, depth: i32, history: Vec<Map>, noise_areas : HashMap<i32, Vec<usize>>, derive_from : Option<Box<dyn MapBuilder>>, spawn_list: Vec<(usize, String)> } impl MapBuilder for WaveformCollapseBuilder { fn get_map(&self) -> Map { self.map.clone() } fn get_starting_position(&self) -> Position { self.starting_position.clone() } fn get_snapshot_history(&self) -> Vec<Map> { self.history.clone() } fn build_map(&mut self) { self.build(); } fn get_spawn_list(&self) -> &Vec<(usize, String)> { &self.spawn_list } fn take_snapshot(&mut self) { if SHOW_MAPGEN_VISUALIZER { let mut snapshot = self.map.clone(); for v in snapshot.revealed_tiles.iter_mut() { *v = true; } self.history.push(snapshot); } } } impl WaveformCollapseBuilder { /// Generic constructor for waveform collapse. /// # Arguments /// * new_depth - the new map depth /// * derive_from - either None, or a boxed MapBuilder, as output by `random_builder` #[allow(dead_code)] pub fn new(new_depth : i32, derive_from : Option<Box<dyn MapBuilder>>) -> WaveformCollapseBuilder { WaveformCollapseBuilder{ map : Map::new(new_depth), starting_position : Position{ x: 0, y : 0 }, depth : new_depth, history: Vec::new(), noise_areas : HashMap::new(), derive_from, spawn_list: Vec::new() } } /// Derives a map from a pre-existing map builder. /// # Arguments /// * new_depth - the new map depth /// * derive_from - either None, or a boxed MapBuilder, as output by `random_builder` #[allow(dead_code)] pub fn derived_map(new_depth: i32, builder: Box<dyn MapBuilder>) -> WaveformCollapseBuilder { WaveformCollapseBuilder::new(new_depth, Some(builder)) } fn build(&mut self) { let mut rng = RandomNumberGenerator::new(); const CHUNK_SIZE :i32 = 8; let prebuilder = &mut self.derive_from.as_mut().unwrap(); prebuilder.build_map(); self.map = prebuilder.get_map(); for t in self.map.tiles.iter_mut() { if *t == TileType::DownStairs { *t = TileType::Floor; } } self.take_snapshot(); let patterns = build_patterns(&self.map, CHUNK_SIZE, true, true); let constraints = patterns_to_constraints(patterns, CHUNK_SIZE); self.render_tile_gallery(&constraints, CHUNK_SIZE); self.map = Map::new(self.depth); loop { let mut solver = Solver::new(constraints.clone(), CHUNK_SIZE, &self.map); while !solver.iteration(&mut self.map, &mut rng) { self.take_snapshot(); } self.take_snapshot(); if solver.possible { break; } // If it has hit an impossible condition, try again } // Find a starting point; start at the middle and walk left until we find an open tile self.starting_position = Position{ x: self.map.width / 2, y : self.map.height / 2 }; let mut start_idx = self.map.xy_idx(self.starting_position.x, self.starting_position.y); while self.map.tiles[start_idx] != TileType::Floor { self.starting_position.x -= 1; start_idx = self.map.xy_idx(self.starting_position.x, self.starting_position.y); } self.take_snapshot(); // Find all tiles we can reach from the starting point let exit_tile = remove_unreachable_areas_returning_most_distant(&mut self.map, start_idx); self.take_snapshot(); // Place the stairs self.map.tiles[exit_tile] = TileType::DownStairs; self.take_snapshot(); // Now we build a noise map for use in spawning entities later self.noise_areas = generate_voronoi_spawn_regions(&self.map, &mut rng); // Spawn the entities for area in self.noise_areas.iter() { spawner::spawn_region(&self.map, &mut rng, area.1, self.depth, &mut self.spawn_list); } } fn render_tile_gallery(&mut self, constraints: &[MapChunk], chunk_size: i32) { self.map = Map::new(0); let mut counter = 0; let mut x = 1; let mut y = 1; while counter < constraints.len() { render_pattern_to_map(&mut self.map, &constraints[counter], chunk_size, x, y); x += chunk_size + 1; if x + chunk_size > self.map.width { // Move to the next row x = 1; y += chunk_size + 1; if y + chunk_size > self.map.height { // Move to the next page self.take_snapshot(); self.map = Map::new(0); x = 1; y = 1; } } counter += 1; } self.take_snapshot(); } }
true
72c4b9a20bd9414c020c9f066fd86640f76266bd
Rust
isgasho/cio
/cio/src/main.rs
UTF-8
7,801
2.671875
3
[ "Apache-2.0" ]
permissive
use std::{fs::File, sync::Arc}; use cio_api::{ applicants::{Applicant, Applicants}, auth_logins::{AuthUser, AuthUsers}, configs::{ Building, Buildings, ConferenceRoom, ConferenceRooms, Group, Groups, Link, Links, User, Users, }, db::Database, journal_clubs::{JournalClubMeeting, JournalClubMeetings}, mailing_list::{MailingListSubscriber, MailingListSubscribers}, repos::{GithubRepo, GithubRepos}, rfds::{RFDs, RFD}, }; use dropshot::{ endpoint, ApiDescription, ConfigDropshot, ConfigLogging, ConfigLoggingLevel, HttpError, HttpResponseOk, HttpServerStarter, RequestContext, }; #[tokio::main] async fn main() -> Result<(), String> { let service_address = "0.0.0.0:8888"; /* * We must specify a configuration with a bind address. We'll use 127.0.0.1 * since it's available and won't expose this server outside the host. We * request port 8888. */ let config_dropshot = ConfigDropshot { bind_address: service_address.parse().unwrap(), request_body_max_bytes: 100000000, }; /* * For simplicity, we'll configure an "info"-level logger that writes to * stderr assuming that it's a terminal. */ let config_logging = ConfigLogging::StderrTerminal { level: ConfigLoggingLevel::Info, }; let log = config_logging .to_logger("cio-server") .map_err(|error| format!("failed to create logger: {}", error)) .unwrap(); /* * Build a description of the API. */ let mut api = ApiDescription::new(); api.register(api_get_applicants).unwrap(); api.register(api_get_auth_users).unwrap(); api.register(api_get_buildings).unwrap(); api.register(api_get_conference_rooms).unwrap(); api.register(api_get_github_repos).unwrap(); api.register(api_get_groups).unwrap(); api.register(api_get_journal_club_meetings).unwrap(); api.register(api_get_links).unwrap(); api.register(api_get_mailing_list_subscribers).unwrap(); api.register(api_get_rfds).unwrap(); api.register(api_get_schema).unwrap(); api.register(api_get_users).unwrap(); // Print the OpenAPI Spec to stdout. let mut api_definition = &mut api.openapi(&"CIO API", &"0.0.1"); api_definition = api_definition .description("Internal API server for information about the company, employess, etc") .contact_url("https://oxide.computer") .contact_email("cio@oxide.computer"); let api_file = "openapi-cio.json"; println!("Writing OpenAPI spec to {}...", api_file); let mut buffer = File::create(api_file).unwrap(); let schema = api_definition.json().unwrap().to_string(); api_definition.write(&mut buffer).unwrap(); /* * The functions that implement our API endpoints will share this context. */ let api_context = Context::new(schema).await; /* * Set up the server. */ let server = HttpServerStarter::new(&config_dropshot, api, api_context, &log) .map_err(|error| format!("failed to start server: {}", error)) .unwrap() .start(); server.await } /** * Application-specific context (state shared by handler functions) */ struct Context { db: Database, schema: String, } impl Context { /** * Return a new Context. */ pub async fn new(schema: String) -> Context { Context { schema, db: Database::new(), } } } /* * HTTP API interface */ /** * Return the OpenAPI schema in JSON format. */ #[endpoint { method = GET, path = "/", }] async fn api_get_schema( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<String>, HttpError> { let api_context = rqctx.context(); Ok(HttpResponseOk(api_context.schema.to_string())) } /** * Fetch all auth users. */ #[endpoint { method = GET, path = "/auth/users", }] async fn api_get_auth_users( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<AuthUser>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(AuthUsers::get_from_db(db, 1).0)) } /** * Fetch all applicants. */ #[endpoint { method = GET, path = "/applicants", }] async fn api_get_applicants( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<Applicant>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(Applicants::get_from_db(db, 1).0)) } /** * Fetch a list of office buildings. */ #[endpoint { method = GET, path = "/buildings", }] async fn api_get_buildings( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<Building>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(Buildings::get_from_db(db, 1).0)) } /** * Fetch a list of conference rooms. */ #[endpoint { method = GET, path = "/conference_rooms", }] #[inline] async fn api_get_conference_rooms( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<ConferenceRoom>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(ConferenceRooms::get_from_db(db, 1).0)) } /** * Fetch a list of our GitHub repositories. */ #[endpoint { method = GET, path = "/github/repos", }] async fn api_get_github_repos( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<GithubRepo>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(GithubRepos::get_from_db(db, 1).0)) } /** * Fetch a list of Google groups. */ #[endpoint { method = GET, path = "/groups", }] async fn api_get_groups( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<Group>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(Groups::get_from_db(db, 1).0)) } /** * Fetch a list of journal club meetings. */ #[endpoint { method = GET, path = "/journal_club_meetings", }] async fn api_get_journal_club_meetings( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<JournalClubMeeting>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(JournalClubMeetings::get_from_db(db, 1).0)) } /** * Fetch a list of internal links. */ #[endpoint { method = GET, path = "/links", }] async fn api_get_links( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<Link>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(Links::get_from_db(db, 1).0)) } /** * Fetch a list of mailing list subscribers. */ #[endpoint { method = GET, path = "/mailing_list_subscribers", }] async fn api_get_mailing_list_subscribers( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<MailingListSubscriber>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(MailingListSubscribers::get_from_db(db, 1).0)) } /** * Fetch all RFDs. */ #[endpoint { method = GET, path = "/rfds", }] async fn api_get_rfds( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<RFD>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(RFDs::get_from_db(db, 1).0)) } /** * Fetch a list of employees. */ #[endpoint { method = GET, path = "/users", }] async fn api_get_users( rqctx: Arc<RequestContext<Context>>, ) -> Result<HttpResponseOk<Vec<User>>, HttpError> { let api_context = rqctx.context(); let db = &api_context.db; Ok(HttpResponseOk(Users::get_from_db(db, 1).0)) }
true
ecf2c2bcdc7ea91915d6d3d8dbde1d67b51b3ab5
Rust
anderspitman/nphysics
/src/volumetric/volumetric_capsule.rs
UTF-8
224
2.703125
3
[ "BSD-2-Clause" ]
permissive
use volumetric::cylinder_volume; use volumetric::ball_volume; /// Computes the volume of a capsule. pub fn capsule_volume(half_height: &N, radius: &N) -> N { cylinder_volume(half_height, radius) + ball_volume(radius) }
true
ab33bebac561d743a8446cbdcc64c78ee74ea568
Rust
tafia/rulinalg
/src/matrix/decomposition.rs
UTF-8
54,876
3.0625
3
[ "MIT" ]
permissive
//! Matrix Decompositions //! //! References: //! 1. [On Matrix Balancing and EigenVector computation] //! (http://arxiv.org/pdf/1401.5766v1.pdf), James, Langou and Lowery //! //! 2. [The QR algorithm for eigen decomposition] //! (http://people.inf.ethz.ch/arbenz/ewp/Lnotes/chapter4.pdf) //! //! 3. [Computation of the SVD] //! (http://www.cs.utexas.edu/users/inderjit/public_papers/HLA_SVD.pdf) use std::any::Any; use std::cmp; use std::ops::{Mul, Add, Div, Sub, Neg}; use std::slice; use matrix::{Matrix, MatrixSlice, MatrixSliceMut, BaseMatrix, BaseMatrixMut}; use vector::Vector; use Metric; use utils; use error::{Error, ErrorKind}; use libnum::{One, Zero, Float, Signed}; use libnum::{cast, abs}; use epsilon::MachineEpsilon; impl<T> Matrix<T> where T: Any + Float { /// Cholesky decomposition /// /// Returns the cholesky decomposition of a positive definite matrix. /// /// # Examples /// /// ``` /// use rulinalg::matrix::Matrix; /// /// let m = Matrix::new(3,3, vec![1.0,0.5,0.5,0.5,1.0,0.5,0.5,0.5,1.0]); /// /// let l = m.cholesky(); /// ``` /// /// # Panics /// /// - The matrix is not square. /// /// # Failures /// /// - Matrix is not positive definite. pub fn cholesky(&self) -> Result<Matrix<T>, Error> { assert!(self.rows == self.cols, "Matrix must be square for Cholesky decomposition."); let mut new_data = Vec::<T>::with_capacity(self.rows() * self.cols()); for i in 0..self.rows() { for j in 0..self.cols() { if j > i { new_data.push(T::zero()); continue; } let mut sum = T::zero(); for k in 0..j { sum = sum + (new_data[i * self.cols() + k] * new_data[j * self.cols() + k]); } if j == i { new_data.push((self[[i, i]] - sum).sqrt()); } else { let p = (self[[i, j]] - sum) / new_data[j * self.cols + j]; if !p.is_finite() { return Err(Error::new(ErrorKind::DecompFailure, "Matrix is not positive definite.")); } else { } new_data.push(p); } } } Ok(Matrix { rows: self.rows(), cols: self.cols(), data: new_data, }) } /// Compute the cos and sin values for the givens rotation. /// /// Returns a tuple (c, s). fn givens_rot(a: T, b: T) -> (T, T) { let r = a.hypot(b); (a / r, -b / r) } fn make_householder(column: &[T]) -> Result<Matrix<T>, Error> { let size = column.len(); if size == 0 { return Err(Error::new(ErrorKind::InvalidArg, "Column for householder transform cannot be empty.")); } let denom = column[0] + column[0].signum() * utils::dot(column, column).sqrt(); if denom == T::zero() { return Err(Error::new(ErrorKind::DecompFailure, "Cannot produce househoulder transform from column as first \ entry is 0.")); } let mut v = column.into_iter().map(|&x| x / denom).collect::<Vec<T>>(); // Ensure first element is fixed to 1. v[0] = T::one(); let v = Vector::new(v); let v_norm_sq = v.dot(&v); let v_vert = Matrix::new(size, 1, v.data().clone()); let v_hor = Matrix::new(1, size, v.into_vec()); Ok(Matrix::<T>::identity(size) - (v_vert * v_hor) * ((T::one() + T::one()) / v_norm_sq)) } fn make_householder_vec(column: &[T]) -> Result<Matrix<T>, Error> { let size = column.len(); if size == 0 { return Err(Error::new(ErrorKind::InvalidArg, "Column for householder transform cannot be empty.")); } let denom = column[0] + column[0].signum() * utils::dot(column, column).sqrt(); if denom == T::zero() { return Err(Error::new(ErrorKind::DecompFailure, "Cannot produce househoulder transform from column as first \ entry is 0.")); } let mut v = column.into_iter().map(|&x| x / denom).collect::<Vec<T>>(); // Ensure first element is fixed to 1. v[0] = T::one(); let v = Matrix::new(size, 1, v); Ok(&v / v.norm()) } /// Compute the QR decomposition of the matrix. /// /// Returns the tuple (Q,R). /// /// # Examples /// /// ``` /// use rulinalg::matrix::Matrix; /// /// let m = Matrix::new(3,3, vec![1.0,0.5,0.5,0.5,1.0,0.5,0.5,0.5,1.0]); /// /// let (q, r) = m.qr_decomp().unwrap(); /// ``` /// /// # Failures /// /// - Cannot compute the QR decomposition. pub fn qr_decomp(self) -> Result<(Matrix<T>, Matrix<T>), Error> { let m = self.rows(); let n = self.cols(); let mut q = Matrix::<T>::identity(m); let mut r = self; for i in 0..(n - ((m == n) as usize)) { let holder_transform: Result<Matrix<T>, Error>; { let lower_slice = MatrixSlice::from_matrix(&r, [i, i], m - i, 1); holder_transform = Matrix::make_householder(&lower_slice.iter().cloned().collect::<Vec<_>>()); } if !holder_transform.is_ok() { return Err(Error::new(ErrorKind::DecompFailure, "Cannot compute QR decomposition.")); } else { let mut holder_data = holder_transform.unwrap().into_vec(); // This bit is inefficient // using for now as we'll swap to lapack eventually. let mut h_full_data = Vec::with_capacity(m * m); for j in 0..m { let mut row_data: Vec<T>; if j < i { row_data = vec![T::zero(); m]; row_data[j] = T::one(); h_full_data.extend(row_data); } else { row_data = vec![T::zero(); i]; h_full_data.extend(row_data); h_full_data.extend(holder_data.drain(..m - i)); } } let h = Matrix::new(m, m, h_full_data); q = q * &h; r = h * &r; } } Ok((q, r)) } /// Converts matrix to bidiagonal form /// /// Returns (B, U, V), where B is bidiagonal and `self = U B V_T`. /// /// Note that if `self` has `self.rows() > self.cols()` the matrix will /// be transposed and then reduced - this will lead to a sub-diagonal instead /// of super-diagonal. /// /// # Failures /// /// - The matrix cannot be reduced to bidiagonal form. pub fn bidiagonal_decomp(mut self) -> Result<(Matrix<T>, Matrix<T>, Matrix<T>), Error> { let mut flipped = false; if self.rows < self.cols { flipped = true; self = self.transpose() } let m = self.rows; let n = self.cols; let mut u = Matrix::identity(m); let mut v = Matrix::identity(n); for k in 0..n { let h_holder: Matrix<T>; { let lower_slice = MatrixSlice::from_matrix(&self, [k, k], m - k, 1); h_holder = try!(Matrix::make_householder(&lower_slice.iter() .cloned() .collect::<Vec<_>>()) .map_err(|_| { Error::new(ErrorKind::DecompFailure, "Cannot compute bidiagonal form.") })); } { // Apply householder on the left to kill under diag. let lower_self_block = MatrixSliceMut::from_matrix(&mut self, [k, k], m - k, n - k); let transformed_self = &h_holder * &lower_self_block; lower_self_block.set_to(transformed_self.as_slice()); let lower_u_block = MatrixSliceMut::from_matrix(&mut u, [0, k], m, m - k); let transformed_u = &lower_u_block * h_holder; lower_u_block.set_to(transformed_u.as_slice()); } if k < n - 2 { let row: &[T]; unsafe { // Get the kth row from column k+1 to end. row = slice::from_raw_parts(self.data .as_ptr() .offset((k * self.cols + k + 1) as isize), n - k - 1); } let row_h_holder = try!(Matrix::make_householder(row).map_err(|_| { Error::new(ErrorKind::DecompFailure, "Cannot compute bidiagonal form.") })); { // Apply householder on the right to kill right of super diag. let lower_self_block = MatrixSliceMut::from_matrix(&mut self, [k, k + 1], m - k, n - k - 1); let transformed_self = &lower_self_block * &row_h_holder; lower_self_block.set_to(transformed_self.as_slice()); let lower_v_block = MatrixSliceMut::from_matrix(&mut v, [0, k + 1], n, n - k - 1); let transformed_v = &lower_v_block * row_h_holder; lower_v_block.set_to(transformed_v.as_slice()); } } } // Trim off the zerod blocks. self.data.truncate(n * n); self.rows = n; u = MatrixSlice::from_matrix(&u, [0, 0], m, n).into_matrix(); if flipped { Ok((self.transpose(), v, u)) } else { Ok((self, u, v)) } } } /// Ensures that all singular values in the given singular value decomposition /// are non-negative, making necessary corrections to the singular vectors. /// /// The SVD is represented by matrices `(b, u, v)`, where `b` is the diagonal matrix /// containing the singular values, `u` is the matrix of left singular vectors /// and v is the matrix of right singular vectors. fn correct_svd_signs<T>(mut b: Matrix<T>, mut u: Matrix<T>, mut v: Matrix<T>) -> (Matrix<T>, Matrix<T>, Matrix<T>) where T: Any + Float + Signed { // When correcting the signs of the singular vectors, we can choose // to correct EITHER u or v. We make the choice depending on which matrix has the // least number of rows. Later we will need to multiply all elements in columns by // -1, which might be significantly faster in corner cases if we pick the matrix // with the least amount of rows. { let ref mut shortest_matrix = if u.rows() <= v.rows() { &mut u } else { &mut v }; let column_length = shortest_matrix.rows(); let num_singular_values = cmp::min(b.rows(), b.cols()); for i in 0 .. num_singular_values { if b[[i, i]] < T::zero() { // Swap sign of singular value and column in u b[[i, i]] = b[[i, i]].abs(); // Access the column as a slice and flip sign let mut column = shortest_matrix.sub_slice_mut([0, i], column_length, 1); column *= -T::one(); } } } (b, u, v) } fn sort_svd<T>(mut b: Matrix<T>, mut u: Matrix<T>, mut v: Matrix<T>) -> (Matrix<T>, Matrix<T>, Matrix<T>) where T: Any + Float + Signed { assert!(u.cols() == b.cols() && b.cols() == v.cols()); // This unfortunately incurs two allocations since we have no (simple) // way to iterate over a matrix diagonal, only to copy it into a new Vector let mut indexed_sorted_values: Vec<_> = b.diag().into_vec() .into_iter() .enumerate() .collect(); // Sorting a vector of indices simultaneously with the singular values // gives us a mapping between old and new (final) column indices. indexed_sorted_values.sort_by(|&(_, ref x), &(_, ref y)| x.partial_cmp(y).expect("All singular values should be finite, and thus sortable.") .reverse() ); // Set the diagonal elements of the singular value matrix for (i, &(_, value)) in indexed_sorted_values.iter().enumerate() { b[[i, i]] = value; } // Assuming N columns, the simultaneous sorting of indices and singular values yields // a set of N (i, j) pairs which correspond to columns which must be swapped. However, // for any (i, j) in this set, there is also (j, i). Keeping both of these would make us // swap the columns back and forth, so we must remove the duplicates. We can avoid // any further sorting or hashsets or similar by noting that we can simply // remove any (i, j) for which j >= i. This also removes (i, i) pairs, // i.e. columns that don't need to be swapped. let swappable_pairs = indexed_sorted_values.into_iter() .enumerate() .map(|(new_index, (old_index, _))| (old_index, new_index)) .filter(|&(old_index, new_index)| old_index < new_index); for (old_index, new_index) in swappable_pairs { u.swap_cols(old_index, new_index); v.swap_cols(old_index, new_index); } (b, u, v) } impl<T: Any + Float + Signed + MachineEpsilon> Matrix<T> { /// Singular Value Decomposition /// /// Computes the SVD using the Golub-Reinsch algorithm. /// /// Returns Σ, U, V, such that `self` = U Σ V<sup>T</sup>. Σ is a diagonal matrix whose elements /// correspond to the non-negative singular values of the matrix. The singular values are ordered in /// non-increasing order. U and V have orthonormal columns, and each column represents the /// left and right singular vectors for the corresponding singular value in Σ, respectively. /// /// If `self` has M rows and N columns, the dimensions of the returned matrices /// are as follows. /// /// If M >= N: /// /// - `Σ`: N x N /// - `U`: M x N /// - `V`: N x N /// /// If M < N: /// /// - `Σ`: M x M /// - `U`: M x M /// - `V`: N x M /// /// Note: This version of the SVD is sometimes referred to as the 'economy SVD'. /// /// # Failures /// /// This function may fail in some cases. The current decomposition whilst being /// efficient is fairly basic. Hopefully the algorithm can be made not to fail in the near future. pub fn svd(self) -> Result<(Matrix<T>, Matrix<T>, Matrix<T>), Error> { let (b, u, v) = try!(self.svd_unordered()); Ok(sort_svd(b, u, v)) } fn svd_unordered(self) -> Result<(Matrix<T>, Matrix<T>, Matrix<T>), Error> { let (b, u, v) = try!(self.svd_golub_reinsch()); // The Golub-Reinsch implementation sometimes spits out negative singular values, // so we need to correct these. Ok(correct_svd_signs(b, u, v)) } fn svd_golub_reinsch(mut self) -> Result<(Matrix<T>, Matrix<T>, Matrix<T>), Error> { let mut flipped = false; // The algorithm assumes rows > cols. If this is not the case we transpose and fix later. if self.cols > self.rows { self = self.transpose(); flipped = true; } let eps = T::from(3.0).unwrap() * T::epsilon(); let n = self.cols; // Get the bidiagonal decomposition let (mut b, mut u, mut v) = try!(self.bidiagonal_decomp() .map_err(|_| Error::new(ErrorKind::DecompFailure, "Could not compute SVD."))); loop { // Values to count the size of lower diagonal block let mut q = 0; let mut on_lower = true; // Values to count top block let mut p = 0; let mut on_middle = false; // Iterate through and hard set the super diag if converged for i in (0..n - 1).rev() { let (b_ii, b_sup_diag, diag_abs_sum): (T, T, T); unsafe { b_ii = *b.get_unchecked([i, i]); b_sup_diag = b.get_unchecked([i, i + 1]).abs(); diag_abs_sum = eps * (b_ii.abs() + b.get_unchecked([i + 1, i + 1]).abs()); } if b_sup_diag <= diag_abs_sum { // Adjust q or p to define boundaries of sup-diagonal box if on_lower { q += 1; } else if on_middle { on_middle = false; p = i + 1; } unsafe { *b.get_unchecked_mut([i, i + 1]) = T::zero(); } } else { if on_lower { // No longer on the lower diagonal on_middle = true; on_lower = false; } } } // We have converged! if q == n - 1 { break; } // Zero off diagonals if needed. for i in p..n - q - 1 { let (b_ii, b_sup_diag): (T, T); unsafe { b_ii = *b.get_unchecked([i, i]); b_sup_diag = *b.get_unchecked([i, i + 1]); } if b_ii.abs() < eps { let (c, s) = Matrix::<T>::givens_rot(b_ii, b_sup_diag); let givens = Matrix::new(2, 2, vec![c, s, -s, c]); let b_i = MatrixSliceMut::from_matrix(&mut b, [i, i], 1, 2); let zerod_line = &b_i * givens; b_i.set_to(zerod_line.as_slice()); } } // Apply Golub-Kahan svd step unsafe { try!(Matrix::<T>::golub_kahan_svd_step(&mut b, &mut u, &mut v, p, q) .map_err(|_| Error::new(ErrorKind::DecompFailure, "Could not compute SVD."))); } } if flipped { Ok((b.transpose(), v, u)) } else { Ok((b, u, v)) } } /// This function is unsafe as it makes assumptions about the dimensions /// of the inputs matrices and does not check them. As a result if misused /// this function can call `get_unchecked` on invalid indices. unsafe fn golub_kahan_svd_step(b: &mut Matrix<T>, u: &mut Matrix<T>, v: &mut Matrix<T>, p: usize, q: usize) -> Result<(), Error> { let n = b.rows(); // C is the lower, right 2x2 square of aTa, where a is the // middle block of b (between p and n-q). // // Computed as xTx + yTy, where y is the bottom 2x2 block of a // and x are the two columns above it within a. let c: Matrix<T>; { let y = MatrixSlice::from_matrix(&b, [n - q - 2, n - q - 2], 2, 2).into_matrix(); if n - q - p - 2 > 0 { let x = MatrixSlice::from_matrix(&b, [p, n - q - 2], n - q - p - 2, 2); c = x.into_matrix().transpose() * x + y.transpose() * y; } else { c = y.transpose() * y; } } let c_eigs = try!(c.eigenvalues()); // Choose eigenvalue closes to c[1,1]. let lambda: T; if (c_eigs[0] - *c.get_unchecked([1, 1])).abs() < (c_eigs[1] - *c.get_unchecked([1, 1])).abs() { lambda = c_eigs[0]; } else { lambda = c_eigs[1]; } let b_pp = *b.get_unchecked([p, p]); let mut alpha = (b_pp * b_pp) - lambda; let mut beta = b_pp * *b.get_unchecked([p, p + 1]); for k in p..n - q - 1 { // Givens rot on columns k and k + 1 let (c, s) = Matrix::<T>::givens_rot(alpha, beta); let givens_mat = Matrix::new(2, 2, vec![c, s, -s, c]); { // Pick the rows from b to be zerod. let b_block = MatrixSliceMut::from_matrix(b, [k.saturating_sub(1), k], cmp::min(3, n - k.saturating_sub(1)), 2); let transformed = &b_block * &givens_mat; b_block.set_to(transformed.as_slice()); let v_block = MatrixSliceMut::from_matrix(v, [0, k], n, 2); let transformed = &v_block * &givens_mat; v_block.set_to(transformed.as_slice()); } alpha = *b.get_unchecked([k, k]); beta = *b.get_unchecked([k + 1, k]); let (c, s) = Matrix::<T>::givens_rot(alpha, beta); let givens_mat = Matrix::new(2, 2, vec![c, -s, s, c]); { // Pick the columns from b to be zerod. let b_block = MatrixSliceMut::from_matrix(b, [k, k], 2, cmp::min(3, n - k)); let transformed = &givens_mat * &b_block; b_block.set_to(transformed.as_slice()); let m = u.rows(); let u_block = MatrixSliceMut::from_matrix(u, [0, k], m, 2); let transformed = &u_block * givens_mat.transpose(); u_block.set_to(transformed.as_slice()); } if k + 2 < n - q { alpha = *b.get_unchecked([k, k + 1]); beta = *b.get_unchecked([k, k + 2]); } } Ok(()) } /// Returns H, where H is the upper hessenberg form. /// /// If the transformation matrix is also required, you should /// use `upper_hess_decomp`. /// /// # Examples /// /// ``` /// use rulinalg::matrix::Matrix; /// /// let a = Matrix::new(4,4,vec![2.,0.,1.,1.,2.,0.,1.,2.,1.,2.,0.,0.,2.,0.,1.,1.]); /// let h = a.upper_hessenberg(); /// /// println!("{:?}", h.expect("Could not get upper Hessenberg form.").data()); /// ``` /// /// # Panics /// /// - The matrix is not square. /// /// # Failures /// /// - The matrix cannot be reduced to upper hessenberg form. pub fn upper_hessenberg(mut self) -> Result<Matrix<T>, Error> { let n = self.rows; assert!(n == self.cols, "Matrix must be square to produce upper hessenberg."); for i in 0..n - 2 { let h_holder_vec: Matrix<T>; { let lower_slice = MatrixSlice::from_matrix(&self, [i + 1, i], n - i - 1, 1); // Try to get the house holder transform - else map error and pass up. h_holder_vec = try!(Matrix::make_householder_vec(&lower_slice.iter() .cloned() .collect::<Vec<_>>()) .map_err(|_| { Error::new(ErrorKind::DecompFailure, "Cannot compute upper Hessenberg form.") })); } { // Apply holder on the left let mut block = MatrixSliceMut::from_matrix(&mut self, [i + 1, i], n - i - 1, n - i); block -= &h_holder_vec * (h_holder_vec.transpose() * &block) * (T::one() + T::one()); } { // Apply holder on the right let mut block = MatrixSliceMut::from_matrix(&mut self, [0, i + 1], n, n - i - 1); block -= (&block * &h_holder_vec) * h_holder_vec.transpose() * (T::one() + T::one()); } } // Enforce upper hessenberg for i in 0..self.cols - 2 { for j in i + 2..self.rows { unsafe { *self.get_unchecked_mut([j, i]) = T::zero(); } } } Ok(self) } /// Returns (U,H), where H is the upper hessenberg form /// and U is the unitary transform matrix. /// /// Note: The current transform matrix seems broken... /// /// # Examples /// /// ``` /// use rulinalg::matrix::{Matrix, BaseMatrix}; /// /// let a = Matrix::new(3,3,vec![1.,2.,3.,4.,5.,6.,7.,8.,9.]); /// /// // u is the transform, h is the upper hessenberg form. /// let (u,h) = a.clone().upper_hess_decomp().expect("This matrix should decompose!"); /// /// println!("The hess : {:?}", h.data()); /// println!("Manual hess : {:?}", (u.transpose() * a * u).data()); /// ``` /// /// # Panics /// /// - The matrix is not square. /// /// # Failures /// /// - The matrix cannot be reduced to upper hessenberg form. pub fn upper_hess_decomp(self) -> Result<(Matrix<T>, Matrix<T>), Error> { let n = self.rows; assert!(n == self.cols, "Matrix must be square to produce upper hessenberg."); // First we form the transformation. let mut transform = Matrix::identity(n); for i in (0..n - 2).rev() { let h_holder_vec: Matrix<T>; { let lower_slice = MatrixSlice::from_matrix(&self, [i + 1, i], n - i - 1, 1); h_holder_vec = try!(Matrix::make_householder_vec(&lower_slice.iter() .cloned() .collect::<Vec<_>>()) .map_err(|_| { Error::new(ErrorKind::DecompFailure, "Could not compute eigenvalues.") })); } let mut trans_block = MatrixSliceMut::from_matrix(&mut transform, [i + 1, i + 1], n - i - 1, n - i - 1); trans_block -= &h_holder_vec * (h_holder_vec.transpose() * &trans_block) * (T::one() + T::one()); } // Now we reduce to upper hessenberg Ok((transform, try!(self.upper_hessenberg()))) } fn balance_matrix(&mut self) { let n = self.rows(); let radix = T::one() + T::one(); debug_assert!(n == self.cols(), "Matrix must be square to produce balance matrix."); let mut d = Matrix::<T>::identity(n); let mut converged = false; while !converged { converged = true; for i in 0..n { let mut c = self.select_cols(&[i]).norm(); let mut r = self.select_rows(&[i]).norm(); let s = c * c + r * r; let mut f = T::one(); while c < r / radix { c = c * radix; r = r / radix; f = f * radix; } while c >= r * radix { c = c / radix; r = r * radix; f = f / radix; } if (c * c + r * r) < cast::<f64, T>(0.95).unwrap() * s { converged = false; d.data[i * (self.cols + 1)] = f * d.data[i * (self.cols + 1)]; for j in 0..n { self.data[j * self.cols + i] = f * self.data[j * self.cols + i]; self.data[i * self.cols + j] = self.data[i * self.cols + j] / f; } } } } } fn direct_2_by_2_eigenvalues(&self) -> Result<Vec<T>, Error> { // The characteristic polynomial of a 2x2 matrix A is // λ² − (a₁₁ + a₂₂)λ + (a₁₁a₂₂ − a₁₂a₂₁); // the quadratic formula suffices. let tr = self.data[0] + self.data[3]; let det = self.data[0] * self.data[3] - self.data[1] * self.data[2]; let two = T::one() + T::one(); let four = two + two; let discr = tr * tr - four * det; if discr < T::zero() { Err(Error::new(ErrorKind::DecompFailure, "Matrix has complex eigenvalues. Currently unsupported, sorry!")) } else { let discr_root = discr.sqrt(); Ok(vec![(tr - discr_root) / two, (tr + discr_root) / two]) } } fn francis_shift_eigenvalues(&self) -> Result<Vec<T>, Error> { let n = self.rows(); debug_assert!(n > 2, "Francis shift only works on matrices greater than 2x2."); debug_assert!(n == self.cols, "Matrix must be square for Francis shift."); let mut h = try!(self.clone() .upper_hessenberg() .map_err(|_| Error::new(ErrorKind::DecompFailure, "Could not compute eigenvalues."))); h.balance_matrix(); // The final index of the active matrix let mut p = n - 1; let eps = cast::<f64, T>(1e-20).expect("Failed to cast value for convergence check."); while p > 1 { let q = p - 1; let s = h[[q, q]] + h[[p, p]]; let t = h[[q, q]] * h[[p, p]] - h[[q, p]] * h[[p, q]]; let mut x = h[[0, 0]] * h[[0, 0]] + h[[0, 1]] * h[[1, 0]] - h[[0, 0]] * s + t; let mut y = h[[1, 0]] * (h[[0, 0]] + h[[1, 1]] - s); let mut z = h[[1, 0]] * h[[2, 1]]; for k in 0..p - 1 { let r = cmp::max(1, k) - 1; let householder = try!(Matrix::make_householder(&[x, y, z]).map_err(|_| { Error::new(ErrorKind::DecompFailure, "Could not compute eigenvalues.") })); { // Apply householder transformation to block (on the left) let h_block = MatrixSliceMut::from_matrix(&mut h, [k, r], 3, n - r); let transformed = &householder * &h_block; h_block.set_to(transformed.as_slice()); } let r = cmp::min(k + 4, p + 1); { // Apply householder transformation to the block (on the right) let h_block = MatrixSliceMut::from_matrix(&mut h, [0, k], r, 3); let transformed = &h_block * householder.transpose(); h_block.set_to(transformed.as_slice()); } x = h[[k + 1, k]]; y = h[[k + 2, k]]; if k < p - 2 { z = h[[k + 3, k]]; } } let (c, s) = Matrix::givens_rot(x, y); let givens_mat = Matrix::new(2, 2, vec![c, -s, s, c]); { // Apply Givens rotation to the block (on the left) let h_block = MatrixSliceMut::from_matrix(&mut h, [q, p - 2], 2, n - p + 2); let transformed = &givens_mat * &h_block; h_block.set_to(transformed.as_slice()); } { // Apply Givens rotation to block (on the right) let h_block = MatrixSliceMut::from_matrix(&mut h, [0, q], p + 1, 2); let transformed = &h_block * givens_mat.transpose(); h_block.set_to(transformed.as_slice()); } // Check for convergence if abs(h[[p, q]]) < eps * (abs(h[[q, q]]) + abs(h[[p, p]])) { h.data[p * h.cols + q] = T::zero(); p -= 1; } else if abs(h[[p - 1, q - 1]]) < eps * (abs(h[[q - 1, q - 1]]) + abs(h[[q, q]])) { h.data[(p - 1) * h.cols + q - 1] = T::zero(); p -= 2; } } Ok(h.diag().into_vec()) } /// Eigenvalues of a square matrix. /// /// Returns a Vec of eigenvalues. /// /// # Examples /// /// ``` /// use rulinalg::matrix::Matrix; /// /// let a = Matrix::new(4,4, (1..17).map(|v| v as f64).collect::<Vec<f64>>()); /// let e = a.eigenvalues().expect("We should be able to compute these eigenvalues!"); /// println!("{:?}", e); /// ``` /// /// # Panics /// /// - The matrix is not square. /// /// # Failures /// /// - Eigenvalues cannot be computed. pub fn eigenvalues(&self) -> Result<Vec<T>, Error> { let n = self.rows(); assert!(n == self.cols, "Matrix must be square for eigenvalue computation."); match n { 1 => Ok(vec![self.data[0]]), 2 => self.direct_2_by_2_eigenvalues(), _ => self.francis_shift_eigenvalues(), } } fn direct_2_by_2_eigendecomp(&self) -> Result<(Vec<T>, Matrix<T>), Error> { let eigenvalues = try!(self.eigenvalues()); // Thanks to // http://www.math.harvard.edu/archive/21b_fall_04/exhibits/2dmatrices/index.html // for this characterization— if self.data[2] != T::zero() { let decomp_data = vec![eigenvalues[0] - self.data[3], eigenvalues[1] - self.data[3], self.data[2], self.data[2]]; Ok((eigenvalues, Matrix::new(2, 2, decomp_data))) } else if self.data[1] != T::zero() { let decomp_data = vec![self.data[1], self.data[1], eigenvalues[0] - self.data[0], eigenvalues[1] - self.data[0]]; Ok((eigenvalues, Matrix::new(2, 2, decomp_data))) } else { Ok((eigenvalues, Matrix::new(2, 2, vec![T::one(), T::zero(), T::zero(), T::one()]))) } } fn francis_shift_eigendecomp(&self) -> Result<(Vec<T>, Matrix<T>), Error> { let n = self.rows(); debug_assert!(n > 2, "Francis shift only works on matrices greater than 2x2."); debug_assert!(n == self.cols, "Matrix must be square for Francis shift."); let (u, mut h) = try!(self.clone().upper_hess_decomp().map_err(|_| { Error::new(ErrorKind::DecompFailure, "Could not compute eigen decomposition.") })); h.balance_matrix(); let mut transformation = Matrix::identity(n); // The final index of the active matrix let mut p = n - 1; let eps = cast::<f64, T>(1e-20).expect("Failed to cast value for convergence check."); while p > 1 { let q = p - 1; let s = h[[q, q]] + h[[p, p]]; let t = h[[q, q]] * h[[p, p]] - h[[q, p]] * h[[p, q]]; let mut x = h[[0, 0]] * h[[0, 0]] + h[[0, 1]] * h[[1, 0]] - h[[0, 0]] * s + t; let mut y = h[[1, 0]] * (h[[0, 0]] + h[[1, 1]] - s); let mut z = h[[1, 0]] * h[[2, 1]]; for k in 0..p - 1 { let r = cmp::max(1, k) - 1; let householder = try!(Matrix::make_householder(&[x, y, z]).map_err(|_| { Error::new(ErrorKind::DecompFailure, "Could not compute eigen decomposition.") })); { // Apply householder transformation to block (on the left) let h_block = MatrixSliceMut::from_matrix(&mut h, [k, r], 3, n - r); let transformed = &householder * &h_block; h_block.set_to(transformed.as_slice()); } let r = cmp::min(k + 4, p + 1); { // Apply householder transformation to the block (on the right) let h_block = MatrixSliceMut::from_matrix(&mut h, [0, k], r, 3); let transformed = &h_block * householder.transpose(); h_block.set_to(transformed.as_slice()); } { // Update the transformation matrix let trans_block = MatrixSliceMut::from_matrix(&mut transformation, [0, k], n, 3); let transformed = &trans_block * householder.transpose(); trans_block.set_to(transformed.as_slice()); } x = h[[k + 1, k]]; y = h[[k + 2, k]]; if k < p - 2 { z = h[[k + 3, k]]; } } let (c, s) = Matrix::givens_rot(x, y); let givens_mat = Matrix::new(2, 2, vec![c, -s, s, c]); { // Apply Givens rotation to the block (on the left) let h_block = MatrixSliceMut::from_matrix(&mut h, [q, p - 2], 2, n - p + 2); let transformed = &givens_mat * &h_block; h_block.set_to(transformed.as_slice()); } { // Apply Givens rotation to block (on the right) let h_block = MatrixSliceMut::from_matrix(&mut h, [0, q], p + 1, 2); let transformed = &h_block * givens_mat.transpose(); h_block.set_to(transformed.as_slice()); } { // Update the transformation matrix let trans_block = MatrixSliceMut::from_matrix(&mut transformation, [0, q], n, 2); let transformed = &trans_block * givens_mat.transpose(); trans_block.set_to(transformed.as_slice()); } // Check for convergence if abs(h[[p, q]]) < eps * (abs(h[[q, q]]) + abs(h[[p, p]])) { h.data[p * h.cols + q] = T::zero(); p -= 1; } else if abs(h[[p - 1, q - 1]]) < eps * (abs(h[[q - 1, q - 1]]) + abs(h[[q, q]])) { h.data[(p - 1) * h.cols + q - 1] = T::zero(); p -= 2; } } Ok((h.diag().into_vec(), u * transformation)) } /// Eigendecomposition of a square matrix. /// /// Returns a Vec of eigenvalues, and a matrix with eigenvectors as the columns. /// /// The eigenvectors are only gauranteed to be correct if the matrix is real-symmetric. /// /// # Examples /// /// ``` /// use rulinalg::matrix::Matrix; /// /// let a = Matrix::new(3,3,vec![3.,2.,4.,2.,0.,2.,4.,2.,3.]); /// /// let (e, m) = a.eigendecomp().expect("We should be able to compute this eigendecomp!"); /// println!("{:?}", e); /// println!("{:?}", m.data()); /// ``` /// /// # Panics /// /// - The matrix is not square. /// /// # Failures /// /// - The eigen decomposition can not be computed. pub fn eigendecomp(&self) -> Result<(Vec<T>, Matrix<T>), Error> { let n = self.rows(); assert!(n == self.cols, "Matrix must be square for eigendecomp."); match n { 1 => Ok((vec![self.data[0]], Matrix::new(1, 1, vec![T::one()]))), 2 => self.direct_2_by_2_eigendecomp(), _ => self.francis_shift_eigendecomp(), } } } impl<T> Matrix<T> where T: Any + Copy + One + Zero + Neg<Output=T> + Add<T, Output=T> + Mul<T, Output=T> + Sub<T, Output=T> + Div<T, Output=T> + PartialOrd { /// Computes L, U, and P for LUP decomposition. /// /// Returns L,U, and P respectively. /// /// # Examples /// /// ``` /// use rulinalg::matrix::Matrix; /// /// let a = Matrix::new(3,3, vec![1.0,2.0,0.0, /// 0.0,3.0,4.0, /// 5.0, 1.0, 2.0]); /// /// let (l,u,p) = a.lup_decomp().expect("This matrix should decompose!"); /// ``` /// /// # Panics /// /// - Matrix is not square. /// /// # Failures /// /// - Matrix cannot be LUP decomposed. pub fn lup_decomp(&self) -> Result<(Matrix<T>, Matrix<T>, Matrix<T>), Error> { let n = self.cols; assert!(self.rows == n, "Matrix must be square for LUP decomposition."); let mut l = Matrix::<T>::zeros(n, n); let mut u = Matrix::<T>::zeros(n, n); let mt = self.transpose(); let mut p = Matrix::<T>::identity(n); // Compute the permutation matrix for i in 0..n { let (row,_) = utils::argmax(&mt.data[i*(n+1)..(i+1)*n]); if row != 0 { for j in 0..n { p.data.swap(i*n + j, row*n+j) } } } let a_2 = &p * self; for i in 0..n { l.data[i*(n+1)] = T::one(); for j in 0..i+1 { let mut s1 = T::zero(); for k in 0..j { s1 = s1 + l.data[j*n + k] * u.data[k*n + i]; } u.data[j*n + i] = a_2[[j,i]] - s1; } for j in i..n { let mut s2 = T::zero(); for k in 0..i { s2 = s2 + l.data[j*n + k] * u.data[k*n + i]; } let denom = u[[i,i]]; if denom == T::zero() { return Err(Error::new(ErrorKind::DivByZero, "Singular matrix found in LUP decomposition. \ A value in the diagonal of U == 0.0.")); } l.data[j*n + i] = (a_2[[j,i]] - s2) / denom; } } Ok((l,u,p)) } } #[cfg(test)] mod tests { use matrix::{Matrix, BaseMatrix}; use vector::Vector; use super::sort_svd; fn validate_bidiag(mat: &Matrix<f64>, b: &Matrix<f64>, u: &Matrix<f64>, v: &Matrix<f64>, upper: bool) { for (idx, row) in b.iter_rows().enumerate() { let pair_start = if upper { idx } else { idx.saturating_sub(1) }; assert!(!row.iter().take(pair_start).any(|&x| x > 1e-10)); assert!(!row.iter().skip(pair_start + 2).any(|&x| x > 1e-10)); } let recovered = u * b * v.transpose(); assert_eq!(recovered.rows(), mat.rows()); assert_eq!(recovered.cols(), mat.cols()); assert!(!mat.data() .iter() .zip(recovered.data().iter()) .any(|(&x, &y)| (x - y).abs() > 1e-10)); } #[test] fn test_bidiagonal_square() { let mat = Matrix::new(5, 5, vec![1f64, 2.0, 3.0, 4.0, 5.0, 2.0, 4.0, 1.0, 2.0, 1.0, 3.0, 1.0, 7.0, 1.0, 1.0, 4.0, 2.0, 1.0, -1.0, 3.0, 5.0, 1.0, 1.0, 3.0, 2.0]); let (b, u, v) = mat.clone().bidiagonal_decomp().unwrap(); validate_bidiag(&mat, &b, &u, &v, true); } #[test] fn test_bidiagonal_non_square() { let mat = Matrix::new(5, 3, vec![1f64, 2.0, 3.0, 4.0, 5.0, 2.0, 4.0, 1.0, 2.0, 1.0, 3.0, 1.0, 7.0, 1.0, 1.0]); let (b, u, v) = mat.clone().bidiagonal_decomp().unwrap(); validate_bidiag(&mat, &b, &u, &v, true); let mat = Matrix::new(3, 5, vec![1f64, 2.0, 3.0, 4.0, 5.0, 2.0, 4.0, 1.0, 2.0, 1.0, 3.0, 1.0, 7.0, 1.0, 1.0]); let (b, u, v) = mat.clone().bidiagonal_decomp().unwrap(); validate_bidiag(&mat, &b, &u, &v, false); } fn validate_svd(mat: &Matrix<f64>, b: &Matrix<f64>, u: &Matrix<f64>, v: &Matrix<f64>) { // b is diagonal (the singular values) for (idx, row) in b.iter_rows().enumerate() { assert!(!row.iter().take(idx).any(|&x| x > 1e-10)); assert!(!row.iter().skip(idx + 1).any(|&x| x > 1e-10)); // Assert non-negativity of diagonal elements assert!(row[idx] >= 0.0); } let recovered = u * b * v.transpose(); assert_eq!(recovered.rows(), mat.rows()); assert_eq!(recovered.cols(), mat.cols()); assert!(!mat.data() .iter() .zip(recovered.data().iter()) .any(|(&x, &y)| (x - y).abs() > 1e-10)); // The transposition is due to the fact that there does not exist // any column iterators at the moment, and we need to simultaneously iterate // over the columns. Once they do exist, we should rewrite // the below iterators to use iter_cols() or whatever instead. let ref u_transposed = u.transpose(); let ref v_transposed = v.transpose(); let ref mat_transposed = mat.transpose(); let mut singular_triplets = u_transposed.iter_rows().zip(b.diag().into_iter()).zip(v_transposed.iter_rows()) // chained zipping results in nested tuple. Flatten it. .map(|((u_col, singular_value), v_col)| (Vector::new(u_col), singular_value, Vector::new(v_col))); assert!(singular_triplets.by_ref() // For a matrix M, each singular value σ and left and right singular vectors u and v respectively // satisfy M v = σ u, so we take the difference .map(|(ref u, sigma, ref v)| mat * v - u * sigma) .flat_map(|v| v.into_vec().into_iter()) .all(|x| x.abs() < 1e-10)); assert!(singular_triplets.by_ref() // For a matrix M, each singular value σ and left and right singular vectors u and v respectively // satisfy M_transposed u = σ v, so we take the difference .map(|(ref u, sigma, ref v)| mat_transposed * u - v * sigma) .flat_map(|v| v.into_vec().into_iter()) .all(|x| x.abs() < 1e-10)); } #[test] fn test_sort_svd() { let u = Matrix::new(2, 3, vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0]); let b = Matrix::new(3, 3, vec![4.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 2.0]); let v = Matrix::new(3, 3, vec![21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0]); let (b, u, v) = sort_svd(b, u, v); assert_eq!(b.data(), &vec![8.0, 0.0, 0.0, 0.0, 4.0, 0.0, 0.0, 0.0, 2.0]); assert_eq!(u.data(), &vec![2.0, 1.0, 3.0, 5.0, 4.0, 6.0]); assert_eq!(v.data(), &vec![22.0, 21.0, 23.0, 25.0, 24.0, 26.0, 28.0, 27.0, 29.0]); } #[test] fn test_svd_tall_matrix() { // Note: This matrix is not arbitrary. It has been constructed specifically so that // the "natural" order of the singular values it not sorted by default. let mat = Matrix::new(5, 4, vec![ 3.61833700244349288, -3.28382346228211697, 1.97968027781346501, -0.41869628192662156, 3.96046289599926427, 0.70730060716580723, -2.80552479438772817, -1.45283286109873933, 1.44435028724617442, 1.27749196276785826, -1.09858397535426366, -0.03159619816434689, 1.13455445826500667, 0.81521390274755756, 3.99123446373437263, -2.83025703359666192, -3.30895752093770579, -0.04979044289857298, 3.03248594516832792, 3.85962479743330977]); let (b, u, v) = mat.clone().svd().unwrap(); let expected_values = vec![8.0, 6.0, 4.0, 2.0]; validate_svd(&mat, &b, &u, &v); // Assert the singular values are what we expect assert!(expected_values.iter() .zip(b.diag().data().iter()) .all(|(expected, actual)| (expected - actual).abs() < 1e-14)); } #[test] fn test_svd_short_matrix() { // Note: This matrix is not arbitrary. It has been constructed specifically so that // the "natural" order of the singular values it not sorted by default. let mat = Matrix::new(4, 5, vec![ 3.61833700244349288, 3.96046289599926427, 1.44435028724617442, 1.13455445826500645, -3.30895752093770579, -3.28382346228211697, 0.70730060716580723, 1.27749196276785826, 0.81521390274755756, -0.04979044289857298, 1.97968027781346545, -2.80552479438772817, -1.09858397535426366, 3.99123446373437263, 3.03248594516832792, -0.41869628192662156, -1.45283286109873933, -0.03159619816434689, -2.83025703359666192, 3.85962479743330977]); let (b, u, v) = mat.clone().svd().unwrap(); let expected_values = vec![8.0, 6.0, 4.0, 2.0]; validate_svd(&mat, &b, &u, &v); // Assert the singular values are what we expect assert!(expected_values.iter() .zip(b.diag().data().iter()) .all(|(expected, actual)| (expected - actual).abs() < 1e-14)); } #[test] fn test_svd_square_matrix() { let mat = Matrix::new(5, 5, vec![1.0, 2.0, 3.0, 4.0, 5.0, 2.0, 4.0, 1.0, 2.0, 1.0, 3.0, 1.0, 7.0, 1.0, 1.0, 4.0, 2.0, 1.0, -1.0, 3.0, 5.0, 1.0, 1.0, 3.0, 2.0]); let expected_values = vec![ 12.1739747429271112, 5.2681047320525831, 4.4942269799769843, 2.9279675877385123, 2.8758200827412224]; let (b, u, v) = mat.clone().svd().unwrap(); validate_svd(&mat, &b, &u, &v); // Assert the singular values are what we expect assert!(expected_values.iter() .zip(b.diag().data().iter()) .all(|(expected, actual)| (expected - actual).abs() < 1e-12)); } #[test] fn test_1_by_1_matrix_eigenvalues() { let a = Matrix::new(1, 1, vec![3.]); assert_eq!(vec![3.], a.eigenvalues().unwrap()); } #[test] fn test_2_by_2_matrix_eigenvalues() { let a = Matrix::new(2, 2, vec![1., 2., 3., 4.]); // characteristic polynomial is λ² − 5λ − 2 = 0 assert_eq!(vec![(5. - (33.0f32).sqrt()) / 2., (5. + (33.0f32).sqrt()) / 2.], a.eigenvalues().unwrap()); } #[test] fn test_2_by_2_matrix_zeros_eigenvalues() { let a = Matrix::new(2, 2, vec![0.; 4]); // characteristic polynomial is λ² = 0 assert_eq!(vec![0.0, 0.0], a.eigenvalues().unwrap()); } #[test] fn test_2_by_2_matrix_complex_eigenvalues() { // This test currently fails - complex eigenvalues would be nice though! let a = Matrix::new(2, 2, vec![1.0, -3.0, 1.0, 1.0]); // characteristic polynomial is λ² − λ + 4 = 0 // Decomposition will fail assert!(a.eigenvalues().is_err()); } #[test] fn test_2_by_2_matrix_eigendecomp() { let a = Matrix::new(2, 2, vec![20., 4., 20., 16.]); let (eigenvals, eigenvecs) = a.eigendecomp().unwrap(); let lambda_1 = eigenvals[0]; let lambda_2 = eigenvals[1]; let v1 = Vector::new(vec![eigenvecs[[0, 0]], eigenvecs[[1, 0]]]); let v2 = Vector::new(vec![eigenvecs[[0, 1]], eigenvecs[[1, 1]]]); let epsilon = 0.00001; assert!((&a * &v1 - &v1 * lambda_1).into_vec().iter().all(|&c| c < epsilon)); assert!((&a * &v2 - &v2 * lambda_2).into_vec().iter().all(|&c| c < epsilon)); } #[test] fn test_3_by_3_eigenvals() { let a = Matrix::new(3, 3, vec![17f64, 22., 27., 22., 29., 36., 27., 36., 45.]); let eigs = a.eigenvalues().unwrap(); let eig_1 = 90.4026; let eig_2 = 0.5973; let eig_3 = 0.0; assert!(eigs.iter().any(|x| (x - eig_1).abs() < 1e-4)); assert!(eigs.iter().any(|x| (x - eig_2).abs() < 1e-4)); assert!(eigs.iter().any(|x| (x - eig_3).abs() < 1e-4)); } #[test] fn test_5_by_5_eigenvals() { let a = Matrix::new(5, 5, vec![1f64, 2.0, 3.0, 4.0, 5.0, 2.0, 4.0, 1.0, 2.0, 1.0, 3.0, 1.0, 7.0, 1.0, 1.0, 4.0, 2.0, 1.0, -1.0, 3.0, 5.0, 1.0, 1.0, 3.0, 2.0]); let eigs = a.eigenvalues().unwrap(); let eig_1 = 12.174; let eig_2 = 5.2681; let eig_3 = -4.4942; let eig_4 = 2.9279; let eig_5 = -2.8758; assert!(eigs.iter().any(|x| (x - eig_1).abs() < 1e-4)); assert!(eigs.iter().any(|x| (x - eig_2).abs() < 1e-4)); assert!(eigs.iter().any(|x| (x - eig_3).abs() < 1e-4)); assert!(eigs.iter().any(|x| (x - eig_4).abs() < 1e-4)); assert!(eigs.iter().any(|x| (x - eig_5).abs() < 1e-4)); } #[test] #[should_panic] fn test_non_square_cholesky() { let a = Matrix::new(2, 3, vec![1.0; 6]); let _ = a.cholesky(); } #[test] #[should_panic] fn test_non_square_upper_hessenberg() { let a = Matrix::new(2, 3, vec![1.0; 6]); let _ = a.upper_hessenberg(); } #[test] #[should_panic] fn test_non_square_upper_hess_decomp() { let a = Matrix::new(2, 3, vec![1.0; 6]); let _ = a.upper_hess_decomp(); } #[test] #[should_panic] fn test_non_square_eigenvalues() { let a = Matrix::new(2, 3, vec![1.0; 6]); let _ = a.eigenvalues(); } #[test] #[should_panic] fn test_non_square_eigendecomp() { let a = Matrix::new(2, 3, vec![1.0; 6]); let _ = a.eigendecomp(); } #[test] #[should_panic] fn test_non_square_lup_decomp() { let a = Matrix::new(2, 3, vec![1.0; 6]); let _ = a.lup_decomp(); } #[test] fn test_lup_decomp() { use error::ErrorKind; let a: Matrix<f64> = matrix!( 1., 2., 3., 4.; 0., 0., 0., 0.; 0., 0., 0., 0.; 0., 0., 0., 0. ); match a.lup_decomp() { Err(e) => assert!(*e.kind() == ErrorKind::DivByZero), Ok(_) => panic!() } } }
true
9b005ec1bca94dd67a01471b6ac54974fe9a7fa5
Rust
Hanaasagi/kurumi
/memory/src/frame.rs
UTF-8
1,074
3.25
3
[ "MIT" ]
permissive
use super::PAGE_SIZE; use super::PhysicalAddress; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] pub struct Frame { pub number: usize, } impl Frame { pub fn containing_address(address: usize) -> Frame { Frame { number: address / PAGE_SIZE } } pub fn start_address(&self) -> PhysicalAddress { self.number * PAGE_SIZE } pub fn clone(&self) -> Frame { Frame { number: self.number } } pub fn range_inclusive(start: Frame, end: Frame) -> FrameIter { FrameIter { start: start, end: end, } } } pub struct FrameIter { start: Frame, end: Frame, } impl Iterator for FrameIter { type Item = Frame; fn next(&mut self) -> Option<Frame> { if self.start <= self.end { let frame = self.start.clone(); self.start.number += 1; Some(frame) } else { None } } } pub trait FrameAllocator { fn allocate_frame(&mut self) -> Option<Frame>; fn deallocate_frame(&mut self, frame: Frame); }
true
b003bce2d5146df8b95093aeaffabc8c08b66d06
Rust
Naalunth/aoc_2018
/src/year2018/day01.rs
UTF-8
1,681
2.90625
3
[]
no_license
type GeneratorOut = Vec<i64>; type PartIn = [i64]; #[aoc_generator(day1)] pub fn gen(input: &str) -> GeneratorOut { input .lines() .map(|l| l.parse::<i64>().unwrap()) .collect::<Vec<_>>() } #[aoc(day1, part1)] pub fn p1(input: &PartIn) -> i64 { input.iter().sum::<i64>() } #[aoc(day1, part2)] pub fn p2(input: &PartIn) -> i64 { use std::collections::{HashMap, HashSet}; let mut freqs = std::iter::once(0) .chain(input.iter().scan(0, |state, &i| { *state += i; Some(*state) })) .collect::<Vec<_>>(); if let Some(repetition) = freqs .iter() .scan(HashSet::new(), |state, &current_frequency| { Some(if state.insert(current_frequency) { None } else { Some(current_frequency) }) }) .filter_map(|x| x) .nth(0) { return repetition; } let freq_shift = freqs.pop().unwrap(); if freq_shift == 0 { return 0; } let mut groups = HashMap::new(); for (i, &freq) in freqs.iter().enumerate() { groups .entry(freq % freq_shift) .or_insert_with(Vec::<(usize, i64)>::new) .push((i, freq)); } let mut min_diff: Option<i64> = None; let mut min_freq = 0i64; let mut min_index = 0usize; for group in groups.values_mut() { group.sort_unstable_by_key(|e| e.1); for i in 1..group.len() { let diff = group[i].1 - group[i - 1].1; let index = if freq_shift > 0 { group[i - 1].0 } else { group[i].0 }; let freq = if freq_shift > 0 { group[i].1 } else { group[i - 1].1 }; if min_diff.is_none() || diff < min_diff.unwrap() || (diff == min_diff.unwrap() && index < min_index) { min_diff = Some(diff); min_freq = freq; min_index = index; } } } min_freq }
true
dda271c5c5a7d3b6746074a764606bf2160fca54
Rust
elpiel/adex-validator-stack-rust
/validator/src/infrastructure/sentry.rs
UTF-8
2,849
2.6875
3
[]
no_license
use domain::{Channel, ValidatorId}; use futures::compat::Future01CompatExt; use futures::future::{ok, try_join_all, FutureExt, TryFutureExt}; use futures::Future; use futures_legacy::Future as LegacyFuture; use reqwest::r#async::{Client, Response}; use reqwest::Error; use serde::Deserialize; use std::iter::once; #[derive(Clone)] // @TODO: make pub(crate) pub struct SentryApi { pub sentry_url: String, pub client: Client, } impl SentryApi { pub fn all_channels( &self, validator: Option<&ValidatorId>, ) -> impl Future<Output = Result<Vec<Channel>, Error>> { let validator = validator.cloned(); // call Sentry and fetch first page, where validator = identity let first_page = self.clone().fetch_page(1, validator.clone()); let handle = self.clone(); first_page .and_then(move |response| { let first_page_future = ok(response.channels).boxed(); if response.total_pages < 2 { // if there is only 1 page, return the results first_page_future } else { // call Sentry again for the rest of tha pages let futures = (2..=response.total_pages) .map(|page| { handle .clone() .fetch_page(page, validator.clone()) .map(|response_result| { response_result.and_then(|response| Ok(response.channels)) }) .boxed() }) .chain(once(first_page_future)); try_join_all(futures) .map(|result_all| { result_all .and_then(|all| Ok(all.into_iter().flatten().collect::<Vec<_>>())) }) .boxed() } }) .boxed() } async fn fetch_page( self, page: u64, validator: Option<ValidatorId>, ) -> Result<ChannelAllResponse, reqwest::Error> { let mut query = vec![format!("page={}", page)]; if let Some(validator) = validator { query.push(format!("validator={}", validator)); } let future = self .client .get(format!("{}/channel/list?{}", self.sentry_url, query.join("&")).as_str()) .send() .and_then(|mut res: Response| res.json::<ChannelAllResponse>()); await!(future.compat()) } } #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct ChannelAllResponse { pub channels: Vec<Channel>, pub total_pages: u64, }
true
f2c5f500c4d31caa314b269e4995bf8f59e16572
Rust
sharnoff/passman
/src/subcmd/update.rs
UTF-8
1,002
2.625
3
[ "MIT" ]
permissive
//! Tools for updating a storage file use super::print_err_and_exit; use crate::version::{self, FileContent}; use std::fs::File; use std::io::{self, Write}; use std::path::PathBuf; #[derive(clap::Args)] pub struct Args { /// Sets the input file to read from #[clap(short, long)] input: PathBuf, /// Sets the output file to write to #[clap(short, long)] output: PathBuf, } pub fn run(args: Args) { let (content, _warning) = version::parse(&args.input); let pwd = rpassword::read_password_from_tty(Some("Please enter the encryption key: ")) .unwrap_or_else(print_err_and_exit); let output_content = content.to_current(pwd); let () = File::create(args.output) .and_then(|mut f| { let s = output_content .map_err(|_| io::Error::new(io::ErrorKind::Other, "decryption failed"))? .write(); write!(f, "{}", s).and_then(|_| f.flush()) }) .unwrap_or_else(print_err_and_exit); }
true
86d39efda4852600a98e7ffb414fdf7884e7439a
Rust
Knabin/Rust-Study
/4.3_slices/src/main.rs
UTF-8
2,250
4.25
4
[]
no_license
fn main_1() { let mut s = String::from("Hello world"); let word = first_word(&s); // word == 5 s.clear(); // s == "" // word는 5를 가지고 있겠지만, 5라는 값을 의미 있게 쓸 수 있는 String이 존재하지 않는다. } // word는 유효하지 않다. fn first_word(s: &String) -> usize { // String을 요소별로 보면서 공백인지 확인해야 하므로 byte 배열로 전환한다. let bytes = s.as_bytes(); // iter - 컬렉션의 각 요소를 반환하는 함수 // enumerate - iter의 결과값을 튜플의 일부로 만들어 반환, (인덱스, 참조값) for (i, &item) in bytes.iter().enumerate() { // 공백을 찾았다면 위치를 반환한다. if item == b' ' { return i; } } // 공백을 찾지 못했다면 String의 길이값을 반환한다. s.len() } fn string_slice() { let s = String::from("hello world"); let hello = &s[0..5]; let world = &s[6..11]; // slice1 == slice2 let slice1 = &s[0..2]; let slice2 = &s[..2]; // slice3 == slice4 let len = s.len(); let slice3 = &s[3..len]; let slice4 = &s[3..]; // slice5 == slice6 let slice5 = &s[0..len]; let slice6 = &s[..]; } fn first_word_slice(s: &String) -> &str { let bytes = s.as_bytes(); for(i, &item) in bytes.iter().enumerate() { if item == b' '{ return &s[0..i]; } } &s[..] } fn main_2() { let mut s = String::from("hello world"); let word = first_word_slice(&s); //s.clear(); //error!! println!("the first word is: {}", word); } fn first_word_sig(s: &str) -> &str { // do something... &s[..] } fn main() { let my_string = String::from("hello world"); // first_word_sig가 'String'의 슬라이스로 동작한다. let word = first_word_sig(&my_string[..]); let my_string_literal = "hello world"; // first_word_sig가 스트링 리터럴의 슬라이스로 동작한다. let word = first_word_sig(&my_string_literal[..]); // 스트링 리터럴은 *또한* 스트링 슬라이스이기 때문에, 슬라이스 문법 없이도 동작한다. let word = first_word_sig(my_string_literal); }
true
79a0e239140056b6310da30e8030867991f69ec6
Rust
mtratsiuk/mtratsiuk.github.io
/rustache/src/rustache.rs
UTF-8
13,395
2.78125
3
[ "Unlicense" ]
permissive
use std::collections::HashMap; use std::error::Error; use std::path::Path; use std::{fs, result}; use crate::pipe::{self}; use crate::ron; use crate::ron::Value as RonValue; pub type Result<T> = result::Result<T, Box<dyn Error>>; type TemplatePair = (u8, u8); const TEMPLATE_NAME: &str = "index.rustache"; const VARIABLES_NAME: &str = "index.ron"; const CSS_NAME: &str = "index.css"; const JS_NAME: &str = "index.js"; const LOOP_ITEM_VARIABLE: &str = "$it"; const VARIABLE_OPEN: TemplatePair = (b'{', b'{'); const VARIABLE_CLOSE: TemplatePair = (b'}', b'}'); const LOOP_OPEN: TemplatePair = (b'{', b'*'); const LOOP_CLOSE: TemplatePair = (b'*', b'}'); const OPTIONAL_OPEN: TemplatePair = (b'{', b'?'); const OPTIONAL_CLOSE: TemplatePair = (b'?', b'}'); const INLINE_OPEN: TemplatePair = (b'{', b'>'); const INLINE_CLOSE: TemplatePair = (b'<', b'}'); const BLOCK_END: TemplatePair = (b'{', b'}'); const VARIABLE_PATH_SEPARATOR: char = '.'; const PIPE_SEPARATOR: char = '|'; const BLOCK_OPENING_PAIRS: [TemplatePair; 2] = [LOOP_OPEN, OPTIONAL_OPEN]; pub fn render(input: &Path, output: &Path) -> Result<()> { let mut parser = Parser::from(input)?; parser.run()?; let result = parser.result()?; fs::write(output, result)?; Ok(()) } #[derive(Debug)] struct Parser<'a> { input: &'a Path, in_bytes: Vec<u8>, out_bytes: Vec<u8>, pos: usize, scopes: Vec<RonValue>, } impl<'a> Parser<'a> { fn from(input: &'a Path) -> Result<Self> { let template_path = input.join(TEMPLATE_NAME); let template = fs::read_to_string(&template_path)?; let variables_path = input.join(VARIABLES_NAME); let variables_string = fs::read_to_string(&variables_path)?; let variables = ron::parse(variables_string)?; let in_bytes = template.into_bytes(); let out_bytes = Vec::with_capacity(in_bytes.len()); Ok(Self { input, in_bytes, out_bytes, pos: 0, scopes: vec![variables], }) } fn __broken_from_string(template_str: String, variables_str: String) -> Result<Self> { let input = Path::new("fake.txt"); let in_bytes = template_str.into_bytes(); let out_bytes = Vec::with_capacity(in_bytes.len()); let variables = ron::parse(variables_str)?; Ok(Self { input, in_bytes, out_bytes, pos: 0, scopes: vec![variables], }) } fn run(&mut self) -> Result<()> { while self.pos < self.in_bytes.len() { self.run_html()?; } Ok(()) } fn result(self) -> Result<String> { Ok(String::from_utf8(self.out_bytes)?) } fn run_html(&mut self) -> Result<()> { loop { match self.peek_pair() { Some(pair) => match pair { VARIABLE_OPEN => { self.skip(2); self.run_variable()?; } LOOP_OPEN => { self.skip(2); self.run_loop()?; } OPTIONAL_OPEN => { self.skip(2); self.run_optional()?; } INLINE_OPEN => { self.skip(2); self.run_inline()?; } BLOCK_END => { if self.scopes.len() > 1 { // if we are inside the block scope, // stop and give control back to previous parser // it will take care of the closing characters break; } else { // skip otherwise self.consume(2); } } _ => self.consume(1), }, None => { // Only single char left, consume and stop // // Bound check handles the case when template ends with // block closing chars (e.g. `{}`) - then they would be already // consumed by last parser if self.in_bytes.len() > self.pos { self.consume(1); } break; } } } Ok(()) } fn run_variable(&mut self) -> Result<()> { let variable_string = self.skip_until_pair(VARIABLE_CLOSE)?; self.skip(2); let (name, apply_pipe) = self.get_name_and_pipe(&variable_string)?; let variable = self.get_value(&name)?; let value = match variable { value @ RonValue::Text(_) => match apply_pipe(value)? { RonValue::Text(x) => x.clone(), _ => return Err("Expected pipe to return text")?, }, _ => return Err(format!("Expected {} to be variable", name))?, }; self.emit(&mut value.into_bytes()); Ok(()) } fn run_inline(&mut self) -> Result<()> { let name = self.skip_until_pair(INLINE_CLOSE)?; self.skip(2); match name.as_str() { "css" => { let css_path = self.input.join(CSS_NAME); let css_string = fs::read_to_string(&css_path)?; self.emit(&mut "<style>\n".to_string().into_bytes()); self.emit(&mut css_string.into_bytes()); self.emit(&mut "</style>".to_string().into_bytes()); } "js" => { let js_path = self.input.join(JS_NAME); let js_string = fs::read_to_string(&js_path)?; self.emit(&mut "<script>\n".to_string().into_bytes()); self.emit(&mut js_string.into_bytes()); self.emit(&mut "</script>".to_string().into_bytes()); } _ => return Err(format!("Unexpected inline asset: {}", name))?, } Ok(()) } fn run_loop(&mut self) -> Result<()> { let variable_string = self.skip_until_pair(LOOP_CLOSE)?; self.skip(2); let (name, apply_pipe) = self.get_name_and_pipe(&variable_string)?; let variable = self.get_value(&name)?; let items = match variable { value @ RonValue::Array(_) => match apply_pipe(value)? { RonValue::Array(x) => x.clone(), _ => return Err("Expected pipe to return array")?, }, _ => return Err(format!("Expected {} to be array", name))?, }; let return_pos = self.pos; for item in items { self.pos = return_pos; let scope = RonValue::Object(HashMap::from([(LOOP_ITEM_VARIABLE.to_string(), item)])); self.scopes.push(scope); self.run_html()?; self.scopes.pop(); } self.skip(2); Ok(()) } fn run_optional(&mut self) -> Result<()> { let name = self.skip_until_pair(OPTIONAL_CLOSE)?; self.skip(2); let variable = self.get_value(&name); let mut inner_blocks = 0; match variable { Ok(_) => { self.run_html()?; self.skip(2); } Err(_) => loop { match self.peek_pair() { Some(pair) => match pair { pair if BLOCK_OPENING_PAIRS.contains(&pair) => { self.skip(2); inner_blocks += 1; } BLOCK_END => { self.skip(2); if inner_blocks > 0 { inner_blocks -= 1; } else { break; } } _ => self.skip(1), }, None => { return Err(format!("Expected {:?} closing Optional block", BLOCK_END))? } } }, } Ok(()) } fn peek_pair(&self) -> Option<TemplatePair> { return if self.pos + 2 > self.in_bytes.len() { None } else { Some((self.in_bytes[self.pos], self.in_bytes[self.pos + 1])) }; } fn skip(&mut self, n: usize) -> () { self.pos += n; } fn skip_until_pair(&mut self, pair: TemplatePair) -> Result<String> { let mut name = vec![]; while self .peek_pair() .expect(&format!("Expected closing {:?}", pair)) != pair { name.push(self.in_bytes[self.pos]); self.skip(1); } let name = String::from_utf8(name)?; Ok(name.trim().to_string()) } fn consume(&mut self, n: usize) -> () { for _ in 0..n { self.out_bytes.push(self.in_bytes[self.pos]); self.pos += 1; } } fn emit(&mut self, bytes: &mut Vec<u8>) -> () { self.out_bytes.append(bytes); } fn get_value(&mut self, key: &str) -> Result<&RonValue> { let mut path = key.split(VARIABLE_PATH_SEPARATOR).into_iter(); for scope in self.scopes.iter().rev() { let variables = match scope { RonValue::Object(x) => x, _ => { return Err(format!( "Expected root scope to be Object, got: {:?}", scope ))? } }; let root_key = path .next() .ok_or(format!("Unexpected variable name {}", key))?; let root_value = variables.get(root_key); if let Some(mut value) = root_value { for next_key in path { match value { RonValue::Object(object) => { value = object.get(next_key).ok_or(format!( "Property {} is undefined at {:?}", next_key, value ))?; } _ => Err(format!("Cannot read property {} of {:?}", next_key, value))?, } } return Ok(value); } } Err(format!("Variable {} is undefined", key))? } fn get_name_and_pipe( &self, var_str: &str, ) -> Result<(String, impl FnOnce(&RonValue) -> Result<RonValue>)> { let (name, pipes) = match &var_str.split(PIPE_SEPARATOR).collect::<Vec<&str>>()[..] { [name] => (name.to_string(), vec![]), [name, pipes @ ..] => ( name.trim().to_string(), pipes.iter().map(|x| pipe::parse(x.trim())).collect::<Result<Vec<_>>>()?, ), [] => Err(format!("Unexpected variable string: {:?}", var_str))? }; Ok((name, move |val: &RonValue| { pipes.iter().try_fold(val.clone(), |res, pipe| { pipe.apply(&res) }) })) } } #[cfg(test)] mod tests { use super::*; #[test] fn parser_should_handle_template_variable() { let template = "\ <div>{{ name }}</div>\ " .to_string(); let variables = " { name: Test name } " .to_string(); let mut parser = Parser::__broken_from_string(template, variables).unwrap(); parser.run().unwrap(); let result = parser.result().unwrap(); assert_eq!(result, "<div>Test name</div>"); } #[test] fn parser_should_handle_template_variable_with_reverse_pipe() { let template = "\ <div>{{ name | $reverse }}</div>\ " .to_string(); let variables = " { name: 12345 } " .to_string(); let mut parser = Parser::__broken_from_string(template, variables).unwrap(); parser.run().unwrap(); let result = parser.result().unwrap(); assert_eq!(result, "<div>54321</div>"); } #[test] fn parser_should_handle_template_variable_with_two_reverse_pipes() { let template = "\ <div>{{ name | $reverse | $reverse }}</div>\ " .to_string(); let variables = " { name: 12345 } " .to_string(); let mut parser = Parser::__broken_from_string(template, variables).unwrap(); parser.run().unwrap(); let result = parser.result().unwrap(); assert_eq!(result, "<div>12345</div>"); } #[test] fn parser_should_handle_template_loop_with_reverse_pipe() { let template = "\ {* items | $reverse *}<div>{{ $it.name }}</div>{}\ " .to_string(); let variables = " { items: [ { name: One } { name: Two } { name: Three } ] } " .to_string(); let mut parser = Parser::__broken_from_string(template, variables).unwrap(); parser.run().unwrap(); let result = parser.result().unwrap(); assert_eq!(result, "<div>Three</div><div>Two</div><div>One</div>"); } }
true
7f78e9997e0833c627b42655558b3056d85365a0
Rust
WillQu/woodpusher
/src/game.rs
UTF-8
45,806
2.96875
3
[ "MIT" ]
permissive
use im::Vector; use board::Board; use board::Piece; use board::PieceType; use board::Player; use board::Position; mod bishop; mod king; mod knight; mod move_list; mod pawn; mod queen; mod rook; #[derive(Clone, Debug, PartialEq)] pub struct Game { board: Board, player_turn: Player, en_passant: Option<Position>, castle_white: (bool, bool), castle_black: (bool, bool), } impl Game { pub fn new() -> Self { Self::from_board(Board::starting_position(), Player::White) } pub fn from_board(board: Board, player: Player) -> Self { Self::from_board_with_castle(board, player, true, true, true, true) } pub fn from_board_with_castle( board: Board, player: Player, castle_a_white: bool, castle_h_white: bool, castle_a_black: bool, castle_h_black: bool, ) -> Self { Self { board, player_turn: player, en_passant: Option::None, castle_white: (castle_a_white, castle_h_white), castle_black: (castle_a_black, castle_h_black), } } pub fn board(&self) -> &Board { &self.board } pub fn turn(&self) -> Player { self.player_turn } pub fn set_turn(&self, player: Player) -> Self { Self { player_turn: player, ..self.clone() } } pub fn execute_move(&self, from: Position, to: Position) -> Result<Self, String> { self.execute_promotion(from, to, None) } pub fn execute_promotion( &self, from: Position, to: Position, promotion: Option<PieceType>, ) -> Result<Self, String> { self.list_moves() .iter() .find(|mv| mv.from == from && mv.to == to && mv.promotion == promotion) .map(Move::new_game) .ok_or_else(|| "Illegal move".to_string()) } fn apply_move_with_en_passant( &self, from: Position, to: Position, en_passant: Option<Position>, promotion: Option<PieceType>, ) -> Result<Self, String> { self.get_piece_at(from).map_or_else( || Err(format!("No piece at {}", from)), |piece| { self.apply_promotion_to_piece(from, to, *piece, promotion) .map(|game| Self { en_passant, ..game }) }, ) } fn apply_promotion_to_piece( &self, from: Position, to: Position, piece: Piece, promotion: Option<PieceType>, ) -> Result<Self, String> { if piece.player() == self.turn() { let new_piece = promotion.map_or(piece, |piece_type| Piece::new(piece_type, piece.player())); Ok(Self { board: self.board.put(to, new_piece).remove(from), player_turn: self.turn().opponent(), en_passant: Option::None, castle_white: self.castle_white, castle_black: self.castle_black, }) } else { Err(String::from("Can’t move pieces from the other player")) } } fn get_piece_at(&self, position: Position) -> Option<&Piece> { self.board.get(position) } pub fn list_moves(&self) -> Vector<Move> { let king_position = self .board .iter() .find(|(_, piece)| { piece.piece_type() == PieceType::King && piece.player() == self.player_turn }) .map(|(position, _)| position); let castles = king_position.map_or(Vector::new(), |position| { king::list_castle_moves(self, *position, self.player_turn) }); (self.list_moves_no_check() + castles) .into_iter() .filter(|mov| { !Game { player_turn: self.player_turn, ..mov.new_game() } .is_king_check() }) .collect() } fn list_moves_no_check(&self) -> Vector<Move> { self.board .iter() .filter(move |(_, value)| value.player() == self.turn()) .flat_map(move |(key, value)| match value.piece_type() { PieceType::Pawn => pawn::list_pawn_moves(self, *key, value.player()), PieceType::Rook => rook::list_rook_moves(self, *key, value.player()), PieceType::Bishop => bishop::list_bishop_moves(self, *key, value.player()), PieceType::Queen => queen::list_queen_moves(self, *key, value.player()), PieceType::Knight => knight::list_knight_moves(self, *key, value.player()), PieceType::King => king::list_king_moves(self, *key, value.player()), }) .collect() } fn create_move(&self, from: Position, to: Position) -> Move { Move::new(self, from, to) } fn create_move_en_passant(&self, from: Position, to: Position, en_passant: Position) -> Move { Move::new_with_en_passant(self, from, to, en_passant) } fn create_move_with_promotion( &self, from: Position, to: Position, promotion: PieceType, ) -> Move { Move::new_with_promotion(self, from, to, promotion) } fn is_check(&self, position: Position) -> bool { let player = self.player_turn; let opp = player.opponent(); let mut result = pawn::list_pawn_moves(self, position, player) .iter() .any(|m| self.get_piece_at(m.to) == Some(&Piece::new(PieceType::Pawn, opp))); result |= rook::list_rook_moves(self, position, player) .iter() .any(|m| self.get_piece_at(m.to) == Some(&Piece::new(PieceType::Rook, opp))); result |= knight::list_knight_moves(self, position, player) .iter() .any(|m| self.get_piece_at(m.to) == Some(&Piece::new(PieceType::Knight, opp))); result |= bishop::list_bishop_moves(self, position, player) .iter() .any(|m| self.get_piece_at(m.to) == Some(&Piece::new(PieceType::Bishop, opp))); result |= queen::list_queen_moves(self, position, player) .iter() .any(|m| self.get_piece_at(m.to) == Some(&Piece::new(PieceType::Queen, opp))); result |= king::list_king_moves(self, position, player) .iter() .any(|m| self.get_piece_at(m.to) == Some(&Piece::new(PieceType::King, opp))); result } fn is_king_check(&self) -> bool { self.board .iter() .find(|(_, piece)| { piece.piece_type() == PieceType::King && piece.player() == self.player_turn }) .map_or(false, |(position, _)| self.is_check(*position)) } pub fn is_stalemate(&self) -> bool { self.list_moves().is_empty() && !self.is_king_check() } pub fn is_mate(&self) -> bool { self.list_moves().is_empty() && self.is_king_check() } fn disable_castle(&self, player: Player) -> Game { match player { Player::White => Game { castle_white: (false, false), ..self.clone() }, Player::Black => Game { castle_black: (false, false), ..self.clone() }, } } pub fn list_pieces(&self) -> Vector<Piece> { self.board.iter().map(|(_, piece)| *piece).collect() } } impl Default for Game { fn default() -> Self { Self::new() } } #[derive(Copy, Clone, PartialEq, Debug)] pub struct Move<'a> { from: Position, to: Position, en_passant: Option<Position>, game: &'a Game, promotion: Option<PieceType>, } impl<'a> Move<'a> { fn new(game: &Game, from: Position, to: Position) -> Move<'_> { Move { from, to, en_passant: None, game, promotion: None, } } fn new_with_en_passant( game: &Game, from: Position, to: Position, en_passant: Position, ) -> Move<'_> { Move { from, to, en_passant: Some(en_passant), game, promotion: None, } } fn new_with_promotion( game: &Game, from: Position, to: Position, promotion: PieceType, ) -> Move<'_> { Move { from, to, en_passant: None, game, promotion: Some(promotion), } } pub fn new_game(&self) -> Game { let mut result = self .game .apply_move_with_en_passant(self.from, self.to, self.en_passant, self.promotion) .unwrap_or_else(|_| panic!("Invalid move {:?}", self)); if Some(self.to) == self.game.en_passant { let position_to_remove = Position::from_chars(self.to.column() as char, self.from.row() as char).unwrap(); result = Game { board: result.board.remove(position_to_remove), ..result }; } self.finalize_castle(result) } fn detect_castle(&self) -> bool { let piece = self .game .board .get(self.from) .expect("No piece at \"from\" position"); self.from.column() == ('e' as u8) && piece.piece_type() == PieceType::King && (self.to.column() == ('g' as u8) || self.to.column() == ('c' as u8)) } fn finalize_castle(&self, game: Game) -> Game { if self.detect_castle() { let (rook_from, rook_to) = if self.to.column() == ('c' as u8) { (b'a', b'd') } else { (b'h', b'f') }; Game { board: game .board .remove(Position::from_u8(rook_from, self.to.row()).unwrap()) .put( Position::from_u8(rook_to, self.to.row()).unwrap(), Piece::new(PieceType::Rook, game.player_turn.opponent()), ), ..game.disable_castle(game.player_turn.opponent()) } } else if self.from == Position::from("h1").unwrap() { Game { castle_white: (game.castle_white.0, false), ..game } } else if self.from == Position::from("a1").unwrap() { Game { castle_white: (false, game.castle_white.1), ..game } } else if self.from == Position::from("e1").unwrap() { Game { castle_white: (false, false), ..game } } else if self.from == Position::from("h8").unwrap() { Game { castle_black: (game.castle_black.0, false), ..game } } else if self.from == Position::from("a8").unwrap() { Game { castle_black: (false, game.castle_black.1), ..game } } else if self.from == Position::from("e8").unwrap() { Game { castle_black: (false, false), ..game } } else { game } } } #[cfg(test)] mod tests { use im::HashSet; use spectral::prelude::*; use board::Board; use board::Piece; use board::PieceType; use board::Player::*; use board::Position; use game::*; #[test] fn new_game() { // When let result = Game::new(); // Then assert_eq!(result.board(), &Board::starting_position()); assert_eq!(result.turn(), Player::White) } #[test] fn first_move() { // Given let game = Game::new(); // When let game_after_move = game .execute_move(Position::from("e2").unwrap(), Position::from("e4").unwrap()) .unwrap(); // Then assert_eq!( game_after_move.get_piece_at(Position::from("e4").unwrap()), Some(&Piece::new(PieceType::Pawn, Player::White)) ); assert_eq!( game_after_move.get_piece_at(Position::from("e2").unwrap()), None ); assert_eq!(game_after_move.turn(), Player::Black); } #[test] fn first_move_2() { // Given let game = Game::new(); // When let game_after_move = game .execute_move(Position::from("d2").unwrap(), Position::from("d4").unwrap()) .unwrap(); // Then assert_eq!( game_after_move.get_piece_at(Position::from("d4").unwrap()), Some(&Piece::new(PieceType::Pawn, Player::White)) ); } #[test] fn second_move() { // Given let game = Game::new(); // When let game_after_move = game .execute_move(Position::from("e2").unwrap(), Position::from("e4").unwrap()) .and_then(|game| { game.execute_move(Position::from("e7").unwrap(), Position::from("e5").unwrap()) }); // Then assert_eq!(game_after_move.map(|game| game.turn()), Ok(Player::White)); } #[test] fn do_not_move_opponent_pieces() { // Given let game = Game::new(); // When let game_after_move = game.execute_move(Position::from("e7").unwrap(), Position::from("e5").unwrap()); // Then assert!(game_after_move.is_err()); } #[test] fn list_move_pawn_simple_white() { // Given let board = Board::empty().put( Position::from("e3").unwrap(), Piece::new(PieceType::Pawn, Player::White), ); let game = Game::from_board(board, Player::White); // When let result = game.list_moves_no_check(); // Then assert_that!(result).equals_iterator( &[game.create_move(Position::from("e3").unwrap(), Position::from("e4").unwrap())] .iter(), ) } #[test] fn list_move_pawn_simple_white2() { // Given let board = Board::empty().put( Position::from("h6").unwrap(), Piece::new(PieceType::Pawn, Player::White), ); let game = Game::from_board(board, Player::White); // When let result = game.list_moves_no_check(); // Then assert_that!(result).equals_iterator( &[game.create_move(Position::from("h6").unwrap(), Position::from("h7").unwrap())] .iter(), ) } #[test] fn list_move_pawn_simple_black() { // Given let board = Board::empty().put( Position::from("h6").unwrap(), Piece::new(PieceType::Pawn, Player::Black), ); let game = Game::from_board(board, Player::Black); // When let result = game.list_moves_no_check(); // Then assert_that!(result).equals_iterator( &[game.create_move(Position::from("h6").unwrap(), Position::from("h5").unwrap())] .iter(), ) } #[test] fn list_move_only_current_player() { // Given let board = Board::empty().put( Position::from("h6").unwrap(), Piece::new(PieceType::Pawn, Player::Black), ); let game = Game::from_board(board, Player::White); // When let result = game.list_moves_no_check(); // Then assert_that!(result.is_empty()).is_true(); } #[test] fn list_move_pawn_starting_point_white() { // Given let board = Board::empty().put( Position::from("e2").unwrap(), Piece::new(PieceType::Pawn, Player::White), ); let game = Game::from_board(board, Player::White); // When let result: Vector<Move> = game.list_moves_no_check(); // Then assert_eq!(result.len(), 2); assert_that!(result).contains_all_of(&&[ game.create_move(Position::from("e2").unwrap(), Position::from("e3").unwrap()), game.create_move_en_passant( Position::from("e2").unwrap(), Position::from("e4").unwrap(), Position::from("e3").unwrap(), ), ]); } #[test] fn list_move_pawn_starting_point_black_with_white() { // Given let board = Board::empty().put( Position::from("e7").unwrap(), Piece::new(PieceType::Pawn, Player::White), ); let game = Game::from_board(board, Player::White); // When let result: Vector<Move> = game.list_moves_no_check(); // Then assert_eq!(result.len(), 4); assert_that!(result).contains_all_of(&&[game.create_move_with_promotion( Position::from("e7").unwrap(), Position::from("e8").unwrap(), PieceType::Queen, )]); } #[test] fn list_move_pawn_starting_point_black() { // Given let board = Board::empty().put( Position::from("e7").unwrap(), Piece::new(PieceType::Pawn, Player::Black), ); let game = Game::from_board(board, Player::Black); // When let result: Vector<Move> = game.list_moves_no_check(); // Then assert_eq!(result.len(), 2); assert_that!(result).contains_all_of(&&[ game.create_move(Position::from("e7").unwrap(), Position::from("e6").unwrap()), game.create_move_en_passant( Position::from("e7").unwrap(), Position::from("e5").unwrap(), Position::from("e6").unwrap(), ), ]); } #[test] fn list_move_pawn_capture_white() { // Given let board = Board::empty() .put( Position::from("e3").unwrap(), Piece::new(PieceType::Pawn, Player::White), ) .put( Position::from("d4").unwrap(), Piece::new(PieceType::Pawn, Player::Black), ); let game = Game::from_board(board, Player::White); // When let result = game.list_moves_no_check(); // Then assert_eq!(result.len(), 2); assert_that!(result).contains_all_of(&&[ game.create_move(Position::from("e3").unwrap(), Position::from("e4").unwrap()), game.create_move(Position::from("e3").unwrap(), Position::from("d4").unwrap()), ]); } #[test] fn list_move_pawn_generate_en_passant() { // Given let board = Board::empty().put( Position::from("e2").unwrap(), Piece::new(PieceType::Pawn, Player::White), ); let game = Game::from_board(board, Player::White); // When let result = game.list_moves_no_check(); // Then assert_eq!(result.len(), 2); assert_that!(result).contains_all_of(&&[ Move::new( &game, Position::from("e2").unwrap(), Position::from("e3").unwrap(), ), Move::new_with_en_passant( &game, Position::from("e2").unwrap(), Position::from("e4").unwrap(), Position::from("e3").unwrap(), ), ]); } #[test] fn list_move_pawn_en_passant() { // Given let board = Board::empty() .put( Position::from("e4").unwrap(), Piece::new(PieceType::Pawn, Player::White), ) .put( Position::from("d4").unwrap(), Piece::new(PieceType::Pawn, Player::Black), ); let game = Game { board, player_turn: Player::White, en_passant: Some(Position::from("d5").unwrap()), castle_white: (true, true), castle_black: (true, true), }; // When let result = game.list_moves_no_check(); // Then assert_eq!(result.len(), 2); assert_that!(result).contains_all_of(&&[ Move::new( &game, Position::from("e4").unwrap(), Position::from("e5").unwrap(), ), Move::new( &game, Position::from("e4").unwrap(), Position::from("d5").unwrap(), ), ]); } #[test] fn execute_move_pawn_en_passant() { // Given let board = Board::empty() .put( Position::from("e4").unwrap(), Piece::new(PieceType::Pawn, Player::White), ) .put( Position::from("d4").unwrap(), Piece::new(PieceType::Pawn, Player::Black), ); let game = Game { board, player_turn: Player::White, en_passant: Some(Position::from("d5").unwrap()), castle_white: (true, true), castle_black: (true, true), }; // When let move_list = game.list_moves_no_check(); let result = move_list .iter() .filter(|mv| { mv.from == Position::from("e4").unwrap() && mv.to == Position::from("d5").unwrap() }) .next(); // Then let new_board = Board::empty().put( Position::from("d5").unwrap(), Piece::new(PieceType::Pawn, Player::White), ); let expected_new_game = Game { board: new_board, player_turn: Player::Black, en_passant: None, castle_white: (true, true), castle_black: (true, true), }; assert_eq!(result.unwrap().new_game(), expected_new_game); } #[test] fn execute_legal_move() { // Given let game = Game::new(); // When let game_after_move = game .execute_move(Position::from("e2").unwrap(), Position::from("e4").unwrap()) .unwrap(); // Then assert_eq!( game_after_move.get_piece_at(Position::from("e4").unwrap()), Some(&Piece::new(PieceType::Pawn, Player::White)) ); assert_eq!( game_after_move.get_piece_at(Position::from("e2").unwrap()), None ); assert_eq!(game_after_move.turn(), Player::Black); } #[test] fn execute_illegal_move() { // Given let game = Game::new(); // When let result = game.execute_move(Position::from("e2").unwrap(), Position::from("d3").unwrap()); // Then assert_eq!(result, Err("Illegal move".to_string())); } #[test] fn execute_illegal_move2() { // Given let game = Game::new() .execute_move(Position::from("b2").unwrap(), Position::from("b3").unwrap()) .unwrap() .execute_move(Position::from("b7").unwrap(), Position::from("b6").unwrap()) .unwrap(); // When let result = game.execute_move(Position::from("b1").unwrap(), Position::from("b2").unwrap()); // Then assert_eq!(result, Err("Illegal move".to_string())); } #[test] fn rook() { // Given let board = Board::empty().put( Position::from("a1").unwrap(), Piece::new(PieceType::Rook, Player::White), ); let game = Game::from_board(board, White); // When let result = game.list_moves_no_check(); //Then let expected: HashSet<Position> = vector![ "a2", "a3", "a4", "a5", "a6", "a7", "a8", "b1", "c1", "d1", "e1", "f1", "g1", "h1" ] .iter() .map(|pos| Position::from(pos).unwrap()) .collect(); let result_positions: HashSet<Position> = result.iter().map(|mv| mv.to).collect(); assert_that!(result_positions).is_equal_to(expected); } #[test] fn bishop() { // Given let game = Game::from_board( Board::empty().put( Position::from("a1").unwrap(), Piece::new(PieceType::Bishop, Player::White), ), White, ); // When let result = game.list_moves_no_check(); //Then let expected: HashSet<Position> = vector!["b2", "c3", "d4", "e5", "f6", "g7", "h8"] .iter() .map(|pos| Position::from(pos).unwrap()) .collect(); let result_positions: HashSet<Position> = result.iter().map(|mv| mv.to).collect(); assert_that!(result_positions).is_equal_to(expected); } #[test] fn queen() { // Given let game = Game::from_board( Board::empty().put( Position::from("a1").unwrap(), Piece::new(PieceType::Queen, Player::White), ), White, ); // When let result = game.list_moves_no_check(); //Then let expected: HashSet<Position> = [ "a2", "a3", "a4", "a5", "a6", "a7", "a8", "b1", "c1", "d1", "e1", "f1", "g1", "h1", "b2", "c3", "d4", "e5", "f6", "g7", "h8", ] .iter() .map(|pos| Position::from(pos).unwrap()) .collect(); let result_positions: HashSet<Position> = result.iter().map(|mv| mv.to).collect(); assert_that!(result_positions).is_equal_to(expected); } #[test] fn knight() { // Given let game = Game::from_board( Board::empty().put( Position::from("a1").unwrap(), Piece::new(PieceType::Knight, Player::White), ), White, ); // When let result = game.list_moves_no_check(); //Then let expected: HashSet<Position> = ["b3", "c2"] .iter() .map(|pos| Position::from(pos).unwrap()) .collect(); let result_positions: HashSet<Position> = result.iter().map(|mv| mv.to).collect(); assert_that!(result_positions).is_equal_to(expected); } #[test] fn king() { // Given let game = Game::from_board( Board::empty().put( Position::from("a1").unwrap(), Piece::new(PieceType::King, Player::White), ), White, ); // When let result = game.list_moves_no_check(); //Then let expected: HashSet<Position> = ["a2", "b1", "b2"] .iter() .map(|pos| Position::from(pos).unwrap()) .collect(); let result_positions: HashSet<Position> = result.iter().map(|mv| mv.to).collect(); assert_that!(result_positions).is_equal_to(expected); } #[test] fn execute_promotion() { // Given let board = Board::empty().put( Position::from("e7").unwrap(), Piece::new(PieceType::Pawn, Player::White), ); let game = Game { board, player_turn: Player::White, en_passant: None, castle_white: (true, true), castle_black: (true, true), }; // When let result = game.execute_promotion( Position::from("e7").unwrap(), Position::from("e8").unwrap(), Some(PieceType::Queen), ); // Then let new_board = Board::empty().put( Position::from("e8").unwrap(), Piece::new(PieceType::Queen, Player::White), ); let expected_new_game = Game { board: new_board, player_turn: Player::Black, en_passant: None, castle_white: (true, true), castle_black: (true, true), }; assert_eq!(result.unwrap(), expected_new_game); } #[test] fn is_black_check() { // Given let board = Board::empty() .put( Position::from("e3").unwrap(), Piece::new(PieceType::Pawn, Player::White), ) .put( Position::from("d4").unwrap(), Piece::new(PieceType::King, Player::Black), ); let game = Game::from_board(board, Player::Black); // When let result = game.is_check(Position::from("d4").unwrap()); // Then assert_that!(result).is_true(); } #[test] fn is_not_check() { // Given let board = Board::empty() .put( Position::from("e3").unwrap(), Piece::new(PieceType::Pawn, Player::White), ) .put( Position::from("d5").unwrap(), Piece::new(PieceType::King, Player::Black), ); let game = Game::from_board(board, Player::Black); // When let result = game.is_check(Position::from("d5").unwrap()); // Then assert_that!(result).is_false(); } #[test] fn is_white_check() { // Given let board = Board::empty() .put( Position::from("e3").unwrap(), Piece::new(PieceType::Pawn, Player::Black), ) .put( Position::from("d2").unwrap(), Piece::new(PieceType::King, Player::White), ); let game = Game::from_board(board, Player::White); // When let result = game.is_check(Position::from("d2").unwrap()); // Then assert_that!(result).is_true(); } #[test] fn dont_list_check_moves() { // Given let board = Board::empty() .put( Position::from("d4").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("d2").unwrap(), Piece::new(PieceType::King, Player::White), ); let game = Game::from_board(board, Player::White); // When let result = game.list_moves(); //Then let expected: HashSet<Position> = ["c2", "e2", "c1", "d1", "e1"] .iter() .map(|pos| Position::from(pos).unwrap()) .collect(); let result_positions: HashSet<Position> = result.iter().map(|mv| mv.to).collect(); assert_that!(result_positions).is_equal_to(expected); } #[test] fn castle() { // Given let game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("h1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), White, false, true, false, false, ); // When let result = game.list_moves(); //Then assert_that!(result).matching_contains(|mv| { mv.from == Position::from("e1").unwrap() && mv.to == Position::from("g1").unwrap() }); } #[test] fn execute_castle() { // Given let game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("h1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), White, false, true, false, false, ); // When let result = game.execute_move(Position::from("e1").unwrap(), Position::from("g1").unwrap()); //Then let expected_game = Game::from_board_with_castle( Board::empty() .put( Position::from("g1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("f1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), Black, false, false, false, false, ); assert_eq!(result, Ok(expected_game)); } #[test] fn execute_castle_a() { // Given let game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("a1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), White, true, false, false, false, ); // When let result = game.execute_move(Position::from("e1").unwrap(), Position::from("c1").unwrap()); //Then let expected_game = Game::from_board_with_castle( Board::empty() .put( Position::from("c1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("d1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), Black, false, false, false, false, ); assert_eq!(result, Ok(expected_game)); } #[test] fn execute_castle_black_h() { // Given let game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("h8").unwrap(), Piece::new(PieceType::Rook, Player::Black), ), Black, true, true, false, true, ); // When let result = game.execute_move(Position::from("e8").unwrap(), Position::from("g8").unwrap()); //Then let expected_game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("g8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("f8").unwrap(), Piece::new(PieceType::Rook, Player::Black), ), White, true, true, false, false, ); assert_eq!(result, Ok(expected_game)); } #[test] fn disable_castle_on_rook_move() { // Given let game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("h1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), White, false, true, false, false, ); // When let result = game.execute_move(Position::from("h1").unwrap(), Position::from("g1").unwrap()); //Then let expected_game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("g1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), Black, false, false, false, false, ); assert_eq!(result, Ok(expected_game)); } #[test] fn disable_castle_on_rook_move2() { // Given let game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("h1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), White, true, true, false, false, ); // When let result = game.execute_move(Position::from("h1").unwrap(), Position::from("g1").unwrap()); //Then let expected_game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("g1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), Black, true, false, false, false, ); assert_eq!(result, Ok(expected_game)); } #[test] fn disable_castle_on_rook_move_a() { // Given let game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("a1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), White, true, true, false, false, ); // When let result = game.execute_move(Position::from("a1").unwrap(), Position::from("b1").unwrap()); //Then let expected_game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("b1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), Black, false, true, false, false, ); assert_eq!(result, Ok(expected_game)); } #[test] fn disable_castle_on_rook_move_a_2() { // Given let game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("a1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), White, true, false, false, false, ); // When let result = game.execute_move(Position::from("a1").unwrap(), Position::from("b1").unwrap()); //Then let expected_game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("b1").unwrap(), Piece::new(PieceType::Rook, Player::White), ), Black, false, false, false, false, ); assert_eq!(result, Ok(expected_game)); } #[test] fn disable_castle_on_rook_move_black() { // Given let game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("h8").unwrap(), Piece::new(PieceType::Rook, Player::Black), ), Black, false, false, false, true, ); // When let result = game.execute_move(Position::from("h8").unwrap(), Position::from("g8").unwrap()); //Then let expected_game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("g8").unwrap(), Piece::new(PieceType::Rook, Player::Black), ), White, false, false, false, false, ); assert_eq!(result, Ok(expected_game)); } #[test] fn disable_castle_on_rook_move_a_black() { // Given let game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("a8").unwrap(), Piece::new(PieceType::Rook, Player::Black), ), Black, false, false, true, true, ); // When let result = game.execute_move(Position::from("a8").unwrap(), Position::from("b8").unwrap()); //Then let expected_game = Game::from_board_with_castle( Board::empty() .put( Position::from("e1").unwrap(), Piece::new(PieceType::King, Player::White), ) .put( Position::from("e8").unwrap(), Piece::new(PieceType::King, Player::Black), ) .put( Position::from("b8").unwrap(), Piece::new(PieceType::Rook, Player::Black), ), White, false, false, false, true, ); assert_eq!(result, Ok(expected_game)); } }
true
e0592a86efb6391ec833d295b2bf93c21ea31dd4
Rust
danielCutipa/migration_csv_to_postgresql
/src/main.rs
UTF-8
14,009
2.84375
3
[]
no_license
#![feature(proc_macro_hygiene, decl_macro)] #[macro_use] extern crate rocket; extern crate chrono; extern crate migration_csv_to_postgresql; extern crate postgres; use chrono::offset::{TimeZone, Utc}; use postgres::{Connection, TlsMode}; use std::fs::File; use std::io::{BufRead, BufReader}; use migration_csv_to_postgresql::is_dot; use migration_csv_to_postgresql::is_lower; use migration_csv_to_postgresql::is_middle_dash; use migration_csv_to_postgresql::is_number; use migration_csv_to_postgresql::is_two_words; use migration_csv_to_postgresql::is_underscore; use migration_csv_to_postgresql::is_upper; use migration_csv_to_postgresql::is_upper_enie; #[get("/")] fn index() -> String { String::from("HELLO WORLD") } // struct User<'a> { // username: &'a str, // password: &'a str, // } // #[post("/", data = "<var>")] // fn hello(var: User) -> String { // format!("{} {}", var.username, var.password) // } struct Persona { id: i32, // nombre: String, // identificacion: String, tipo_identificacion: String, // genero: String, estado_civil: String, fecha_nacimiento: String, // telefono: String, tipo_telefono: String, // direccion: String, email: String, observacion: Option<String>, validado: bool, } fn main() { // rocket::ignite().mount("/", routes![index, hello]).launch(); create_table(); // READ THE FILE let filename = "registros.csv"; // Open the file in read-only mode (ignoring errors). let file = File::open(filename).unwrap(); let reader = BufReader::new(file); // Read the file line by line using the lines() iterator from std::io::BufRead. for (index, line) in reader.lines().enumerate() { let line = line.unwrap(); // Ignore errors. // Show the line and its number. println!("{}. {}", index + 1, line); let line_split: Vec<&str> = line.split(';').collect(); // println!("{:?}", line_split); let identificacion = line_split[0]; let nombre = line_split[1]; let genero = line_split[2]; let estado_civil = line_split[3]; let fecha_nacimiento = line_split[4]; let telefono = line_split[5]; let direccion = line_split[6]; let email = line_split[7]; let mut observacion = String::new(); let mut validado = true; let mut tipo_identificacion = String::new(); let mut tipo_telefono = String::new(); /* IDENTIFICACION */ match identificacion.len() { 10 => { if check_cedula(&identificacion) { tipo_identificacion.push_str("CEDULA"); } else { observacion.push_str("Cedula, no cumple con reglas, "); validado = false; } } len => { let result = check_passport(&identificacion, len); if result == "true" { tipo_identificacion.push_str("PASAPORTE"); } else if result == "false" { observacion.push_str("Pasaporte, no cumple con reglas, "); validado = false; } else { tipo_identificacion.push_str("CEDULA"); observacion.push_str(&result); validado = false; } } } /* NOMBRE */ let nombre = nombre.to_uppercase(); let chars: Vec<char> = nombre.chars().collect(); let mut name = String::new(); for elem in chars { if !elem.is_ascii() { let elem_string = &elem.to_string(); let ascii_word = elem_string.as_bytes(); if ascii_word.contains(&195) { if ascii_word.contains(&129) { name.push('A'); } else if ascii_word.contains(&137) { name.push('E'); } else if ascii_word.contains(&141) { name.push('I'); } else if ascii_word.contains(&147) { name.push('O'); } else if ascii_word.contains(&154) { name.push('U'); } else { name.push(elem); } } } else { name.push(elem); } } let nombre = name; if is_two_words(&nombre) { } else { observacion.push_str("nombre invalido, "); validado = false; } /* GENERO */ let genero = match genero { "M" => "M", "F" => "F", "NULL" => "NULL", _ => "NULL", }; /* ESTADO CIVIL */ const ESTADO_CIVIL: [&str; 6] = [ "SOLTERO", "CASADO", "DIVORCIADO", "VIUDO", "EN UNION DE HECHO", "NULL", // "UNION LIBRE", // "SEPARADO", ]; let estado_civil = estado_civil.to_uppercase(); let mut result = String::new(); for elem in ESTADO_CIVIL.iter() { if elem.to_string() == estado_civil { result.push_str(elem); } } if result.len() == 0 { result.push_str("NULL"); } let estado_civil = result; /* FECHA DE NACIMIENTO */ let date_split: Vec<&str> = fecha_nacimiento.split('-').collect(); if date_split.len() == 3 { let year: i32 = date_split[0].parse().unwrap_or(0); let month: u32 = date_split[1].parse().unwrap_or(0); let day: u32 = date_split[2].parse().unwrap_or(0); let dt1 = Utc.ymd(year, month, day); let dt2 = Utc::now().date(); let duration = dt2.signed_duration_since(dt1); let weeks = duration.num_weeks(); let years = weeks / 52; if years >= 8 && years <= 95 { } else { observacion.push_str("no esta en el rango de edad, ") } } /* TELEFONO */ let chars: Vec<char> = telefono.chars().collect(); let mut is_valid = true; let mut tel = String::new(); for elem in chars { if is_number(&elem.to_string()) { } else { is_valid = false; } } if is_valid { match telefono.len() { 9 => { let start_with = &telefono[..2]; let mut numeros_provincia: Vec<u32> = Vec::new(); for elem in 2..7 { numeros_provincia.push(elem); } let start_with: u32 = start_with.parse().unwrap(); if numeros_provincia.contains(&start_with) { tel.push_str("593"); tipo_telefono.push_str("CONVENCIONAL"); } tel.push_str(telefono); } 10 => { let start_with = &telefono[..2]; if start_with == "09" { tel.push_str("593"); tipo_telefono.push_str("CELULAR"); } tel.push_str(telefono); } len => { if len >= 6 { tel.push_str(telefono); } } } } else { observacion.push_str("telefono invalido, "); validado = false; } let telefono = tel; /* DIRECCION */ if is_two_words(&direccion) { } else { observacion.push_str("direccion invalida, "); } /* EMAIL */ let mut email = email; email = email.trim(); let mut email = email.to_string(); email.retain(|c| c != ' '); let mut email_split: Vec<&str> = email.split('@').collect(); let mut email = String::new(); if email_split.len() == 2 { if email_split[1].starts_with(".") { email_split[1] = &email_split[1][1..]; } if email_split[0].ends_with(".") { email_split[0] = &email_split[0][..email_split[0].len() - 1]; } let dominio_ext: Vec<&str> = email_split[1].split('.').collect(); let dominio = dominio_ext[0]; if dominio.len() >= 2 && dominio.len() <= 6 { } else { observacion.push_str("tamaño del dominio[email] es incorrecto, "); validado = false; } let chars: Vec<char> = email_split[0].chars().collect(); let mut is_valid = true; for word in chars { let word_string = word.to_string(); if is_number(&word_string) || is_lower(&word_string) || is_dot(&word_string) || is_middle_dash(&word_string) || is_underscore(&word_string) { } else { is_valid = false; } } if is_valid { email.push_str(email_split[0]); email.push_str("@"); email.push_str(email_split[1]); } else { for elem in email_split { email.push_str(elem); } observacion.push_str("email invalido"); validado = false; } } let data_to_save = Persona { id: 0, nombre: nombre.to_owned(), identificacion: identificacion.to_owned(), tipo_identificacion: tipo_identificacion.to_owned(), genero: genero.to_owned(), estado_civil: estado_civil.to_owned(), fecha_nacimiento: fecha_nacimiento.to_owned(), telefono: telefono.to_owned(), tipo_telefono: tipo_telefono.to_owned(), direccion: direccion.to_owned(), email: email.to_owned(), observacion: Some(observacion.to_owned()), validado: validado, }; save_data(data_to_save); } } fn check_cedula(identificacion: &str) -> bool { const CONSUMIDOR_FINAL: &str = "9999999999"; let mut numeros_valido_cedula: Vec<u32> = vec![30, 50, 80]; for elem in 1..25 { numeros_valido_cedula.push(elem); } if is_number(&identificacion) { let ident_slice_two = &identificacion[..2]; let ident_slice_two: u32 = ident_slice_two.parse().unwrap(); if numeros_valido_cedula.contains(&ident_slice_two) || CONSUMIDOR_FINAL == identificacion { return true; } } false } fn check_passport(identificacion: &str, len: usize) -> String { if len >= 5 && len <= 20 { let chars: Vec<char> = identificacion.chars().collect(); let mut is_valid = true; for word in chars { let word_string = word.to_string(); if is_number(&word_string) || is_upper(&word_string) || is_upper_enie(&word_string) { } else { is_valid = false; } } if is_valid { if len >= 10 { let num_cedula = &identificacion[..10]; if check_cedula(num_cedula) { return "este número es una cedula y no un pasaporte. ".to_string(); } } return "true".to_string(); } } "false".to_string() } fn save_data(data_to_save: Persona) { let conn = Connection::connect( "postgresql://postgres:postgres@localhost:5432/migration", TlsMode::None, ) .unwrap(); conn.execute( "INSERT INTO persona ( nombre, identificacion, tipo_identificacion, genero, estado_civil, fecha_nacimiento, telefono, tipo_telefono, direccion, email, observacion, validado ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)", &[ &data_to_save.nombre, &data_to_save.identificacion, &data_to_save.tipo_identificacion, &data_to_save.genero, &data_to_save.estado_civil, &data_to_save.fecha_nacimiento, &data_to_save.telefono, &data_to_save.tipo_telefono, &data_to_save.direccion, &data_to_save.email, &data_to_save.observacion, &data_to_save.validado, ], ) .unwrap(); } fn create_table() { let conn = Connection::connect( "postgresql://postgres:postgres@localhost:5432/migration", TlsMode::None, ) .unwrap(); conn.execute( "CREATE TABLE IF NOT EXISTS persona ( id SERIAL PRIMARY KEY, nombre VARCHAR NOT NULL, email VARCHAR NOT NULL, identificacion VARCHAR, tipo_identificacion VARCHAR, genero VARCHAR, estado_civil VARCHAR, fecha_nacimiento VARCHAR, telefono VARCHAR NOT NULL, tipo_telefono VARCHAR, direccion VARCHAR, observacion VARCHAR, validado BOOLEAN )", &[], ) .unwrap(); } // for row in &conn // .query("SELECT id, name, data FROM person", &[]) // .unwrap() // { // let person = Person { // id: row.get(0), // name: row.get(1), // data: row.get(2), // }; // println!("Found person {}", person.name); // }
true
a272ae72dc7345f902c30fa03e0c5217f8481821
Rust
micxjo/rust-advent
/src/day16.rs
UTF-8
2,198
2.875
3
[ "MIT" ]
permissive
use std::collections::HashMap; fn check_thing(analysis: &HashMap<String, u32>, thing: &str, count: u32, part: u32) -> bool { let real_count = analysis.get(thing).unwrap(); if part == 2 && (thing == "cats:" || thing == "trees:") { &count > real_count } else if part == 2 && (thing == "pomeranians:" || thing == "goldfish:") { &count < real_count } else { &count == real_count } } fn process_part(analysis: &HashMap<String, u32>, path: &str, part: u32) { use std::fs::File; use std::io::prelude::*; use std::io::BufReader; let file = File::open(path).unwrap(); let file = BufReader::new(file); for line in file.lines() { let line = line.unwrap(); let words: Vec<_> = line.split(" ").collect(); let thing = words[2].to_owned(); let count = words[3].trim_matches(',').parse::<u32>().unwrap(); if !check_thing(&analysis, &thing, count, part) { continue; } let thing = words[4].to_owned(); let count = words[5].trim_matches(',').parse::<u32>().unwrap(); if !check_thing(&analysis, &thing, count, part) { continue; } let thing = words[6].to_owned(); let count = words[7].trim_matches(',').parse::<u32>().unwrap(); if !check_thing(&analysis, &thing, count, part) { continue; } println!("Part {}: {}", part, words[1].trim_matches(':').to_owned()); break; } } pub fn process_file(path: &str) { let mut analysis: HashMap<String, u32> = HashMap::new(); analysis.insert("children:".to_owned(), 3); analysis.insert("cats:".to_owned(), 7); analysis.insert("samoyeds:".to_owned(), 2); analysis.insert("pomeranians:".to_owned(), 3); analysis.insert("akitas:".to_owned(), 0); analysis.insert("vizslas:".to_owned(), 0); analysis.insert("goldfish:".to_owned(), 5); analysis.insert("trees:".to_owned(), 3); analysis.insert("cars:".to_owned(), 2); analysis.insert("perfumes:".to_owned(), 1); process_part(&analysis, path, 1); process_part(&analysis, path, 2); }
true
a1f6958c65dc00e82cc3568d5370cfd8c1629289
Rust
isgasho/algorithm-1
/tests/test_strings_alphabet.rs
UTF-8
566
2.875
3
[ "MIT" ]
permissive
use algo::strings::alphabet; #[test] fn t_alphabet() { let s = "NowIsTheTimeForAllGoodMen"; let encoded = alphabet::BASE64.to_indices(s); let decoded = alphabet::BASE64.to_chars(&encoded); assert_eq!(s, decoded); let s = "AACGAACGGTTTACCCCG"; let encoded = alphabet::DNA.to_indices(s); let decoded = alphabet::DNA.to_chars(&encoded); assert_eq!(s, decoded); let s = "01234567890123456789"; let encoded = alphabet::DECIMAL.to_indices(s); let decoded = alphabet::DECIMAL.to_chars(&encoded); assert_eq!(s, decoded); }
true
3e115cb9e35f83d69291fc9a295710a8eda62630
Rust
AIT-S/solidity-rs
/solast/src/analysis/ineffectual_statements.rs
UTF-8
4,987
3.125
3
[ "MIT" ]
permissive
use solidity::ast::*; use std::io; pub struct IneffectualStatementsVisitor; impl IneffectualStatementsVisitor { fn print_message( &mut self, contract_definition: &ContractDefinition, definition_node: &ContractDefinitionNode, source_line: usize, description: &str, expression: &dyn std::fmt::Display ) { match definition_node { ContractDefinitionNode::FunctionDefinition(function_definition) => println!( "\tL{}: The {} {} in the `{}` {} contains an ineffectual {} statement: `{}`", source_line, function_definition.visibility, if let FunctionKind::Constructor = function_definition.kind { format!("{}", "constructor") } else { format!("`{}` {}", function_definition.name, function_definition.kind) }, contract_definition.name, contract_definition.kind, description, expression ), ContractDefinitionNode::ModifierDefinition(modifier_definition) => println!( "\tL{}: The `{}` modifier in the `{}` {} contains an ineffectual {} statement: `{}`", source_line, modifier_definition.name, contract_definition.name, contract_definition.kind, description, expression ), _ => {} } } } impl AstVisitor for IneffectualStatementsVisitor { fn visit_statement<'a, 'b>(&mut self, context: &mut StatementContext<'a, 'b>) -> io::Result<()> { let expression = match context.statement { Statement::ExpressionStatement(ExpressionStatement { expression }) => expression, _ => return Ok(()) }; match expression { Expression::Literal(literal) => self.print_message( context.contract_definition, context.definition_node, context.current_source_unit.source_line(literal.src.as_str())?, "literal", literal ), Expression::Identifier(identifier) => self.print_message( context.contract_definition, context.definition_node, context.current_source_unit.source_line(identifier.src.as_str())?, "identifier", identifier ), Expression::IndexAccess(index_access) => self.print_message( context.contract_definition, context.definition_node, context.current_source_unit.source_line(index_access.src.as_str())?, "index access", index_access ), Expression::IndexRangeAccess(index_range_access) => self.print_message( context.contract_definition, context.definition_node, context.current_source_unit.source_line(index_range_access.src.as_str())?, "index range access", index_range_access ), Expression::MemberAccess(member_access) => self.print_message( context.contract_definition, context.definition_node, context.current_source_unit.source_line(member_access.src.as_str())?, "member access", member_access ), Expression::BinaryOperation(binary_operation) => self.print_message( context.contract_definition, context.definition_node, context.current_source_unit.source_line(binary_operation.src.as_str())?, "binary operation", binary_operation ), Expression::Conditional(conditional) => self.print_message( context.contract_definition, context.definition_node, context.current_source_unit.source_line(conditional.src.as_str())?, "conditional", conditional ), Expression::TupleExpression(tuple_expression) => self.print_message( context.contract_definition, context.definition_node, context.current_source_unit.source_line(tuple_expression.src.as_str())?, "tuple expression", tuple_expression ), Expression::FunctionCallOptions(function_call_options) => self.print_message( context.contract_definition, context.definition_node, context.current_source_unit.source_line(function_call_options.src.as_str())?, "function call options", function_call_options ), _ => {} } Ok(()) } }
true
ef8d20c6319f3a590978a4a7d03395201a292e0a
Rust
rust-lang/rust
/src/tools/clippy/tests/ui/mem_replace.rs
UTF-8
3,608
2.96875
3
[ "Apache-2.0", "MIT", "LLVM-exception", "NCSA", "BSD-2-Clause", "LicenseRef-scancode-unicode", "LicenseRef-scancode-other-permissive" ]
permissive
#![allow(unused)] #![warn( clippy::all, clippy::style, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default )] use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque}; use std::mem; fn replace_option_with_none() { let mut an_option = Some(1); let _ = mem::replace(&mut an_option, None); let an_option = &mut Some(1); let _ = mem::replace(an_option, None); } fn replace_with_default() { let mut s = String::from("foo"); let _ = std::mem::replace(&mut s, String::default()); let s = &mut String::from("foo"); let _ = std::mem::replace(s, String::default()); let _ = std::mem::replace(s, Default::default()); let mut v = vec![123]; let _ = std::mem::replace(&mut v, Vec::default()); let _ = std::mem::replace(&mut v, Default::default()); let _ = std::mem::replace(&mut v, Vec::new()); let _ = std::mem::replace(&mut v, vec![]); let mut hash_map: HashMap<i32, i32> = HashMap::new(); let _ = std::mem::replace(&mut hash_map, HashMap::new()); let mut btree_map: BTreeMap<i32, i32> = BTreeMap::new(); let _ = std::mem::replace(&mut btree_map, BTreeMap::new()); let mut vd: VecDeque<i32> = VecDeque::new(); let _ = std::mem::replace(&mut vd, VecDeque::new()); let mut hash_set: HashSet<&str> = HashSet::new(); let _ = std::mem::replace(&mut hash_set, HashSet::new()); let mut btree_set: BTreeSet<&str> = BTreeSet::new(); let _ = std::mem::replace(&mut btree_set, BTreeSet::new()); let mut list: LinkedList<i32> = LinkedList::new(); let _ = std::mem::replace(&mut list, LinkedList::new()); let mut binary_heap: BinaryHeap<i32> = BinaryHeap::new(); let _ = std::mem::replace(&mut binary_heap, BinaryHeap::new()); let mut tuple = (vec![1, 2], BinaryHeap::<i32>::new()); let _ = std::mem::replace(&mut tuple, (vec![], BinaryHeap::new())); let mut refstr = "hello"; let _ = std::mem::replace(&mut refstr, ""); let mut slice: &[i32] = &[1, 2, 3]; let _ = std::mem::replace(&mut slice, &[]); } // lint is disabled for primitives because in this case `take` // has no clear benefit over `replace` and sometimes is harder to read fn dont_lint_primitive() { let mut pbool = true; let _ = std::mem::replace(&mut pbool, false); let mut pint = 5; let _ = std::mem::replace(&mut pint, 0); } fn main() { replace_option_with_none(); replace_with_default(); dont_lint_primitive(); } #[clippy::msrv = "1.39"] fn msrv_1_39() { let mut s = String::from("foo"); let _ = std::mem::replace(&mut s, String::default()); } #[clippy::msrv = "1.40"] fn msrv_1_40() { let mut s = String::from("foo"); let _ = std::mem::replace(&mut s, String::default()); } fn issue9824() { struct Foo<'a>(Option<&'a str>); impl<'a> std::ops::Deref for Foo<'a> { type Target = Option<&'a str>; fn deref(&self) -> &Self::Target { &self.0 } } impl<'a> std::ops::DerefMut for Foo<'a> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } struct Bar { opt: Option<u8>, val: String, } let mut f = Foo(Some("foo")); let mut b = Bar { opt: Some(1), val: String::from("bar"), }; // replace option with none let _ = std::mem::replace(&mut f.0, None); let _ = std::mem::replace(&mut *f, None); let _ = std::mem::replace(&mut b.opt, None); // replace with default let _ = std::mem::replace(&mut b.val, String::default()); }
true
70fa4f3182c5d66959edf8e5bdda919ed20faa80
Rust
wasmerio/wasmer
/lib/virtual-fs/src/builder.rs
UTF-8
6,015
2.75
3
[ "MIT" ]
permissive
use crate::random_file::RandomFile; use crate::{FileSystem, VirtualFile}; use std::path::{Path, PathBuf}; use tracing::*; use super::ZeroFile; use super::{DeviceFile, NullFile}; use crate::tmp_fs::TmpFileSystem; pub struct RootFileSystemBuilder { default_root_dirs: bool, default_dev_files: bool, add_wasmer_command: bool, stdin: Option<Box<dyn VirtualFile + Send + Sync>>, stdout: Option<Box<dyn VirtualFile + Send + Sync>>, stderr: Option<Box<dyn VirtualFile + Send + Sync>>, tty: Option<Box<dyn VirtualFile + Send + Sync>>, } impl Default for RootFileSystemBuilder { fn default() -> Self { Self { default_root_dirs: true, default_dev_files: true, add_wasmer_command: true, stdin: None, stdout: None, stderr: None, tty: None, } } } impl RootFileSystemBuilder { pub fn new() -> Self { Self::default() } pub fn with_stdin(mut self, file: Box<dyn VirtualFile + Send + Sync>) -> Self { self.stdin.replace(file); self } pub fn with_stdout(mut self, file: Box<dyn VirtualFile + Send + Sync>) -> Self { self.stdout.replace(file); self } pub fn with_stderr(mut self, file: Box<dyn VirtualFile + Send + Sync>) -> Self { self.stderr.replace(file); self } pub fn with_tty(mut self, file: Box<dyn VirtualFile + Send + Sync>) -> Self { self.tty.replace(file); self } pub fn default_root_dirs(mut self, val: bool) -> Self { self.default_root_dirs = val; self } pub fn build(self) -> TmpFileSystem { let tmp = TmpFileSystem::new(); if self.default_root_dirs { for root_dir in &["/.app", "/.private", "/bin", "/dev", "/etc", "/tmp"] { if let Err(err) = tmp.create_dir(Path::new(root_dir)) { debug!("failed to create dir [{}] - {}", root_dir, err); } } } if self.add_wasmer_command { let _ = tmp .new_open_options_ext() .insert_device_file(PathBuf::from("/bin/wasmer"), Box::<NullFile>::default()); } if self.default_dev_files { let _ = tmp .new_open_options_ext() .insert_device_file(PathBuf::from("/dev/null"), Box::<NullFile>::default()); let _ = tmp .new_open_options_ext() .insert_device_file(PathBuf::from("/dev/zero"), Box::<ZeroFile>::default()); let _ = tmp .new_open_options_ext() .insert_device_file(PathBuf::from("/dev/urandom"), Box::<RandomFile>::default()); let _ = tmp.new_open_options_ext().insert_device_file( PathBuf::from("/dev/stdin"), self.stdin .unwrap_or_else(|| Box::new(DeviceFile::new(DeviceFile::STDIN))), ); let _ = tmp.new_open_options_ext().insert_device_file( PathBuf::from("/dev/stdout"), self.stdout .unwrap_or_else(|| Box::new(DeviceFile::new(DeviceFile::STDOUT))), ); let _ = tmp.new_open_options_ext().insert_device_file( PathBuf::from("/dev/stderr"), self.stderr .unwrap_or_else(|| Box::new(DeviceFile::new(DeviceFile::STDERR))), ); let _ = tmp.new_open_options_ext().insert_device_file( PathBuf::from("/dev/tty"), self.tty.unwrap_or_else(|| Box::<NullFile>::default()), ); } tmp } } #[cfg(test)] mod test_builder { use crate::{FileSystem, RootFileSystemBuilder}; use tokio::io::{AsyncReadExt, AsyncWriteExt}; #[tokio::test] async fn test_root_file_system() { let root_fs = RootFileSystemBuilder::new().build(); let mut dev_null = root_fs .new_open_options() .read(true) .write(true) .open("/dev/null") .unwrap(); assert_eq!(dev_null.write(b"hello").await.unwrap(), 5); let mut buf = Vec::new(); dev_null.read_to_end(&mut buf).await.unwrap(); assert!(buf.is_empty()); assert!(dev_null.get_special_fd().is_none()); let mut dev_zero = root_fs .new_open_options() .read(true) .write(true) .open("/dev/zero") .unwrap(); assert_eq!(dev_zero.write(b"hello").await.unwrap(), 5); let mut buf = vec![1; 10]; dev_zero.read_exact(&mut buf[..]).await.unwrap(); assert_eq!(buf, vec![0; 10]); assert!(dev_zero.get_special_fd().is_none()); let mut dev_tty = root_fs .new_open_options() .read(true) .write(true) .open("/dev/tty") .unwrap(); assert_eq!(dev_tty.write(b"hello").await.unwrap(), 5); let mut buf = Vec::new(); dev_tty.read_to_end(&mut buf).await.unwrap(); assert!(buf.is_empty()); assert!(dev_tty.get_special_fd().is_none()); root_fs .new_open_options() .read(true) .open("/bin/wasmer") .unwrap(); let dev_stdin = root_fs .new_open_options() .read(true) .write(true) .open("/dev/stdin") .unwrap(); assert_eq!(dev_stdin.get_special_fd().unwrap(), 0); let dev_stdout = root_fs .new_open_options() .read(true) .write(true) .open("/dev/stdout") .unwrap(); assert_eq!(dev_stdout.get_special_fd().unwrap(), 1); let dev_stderr = root_fs .new_open_options() .read(true) .write(true) .open("/dev/stderr") .unwrap(); assert_eq!(dev_stderr.get_special_fd().unwrap(), 2); } }
true
bf786bc88ce7d82d97db4d68a4379aad2c40af78
Rust
hcorrada/rusty_rosalind
/approximate_matching/src/main.rs
UTF-8
1,527
2.875
3
[]
no_license
extern crate itertools; extern crate rosalind_lib; use std::env; use std::fs::File; use std::io::BufReader; use std::io::BufRead; use itertools::Itertools; use rosalind_lib::kmers::find_matches; /// read input /// pub fn read_input(filename: &str) -> (String, String, usize) { let fhandle = File::open(filename) .ok() .expect("Couldn't open file"); let mut lines = BufReader::new(fhandle).lines(); let pattern = if let Some(Ok(x)) = lines.next() { x } else { panic!("Could not read pattern"); }; let text = if let Some(Ok(x)) = lines.next() { x } else { panic!("Could not read text"); }; let dstr = if let Some(Ok(x)) = lines.next() { x } else { panic!("Could not read d"); }; let d = if let Ok(x) = dstr.parse() { x } else { panic!("Could not parse d"); }; (pattern, text, d) } fn main() { let filename = env::args().nth(1) .expect("Need input filename as argument"); let (pattern, text, d) = read_input(&filename); let res = find_matches(&pattern, &text, d); let out = res.iter().join(" "); println!("{}", out); } #[cfg(test)] mod test { #[test] fn read_input() { let res = super::read_input("test.txt"); assert_eq!(res.0, "ATTCTGGA".to_string()); assert_eq!(res.1, "CGCCCGAATCCAGAACGCATTCCCATATTTCGGGACCACTGGCCTCCACGGTACGGACGTCAATCAAATGCCTAGCGGCTTGTGGTTTCTCCTACGCTCC".to_string()); assert_eq!(res.2, 3); } }
true
aa10ec6dc22bf95af266435e804f2f745644cdd8
Rust
Liamolucko/differential-datalog
/rust/template/differential_datalog/src/program/timestamp.rs
UTF-8
2,834
2.953125
3
[ "MIT" ]
permissive
//! Datalog timestamps use abomonation::Abomonation; use differential_dataflow::lattice::Lattice; use num::One; use std::{ ops::{Add, Mul}, sync::atomic::AtomicU32, }; use timely::{ order::{PartialOrder, Product}, progress::{PathSummary, Timestamp}, }; /// 16-bit timestamp. // TODO: get rid of this and use `u16` directly when/if differential implements // `Lattice`, `Timestamp`, `PathSummary` traits for `u16`. #[derive(Copy, PartialOrd, PartialEq, Eq, Debug, Default, Clone, Hash, Ord)] pub struct TS16 { pub x: u16, } impl TS16 { pub const fn max_value() -> TS16 { TS16 { x: 0xffff } } } impl Abomonation for TS16 {} impl Mul for TS16 { type Output = TS16; fn mul(self, rhs: TS16) -> Self::Output { TS16 { x: self.x * rhs.x } } } impl Add for TS16 { type Output = TS16; fn add(self, rhs: TS16) -> Self::Output { TS16 { x: self.x + rhs.x } } } impl One for TS16 { fn one() -> Self { TS16 { x: 1 } } } impl PartialOrder for TS16 { fn less_equal(&self, other: &Self) -> bool { self.x.less_equal(&other.x) } fn less_than(&self, other: &Self) -> bool { self.x.less_than(&other.x) } } impl Lattice for TS16 { fn minimum() -> Self { TS16 { x: u16::min_value(), } } fn join(&self, other: &Self) -> Self { TS16 { x: std::cmp::max(self.x, other.x), } } fn meet(&self, other: &Self) -> Self { TS16 { x: std::cmp::min(self.x, other.x), } } } impl Timestamp for TS16 { type Summary = TS16; } impl PathSummary<TS16> for TS16 { fn results_in(&self, src: &TS16) -> Option<TS16> { self.x.checked_add(src.x).map(|y| TS16 { x: y }) } fn followed_by(&self, other: &TS16) -> Option<TS16> { self.x.checked_add(other.x).map(|y| TS16 { x: y }) } } impl From<TS16> for u64 { fn from(ts: TS16) -> Self { ts.x as u64 } } /// Outer timestamp pub type TS = u32; pub(crate) type TSAtomic = AtomicU32; /// Timestamp for the nested scope /// Use 16-bit timestamps for inner scopes to save memory #[cfg(feature = "nested_ts_32")] pub type TSNested = u32; /// Timestamp for the nested scope /// Use 16-bit timestamps for inner scopes to save memory #[cfg(not(feature = "nested_ts_32"))] pub type TSNested = TS16; /// `Inspect` operator expects the timestampt to be a tuple. pub type TupleTS = (TS, TSNested); pub(crate) trait ToTupleTS { fn to_tuple_ts(&self) -> TupleTS; } /// 0-extend top-level timestamp to a tuple. impl ToTupleTS for TS { fn to_tuple_ts(&self) -> TupleTS { (*self, TSNested::default()) } } impl ToTupleTS for Product<TS, TSNested> { fn to_tuple_ts(&self) -> TupleTS { (self.outer, self.inner) } }
true
b04b879215504a2d52c4c65695c2a89890f339e3
Rust
seanwallawalla-forks/nushell
/crates/nu-command/tests/commands/drop.rs
UTF-8
1,188
3.046875
3
[ "MIT" ]
permissive
use nu_test_support::{nu, pipeline}; #[test] fn columns() { let actual = nu!( cwd: ".", pipeline(r#" echo [ [arepas, color]; [3, white] [8, yellow] [4, white] ] | drop column | get | length "#) ); assert_eq!(actual.out, "1"); } #[test] fn more_columns_than_table_has() { let actual = nu!( cwd: ".", pipeline(r#" echo [ [arepas, color]; [3, white] [8, yellow] [4, white] ] | drop column 3 | get | empty? "#) ); assert_eq!(actual.out, "true"); } #[test] fn rows() { let actual = nu!( cwd: ".", pipeline(r#" echo [ [arepas]; [3] [8] [4] ] | drop 2 | get arepas | math sum "#) ); assert_eq!(actual.out, "3"); } #[test] fn more_rows_than_table_has() { let actual = nu!(cwd: ".", "date | drop 50 | length"); assert_eq!(actual.out, "0"); }
true
64bf72d314e82c2f626a5eda6850e03cc4dd3363
Rust
hawkw/mycelium
/maitake/src/task.rs
UTF-8
51,075
2.78125
3
[ "MIT" ]
permissive
//! The `maitake` task system. //! //! This module contains the code that spawns tasks on a [scheduler], and //! manages the lifecycle of tasks once they are spawned. This includes the //! in-memory representation of spawned tasks (the [`Task`] type), and the //! handle used by the scheduler and other components of the runtime to //! reference a task once it is spawned (the [`TaskRef`] type). //! //! [scheduler]: crate::scheduler #[cfg(feature = "alloc")] pub use self::storage::BoxStorage; pub use self::{ builder::Builder, id::TaskId, join_handle::{JoinError, JoinHandle}, storage::Storage, }; pub use core::task::{Context, Poll, Waker}; mod builder; mod id; pub(crate) mod join_handle; mod state; mod storage; #[cfg(test)] mod tests; use crate::{ loom::{cell::UnsafeCell, sync::atomic::Ordering}, scheduler::Schedule, trace, util::non_null, }; #[cfg(debug_assertions)] use core::any::TypeId; use core::{ any::type_name, future::Future, marker::PhantomData, mem, pin::Pin, ptr::{self, NonNull}, task::{RawWaker, RawWakerVTable}, }; use self::{ builder::Settings, state::{JoinAction, OrDrop, ScheduleAction, StartPollAction, StateCell}, }; use cordyceps::{mpsc_queue, Linked}; use mycelium_util::{fmt, mem::CheckedMaybeUninit}; /// A type-erased, reference-counted pointer to a spawned [`Task`]. /// /// Once a task has been spawned, it is generally referenced by a `TaskRef`. /// When a spawned task is placed in a scheduler's run queue, dequeuing the next /// task will yield a `TaskRef`, and a `TaskRef` may be converted into a /// [`Waker`] or used to await a spawned task's completion. /// /// `TaskRef`s are reference-counted, and the task will be deallocated when the /// last `TaskRef` pointing to it is dropped. #[derive(Eq, PartialEq)] pub struct TaskRef(NonNull<Header>); /// A task. /// /// This type contains the various components of a task: the [future][`Future`] /// itself, the task's header, and a reference to the task's [scheduler]. When a /// task is spawned, the `Task` type is placed on the heap (or wherever spawned /// tasks are stored), and a type-erased [`TaskRef`] that points to that `Task` /// is returned. Once a task is spawned, it is primarily interacted with via /// [`TaskRef`]s. /// /// ## Vtables and Type Erasure /// /// The `Task` struct, once spawned, is rarely interacted with directly. Because /// a system may spawn any number of different [`Future`] types as tasks, and /// may potentially also contain multiple types of [scheduler] and/or [task /// storage], the scheduler and other parts of the system generally interact /// with tasks via type-erased [`TaskRef`]s. /// /// However, in order to actually poll a task's [`Future`], or perform other /// operations such as deallocating a task, it is necessary to know the type of /// the the task's [`Future`] (and potentially, that of the scheduler and/or /// storage). Therefore, operations that are specific to the task's `S`-typed /// [scheduler], `F`-typed [`Future`], and `STO`-typed [`Storage`] are performed /// via [dynamic dispatch]. /// /// [scheduler]: crate::scheduler::Schedule /// [task storage]: Storage /// [dynamic dispatch]: https://en.wikipedia.org/wiki/Dynamic_dispatch #[repr(C)] pub struct Task<S, F: Future, STO> { /// The task's [`Header`] and [scheduler]. /// /// # Safety /// /// This must be the first field of the `Task` struct! /// /// [scheduler]: crate::scheduler::Schedule schedulable: Schedulable<S>, /// The task itself. /// /// This is either the task's [`Future`], when it is running, /// or the future's [`Output`], when the future has completed. /// /// [`Future`]: core::future::Future /// [`Output`]: core::future::Future::Output inner: UnsafeCell<Cell<F>>, /// The [`Waker`] of the [`JoinHandle`] for this task, if one exists. /// /// # Safety /// /// This field is only initialized when the [`State::JOIN_WAKER`] state /// field is set to `JoinWakerState::Waiting`. If the join waker state is /// any other value, this field may be uninitialized. /// /// [`State::JOIN_WAKER`]: state::State::JOIN_WAKER join_waker: UnsafeCell<CheckedMaybeUninit<Waker>>, /// The [`Storage`] type associated with this struct /// /// In order to be agnostic over container types (e.g. [`Box`], or /// other user provided types), the Task is generic over a /// [`Storage`] type. /// /// [`Box`]: alloc::boxed::Box /// [`Storage`]: crate::task::Storage storage: PhantomData<STO>, } /// The task's header. /// /// This contains the *untyped* components of the task which are identical /// regardless of the task's future, output, and scheduler types: the /// [vtable], [state cell], and [run queue links]. /// /// See the [`Vtable` documentation](Vtable#task-vtables) for more details on a /// task's vtables. /// /// The header is the data at which a [`TaskRef`] points, and will likely be /// prefetched when dereferencing a [`TaskRef`] pointer.[^1] Therefore, the /// header should contain the task's most frequently accessed data, and should /// ideally fit within a CPU cache line. /// /// # Safety /// /// The [run queue links] *must* be the first field in this type, in order for /// the [`Linked::links` implementation] for this type to be sound. Therefore, /// the `#[repr(C)]` attribute on this struct is load-bearing. /// /// [vtable]: Vtable /// [state cell]: StateCell /// [run queue links]: cordyceps::mpsc_queue::Links /// [`Linked::links` implementation]: #method.links /// /// [^1]: On CPU architectures which support spatial prefetch, at least... #[repr(C)] #[derive(Debug)] pub(crate) struct Header { /// The task's links in the intrusive run queue. /// /// # Safety /// /// This MUST be the first field in this struct. run_queue: mpsc_queue::Links<Header>, /// The task's state, which can be atomically updated. state: StateCell, /// The task vtable for this task. /// /// Note that this is different from the [waker vtable], which contains /// pointers to the waker methods (and depends primarily on the task's /// scheduler type). The task vtable instead contains methods for /// interacting with the task's future, such as polling it and reading the /// task's output. These depend primarily on the type of the future rather /// than the scheduler. /// /// See the [`Vtable` documentation](Vtable#task-vtables) for /// more details on a task's vtables. /// /// [waker vtable]: core::task::RawWakerVTable vtable: &'static Vtable, /// The task's ID. id: TaskId, /// The task's `tracing` span, if `tracing` is enabled. span: trace::Span, #[cfg(debug_assertions)] scheduler_type: Option<TypeId>, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) enum PollResult { /// The task has completed, without waking a [`JoinHandle`] waker. /// /// The scheduler can increment a counter of completed tasks, and then drop /// the [`TaskRef`]. Ready, /// The task has completed and a [`JoinHandle`] waker has been woken. /// /// The scheduler can increment a counter of completed tasks, and then drop /// the [`TaskRef`]. ReadyJoined, /// The task is pending, but not woken. /// /// The scheduler can drop the [`TaskRef`], as whoever intends to wake the /// task later is holding a clone of its [`Waker`]. Pending, /// The task has woken itself during the poll. /// /// The scheduler should re-schedule the task, rather than dropping the [`TaskRef`]. PendingSchedule, } /// The task's [`Header`] and [scheduler] reference. /// /// This is factored out into a separate type from `Task` itself so that we can /// have a target for casting a pointer to that is generic only over the /// `S`-typed [scheduler], and not the task's `Future` and `Storage` types. This /// reduces excessive monomorphization of waker vtable functions. /// /// This type knows the task's [`RawWaker`] vtable, as the raw waker methods /// need only be generic over the type of the scheduler. It does not know the /// task's *task* vtable, as the task vtable actually polls the future and /// deallocates the task, and must therefore know the types of the task's future /// and storage. /// /// [scheduler]: crate::scheduler::Schedule #[repr(C)] struct Schedulable<S> { /// The task's header. /// /// This contains the *untyped* components of the task which are identical /// regardless of the task's future, output, and scheduler types: the /// [vtable], [state cell], and [run queue links]. /// /// # Safety /// /// This *must* be the first field in this type, to allow casting a /// `NonNull<Task>` to a `NonNull<Header>`. /// /// [vtable]: Vtable /// [state cell]: StateCell /// [run queue links]: cordyceps::mpsc_queue::Links header: Header, /// A reference to the [scheduler] this task is spawned on, or `None` if /// this task has not yet been bound to a scheduler. /// /// This is used to schedule the task when it is woken. /// /// [scheduler]: crate::scheduler::Schedule scheduler: UnsafeCell<Option<S>>, } /// The core of a task: either the [`Future`] that was spawned, if the task /// has not yet completed, or the [`Output`] of the future, once the future has /// completed. /// /// [`Output`]: Future::Output enum Cell<F: Future> { /// The future is still pending. Pending(F), /// The future has completed, and its output is ready to be taken by a /// `JoinHandle`, if one exists. Ready(F::Output), /// The future has completed, and the task's output has been taken or is not /// needed. Joined, } /// A [virtual function pointer table][vtable] (vtable) that specifies the /// behavior of a [`Task`] instance. /// /// This is distinct from the [`RawWakerVTable`] type in [`core::task`]: that /// type specifies the vtable for a task's [`Waker`], while this vtable /// specifies functions called by the runtime to poll, join, and deallocate a /// spawned task. /// /// The first argument passed to all functions inside this vtable is a pointer /// to the task. /// /// The functions inside this struct are only intended to be called on a pointer /// to a spawned [`Task`]. Calling one of the contained functions using /// any other pointer will cause undefined behavior. /// /// ## Task Vtables /// /// Each spawned task has two virtual function tables, which perform dynamic /// dispatch on the type-erased type parameters of the task (the `S`-typed /// [scheduler], the `F`-typed [`Future`], and the `STO`-typed [`Storage`]). /// /// The first vtable is the [`RawWakerVTable`], which is specified by the Rust /// standard library's [`core::task`] module. This vtable contains function /// pointers to the implementations of the task's [`Waker`] operations. The /// second vtable is the **task** vtable, which contains function pointers to /// functions that are specific to the task's [`Future`] type, such as polling /// the future and deallocating the task. /// /// The [`RawWakerVTable`] is monomorphic only over the `S`-typed [`Schedule`] /// implementation, so all tasks spawned on the same type of [scheduler] share /// one instance of the [`RawWakerVTable`]. On the other hand, the task vtable /// is monomorphic over the task's `F`-typed [`Future`] and `S`-typed /// [`Storage`], so a separate monomorphization of the task vtable methods is /// generated for each spawned [`Future`] type. /// /// The task vtable is generated by the [`Task`] struct, as it requires type /// information about the task's [`Future`] and [`Storage`], while the /// [`RawWakerVTable`] is generated by the [`Schedulable`] struct, as it only /// requires type information about the [`Schedule`] type. This reduces /// unnecessary monomorphization of the waker vtable methods for each future /// type that's spawned. /// /// The methods contained in each vtable are as follows: /// /// #### [`RawWakerVTable`] /// /// * **`unsafe fn `[`clone`]`(*const ()) -> `[`RawWaker`]** /// /// Called when a task's [`Waker`] is cloned. /// /// Increments the task's reference count. /// /// * **`unsafe fn `[`wake`]`(*const ())`** /// /// Called when a task is woken by value. /// /// Decrements the task's reference count. /// /// * **`unsafe fn `[`wake_by_ref`]`(*const ())`** /// /// Called when a task's [`Waker`] is woken through a reference. /// /// This wakes the task but does not change the task's reference count. /// /// * **`unsafe fn `[`drop`]`(*const ())`** /// /// Called when a task's [`Waker`] is dropped. /// /// Decrements the task's reference count. /// /// #### Task `Vtable` /// /// * **`unsafe fn `[`poll`]`(`[`NonNull`]`<`[`Header`]`>) -> `[`PollResult`]** /// /// Polls the task's [`Future`]. /// /// This does *not* consume a [`TaskRef`], as the scheduler may wish to do /// additional operations on the task even if it should be dropped. Instead, /// this function returns a [`PollResult`] that indicates what the scheduler /// should do with the task after the poll. /// /// * **`unsafe fn `[`poll_join`]`(`[`NonNull`]`<`[`Header`]`>, `[`NonNull`]`<()>, /// &mut `[`Context`]`<'_>) -> `[`Poll`]`<Result<(), `[`JoinError`]`>>`** /// /// Called when a task's [`JoinHandle`] is polled. /// /// This takes a `NonNull<Header>` rather than a [`TaskRef`], as it does not /// consume a ref count. The second [`NonNull`] is an out-pointer to which the /// task's output will be written if the task has completed. The caller is /// responsible for /// ensuring that this points to a valid, if uninitialized, memory location /// for a `F::Output`. /// /// This method returns [`Poll::Ready`]`(Ok(()))` when the task has joined, /// [`Poll::Ready`]`(Err(`[`JoinError`]`))` if the task has been cancelled, or /// [`Poll::Pending`] when the task is still running. /// /// * **`unsafe fn `[`deallocate`]`(`[`NonNull`]`<`[`Header`]`>)`** /// /// Called when a task's final [`TaskRef`] is dropped and the task is ready to /// be deallocated. /// /// This does not take a [`TaskRef`], as dropping a [`TaskRef`] decrements the /// reference count, and the final `TaskRef` has already been dropped. /// /// [scheduler]: crate::scheduler::Schedule /// [task storage]: Storage /// [dynamic dispatch]: https://en.wikipedia.org/wiki/Dynamic_dispatch /// [vtable]: https://en.wikipedia.org/wiki/Virtual_method_table /// [`clone`]: core::task::RawWakerVTable#clone /// [`wake`]: core::task::RawWakerVTable#wake /// [`wake_by_ref`]: core::task::RawWakerVTable#wake_by_ref /// [`drop`]: core::task::RawWakerVTable#drop /// [`poll`]: Task::poll /// [`poll_join`]: Task::poll_join /// [`deallocate`]: Task::deallocate struct Vtable { /// Poll the future, returning a [`PollResult`] that indicates what the /// scheduler should do with the polled task. poll: unsafe fn(NonNull<Header>) -> PollResult, /// Poll the task's `JoinHandle` for completion, storing the output at the /// provided [`NonNull`] pointer if the task has completed. /// /// If the task has not completed, the [`Waker`] from the provided /// [`Context`] is registered to be woken when the task completes. // Splitting this up into type aliases just makes it *harder* to understand // IMO... #[allow(clippy::type_complexity)] poll_join: unsafe fn( NonNull<Header>, NonNull<()>, &mut Context<'_>, ) -> Poll<Result<(), JoinError<()>>>, /// Drops the task and deallocates its memory. deallocate: unsafe fn(NonNull<Header>), /// The `wake_by_ref` function from the task's [`RawWakerVTable`]. /// /// This is duplicated here as it's used to wake canceled tasks when a task /// is canceled by a [`TaskRef`] or [`JoinHandle`]. wake_by_ref: unsafe fn(*const ()), } // === impl Task === macro_rules! trace_waker_op { ($ptr:expr, $method: ident) => { trace_waker_op!($ptr, $method, op: $method) }; ($ptr:expr, $method: ident, op: $op:ident) => { #[cfg(any(feature = "tracing-01", loom))] tracing_01::trace!( target: "runtime::waker", { task.id = (*$ptr).span().tracing_01_id(), task.addr = ?$ptr, task.tid = (*$ptr).header.id.as_u64(), op = concat!("waker.", stringify!($op)), }, concat!("Task::", stringify!($method)), ); #[cfg(not(any(feature = "tracing-01", loom)))] trace!( target: "runtime::waker", { task.addr = ?$ptr, task.tid = (*$ptr).header.id.as_u64(), op = concat!("waker.", stringify!($op)), }, concat!("Task::", stringify!($method)), ); }; } impl<S, F, STO> Task<S, F, STO> where F: Future, { #[inline] fn header(&self) -> &Header { &self.schedulable.header } #[inline] fn state(&self) -> &StateCell { &self.header().state } #[inline] #[cfg(any(feature = "tracing-01", feature = "tracing-02", test))] fn span(&self) -> &trace::Span { &self.header().span } } impl<STO> Task<Stub, Stub, STO> where STO: Storage<Stub, Stub>, { /// The stub task's vtable is mostly nops, as it should never be polled, /// joined, or woken. const HEAP_STUB_VTABLE: Vtable = Vtable { poll: _maitake_header_nop, poll_join: _maitake_header_nop_poll_join, // Heap allocated stub tasks *will* need to be deallocated, since the // scheduler will deallocate its stub task if it's dropped. deallocate: Self::deallocate, wake_by_ref: _maitake_header_nop_wake_by_ref, }; loom_const_fn! { /// Create a new stub task. pub(crate) fn new_stub() -> Self { Task { schedulable: Schedulable { header: Header { run_queue: mpsc_queue::Links::new(), vtable: &Self::HEAP_STUB_VTABLE, state: StateCell::new(), id: TaskId::stub(), span: crate::trace::Span::none(), #[cfg(debug_assertions)] scheduler_type: None, }, scheduler: UnsafeCell::new(Some(Stub)), }, inner: UnsafeCell::new(Cell::Pending(Stub)), join_waker: UnsafeCell::new(CheckedMaybeUninit::uninit()), storage: PhantomData, } } } } impl<S, F, STO> Task<S, F, STO> where S: Schedule + 'static, F: Future, STO: Storage<S, F>, { const TASK_VTABLE: Vtable = Vtable { poll: Self::poll, poll_join: Self::poll_join, deallocate: Self::deallocate, wake_by_ref: Schedulable::<S>::wake_by_ref, }; /// Create a new (non-heap-allocated) Task. /// /// This needs to be heap allocated using an implementor of /// the [`Storage`] trait to be used with the scheduler. /// /// [`Storage`]: crate::task::Storage pub fn new(future: F) -> Self { Self { schedulable: Schedulable { header: Header { run_queue: mpsc_queue::Links::new(), vtable: &Self::TASK_VTABLE, state: StateCell::new(), id: TaskId::next(), span: crate::trace::Span::none(), #[cfg(debug_assertions)] scheduler_type: Some(TypeId::of::<S>()), }, scheduler: UnsafeCell::new(None), }, inner: UnsafeCell::new(Cell::Pending(future)), join_waker: UnsafeCell::new(CheckedMaybeUninit::uninit()), storage: PhantomData, } } /// Returns a [`TaskId`] that uniquely identifies this task. /// /// The returned ID does *not* increment the task's reference count, and may /// persist even after the task it identifies has completed and been /// deallocated. #[inline] #[must_use] pub fn id(&self) -> TaskId { self.header().id } pub(crate) fn bind(&mut self, scheduler: S) { self.schedulable.scheduler.with_mut(|current| unsafe { *current = Some(scheduler); }); } unsafe fn poll(ptr: NonNull<Header>) -> PollResult { trace!( task.addr = ?ptr, task.output = %type_name::<<F>::Output>(), task.tid = ptr.as_ref().id.as_u64(), "Task::poll" ); let mut this = ptr.cast::<Self>(); test_debug!(task = ?fmt::alt(this.as_ref())); // try to transition the task to the polling state let state = &this.as_ref().state(); match test_dbg!(state.start_poll()) { // transitioned successfully! StartPollAction::Poll => {} // cancel culture has gone too far! StartPollAction::Canceled { wake_join_waker } => { trace!(task.addr = ?ptr, wake_join_waker, "task canceled!"); if wake_join_waker { this.as_ref().wake_join_waker(); return PollResult::ReadyJoined; } else { return PollResult::Ready; } } // can't poll this task for some reason... StartPollAction::CantPoll => return PollResult::Ready, }; // wrap the waker in `ManuallyDrop` because we're converting it from an // existing task ref, rather than incrementing the task ref count. if // this waker is consumed during the poll, we don't want to decrement // its ref count when the poll ends. let waker = { let raw = Schedulable::<S>::raw_waker(this.as_ptr().cast()); mem::ManuallyDrop::new(Waker::from_raw(raw)) }; // actually poll the task let poll = { let cx = Context::from_waker(&waker); let pin = Pin::new_unchecked(this.as_mut()); pin.poll_inner(cx) }; // post-poll state transition let result = test_dbg!(state.end_poll(poll.is_ready())); // if the task is ready and has a `JoinHandle` to wake, wake the join // waker now. if result == PollResult::ReadyJoined { this.as_ref().wake_join_waker() } result } /// Deallocates the task pointed to by `ptr`. /// /// This is a type-erased function called through the task's [`Vtable`]. /// /// # Safety /// /// - `ptr` must point to the [`Header`] of a task of type `Self` (i.e. the /// pointed header must have the same `S`, `F`, and `STO` type parameters /// as `Self`) /// - the pointed task must have zero active references. unsafe fn deallocate(ptr: NonNull<Header>) { trace!( task.addr = ?ptr, task.output = %type_name::<<F>::Output>(), task.tid = ptr.as_ref().id.as_u64(), "Task::deallocate" ); let this = ptr.cast::<Self>(); debug_assert_eq!( ptr.as_ref().state.load(Ordering::Acquire).ref_count(), 0, "a task may not be deallocated if its ref count is greater than zero!" ); drop(STO::from_raw(this)); } /// Poll to join the task pointed to by `ptr`, taking its output if it has /// completed. /// /// If the task has completed, this method returns [`Poll::Ready`], and the /// task's output is stored at the memory location pointed to by `outptr`. /// This function is called by [`JoinHandle`]s o poll the task they /// correspond to. /// /// This is a type-erased function called through the task's [`Vtable`]. /// /// # Safety /// /// - `ptr` must point to the [`Header`] of a task of type `Self` (i.e. the /// pointed header must have the same `S`, `F`, and `STO` type parameters /// as `Self`). /// - `outptr` must point to a valid `MaybeUninit<F::Output>`. unsafe fn poll_join( ptr: NonNull<Header>, outptr: NonNull<()>, cx: &mut Context<'_>, ) -> Poll<Result<(), JoinError<()>>> { let task = ptr.cast::<Self>().as_ref(); trace!( task.addr = ?ptr, task.output = %type_name::<<F>::Output>(), task.tid = task.id().as_u64(), "Task::poll_join" ); match test_dbg!(task.state().try_join()) { JoinAction::Canceled { completed } => { // if the task has completed before it was canceled, also try to // read the output, so that it can be returned in the `JoinError`. if completed { unsafe { // safety: if the state transition returned `Canceled` // with `completed` set, this indicates that we have // exclusive permission to take the output. task.take_output(outptr); } } return JoinError::canceled(completed, task.id()); } JoinAction::TakeOutput => unsafe { // safety: if the state transition returns // `JoinAction::TakeOutput`, this indicates that we have // exclusive permission to read the task output. task.take_output(outptr); return Poll::Ready(Ok(())); }, JoinAction::Register => { task.join_waker.with_mut(|waker| unsafe { // safety: we now have exclusive permission to write to the // join waker. (*waker).write(cx.waker().clone()); }) } JoinAction::Reregister => { task.join_waker.with_mut(|waker| unsafe { // safety: we now have exclusive permission to write to the // join waker. let waker = (*waker).assume_init_mut(); let my_waker = cx.waker(); if !waker.will_wake(my_waker) { *waker = my_waker.clone(); } }); } } task.state().set_join_waker_registered(); Poll::Pending } fn poll_inner(&self, mut cx: Context<'_>) -> Poll<()> { #[cfg(any(feature = "tracing-01", feature = "tracing-02", test))] let _span = self.span().enter(); self.inner.with_mut(|cell| { let cell = unsafe { &mut *cell }; let poll = match cell { Cell::Pending(future) => unsafe { Pin::new_unchecked(future).poll(&mut cx) }, _ => unreachable!("tried to poll a completed future!"), }; match poll { Poll::Ready(ready) => { *cell = Cell::Ready(ready); Poll::Ready(()) } Poll::Pending => Poll::Pending, } }) } /// Wakes the task's [`JoinHandle`], if it has one. /// /// # Safety /// /// - The caller must have exclusive access to the task's `JoinWaker`. This /// is ensured by the task's state management. unsafe fn wake_join_waker(&self) { self.join_waker.with_mut(|join_waker| unsafe { let join_waker = (*join_waker).assume_init_read(); test_debug!(?join_waker, "waking"); join_waker.wake(); }) } /// Takes the task's output, storing it at the memory location pointed to by /// `outptr`. /// /// This function panics if the task has not completed (i.e., its `Cell` /// must be in the [`Cell::Ready`] state). /// /// # Safety /// /// - `outptr` *must* point to a `MaybeUninit<F::Output>`! /// - The the caller must have exclusive access to `self.inner`. unsafe fn take_output(&self, outptr: NonNull<()>) { self.inner.with_mut(|cell| { match mem::replace(&mut *cell, Cell::Joined) { Cell::Ready(output) => { // safety: the caller is responsible for ensuring that this // points to a `MaybeUninit<F::Output>`. let outptr = outptr.cast::<mem::MaybeUninit<F::Output>>().as_mut(); // that's right, it goes in the `NonNull<()>` hole! outptr.write(output) }, state => unreachable!("attempted to take join output on a task that has not completed! task: {self:?}; state: {state:?}"), } }); } } unsafe impl<S, F, STO> Send for Task<S, F, STO> where S: Send, F: Future + Send, { } unsafe impl<S, F, STO> Sync for Task<S, F, STO> where S: Sync, F: Future + Sync, { } impl<S, F, STO> fmt::Debug for Task<S, F, STO> where F: Future, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let Self { schedulable: Schedulable { header, scheduler: _, }, inner: _, join_waker: _, storage: _, } = self; f.debug_struct("Task") .field("header", header) .field("inner", &format_args!("UnsafeCell(<{}>)", type_name::<F>())) .field("join_waker", &format_args!("UnsafeCell(<Waker>)")) .field("scheduler", &fmt::display(type_name::<S>())) .field("storage", &fmt::display(type_name::<STO>())) .finish() } } impl<S, F, STO> Drop for Task<S, F, STO> where F: Future, { fn drop(&mut self) { test_debug!(task.tid = self.header().id.as_u64(), "Task::drop"); // if there's a join waker, ensure that its destructor runs when the // task is dropped. // NOTE: this *should* never happen; we don't ever expect to deallocate // a task while it still has a `JoinHandle`, since the `JoinHandle` // holds a task ref. However, let's make sure we don't leak another task // in case something weird happens, I guess... if self.header().state.join_waker_needs_drop() { self.join_waker.with_mut(|waker| unsafe { // safety: we now have exclusive permission to write to the // join waker. (*waker).assume_init_drop(); }); } } } // === impl Schedulable === impl<S: Schedule> Schedulable<S> { /// The task's [`Waker`] vtable. /// /// This belongs to the `Schedulable` type rather than the [`Task`] type, /// because the [`Waker`] vtable methods need only be monomorphized over the /// `S`-typed [scheduler], and not over the task's `F`-typed [`Future`] or /// the `STO`-typed [`Storage`]. /// /// [scheduler]: crate::scheduler::Schedule const WAKER_VTABLE: RawWakerVTable = RawWakerVTable::new( Self::clone_waker, Self::wake_by_val, Self::wake_by_ref, Self::drop_waker, ); #[inline(always)] unsafe fn schedule(this: TaskRef) { this.0.cast::<Self>().as_ref().scheduler.with(|current| { (*current) .as_ref() .expect("cannot schedule a task that has not been bound to a scheduler!") .schedule(this) }) } #[inline] unsafe fn drop_ref(this: NonNull<Self>) { trace!( task.addr = ?this, task.tid = this.as_ref().header.id.as_u64(), "Schedulable::drop_ref" ); if !this.as_ref().state().drop_ref() { return; } let deallocate = this.as_ref().header.vtable.deallocate; deallocate(this.cast::<Header>()) } fn raw_waker(this: *const Self) -> RawWaker { RawWaker::new(this as *const (), &Self::WAKER_VTABLE) } #[inline(always)] fn state(&self) -> &StateCell { &self.header.state } #[inline(always)] #[cfg(any(feature = "tracing-01", loom))] fn span(&self) -> &trace::Span { &self.header.span } // === Waker vtable methods === unsafe fn wake_by_val(ptr: *const ()) { let ptr = ptr as *const Self; trace_waker_op!(ptr, wake_by_val, op: wake); let this = non_null(ptr as *mut Self); match test_dbg!(this.as_ref().state().wake_by_val()) { OrDrop::Drop => Self::drop_ref(this), OrDrop::Action(ScheduleAction::Enqueue) => { // the task should be enqueued. // // in the case that the task is enqueued, the state // transition does *not* decrement the reference count. this is // in order to avoid dropping the task while it is being // scheduled. one reference is consumed by enqueuing the task... Self::schedule(TaskRef(this.cast::<Header>())); // now that the task has been enqueued, decrement the reference // count to drop the waker that performed the `wake_by_val`. Self::drop_ref(this); } OrDrop::Action(ScheduleAction::None) => {} } } unsafe fn wake_by_ref(ptr: *const ()) { let ptr = ptr as *const Self; trace_waker_op!(ptr, wake_by_ref); let this = non_null(ptr as *mut ()).cast::<Self>(); if test_dbg!(this.as_ref().state().wake_by_ref()) == ScheduleAction::Enqueue { Self::schedule(TaskRef(this.cast::<Header>())); } } unsafe fn clone_waker(ptr: *const ()) -> RawWaker { let this = ptr as *const Self; trace_waker_op!(this, clone_waker, op: clone); (*this).header.state.clone_ref(); Self::raw_waker(this) } unsafe fn drop_waker(ptr: *const ()) { let ptr = ptr as *const Self; trace_waker_op!(ptr, drop_waker, op: drop); let this = ptr as *mut _; Self::drop_ref(non_null(this)) } } impl<S> fmt::Debug for Schedulable<S> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let Self { header, scheduler: _, } = self; f.debug_struct("Schedulable") .field("header", header) .field("scheduler", &fmt::display(type_name::<S>())) .finish() } } // === impl TaskRef === impl TaskRef { pub(crate) const NO_BUILDER: &'static Settings<'static> = &Settings::new(); /// Returns a [`TaskId`] that uniquely identifies this task. /// /// The returned ID does *not* increment the task's reference count, and may /// persist even after the task it identifies has completed and been /// deallocated. #[inline] #[must_use] pub fn id(&self) -> TaskId { self.header().id } /// Forcibly cancel the task. /// /// Canceling a task sets a flag indicating that it has been canceled and /// should terminate. The next time a canceled task is polled by the /// scheduler, it will terminate instead of polling the inner [`Future`]. If /// the task has a [`JoinHandle`], that [`JoinHandle`] will complete with a /// [`JoinError`]. The task then will be deallocated once all /// [`JoinHandle`]s and [`TaskRef`]s referencing it have been dropped. /// /// This method returns `true` if the task was canceled successfully, and /// `false` if the task could not be canceled (i.e., it has already completed, /// has already been canceled, cancel culture has gone TOO FAR, et cetera). pub fn cancel(&self) -> bool { // try to set the canceled bit. let canceled = self.state().cancel(); // if the task was successfully canceled, wake it so that it can clean // up after itself. if canceled { test_debug!("woke canceled task"); self.wake_by_ref(); } canceled } /// Returns `true` if this task has completed. /// /// Tasks are considered completed when the spawned [`Future`] has returned /// [`Poll::Ready`], or if the task has been canceled by the [`cancel()`] /// method. /// /// **Note**: This method can return `false` after [`cancel()`] has /// been called. This is because calling `cancel` *begins* the process of /// cancelling a task. The task is not considered canceled until it has been /// polled by the scheduler after calling [`cancel()`]. /// /// [`cancel()`]: Self::cancel #[inline] #[must_use] pub fn is_complete(&self) -> bool { self.state() .load(Ordering::Acquire) .get(state::State::COMPLETED) } /// Wakes the task. /// /// TODO(eliza): would this be better if we just added an `Into<Waker>` impl /// for `TaskRef` or something? Should this be a public API? pub(crate) fn wake_by_ref(&self) { test_debug!(?self, "TaskRef::wake_by_ref"); let wake_by_ref = self.header().vtable.wake_by_ref; unsafe { wake_by_ref(self.0.as_ptr().cast::<()>()) } } /// Sets the task's `WOKEN` bit. /// /// This must be called when enqueueing a spawned task for the first time. pub(crate) fn set_woken(&self) { self.state().set_woken(); } #[track_caller] pub(crate) fn new_allocated<S, F, STO>( scheduler: S, task: STO::StoredTask, ) -> (Self, JoinHandle<F::Output>) where S: Schedule + 'static, F: Future, STO: Storage<S, F>, { let (task, join) = Self::build_allocated::<S, F, STO>(Self::NO_BUILDER, task); unsafe { task.bind_scheduler(scheduler) }; (task, join) } /// Returns a **non-owning** pointer to the referenced task's [`Header`]. /// /// This does **not** modify the task's ref count, the [`TaskRef`] on which /// this function is called still owns a reference. Therefore, this means /// the returned [`NonNull`] pointer **may not** outlive this [`TaskRef`]. /// /// # Safety /// /// The returned [`NonNull`] pointer is not guaranteed to be valid if it /// outlives the lifetime of this [`TaskRef`]. If this [`TaskRef`] is /// dropped, it *may* deallocate the task, and the [`NonNull`] pointer may /// dangle. /// /// **Do not** dereference the returned [`NonNull`] pointer unless at least /// one [`TaskRef`] referencing this task is known to exist! pub(crate) fn as_ptr(&self) -> NonNull<Header> { self.0 } /// Convert a [`NonNull`] pointer to a task's [`Header`] into a new `TaskRef` to /// that task, incrementing the reference count. pub(crate) fn clone_from_raw(ptr: NonNull<Header>) -> Self { let this = Self(ptr); this.state().clone_ref(); this } #[track_caller] pub(crate) fn build_allocated<S, F, STO>( builder: &Settings<'_>, task: STO::StoredTask, ) -> (Self, JoinHandle<F::Output>) where S: Schedule, F: Future, STO: Storage<S, F>, { #[allow(unused_mut)] let mut ptr = STO::into_raw(task); // attach the task span, if tracing is enabled. #[cfg(any(feature = "tracing-01", feature = "tracing-02", test))] { let loc = match builder.location { Some(ref loc) => loc, None => core::panic::Location::caller(), }; let header = &mut unsafe { ptr.as_mut() }.schedulable.header; let span = trace_span!( "runtime.spawn", kind = %builder.kind, // XXX(eliza): would be nice to not use emptystring here but // `tracing` 0.2 is missing `Option` value support :( task.name = builder.name.unwrap_or(""), task.tid = header.id.as_u64(), task.addr = ?ptr, task.output = %type_name::<F::Output>(), task.storage = %type_name::<STO>(), loc.file = loc.file(), loc.line = loc.line(), loc.col = loc.column(), ); header.span = span; trace!( task.name = builder.name.unwrap_or(""), task.addr = ?ptr, task.tid = header.id.as_u64(), task.kind = %builder.kind, task.spawn_location = %loc, "Task<..., Output = {}>::new", type_name::<F::Output>() ); } let ptr = ptr.cast::<Header>(); #[cfg(not(any(feature = "tracing-01", feature = "tracing-02", test)))] let _ = builder; let this = Self(ptr); let join_handle = unsafe { // Safety: it's fine to create a `JoinHandle` here, because we know // the task's actual output type. JoinHandle::from_task_ref(this.clone()) }; (this, join_handle) } pub(crate) fn poll(&self) -> PollResult { let poll_fn = self.header().vtable.poll; unsafe { poll_fn(self.0) } } pub(crate) unsafe fn bind_scheduler<S: Schedule + 'static>(&self, scheduler: S) { #[cfg(debug_assertions)] { if let Some(scheduler_type) = self.header().scheduler_type { assert_eq!( scheduler_type, TypeId::of::<S>(), "cannot bind {self:?} to a scheduler of type {}", type_name::<S>(), ); } } self.0 .cast::<Schedulable<S>>() .as_ref() .scheduler .with_mut(|current| *current = Some(scheduler)); } /// # Safety /// /// `T` *must* be the task's actual output type! unsafe fn poll_join<T>(&self, cx: &mut Context<'_>) -> Poll<Result<T, JoinError<T>>> { let poll_join_fn = self.header().vtable.poll_join; // NOTE: we can't use `CheckedMaybeUninit` here, since the vtable method // will cast this to a `MaybeUninit` and write to it; this would ignore // the initialized tracking bit. let mut slot = mem::MaybeUninit::<T>::uninit(); match test_dbg!(poll_join_fn( self.0, NonNull::from(&mut slot).cast::<()>(), cx )) { Poll::Ready(Ok(())) => { // if the poll function returned `Ok`, we get to take the // output! Poll::Ready(Ok(slot.assume_init_read())) } Poll::Ready(Err(e)) => { // if the task completed before being canceled, we can still // take its output. let output = if e.is_completed() { Some(slot.assume_init_read()) } else { None }; Poll::Ready(Err(e.with_output(output))) } Poll::Pending => Poll::Pending, } } #[inline] fn state(&self) -> &StateCell { &self.header().state } #[inline] fn header(&self) -> &Header { unsafe { self.0.as_ref() } } } impl fmt::Debug for TaskRef { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("TaskRef") .field("id", &self.id()) .field("addr", &self.0) .finish() } } impl fmt::Pointer for TaskRef { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&self.0, f) } } impl Clone for TaskRef { #[inline] #[track_caller] fn clone(&self) -> Self { test_debug!( task.addr = ?self.0, task.tid = self.id().as_u64(), location = %core::panic::Location::caller(), "TaskRef::clone", ); self.state().clone_ref(); Self(self.0) } } impl Drop for TaskRef { #[inline] #[track_caller] fn drop(&mut self) { test_debug!( task.addr = ?self.0, task.tid = self.id().as_u64(), "TaskRef::drop", ); if !self.state().drop_ref() { return; } unsafe { Header::deallocate(self.0); } } } unsafe impl Send for TaskRef {} unsafe impl Sync for TaskRef {} // === impl Header === // See https://github.com/rust-lang/rust/issues/97708 for why // this is necessary #[no_mangle] unsafe fn _maitake_header_nop(_ptr: NonNull<Header>) -> PollResult { debug_assert!(_ptr.as_ref().id.is_stub()); #[cfg(debug_assertions)] unreachable!("stub task ({_ptr:?}) should never be polled!"); #[cfg(not(debug_assertions))] PollResult::Pending } // See https://github.com/rust-lang/rust/issues/97708 for why // this is necessary #[no_mangle] unsafe fn _maitake_header_nop_deallocate(ptr: NonNull<Header>) { debug_assert!(ptr.as_ref().id.is_stub()); unreachable!("stub task ({ptr:p}) should never be deallocated!"); } // See https://github.com/rust-lang/rust/issues/97708 for why // this is necessary #[no_mangle] unsafe fn _maitake_header_nop_poll_join( _ptr: NonNull<Header>, _: NonNull<()>, _: &mut Context<'_>, ) -> Poll<Result<(), JoinError<()>>> { debug_assert!(_ptr.as_ref().id.is_stub()); #[cfg(debug_assertions)] unreachable!("stub task ({_ptr:?}) should never be polled!"); #[cfg(not(debug_assertions))] Poll::Ready(Err(JoinError::stub())) } // See https://github.com/rust-lang/rust/issues/97708 for why // this is necessary #[no_mangle] unsafe fn _maitake_header_nop_wake_by_ref(_ptr: *const ()) { #[cfg(debug_assertions)] unreachable!("stub task ({_ptr:?}) should never be woken!"); } impl Header { const STATIC_STUB_VTABLE: Vtable = Vtable { poll: _maitake_header_nop, poll_join: _maitake_header_nop_poll_join, deallocate: _maitake_header_nop_deallocate, wake_by_ref: _maitake_header_nop_wake_by_ref, }; loom_const_fn! { pub(crate) fn new_static_stub() -> Self { Self { run_queue: mpsc_queue::Links::new_stub(), state: StateCell::new(), vtable: &Self::STATIC_STUB_VTABLE, span: trace::Span::none(), id: TaskId::stub(), #[cfg(debug_assertions)] scheduler_type: None, } } } unsafe fn deallocate(this: NonNull<Self>) { #[cfg(debug_assertions)] { let refs = this .as_ref() .state .load(core::sync::atomic::Ordering::Acquire) .ref_count(); debug_assert_eq!(refs, 0, "tried to deallocate a task with references!"); } let deallocate = this.as_ref().vtable.deallocate; deallocate(this) } } /// # Safety /// /// A task must be pinned to be spawned. unsafe impl Linked<mpsc_queue::Links<Header>> for Header { type Handle = TaskRef; #[inline] fn into_ptr(task: Self::Handle) -> NonNull<Self> { let ptr = task.0; // converting a `TaskRef` into a pointer to enqueue it assigns ownership // of the ref count to the queue, so we don't want to run its `Drop` // impl. mem::forget(task); ptr } /// Convert a raw pointer to a `Handle`. /// /// # Safety /// /// This function is safe to call when: /// - It is valid to construct a `Handle` from a`raw pointer /// - The pointer points to a valid instance of `Self` (e.g. it does not /// dangle). #[inline] unsafe fn from_ptr(ptr: NonNull<Self>) -> Self::Handle { TaskRef(ptr) } /// Return the links of the node pointed to by `ptr`. /// /// # Safety /// /// This function is safe to call when: /// - It is valid to construct a `Handle` from a`raw pointer /// - The pointer points to a valid instance of `Self` (e.g. it does not /// dangle). #[inline] unsafe fn links(target: NonNull<Self>) -> NonNull<mpsc_queue::Links<Self>> { let target = target.as_ptr(); // Safety: using `ptr::addr_of_mut!` avoids creating a temporary // reference, which stacked borrows dislikes. let links = ptr::addr_of_mut!((*target).run_queue); // Safety: it's fine to use `new_unchecked` here; if the pointer that we // offset to the `links` field is not null (which it shouldn't be, as we // received it as a `NonNull`), the offset pointer should therefore also // not be null. NonNull::new_unchecked(links) } } unsafe impl Send for Header {} unsafe impl Sync for Header {} // === impl Cell === impl<F: Future> fmt::Debug for Cell<F> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Cell::Pending(_) => write!(f, "Cell::Pending({})", type_name::<F>()), Cell::Ready(_) => write!(f, "Cell::Ready({})", type_name::<F::Output>()), Cell::Joined => f.pad("Cell::Joined"), } } } // === impl Vtable === impl fmt::Debug for Vtable { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let &Self { poll, poll_join, deallocate, wake_by_ref, } = self; f.debug_struct("Vtable") .field("poll", &fmt::ptr(poll)) .field("poll_join", &fmt::ptr(poll_join as *const ())) .field("deallocate", &fmt::ptr(deallocate)) .field("wake_by_ref", &fmt::ptr(wake_by_ref)) .finish() } } // Additional types and capabilities only available with the "alloc" // feature active feature! { #![feature = "alloc"] use alloc::boxed::Box; impl TaskRef { #[track_caller] pub(crate) fn new<S, F>(scheduler: S, future: F) -> (Self, JoinHandle<F::Output>) where S: Schedule + 'static, F: Future + 'static { let mut task = Box::new(Task::<S, F, BoxStorage>::new(future)); task.bind(scheduler); Self::build_allocated::<S, F, BoxStorage>(Self::NO_BUILDER, task) } } } #[derive(Copy, Clone, Debug)] pub(crate) struct Stub; impl Future for Stub { type Output = (); fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> { unreachable!("the stub task should never be polled!") } } impl Schedule for Stub { fn schedule(&self, _: TaskRef) { unimplemented!("stub task should never be woken!") } fn current_task(&self) -> Option<TaskRef> { None } }
true
810dbb4b9b8052816910b6735174fcbe8b410a50
Rust
robatipoor/cbs
/src/user_group.rs
UTF-8
1,335
2.9375
3
[ "Apache-2.0", "MIT" ]
permissive
use log::*; use users; #[derive(Debug)] pub struct UserGroup { user: Option<String>, group: Option<String>, } impl Default for UserGroup { /// Returns current UserGroup fn default() -> UserGroup { let user = users::get_current_username() .or_else(|| { error!("unable get user name "); None }) .and_then(|u| { u.to_str() .or_else(|| { error!("unable convert osstring to str user name"); None }) .and_then(|s| Some(s.to_owned())) }); let group = users::get_current_groupname() .or_else(|| { error!("unable get group name "); None }) .and_then(|u| { u.to_str() .or_else(|| { error!("unable convert osstring to str group name"); None }) .and_then(|s| Some(s.to_owned())) }); UserGroup { user, group } } } impl UserGroup { pub fn get_user(&self) -> Option<String> { self.user.clone() } pub fn get_group(&self) -> Option<String> { self.group.clone() } }
true
cef0dc9e0e085e0c5d4b356e31671f102e7522ba
Rust
Devolutions/siquery-rs
/siquery/src/common/services.rs
UTF-8
5,145
3.15625
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::tables::{EtcServices,EtcServicesIface}; use regex::Regex; use std::str::FromStr; use std::borrow::Borrow; cfg_if! { if #[cfg(target_os = "linux")] { use crate::linux::EtcServicesReader; } else if #[cfg(target_os = "macos")] { use crate::macos::EtcServicesReader; } else if #[cfg(target_os = "windows")] { use crate::windows::EtcServicesReader; } } lazy_static! { // Regex filter: remove everything from "#" till the line break static ref SERVICES_FILE_REGEX: Regex = Regex::new(r"(?m)^([^#]*)").unwrap(); // Regex filter to extract comments: keep everything following a "#" char static ref PROTOCOLS_FILE_REGEX_COMMENTS: Regex = Regex::new(r"#\s*(.*)").unwrap(); } impl EtcServices { pub fn new() -> EtcServices { EtcServices { name: String::new(), port: 0, protocol: String::new(), aliases: String::new(), comment: String::new(), } } pub fn get_specific_ex(reader: &dyn EtcServicesIface) -> Vec<EtcServices> { let mut services: Vec<EtcServices> = Vec::new(); for line in reader .get_services_file() .unwrap_or_else(|| "".to_string()) .lines() { let mut etc_services = EtcServices::new(); let captures = SERVICES_FILE_REGEX.captures(&line); if let Some(cap) = captures { if let Some(services_group) = cap.get(0) { // Omitting empty outputs from regex if services_group.as_str().is_empty() { continue; } let v: Vec<_> = services_group .as_str() .trim() .split_whitespace() .collect(); // Check entry for validity // <service name> <port number>/<protocol> [aliases...] [#<comment>] if v.len() < 2 { continue; } // Split <port number>/<protocol> let v_1: Vec<_> = v[1].split('/').collect(); // Check <port number>/<protocol> format validity if v_1.len() < 2 { continue; } etc_services.name = v[0].to_string(); etc_services.port = u16::from_str(v_1[0]).unwrap_or(0); etc_services.protocol = v_1[1].to_string(); // Get aliases if they exist if let Some(_alias) = v.get(2) { etc_services.aliases = v[2..].join(" "); }; }; // Nested to extract relevant comments let comments = PROTOCOLS_FILE_REGEX_COMMENTS.captures(&line); if let Some(cap) = comments { if let Some(captured_comments) = cap.get(1) { // Omitting empty outputs from regex if captured_comments.as_str().is_empty() { continue; } etc_services.comment = captured_comments.as_str().trim().to_owned(); }; } } services.push(etc_services); } services } pub fn get_specific() -> Vec<EtcServices> { let reader: Box<dyn EtcServicesIface> = Box::new(EtcServicesReader{}); let out = EtcServices::get_specific_ex(reader.borrow()); out } } #[cfg(test)] mod tests { use super::*; pub struct EtcServicesTest {} impl EtcServicesIface for EtcServicesTest { fn get_services_file(&self) -> Option<String> { Some(String::from(include_str!("../../test_data/services.txt"))) } } #[test] fn test_etc_services() { let reader: Box<dyn EtcServicesIface> = Box::new(EtcServicesTest {}); let etc_services = EtcServices::get_specific_ex(reader.borrow()); assert_eq!(etc_services.get(0).unwrap().name, "echo"); assert_eq!(etc_services.get(0).unwrap().port, 7); assert_eq!(etc_services.get(0).unwrap().protocol, "tcp"); assert_eq!(etc_services.get(0).unwrap().aliases, ""); assert_eq!(etc_services.get(0).unwrap().comment, ""); assert_eq!(etc_services.get(2).unwrap().name, "discard"); assert_eq!(etc_services.get(2).unwrap().port, 9); assert_eq!(etc_services.get(2).unwrap().protocol, "tcp"); assert_eq!(etc_services.get(2).unwrap().aliases, "sink null"); assert_eq!(etc_services.get(2).unwrap().comment, ""); assert_eq!(etc_services.get(12).unwrap().name, "ftp-data"); assert_eq!(etc_services.get(12).unwrap().port, 20); assert_eq!(etc_services.get(12).unwrap().protocol, "tcp"); assert_eq!(etc_services.get(12).unwrap().aliases, ""); assert_eq!(etc_services.get(12).unwrap().comment, "FTP, data"); assert_eq!(etc_services.len(), 15); } }
true
a9cf68e96b5a87863025f41ccc702fb8dca55e41
Rust
bitex-la/tiny_ram_db
/src/lib.rs
UTF-8
4,277
3
3
[]
no_license
/* TODO: * - Make db available to all Records. * - Serialize and deserialize db from jsonapi. */ #[macro_use] extern crate error_chain; #[macro_use] extern crate serde_derive; extern crate serde; extern crate serde_json; pub extern crate hashbrown; use std::cmp::Eq; use hashbrown::{HashMap, HashSet}; use std::hash::{Hash, Hasher}; use std::sync::{Arc, RwLock}; pub mod errors; use errors::*; #[derive(Debug, Serialize, Deserialize)] pub struct Record<T> { pub id: usize, pub data: Arc<T>, } impl<T> Clone for Record<T> { fn clone(&self) -> Self { Self { id: self.id, data: Arc::clone(&self.data), } } } impl<T> PartialEq for Record<T> { fn eq(&self, other: &Record<T>) -> bool { self.id == other.id } } impl<T> Eq for Record<T> {} impl<T> Hash for Record<T> { fn hash<H: Hasher>(&self, state: &mut H) { self.id.hash(state); } } pub type HashMapRecord<T> = HashMap<usize, Record<T>>; #[derive(Debug, Serialize, Deserialize)] pub struct Table<T, Indexes> { pub data: Arc<RwLock<HashMapRecord<T>>>, pub indexes: Arc<RwLock<Indexes>>, last_id: usize } impl<T, Indexes: Indexer<Item = T>> Clone for Table<T, Indexes> { fn clone(&self) -> Self { Self { data: Arc::clone(&self.data), indexes: Arc::clone(&self.indexes), last_id: self.last_id } } } impl<T, Indexes: Indexer<Item = T>> Table<T, Indexes> { pub fn new() -> Self { Self { data: Arc::new(RwLock::new(HashMap::new())), indexes: Default::default(), last_id: 0 } } pub fn find(&self, id: usize) -> Result<Record<T>> { match self.data.read()?.get(&id) { Some(entry) => Ok(entry.clone()), _ => bail!(ErrorKind::RecordNotFound("".into())), } } pub fn insert(&mut self, value: T) -> Result<Record<T>> { let mut table = self.data.write()?; self.last_id = self.last_id + 1; let record = Record { id: self.last_id, data: Arc::new(value), }; table.insert(self.last_id, record.clone()); self.indexes.write()?.index(&record)?; Ok(record) } } /* PlainTable is a bit of duplication, but makes the API clearer. * Once 'never' type is stable we can implement PlainTable as an alias * of Table with a no-op indexer */ #[derive(Debug, Serialize, Deserialize)] pub struct PlainTable<T> { pub data: Arc<RwLock<HashMapRecord<T>>>, last_id: usize } impl<T> PlainTable<T> { pub fn new() -> Self { Self { data: Arc::new(RwLock::new(HashMap::new())), last_id: 0 } } pub fn find(&self, id: usize) -> Result<Record<T>> { match self.data.read()?.get(&id) { Some(entry) => Ok(entry.clone()), _ => bail!(ErrorKind::RecordNotFound("".into())), } } pub fn insert(&mut self, value: T) -> Result<Record<T>> { let mut table = self.data.write()?; self.last_id = self.last_id + 1; let record = Record { id: self.last_id, data: Arc::new(value), }; table.insert(self.last_id, record.clone()); Ok(record) } } impl<T> Clone for PlainTable<T> { fn clone(&self) -> Self { Self { data: Arc::clone(&self.data), last_id: self.last_id } } } pub trait Indexer: Default { type Item; fn index(&mut self, _item: &Record<Self::Item>) -> Result<bool> { Ok(true) } } pub struct Index<K: Eq + Hash, V> { pub data: HashMap<K, HashSet<Record<V>>>, } impl<K: Eq + Hash, V> Default for Index<K, V> { fn default() -> Self { Self { data: HashMap::new(), } } } impl<K: Eq + Hash, V> Index<K, V> { pub fn insert(&mut self, k: K, record: Record<V>) -> Result<bool> { Ok(self.data.entry(k).or_insert(HashSet::new()).insert(record)) } pub fn get<F, A>(&self, k: &K, closure: F) -> Result<A> where F: FnOnce(&HashSet<Record<V>>) -> A, { Ok(match self.data.get(k) { Some(a) => closure(a), _ => closure(&HashSet::new()), }) } }
true
089a92807b09e5768e2d07156d7c3666d151bd5c
Rust
andyrsmith/learnRust
/src/pin_cracker.rs
UTF-8
2,763
4.15625
4
[]
no_license
///Takes a random number which it calls a pin, and then tells you how fast it will take to crack it. extern crate time; //look a struct struct Point { x: int, y: int, } enum OptionalInt { Value(int), Missing, } fn main(){ //i means integer let (code, five) = (111995i, 5i); mult_value(); a_struct(); let start = time::precise_time_s(); crack_pin(code, 6); let stop = time::precise_time_s(); print_something(code); let elapsed = stop - start; println!("Looks like it can be cracked in {} seconds ", elapsed); if_else_value(five); just_a_tuple(); enum_fun(); match_maker(); for_loop(); fun_with_strings(); this_vector(); } fn crack_pin(x: int, y: int){ let mut guess = 1i; while guess != x { guess = add_one(guess); } println!("Random number {}", y); } fn add_one(x: int) -> int { x + 1 } fn next_two(x: int) -> (int, int) { (x + 1i, x+2i) } fn print_something(code: int){ if code < 1000 { println!("Wow, that is pretty weak"); } else { println!("Weak, but not as weak"); } } fn mult_value() { let first_match = (1i,2i,3i); let second_match = (1i,2i,3i); if first_match == second_match { println!("A match!"); } else { println!("nope"); } } fn a_struct() { //let's assign values from a struct let point = Point {x: 1i, y: 6i}; //calling value from struct println!("The points are at {} and {}", point.x, point.y); } fn if_else_value(five: int) { //returns either 10 or 5 and stores it in variable y let y = if five < 10 { 10i } else { 5i }; println!("Look at this number {}", y); } fn enum_fun() { let real_value = Value(5); let fake_value = Missing; match real_value { Value(n) => println!("x is {:d}", n), Missing => println!("x is missing"), } match fake_value { Value(n) => println!("x ix {:d}", n), Missing => println!("x is missing"), } } fn just_a_tuple(){ let (thing1, thing2) = next_two(5i); println!("thing1, thing2 = {}, {}", thing1, thing2); } fn match_maker(){ let matchy = 15i; match matchy { 1 => println!("1"), 2 => println!("2"), 3 => println!("3"), 4 => println!("4"), 5 => println!("5"), _ => println!("no match"), } } fn for_loop(){ for x in range(0i, 10i) { println!("{:d}", x); } } fn fun_with_strings() { let mut s = "Hello".to_string(); let s1 = "Hello".to_string(); println!("{}", s); s.push_str(" world."); println!("{}", s); takes_slice(s1.as_slice()); } fn takes_slice(slice: &str) { println!("Got: {}", slice); } fn this_vector() { let mut nums = vec![1i, 2i, 7i]; nums.push(4i); let slice = nums.as_slice(); println!("Here is the slice {}", slice); for i in nums.iter() { println!("{}", i); } }
true
e1ec6f9e54ca3f0c7c8abf27ee7f517afc608ca6
Rust
phil-opp/redox-kernel
/src/arch/x86_64/interrupt/syscall.rs
UTF-8
1,729
2.640625
3
[ "MIT" ]
permissive
use arch::x86_64::pti; use syscall; #[naked] pub unsafe extern fn syscall() { #[inline(never)] unsafe fn inner(stack: &mut SyscallStack) -> usize { let rbp; asm!("" : "={rbp}"(rbp) : : : "intel", "volatile"); syscall::syscall(stack.rax, stack.rbx, stack.rcx, stack.rdx, stack.rsi, stack.rdi, rbp, stack) } // Push scratch registers asm!("push rax push rbx push rcx push rdx push rdi push rsi push r8 push r9 push r10 push r11 push fs mov r11, 0x18 mov fs, r11" : : : : "intel", "volatile"); // Get reference to stack variables let rsp: usize; asm!("" : "={rsp}"(rsp) : : : "intel", "volatile"); // Map kernel pti::map(); let a = inner(&mut *(rsp as *mut SyscallStack)); // Unmap kernel pti::unmap(); asm!("" : : "{rax}"(a) : : "intel", "volatile"); // Interrupt return asm!("pop fs pop r11 pop r10 pop r9 pop r8 pop rsi pop rdi pop rdx pop rcx pop rbx add rsp, 8 iretq" : : : : "intel", "volatile"); } #[allow(dead_code)] #[repr(packed)] pub struct SyscallStack { pub fs: usize, pub r11: usize, pub r10: usize, pub r9: usize, pub r8: usize, pub rsi: usize, pub rdi: usize, pub rdx: usize, pub rcx: usize, pub rbx: usize, pub rax: usize, pub rip: usize, pub cs: usize, pub rflags: usize, } #[naked] pub unsafe extern fn clone_ret() { asm!("pop rbp" : : : : "intel", "volatile"); asm!("" : : "{rax}"(0) : : "intel", "volatile"); }
true
8dd7482cd33b82b2f6d3276daa128e368e0e7896
Rust
rgardner/bsh-rs
/src/builtins/history.rs
UTF-8
1,879
3.265625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
use crate::{ builtins::{self, prelude::*}, editor::Editor, }; pub struct History; impl builtins::BuiltinCommand for History { const NAME: &'static str = builtins::HISTORY_NAME; const HELP: &'static str = "\ history: history [-c] [-s size] [n] Display the history list with line numbers. Argument of N says to list only the last N lines. The `-c' option causes the history list to be cleared by deleting all of the entries. The `-s' option sets the size of the history list."; fn run<T: AsRef<str>>(shell: &mut dyn Shell, args: &[T], stdout: &mut dyn Write) -> Result<()> { if args.is_empty() { write!(stdout, "{}", shell.editor()).context(ErrorKind::Io)?; return Ok(()); } match args.first().unwrap().as_ref() { "-c" => shell.editor_mut().clear_history(), "-s" => { if let Some(s) = args.get(2) { if let Ok(n) = s.as_ref().parse::<usize>() { shell.editor_mut().set_history_max_size(n); } } } s => match s.parse::<usize>() { Ok(n) => writeln!(stdout, "{}", history_display(shell.editor(), n)) .context(ErrorKind::Io)?, Err(_) => { let msg = format!("history: {}: nonnegative numeric argument required", s); return Err(Error::builtin_command(msg, 1)); } }, } Ok(()) } } pub fn history_display(state: &Editor, n_last_entries: usize) -> String { let num_to_skip = state.get_history_count().saturating_sub(n_last_entries); state .enumerate_history_entries() .skip(num_to_skip) .map(|(i, e)| format!("\t{}\t{}", i + 1, e)) .collect::<Vec<String>>() .join("\n") }
true
a59f94df228e312274836a0b165c7f9a494e3801
Rust
mathiznogoud/wasabi
/lib/wasm/src/binary.rs
UTF-8
27,492
2.6875
3
[ "MIT" ]
permissive
use std::io; use std::marker::PhantomData; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use ordered_float::OrderedFloat; use rayon::prelude::*; use wasabi_leb128::{ReadLeb128, WriteLeb128}; use crate::{BlockType, Idx, Limits, RawCustomSection, ValType}; use crate::error::{AddErrInfo, Error, ErrorKind, SetErrElem}; use crate::lowlevel::{CustomSection, Expr, Instr, Module, NameSection, NameSubSection, Parallel, Section, WithSize, ImportType, SectionOffset, Offsets}; /* Trait and impl for decoding/encoding between binary format (as per spec) and our own formats (see ast module) */ pub trait WasmBinary: Sized { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error>; fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize>; } /// "Global" state kept during decoding. Useful for error reporting (At which byte offset did /// parsing fail?) and mapping from our AST back to the binary (function index <-> code section offset). #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct DecodeState { pub current_offset: usize, /// The vec of offsets is aligned with the vec of low-level sections, i.e., the index of this /// vec corresponds to the enumeration index of raw sections in the binary. section_offsets: Vec<usize>, /// The index of the vec corresponds to the index of the code element, which is the function /// index minus the number of important functions (which don't have a code/body). code_offsets: Vec<usize>, } impl DecodeState { pub fn new() -> DecodeState { DecodeState::with_offset(0) } pub fn with_offset(current_offset: usize) -> DecodeState { DecodeState { current_offset, section_offsets: Vec::new(), code_offsets: Vec::new(), } } /// Convert code offsets into an easier to understand mapping of function indices. pub fn into_offsets(self, module: &Module) -> Offsets { assert_eq!(self.section_offsets.len(), module.sections.len()); let sections = module.sections.iter() .map(std::mem::discriminant) .zip(self.section_offsets.into_iter()) .collect(); let imported_function_count = module.sections.iter() .filter_map(|section| if let Section::Import(WithSize(SectionOffset(imports))) = section { Some(imports) } else { None }) .flat_map(|imports| imports.iter() .filter(|import| if let ImportType::Function(_) = import.type_ { true } else { false })) .count(); let functions_code = self.code_offsets.into_iter() .enumerate() .map(|(code_idx, byte_offset)| (Idx::from(imported_function_count + code_idx), byte_offset)) .collect(); Offsets { sections, functions_code, } } } /* Primitive types */ impl WasmBinary for u8 { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let byte = reader.read_u8().add_err_info::<u8>(state.current_offset)?; state.current_offset += 1; Ok(byte) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { writer.write_u8(*self)?; Ok(1) } } impl WasmBinary for u32 { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let (value, bytes_read) = reader.read_leb128().add_err_info::<u32>(state.current_offset)?; state.current_offset += bytes_read; Ok(value) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { writer.write_leb128(*self) } } // The WebAssembly wasm32 spec only has 32-bit integer indices, so this usize implementation is // only for convenience when serializing Rust usize values (e.g., Rust array or vector indices). // You should always parse values with the u32 implementation since that ensures correct range. impl WasmBinary for usize { fn decode<R: io::Read>(_: &mut R, _: &mut DecodeState) -> Result<Self, Error> { unimplemented!("use u32 impl for parsing wasm32 indices") } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { if *self > u32::max_value() as usize { // TODO Proper design would be an error type of its own for serialization, but that // would clutter the interface of all encode() method implementations. So for now // a custom io::Error is sufficient (since it is the only one). return Err(io::Error::new(io::ErrorKind::InvalidData, "wasm32 does not allow unsigned int (e.g., indices) larger than 32 bits")); } writer.write_leb128(*self) } } impl WasmBinary for i32 { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let (value, bytes_read) = reader.read_leb128().add_err_info::<i32>(state.current_offset)?; state.current_offset += bytes_read; Ok(value) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { writer.write_leb128(*self) } } impl WasmBinary for i64 { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let (value, bytes_read) = reader.read_leb128().add_err_info::<i64>(state.current_offset)?; state.current_offset += bytes_read; Ok(value) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { writer.write_leb128(*self) } } impl WasmBinary for OrderedFloat<f32> { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let value = reader.read_f32::<LittleEndian>().add_err_info::<f32>(state.current_offset)?; state.current_offset += 4; Ok(value.into()) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { writer.write_f32::<LittleEndian>(self.into_inner())?; Ok(4) } } impl WasmBinary for OrderedFloat<f64> { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let value = reader.read_f64::<LittleEndian>().add_err_info::<f64>(state.current_offset)?; state.current_offset += 8; Ok(value.into()) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { writer.write_f64::<LittleEndian>(self.into_inner())?; Ok(8) } } /* Generic "AST combinators" */ impl<T: WasmBinary> WasmBinary for WithSize<T> { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { // The expected size is only necessary to speed up parallel decoding. // In this (serial) case, we just use it for error checking. let offset_before_size = state.current_offset; let expected_size_bytes = u32::decode(reader, state).set_err_elem::<Self>()?; let offset_before_content = state.current_offset; let t = T::decode(reader, state)?; let actual_size_bytes = state.current_offset - offset_before_content; if actual_size_bytes != expected_size_bytes as usize { return Err(Error::new::<T>( offset_before_size, ErrorKind::Size { expected: expected_size_bytes, actual: actual_size_bytes, })); } Ok(WithSize(t)) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { // Write to an intermediate buffer first because we need to know the encoding size. let mut buf = Vec::new(); let encoded_size_bytes = self.0.encode(&mut buf)?; // Then write the size as LEB128 and copy all bytes from the intermediate buffer over. let mut bytes_written = encoded_size_bytes.encode(writer)?; writer.write_all(&buf)?; bytes_written += encoded_size_bytes; Ok(bytes_written) } } impl<T: WasmBinary> WasmBinary for SectionOffset<T> { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { // Remember at which offset this section contents (T) started. state.section_offsets.push(state.current_offset); T::decode(reader, state).map(SectionOffset) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { self.0.encode(writer) } } /// Do not blindly trust the decoded element_count for pre-allocating a vector, but instead limit /// the pre-allocation to some sensible size (e.g., 1 MB). /// Otherwise a (e.g., malicious) Wasm file could request very large amounts of memory just by /// having a large vec-size in the binary. /// (We got struck by plenty such out of memory errors when testing our Wasm parser with AFL. /// See tests/invalid/oom-large-vector-size/oom.wasm) fn limit_prealloc_capacity<T>(element_count: u32) -> usize { // Limit to 1 MB, which should be hit almost never for real (benign) Wasm binaries: // The Wasm vectors with the largest number of elements are most likely bodies of functions // (i.e., vectors of instructions), and a single function is likely not having that many // instructions. const PREALLOC_LIMIT_BYTES: usize = 1 << 20; // Vec::with_capacity() takes number of elements, not bytes; so divide bytes by element size. let element_limit = PREALLOC_LIMIT_BYTES / std::mem::size_of::<T>(); std::cmp::min(element_count as usize, element_limit) } /// Generic vectors of T. /// see https://webassembly.github.io/spec/core/binary/conventions.html#vectors impl<T: WasmBinary> WasmBinary for Vec<T> { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let element_count = u32::decode(reader, state).set_err_elem::<Self>()?; let mut vec = Vec::with_capacity(limit_prealloc_capacity::<T>(element_count)); for _ in 0..element_count { vec.push(T::decode(reader, state)?); } Ok(vec) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { let mut bytes_written = self.len().encode(writer)?; for element in self { bytes_written += element.encode(writer)?; } Ok(bytes_written) } } // TODO This is no longer a generic wrapper for a Vec<WithSize<T>> construction // that can be parsed in parallel. It is now specific to Code section parsing. // So give it its own type, like ParallelCode /// Provide parallel decoding/encoding when explicitly requested by Parallel<...> marker struct. impl<T: WasmBinary + Send + Sync> WasmBinary for Parallel<Vec<WithSize<T>>> { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { // Treat the individual WithSize<T> parts as Vec<u8> (i.e., don't parse the content T just yet, // instead read each element first into a plain byte buffer (non-parallel, but hopefully fast). // Then decode the byte buffers in parallel to the actual T's. // NOTE This means error reporting is subtly different compared to when we use the serial // WithSize implementation: WithSize parses T first and then compares the actual size to // the expected size, whereas here we assume the size is correct and the error is generated // later during parallel parsing of the contents T (which could, e.g., yield an Eof). let element_count = u32::decode(reader, state).set_err_elem::<Self>()?; // Contains the offset of the size (for error reporting), the expected size, the offset of // of content T, and the (unparsed) bytes of contents. let mut elements: Vec<(usize, u32, usize, Vec<u8>)> = Vec::with_capacity( limit_prealloc_capacity::<(usize, u32, usize, Vec<u8>)>(element_count)); // Read each element into a buffer first (+ the information mentioned before). for _ in 0..element_count { let size_offset = state.current_offset; let content_size_bytes = u32::decode(reader, state).set_err_elem::<WithSize<T>>()?; let offset_before_content = state.current_offset; let mut buf = vec![0u8; content_size_bytes as usize]; reader.read_exact(buf.as_mut_slice()).add_err_info::<T>(offset_before_content)?; state.current_offset += content_size_bytes as usize; // Remember where each code offset started. state.code_offsets.push(offset_before_content); elements.push((size_offset, content_size_bytes, offset_before_content, buf)); } // Then, parallel decoding of each buffer to actual elements T. let decoded: Result<Vec<WithSize<T>>, Error> = elements.into_par_iter() .map(|(size_offset, expected_size_bytes, offset_before_content, buf)| -> Result<WithSize<T>, Error> { // Every individual code section parser gets its own state. This only works, because // the code_offsets have already been inserted during serial parsing above, and // the rest of the decode state is only for the global current_offset in bytes. let mut forked_state = DecodeState::with_offset(offset_before_content); let t = T::decode(&mut &buf[..], &mut forked_state)?; // While a too short size will result in an Eof error, we still need to check that // the size was not too long (i.e., that the whole buffer has been consumed). let actual_size_bytes = forked_state.current_offset - offset_before_content; if actual_size_bytes != expected_size_bytes as usize { return Err(Error::new::<T>( size_offset, ErrorKind::Size { expected: expected_size_bytes, actual: actual_size_bytes, })); } Ok(WithSize(t)) }) .collect(); Ok(Parallel(decoded?)) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { let mut bytes_written = self.0.len().encode(writer)?; // Encode elements to buffers in parallel. let encoded: io::Result<Vec<Vec<u8>>> = self.0.par_iter() .map(|element: &WithSize<T>| { let mut buf = Vec::new(); element.0.encode(&mut buf)?; Ok(buf) }) .collect(); // Write sizes and buffer contents to actual writer (non-parallel, but hopefully fast). for buf in encoded? { bytes_written += buf.encode(writer)?; } Ok(bytes_written) } } impl<T: WasmBinary> WasmBinary for Box<[T]> { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { // Reuse Vec implementation, and just drop capacity field to get Box<[T]>. Ok(Vec::<T>::decode(reader, state)?.into_boxed_slice()) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { // Essentially the same implementation as for Vec<T>, but we cannot reuse it, since we can't // convert a Box<[T]> to a Vec<T> without owning the box (or allocating). let mut bytes_written = self.len().encode(writer)?; for element in self.iter() { bytes_written += element.encode(writer)?; } Ok(bytes_written) } } /// UTF-8 strings. impl WasmBinary for String { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { // Reuse Vec<u8> implementation, then convert to UTF-8, consuming the buffer so no // re-allocation is necessary. let offset_before = state.current_offset; let buf: Vec<u8> = Vec::decode(reader, state).set_err_elem::<String>()?; Ok(String::from_utf8(buf).add_err_info::<String>(offset_before)?) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { // Cannot reuse implementation of Vec<u8> for writing, because we only have the string // borrowed, but conversion via into_bytes (which produces a Vec) would require owning it. let mut bytes_written = self.len().encode(writer)?; writer.write_all(self.as_bytes())?; bytes_written += self.len(); Ok(bytes_written) } } /* Special cases that cannot be derived and need a manual impl */ impl WasmBinary for Module { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { // Check magic number. let mut magic_number = [0u8; 4]; reader.read_exact(&mut magic_number).add_err_info::<Module>(0)?; if &magic_number != b"\0asm" { return Err(Error::new::<Module>(0, ErrorKind::MagicNumber { actual: magic_number })); } state.current_offset += 4; // Check version. let version = reader.read_u32::<LittleEndian>().add_err_info::<Module>(4)?; if version != 1 { return Err(Error::new::<Module>(4, ErrorKind::Version { actual: version })); } state.current_offset += 4; // Parse sections until EOF. let mut sections = Vec::new(); let mut last_section_type = None; loop { let offset_section_begin = state.current_offset; match Section::decode(reader, state) { Ok(mut section) => { // To insert custom sections at the correct place when serializing again, we // need to remember after which other non-custom section they originally came. if let Section::Custom(CustomSection::Raw(r)) = &mut section { r.after = last_section_type; } else { last_section_type = Some(std::mem::discriminant(&section)); } sections.push(section); } // If we cannot even read one more byte (the ID of the next section), we are done. Err(e) if e.kind() == &ErrorKind::Eof && e.offset() == offset_section_begin => break, // All other errors (including Eof in the _middle_ of a section, i.e., // where we read at least some bytes), are an error and will be reported. Err(e) => return Err(e) }; } Ok(Module { sections }) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { writer.write_all(b"\0asm")?; writer.write_all(&[1, 0, 0, 0])?; let mut bytes_written = 8; for section in &self.sections { bytes_written += section.encode(writer)?; } Ok(bytes_written) } } /// needs manual impl because of block handling: End op-code terminates body, but only if block stack is empty impl WasmBinary for Expr { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let mut instructions = Vec::new(); let mut block_depth = 0; while block_depth >= 0 { let instr = Instr::decode(reader, state)?; block_depth += match instr { Instr::Block(..) | Instr::Loop(..) | Instr::If(..) => 1, // Else ends a block, but also starts a new one Instr::Else => -1 + 1, Instr::End => -1, _ => 0 }; instructions.push(instr); } Ok(Expr(instructions)) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { let mut bytes_written = 0; for instruction in &self.0 { bytes_written += instruction.encode(writer)?; } Ok(bytes_written) } } /// Needs manual impl because of compressed format: even though BlockType is "logically" an enum, /// it has no tag, because they know that 0x40 (empty block) and ValType bytes are disjoint. impl WasmBinary for BlockType { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let tag = u8::decode(reader, state).set_err_elem::<Self>()?; Ok(BlockType(match tag { 0x40 => None, byte => { // Retry, now interpreting as ValType. let buf = [byte; 1]; state.current_offset -= 1; Some(ValType::decode(&mut &buf[..], state)?) } })) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { match self { BlockType(None) => 0x40u8.encode(writer), BlockType(Some(ref val_type)) => val_type.encode(writer) } } } /// Needs manual impl because the tag if max is present comes at the beginning of the struct, not /// before the max field. impl WasmBinary for Limits { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let tag = u8::decode(reader, state).set_err_elem::<Self>()?; Ok(match tag { 0x00 => Limits { initial_size: u32::decode(reader, state)?, max_size: None, }, 0x01 => Limits { initial_size: u32::decode(reader, state)?, max_size: Some(u32::decode(reader, state)?), }, byte => return Err(Error::invalid_tag::<Limits>(state.current_offset, byte)) }) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { let mut bytes_written = 0; match self.max_size { None => { bytes_written += 0x00u8.encode(writer)?; bytes_written += self.initial_size.encode(writer)?; } Some(ref max_size) => { bytes_written += 0x01u8.encode(writer)?; bytes_written += self.initial_size.encode(writer)?; bytes_written += max_size.encode(writer)?; } } Ok(bytes_written) } } impl<T> WasmBinary for PhantomData<T> { fn decode<R: io::Read>(_: &mut R, _: &mut DecodeState) -> Result<Self, Error> { Ok(PhantomData) } fn encode<W: io::Write>(&self, _: &mut W) -> io::Result<usize> { Ok(0) } } /* Custom sections and name subsection parsing. */ impl WasmBinary for CustomSection { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { let size_offset = state.current_offset; let section_size_bytes = u32::decode(reader, state).set_err_elem::<Self>()?; // Each custom section must have a name, see https://webassembly.github.io/spec/core/binary/modules.html#binary-customsec let name_offset = state.current_offset; let name = String::decode(reader, state).set_err_elem::<Self>()?; let name_size_bytes = state.current_offset - name_offset; // Remember the offset of this custom section in the binary. state.section_offsets.push(name_offset); // The size of the section includes also the bytes of the name, so we have to subtract // the size of the name to get the size of the content only. let content_size_bytes = (section_size_bytes as usize).checked_sub(name_size_bytes) // Check that the name alone is not already longer than the overall size of the section. .ok_or_else(|| Error::new::<Self>( size_offset, ErrorKind::Size { expected: section_size_bytes, actual: state.current_offset - size_offset, }))?; // Read to a buffer first, so that we can always fall back to returning a raw custom section. let mut content_state = DecodeState::with_offset(state.current_offset); let mut content = vec![0u8; content_size_bytes as usize]; reader.read_exact(content.as_mut_slice()).add_err_info::<Self>(state.current_offset)?; state.current_offset += content_size_bytes; let section = match name.as_str() { "name" => { // Unfortunately, some name sections are invalid (e.g., from the UE4 engine, an // early Wasm binary). But we don't want to fail completely, so downgrade to warning. match NameSection::decode(&mut content.as_slice(), &mut content_state) { Ok(name_section) => CustomSection::Name(name_section), Err(err) => { eprintln!("Warning: Wasm binary at offset 0x{:x} ({}): could not parse name section", size_offset, size_offset); eprintln!("Caused by: {}", err); // Keep the section as a raw section at least CustomSection::Raw(RawCustomSection { name, content, after: None }) } } } // Unknown custom section: parse the rest (excluding the name, which we already did.) // After is set later correctly by Module::decode(). _ => CustomSection::Raw(RawCustomSection { name, content, after: None }), }; Ok(section) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { // Write to an intermediate buffer first because we need to know the custom section size. let mut buf = Vec::new(); match self { CustomSection::Name(name_sec) => { "name".to_string().encode(&mut buf)?; name_sec.encode(&mut buf)?; } CustomSection::Raw(sec) => { sec.name.encode(&mut buf)?; buf.extend_from_slice(&sec.content); } } // Then write the size as LEB128 and copy all bytes from the intermediate buffer over. let encoded_size_bytes = buf.len(); let mut bytes_written = encoded_size_bytes.encode(writer)?; writer.write_all(&buf)?; bytes_written += encoded_size_bytes; Ok(bytes_written) } } impl WasmBinary for NameSection { fn decode<R: io::Read>(reader: &mut R, state: &mut DecodeState) -> Result<Self, Error> { // Parse subsections until EOF, cf. Module sections parsing. let mut subsections = Vec::new(); loop { let offset_section_begin = state.current_offset; match NameSubSection::decode(reader, state) { Ok(section) => subsections.push(section), // If we cannot even read one more byte (the ID of the next section), we are done. Err(e) if e.kind() == &ErrorKind::Eof && e.offset() == offset_section_begin => break, // All other errors (including Eof in the _middle_ of a section, i.e., // where we read at least some bytes), are an error and will be reported. Err(e) => return Err(e) }; } Ok(NameSection { subsections }) } fn encode<W: io::Write>(&self, writer: &mut W) -> io::Result<usize> { let mut bytes_written = 0; for section in &self.subsections { bytes_written += section.encode(writer)?; } Ok(bytes_written) } }
true
7d5844e520de44fe3dbfdb002f2d1ae7dacbca3d
Rust
delewit/rust
/src/test/run-pass/where-clause-early-bound-lifetimes.rs
UTF-8
695
2.625
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference", "LicenseRef-scancode-other-permissive", "MIT", "Unlicense", "BSD-3-Clause", "bzip2-1.0.6", "NCSA", "ISC", "LicenseRef-scancode-public-domain", "BSD-2-Clause" ]
permissive
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. trait TheTrait { } impl TheTrait for &'static int { } fn foo<'a,T>(_: &'a T) where &'a T : TheTrait { } fn bar<T>(_: &'static T) where &'static T : TheTrait { } fn main() { static x: int = 1; foo(&x); bar(&x); }
true
de980d7d8e37df95f92ea5f30afd45879462ac25
Rust
vx416/solana_play
/bank/program/src/instruction.rs
UTF-8
6,518
3
3
[]
no_license
use solana_program::instruction::{AccountMeta, Instruction}; // use crate::error::{self}; use solana_program::{program_error::ProgramError, pubkey::Pubkey}; use std::convert::TryInto; use std::iter::Inspect; use std::mem::size_of; #[repr(C)] #[derive(Clone, Debug, PartialEq)] pub enum BankInstruction { InitializeBank { decimals: u8 }, InitializeAccount, Transfer { amount: u64 }, Approve { amount: u64 }, MintTo { amount: u64 }, Burn { amount: u64 }, CloseAccount, } impl BankInstruction { pub fn unpack(input: &[u8]) -> Result<Self, ProgramError> { use ProgramError::InvalidInstructionData; let (&tag, rest) = input.split_first().ok_or(InvalidInstructionData)?; Ok(match tag { 0 => { let (&decimal, _rest) = rest.split_first().ok_or(InvalidInstructionData)?; Self::InitializeBank { decimals: decimal } } 1 => Self::InitializeAccount, 2 | 3 | 4 | 5 => { let amount = rest .get(..8) .and_then(|slice| slice.try_into().ok()) .map(u64::from_le_bytes) .ok_or(InvalidInstructionData)?; match tag { 2 => Self::Transfer { amount }, 3 => Self::Approve { amount }, 4 => Self::MintTo { amount }, 5 => Self::Burn { amount }, _ => unreachable!(), } } 6 => Self::CloseAccount, _ => { return Err(InvalidInstructionData); } }) } pub fn pack(&self) -> Vec<u8> { let mut buf = Vec::with_capacity(size_of::<Self>()); match self { &Self::InitializeBank { decimals } => { buf.push(0); buf.push(decimals); } &Self::InitializeAccount => { buf.push(1); } &Self::Transfer { amount } => { buf.push(2); buf.extend_from_slice(&amount.to_le_bytes()); } &Self::Approve { amount } => { buf.push(3); buf.extend_from_slice(&amount.to_le_bytes()); } &Self::MintTo { amount } => { buf.push(4); buf.extend_from_slice(&amount.to_le_bytes()); } &Self::Burn { amount } => { buf.push(5); buf.extend_from_slice(&amount.to_le_bytes()); } &Self::CloseAccount => { buf.push(6); } }; buf } fn unpack_pubkey(input: &[u8]) -> Result<(Pubkey, &[u8]), ProgramError> { if input.len() >= 32 { let (key, rest) = input.split_at(32); let pk = Pubkey::new(key); return Ok((pk, rest)); } Err(ProgramError::InvalidInstructionData) } } pub fn initialize_bank( bank_program_id: &Pubkey, bank: &Pubkey, bank_owner: &Pubkey, decimals: u8, ) -> Result<Instruction, ProgramError> { let data = BankInstruction::InitializeBank { decimals }.pack(); let accounts = vec![ AccountMeta::new(*bank, false), AccountMeta::new(*bank_owner, true), ]; Ok(Instruction { program_id: *bank_program_id, accounts, data, }) } pub fn initialize_account( bank_program_id: &Pubkey, bank: &Pubkey, bank_account: &Pubkey, bank_account_owner: &Pubkey, ) -> Result<Instruction, ProgramError> { let data = BankInstruction::InitializeAccount.pack(); let accounts = vec![ AccountMeta::new(*bank, false), AccountMeta::new(*bank_account, false), AccountMeta::new(*bank_account_owner, true), ]; Ok(Instruction { program_id: *bank_program_id, accounts, data, }) } pub fn transfer( bank_program_id: &Pubkey, from_account: &Pubkey, to_account: &Pubkey, from_account_owner: &Pubkey, amount: u64, ) -> Result<Instruction, ProgramError> { let data = BankInstruction::Transfer { amount }.pack(); let accounts = vec![ AccountMeta::new(*from_account, false), AccountMeta::new(*to_account, false), AccountMeta::new(*from_account_owner, true), ]; Ok(Instruction { program_id: *bank_program_id, accounts, data, }) } pub fn approve( bank_program_id: &Pubkey, account: &Pubkey, delegated_account: &Pubkey, account_owner: &Pubkey, amount: u64, ) -> Result<Instruction, ProgramError> { let data = BankInstruction::Approve { amount }.pack(); let accounts = vec![ AccountMeta::new(*account, false), AccountMeta::new(*delegated_account, false), AccountMeta::new(*account_owner, true), ]; Ok(Instruction { program_id: *bank_program_id, accounts, data, }) } pub fn mint_to( bank_program_id: &Pubkey, bank: &Pubkey, mint_account: &Pubkey, bank_owner: &Pubkey, amount: u64, ) -> Result<Instruction, ProgramError> { let data = BankInstruction::MintTo { amount }.pack(); let accounts = vec![ AccountMeta::new(*bank, false), AccountMeta::new(*mint_account, false), AccountMeta::new(*bank_owner, true), ]; Ok(Instruction { program_id: *bank_program_id, accounts, data, }) } pub fn burn( bank_program_id: &Pubkey, bank: &Pubkey, burn_account: &Pubkey, bank_owner: &Pubkey, burn_account_owner: &Pubkey, amount: u64, ) -> Result<Instruction, ProgramError> { let data = BankInstruction::Burn { amount }.pack(); let accounts = vec![ AccountMeta::new(*bank, false), AccountMeta::new(*burn_account, false), AccountMeta::new(*bank_owner, true), AccountMeta::new(*burn_account_owner, true), ]; Ok(Instruction { program_id: *bank_program_id, accounts, data, }) } pub fn close_account( bank_program_id: &Pubkey, closed_account: &Pubkey, account_owner: &Pubkey, ) -> Result<Instruction, ProgramError> { let data = BankInstruction::CloseAccount.pack(); let accounts = vec![ AccountMeta::new(*closed_account, false), AccountMeta::new(*account_owner, true), ]; Ok(Instruction { program_id: *bank_program_id, accounts, data, }) }
true
079c9781152a5d88a8dfe4c9b09f58fd48ce32fa
Rust
xtremerui/gcf-resource
/src/check.rs
UTF-8
2,644
2.84375
3
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
extern crate gcf_resource; extern crate hyper; extern crate hyper_rustls; extern crate yup_oauth2 as oauth2; extern crate google_cloudfunctions1 as cloudfunctions1; use cloudfunctions1::{Result, Error}; use std::default::Default; use oauth2::{Authenticator, DefaultAuthenticatorDelegate, ApplicationSecret, MemoryStorage}; use cloudfunctions1::CloudFunctions; use gcf_resource::Source; use gcf_resource::Version; #[derive(Debug)] struct CheckRequest { pub source: Source, pub version: Version, } fn main() { let req = CheckRequest { source: Source { json_key: String::from("test"), }, version: Version { version: String::from("some-version"), }, }; println!("Hello {:?}", req); } // Get an ApplicationSecret instance by some means. It contains the `client_id` and // `client_secret`, among other things. let secret: ApplicationSecret = Default::default(); // Instantiate the authenticator. It will choose a suitable authentication flow for you, // unless you replace `None` with the desired Flow. // Provide your own `AuthenticatorDelegate` to adjust the way it operates and get feedback about // what's going on. You probably want to bring in your own `TokenStorage` to persist tokens and // retrieve them from storage. let auth = Authenticator::new(&secret, DefaultAuthenticatorDelegate, hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())), <MemoryStorage as Default>::default(), None); let mut hub = CloudFunctions::new(hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())), auth); // You can configure optional parameters by calling the respective setters at will, and // execute the final call using `doit()`. // Values shown here are possibly random and not representative ! let result = hub.operations().get() .page_token("eirmod") .page_size(-48) .name("Stet") .filter("sed") .doit(); match result { Err(e) => match e { // The Error enum provides details about what exactly happened. // You can also just use its `Debug`, `Display` or `Error` traits Error::HttpError(_) |Error::MissingAPIKey |Error::MissingToken(_) |Error::Cancelled |Error::UploadSizeLimitExceeded(_, _) |Error::Failure(_) |Error::BadRequest(_) |Error::FieldClash(_) |Error::JsonDecodeError(_, _) => println!("{}", e), }, Ok(res) => println!("Success: {:?}", res), }
true
02fd908416c00b3631eaf09213960e81d63921ed
Rust
jay-tyler/rust_toy_raytracer
/src/lib.rs
UTF-8
4,454
2.875
3
[]
no_license
pub mod rays; pub mod vectors; pub mod hitable; pub mod sphere; pub mod camera; #[cfg(test)] mod test_vectors { // crate ray_tracer; use vectors; #[test] fn test_vector_fields() { // Stupid sanity test let v1 = vectors::ThreeVector(1.,2.,3.); assert_eq!(v1.0, 1.); } #[test] fn test_add_vectors() { let v1 = vectors::ThreeVector(1., 2.,3.); let v2 = vectors::ThreeVector(5., 7., 8.); let vsum = v1 + v2; assert_eq!(vsum.0, 6.); assert_eq!(vsum.1, 9.); assert_eq!(vsum.2, 11.); println!("{:?}{:?}", v1, v2); } #[test] fn test_sub_vectors_vector() { let v1 = vectors::ThreeVector(1., 2.,3.); let v2 = vectors::ThreeVector(5., 7., 8.); let vdiff = v2 - v1; assert_eq!(vdiff.0, 4.); assert_eq!(vdiff.1, 5.); assert_eq!(vdiff.2, 5.); } #[test] fn test_sub_vectors_f64() { let v1 = vectors::ThreeVector(1., 2.,3.); let one_and_half = 1.5_f64; let vdiff = v1 - one_and_half; // let expected = vectors::ThreeVector(-0.5, 0.5, 1.5); assert_eq!(vdiff.0, -0.5); assert_eq!(vdiff.1, 0.5); assert_eq!(vdiff.2, 1.5); } #[test] fn test_dot_vectors() { let v1 = vectors::ThreeVector(1., 2.,3.); let v2 = vectors::ThreeVector(5., 7., 8.); let vdot = v1.dot_product(&v2); // TODO: figure out why &v1.dot_product(&v2) gives back a reference assert_eq!(vdot, 43. as f64); } #[test] fn test_cross_vectors() { let v1 = vectors::ThreeVector(2., 3., 4.); let v2 = vectors::ThreeVector(5., 6., 7.); let vcross = v1.cross_product(&v2); let expected = vectors::ThreeVector(-3., -6., -3.); assert_eq!(vcross, expected); } #[test] fn test_magnitude_vectors() { let v = vectors::ThreeVector(4., -2., 3.); let expected= 29_f64.sqrt(); assert_eq!(v.magnitude(), expected); } #[test] fn test_multiply_vectors() { let v1 = vectors::ThreeVector(2., 3., 4.); let v2 = v1 * 3_f64; let expected = vectors::ThreeVector(6., 9., 12.); assert_eq!(v2, expected); } #[test] fn test_divide_vectors() { let v1 = vectors::ThreeVector(2., 3., 4.); let v2 = v1 / 2_f64; let expected = vectors::ThreeVector(1., 1.5, 2.); assert_eq!(v2, expected); } #[test] fn test_unit_vector() { let v = vectors::ThreeVector(4., -2., 3.); let vunit = v.as_unit_vector(); let expected = vectors::ThreeVector(4., -2., 3.) * (1_f64/29_f64.sqrt()); assert_eq!(vunit, expected); } } #[cfg(test)] mod test_rays { use rays; use vectors; #[test] fn test_ray_fields() { let ray = rays::Ray{ origin: vectors::ThreeVector(1., 1., 1.), direction: vectors::ThreeVector(3., 2., 1.) }; let expected_origin = vectors::ThreeVector(1., 1., 1.); let expected_direction = vectors::ThreeVector(3., 2., 1.); assert_eq!(ray.origin, expected_origin); assert_eq!(ray.direction, expected_direction); } #[test] fn test_point_at_parameter() { let ray = rays::Ray{ origin: vectors::ThreeVector(1., 1., 1.), direction: vectors::ThreeVector(3., 2., 1.) }; let zero_travel = ray.point_at_parameter(0.); let zero_expected = vectors::ThreeVector(1., 1., 1.); let one_travel = ray.point_at_parameter(1.); let one_expected = vectors::ThreeVector(4., 3., 2.); let three_travel = ray.point_at_parameter(3.); let three_expected = vectors::ThreeVector(10., 7., 4.); assert_eq!(zero_travel, zero_expected); assert_eq!(one_travel, one_expected); assert_eq!(three_travel, three_expected); } } #[cfg(test)] mod test_hitable { use hitable; use vectors; #[test] fn test_fields() { let h = hitable::HitRecord{ t: 4.5_f64, p: vectors::ThreeVector(4., 5., 6. ), normal: vectors::ThreeVector(7., 8., 9.) }; let p_expected = vectors::ThreeVector(4., 5., 6.); let n_expected = vectors::ThreeVector(7., 8., 9.); assert_eq!(h.t, 4.5_f64); assert_eq!(h.p, p_expected); assert_eq!(h.normal, n_expected); } }
true
7c55c2de3cd1c5891a2d730317d44ec6d19142dd
Rust
bickfordb/rust-euler
/one.rs
UTF-8
316
2.890625
3
[]
no_license
fn main() { println("one!"); let mut s : int = 0; let mut i : int = 1; loop { if i >= 1000 { break } if ((i % 3) == 0) { s += i; } else if ((i % 5) == 0) { s += i; } else { } i += 1; println("s:" + s.to_str()); } println("result: " + s.to_str()); }
true
476f7b0d4de9de4096a3dc87211f7ca8d7f299e6
Rust
y-usuzumi/survive-the-course
/survive-the-course-rs/src/problems/neetcode/arrays_and_hashing/Product_of_Array_Except_Self.rs
UTF-8
1,726
3.640625
4
[ "BSD-3-Clause" ]
permissive
// https://leetcode.com/problems/product-of-array-except-self/ pub struct Solution; impl Solution { // 以长度为6的数组,比如要计算index为2的值,其值为arr[0] * arr[1] * arr[3] * arr[4] * arr[5] // 亦即其前缀数组的积乘以后缀数组的积。 // 想象我们可以创建两个额外数组,其中一个存储所有到当前位置的前缀之积,另一个存储后缀之积。 // 创建前缀数组时,我们只需从前到后遍历一次;创建后缀数组时,只需反向遍历一次。 // 此题的挑战项目为:使用O(1)空间(不包含返回结果) // 那么我们可以将前缀数组与后缀数组直接融合到结果数组上,即: // 首先从前到后遍历一次,使用前缀之积填充结果,再从后到前遍历一次,在之前结果的基础上乘以后缀之积, // 注意偏移即可。 pub fn product_except_self(nums: Vec<i32>) -> Vec<i32> { let mut result = vec![1; nums.len()]; // 使用至当前位置的前缀之积填充结果数组 let mut prefix_product = 1; for idx in 0..nums.len() - 1 { prefix_product *= nums[idx]; result[idx + 1] *= prefix_product; } let mut postfix_product = 1; // 使用至当前位置的后缀之积更新结果数组 for idx in (1..nums.len()).rev() { postfix_product *= nums[idx]; result[idx - 1] *= postfix_product; } return result; } } #[cfg(test)] mod tests { use super::*; #[test] fn test_1() { assert_eq!( Solution::product_except_self(vec![1, 2, 3, 4]), vec![24, 12, 8, 6] ); } }
true
9f8c40ef78ddd72ddee338705dcffd459059b928
Rust
hortonberman/tun-driver
/examples/dump_iface.rs
UTF-8
1,537
3.546875
4
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0", "MIT" ]
permissive
//! An example of reading from tun //! //! It creates a tun device, sets it up (using shell commands) for local use and then prints the //! raw data of the packets that arrive. //! //! You really do want better error handling than all these unwraps. extern crate tun_tap; use std::process::Command; use tun_tap::{Iface, Mode}; /// Run a shell command. Panic if it fails in any way. fn cmd(cmd: &str, args: &[&str]) { let ecode = Command::new("ip") .args(args) .spawn() .unwrap() .wait() .unwrap(); assert!(ecode.success(), "Failed to execte {}", cmd); } fn main() { // Create the tun interface. let iface = Iface::new("testtun%d", Mode::Tun).unwrap(); eprintln!("Iface: {:?}", iface); // Configure the „local“ (kernel) endpoint. cmd("ip", &["addr", "add", "dev", iface.name(), "10.107.1.2/24"]); cmd("ip", &["link", "set", "up", "dev", iface.name()]); println!("Created interface {}. Send some packets into it and see they're printed here", iface.name()); println!("You can for example ping 10.107.1.3 (it won't answer)"); // That 1500 is a guess for the IFace's MTU (we probably could configure it explicitly). 4 more // for TUN's „header“. let mut buffer = vec![0; 1504]; loop { // Every read is one packet. If the buffer is too small, bad luck, it gets truncated. let size = iface.recv(&mut buffer).unwrap(); assert!(size >= 4); println!("Packet: {:?}", &buffer[4..size]); } }
true
07a66996ee770cbb02e4082e03ebbcf730815bf9
Rust
misoton665/skylink
/src/domain/link.rs
UTF-8
409
2.890625
3
[]
no_license
#[derive(RustcDecodable, RustcEncodable, Clone, Debug)] pub struct Link { pub id: String, pub path: String, pub has_gitrep: bool, } impl Link { pub fn new(id: &'static str, path: &'static str, has_gitrep: bool) -> Link { Link{id: id.to_string(), path: path.to_string(), has_gitrep: has_gitrep} } } impl PartialEq for Link { fn eq(&self, other: &Link) -> bool { self.id == other.id } }
true
f16bf84e3607ea918b27df3d95b50f4ee17ead37
Rust
nabijaczleweli/bloguen
/tests/ops/output/wrapped_element/style_element/deserialisation/object/err.rs
UTF-8
2,029
2.6875
3
[ "MIT" ]
permissive
use toml::from_str as from_toml_str; use bloguen::ops::StyleElement; #[derive(Deserialize)] struct Data { pub data: StyleElement, } #[test] fn invalid_class() { let res: Result<Data, _> = from_toml_str("[data]\nclass = 'helnlo'\ndata = '//nabijaczleweli.xyz/kaschism/assets/column.css'\n"); assert_eq!(format!("{}", res.err().unwrap()), r#"invalid value: string "helnlo", expected "literal", "link", or "file" for key `data` at line 1 column 1"#); } #[test] fn no_class() { let res: Result<Data, _> = from_toml_str("[data]\n\ data = '//nabijaczleweli.xyz/kaschism/assets/column.css'\n"); assert_eq!(format!("{}", res.err().unwrap()), r#"missing field `class` for key `data` at line 1 column 1"#); } #[test] fn no_data() { let res: Result<Data, _> = from_toml_str("[data]\n\ class = 'link'\n"); assert_eq!(format!("{}", res.err().unwrap()), r#"missing field `data` for key `data` at line 1 column 1"#); } #[test] fn dupe_class() { let res: Result<Data, _> = from_toml_str("[data]\nclass = 'link'\nclass = 'link'\ndata = '//nabijaczleweli.xyz/kaschism/assets/column.css'\n"); assert_eq!(format!("{}", res.err().unwrap()), r#"duplicate field `class` for key `data` at line 1 column 1"#); } #[test] fn dupe_data() { let res: Result<Data, _> = from_toml_str("[data]\nclass = 'link'\ndata = '//nabijaczleweli.xyz/kaschism/assets/column.css'\ndata = \ '//nabijaczleweli.xyz/kaschism/assets/column.css'\n"); assert_eq!(format!("{}", res.err().unwrap()), r#"duplicate field `data` for key `data` at line 1 column 1"#); } #[test] fn unknown_field() { let res: Result<Data, _> = from_toml_str("[data]\n\ helnlo = 'link'\n"); assert_eq!(format!("{}", res.err().unwrap()), r#"unknown field `helnlo`, expected `class` or `data` for key `data` at line 1 column 1"#); }
true
df713293a9a82a129bf2e3636082d1259014134b
Rust
miquels/nntp-rs
/src/util/buffer.rs
UTF-8
10,587
3.546875
4
[ "MIT" ]
permissive
//! Buffer implementation like Bytes / BytesMut. //! //! It is simpler and contains less unsafe code. use std::default::Default; use std::fmt; use std::io::{self, Read, Write}; use std::marker::Unpin; use std::mem; use std::ops::{Deref, DerefMut}; use std::pin::Pin; use std::slice; use std::task::{Context, Poll}; use tokio::io::{AsyncRead, ReadBuf}; /// A buffer structure, like Bytes/BytesMut. /// /// It is not much more than a wrapper around Vec. pub struct Buffer { start_offset: usize, rd_pos: usize, data: Vec<u8>, } impl Buffer { /// Create new Buffer. pub fn new() -> Buffer { Buffer { start_offset: 0, rd_pos: 0, data: Vec::new(), } } /// Create new Buffer. pub fn with_capacity(cap: usize) -> Buffer { Buffer { start_offset: 0, rd_pos: 0, data: Vec::with_capacity(Self::round_size_up(cap)), } } /// Clear this buffer. pub fn clear(&mut self) { self.start_offset = 0; self.rd_pos = 0; self.data.truncate(0); } /// Truncate this buffer. pub fn truncate(&mut self, size: usize) { if size == 0 { self.clear(); return; } if size > self.len() { panic!("Buffer::truncate(size): size > self.len()"); } if self.rd_pos > size { self.rd_pos = size; } self.data.truncate(size + self.start_offset); } pub fn bytes(&self) -> &[u8] { if self.rd_pos >= self.len() { return &[][..]; } &self.data[self.start_offset + self.rd_pos..] } /// Split this Buffer in two parts. /// /// The first part remains in this buffer. The second part is /// returned as a new Buffer. pub fn split_off(&mut self, at: usize) -> Buffer { if at > self.len() { panic!("Buffer:split_off(size): size > self.len()"); } if self.rd_pos > at { self.rd_pos = at; } // If "header" < 32K and "body" >= 32K, use a start_offset // for "body" and copy "header". if self.start_offset == 0 && at < 32000 && self.len() - at >= 32000 { let mut bnew = Buffer::with_capacity(at); mem::swap(self, &mut bnew); self.extend_from_slice(&bnew[0..at]); bnew.start_offset = at; return bnew; } let mut bnew = Buffer::new(); let bytes = self.bytes(); bnew.extend_from_slice(&bytes[at..]); self.truncate(at); bnew } /// Add data to this buffer. #[inline] pub fn extend_from_slice(&mut self, extend: &[u8]) { self.reserve(extend.len()); self.data.extend_from_slice(extend); } #[inline] fn round_size_up(size: usize) -> usize { if size < 128 { 128 } else if size < 4096 { 4096 } else if size < 65536 { 65536 } else if size < 2097152 { size.next_power_of_two() } else { (1 + size / 1048576) * 1048576 } } /// Make sure at least `size` bytes are available. #[inline] pub fn reserve(&mut self, size: usize) { let end = self.data.len() + size; if end < self.data.capacity() { return; } self.data.reserve_exact(Self::round_size_up(end) - self.data.len()); } /// total length of all data in this Buffer. #[inline] pub fn len(&self) -> usize { self.data.len() - self.start_offset } /// Split this Buffer in two parts. /// /// The second part remains in this buffer. The first part is /// returned to the caller. pub fn split_to(&mut self, size: usize) -> Buffer { let mut other = self.split_off(size); mem::swap(self, &mut other); other } /// Write all data in this `Buffer` to a file. pub fn write_all(&mut self, mut file: impl Write) -> io::Result<()> { while self.rd_pos < self.len() { let bytes = self.bytes(); let size = bytes.len(); file.write_all(bytes)?; self.rd_pos += size; } Ok(()) } /// Add text data to this buffer. #[inline] pub fn push_str(&mut self, s: &str) { self.extend_from_slice(s.as_bytes()); } /// Add a string to the buffer. #[inline] pub fn put_str(&mut self, s: impl AsRef<str>) { self.extend_from_slice(s.as_ref().as_bytes()); } /// Return a reference to this Buffer as an UTF-8 string. #[inline] pub fn as_utf8_str(&self) -> Result<&str, std::str::Utf8Error> { std::str::from_utf8(self.bytes()) } /// Convert this buffer into a Vec<u8>. pub fn into_bytes(self) -> Vec<u8> { if self.start_offset > 0 { let mut v = Vec::with_capacity(Self::round_size_up(self.len())); v.extend_from_slice(self.bytes()); v } else { self.data } } // // ===== Begin unsafe code ===== // /// Read an exact number of bytes. pub fn read_exact(&mut self, reader: &mut std::fs::File, len: usize) -> io::Result<()> { self.reserve(len); // Safety: it is safe for a std::fs::File to read into uninitialized memory. unsafe { let buf = self.spare_capacity_mut(); reader.read_exact(&mut buf[..len])?; self.advance_mut(len); } Ok(()) } unsafe fn spare_capacity_mut<T>(&mut self) -> &mut [T] { let len = self.data.len(); let spare = self.data.capacity() - len; let ptr = self.data.as_mut_ptr().add(len) as *mut T; &mut slice::from_raw_parts_mut(ptr, spare)[..] } unsafe fn advance_mut(&mut self, cnt: usize) { if self.data.len() + cnt > self.data.capacity() { panic!("Buffer::advance_mut(cnt): would advance past end of Buffer"); } self.data.set_len(self.data.len() + cnt); } pub fn poll_read<R>(&mut self, reader: Pin<&mut R>, cx: &mut Context<'_>) -> Poll<io::Result<usize>> where R: AsyncRead + Unpin + ?Sized { // Safety: ReadBuf::uninit takes a MaybeUninit. let mut buf = ReadBuf::uninit(unsafe { self.spare_capacity_mut() }); futures::ready!(reader.poll_read(cx, &mut buf))?; let len = buf.filled().len(); // Safety: len = buf.filled().len() is guaranteed to be correct. unsafe { self.advance_mut(len); } Poll::Ready(Ok(len)) } // // ===== End unsafe code ===== // } impl bytes::Buf for Buffer { fn advance(&mut self, cnt: usize) { // advance buffer read pointer. self.rd_pos += cnt; if self.rd_pos > self.len() { // "It is recommended for implementations of advance to // panic if cnt > self.remaining()" panic!("read position advanced beyond end of buffer"); } } #[inline] fn chunk(&self) -> &[u8] { self.bytes() } #[inline] fn remaining(&self) -> usize { self.len() - self.rd_pos } } impl Deref for Buffer { type Target = [u8]; #[inline] fn deref(&self) -> &[u8] { self.bytes() } } impl DerefMut for Buffer { #[inline] fn deref_mut(&mut self) -> &mut [u8] { &mut self.data[self.start_offset + self.rd_pos..] } } impl fmt::Write for Buffer { fn write_str(&mut self, s: &str) -> Result<(), fmt::Error> { self.push_str(s); Ok(()) } } impl From<&[u8]> for Buffer { fn from(src: &[u8]) -> Self { let mut buffer = Buffer::new(); buffer.extend_from_slice(src); buffer } } impl From<Vec<u8>> for Buffer { fn from(src: Vec<u8>) -> Self { Buffer { start_offset: 0, rd_pos: 0, data: src, } } } impl From<&str> for Buffer { fn from(src: &str) -> Self { Buffer::from(src.as_bytes()) } } impl From<String> for Buffer { fn from(src: String) -> Self { Buffer::from(src.into_bytes()) } } impl From<bytes::Bytes> for Buffer { fn from(src: bytes::Bytes) -> Self { Buffer::from(&src[..]) } } impl Default for Buffer { fn default() -> Self { Buffer::new() } } impl fmt::Debug for Buffer { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let cap = self.data.capacity(); let len = self.len(); f.debug_struct("Buffer") .field("start_offset", &self.start_offset) .field("rd_pos", &self.rd_pos) .field("len", &len) .field("capacity", &cap) .field("data", &"[data]") .finish() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_buffer() { let mut b = Buffer::new(); b.reserve(4096); b.start_offset = 23; b.data.resize(b.start_offset, 0); for _ in 0..50000 { b.put_str("xyzzyxyzzy"); } assert!(b.len() == 500000); assert!(&b[1000..1010] == &b"xyzzyxyzzy"[..]); } #[test] fn test_split() { let mut b = Buffer::new(); for _ in 0..5000 { b.put_str("xyzzyxyzzyz"); } assert!(b.len() == 55000); let mut n = b.split_off(4918); assert!(b.len() == 4918); assert!(n.len() == 50082); println!("1. {}", std::str::from_utf8(&b[1100..1110]).unwrap()); println!("2. {}", std::str::from_utf8(&n[1100..1110]).unwrap()); assert!(&b[1100..1110] == &b"xyzzyxyzzy"[..]); assert!(&n[1100..1110] == &b"yzzyxyzzyz"[..]); n.start_offset += 13; let x = n.split_to(20000); println!("3. n.len() {}", n.len()); println!("4. x.len() {}", x.len()); println!("5. {}", std::str::from_utf8(&n[1000..1010]).unwrap()); println!("6. {}", std::str::from_utf8(&x[1000..1010]).unwrap()); assert!(n.len() == 30069); assert!(x.len() == 20000); assert!(&n[1000..1010] == &b"yxyzzyzxyz"[..]); assert!(&x[1000..1010] == &b"zzyxyzzyzx"[..]); } #[test] fn test_spare() { let mut b = Buffer::with_capacity(194); assert!(b.data.capacity() == 4096); b.extend_from_slice(b"0123456789"); let buf: &mut [u8] = unsafe { b.spare_capacity_mut() }; assert!(buf.len() == 4086); } }
true
9e2d2c2aaf59923e4871c22d0f193487a94ec307
Rust
gbutler69/rust-exercism
/diamond/src/lib.rs
UTF-8
775
3.28125
3
[]
no_license
pub fn get_diamond(c: char) -> Vec<String> { let mut result = Vec::new(); if !('A'..='Z').contains(&c) { return result; } let max_fill = c as usize - 'A' as usize; let width = max_fill * 2 + 1; for fill in (0..=max_fill).rev().chain(1..=max_fill) { let mut line = vec![b' '; width]; line[fill] = c as u8 - fill as u8; line[width - fill - 1] = c as u8 - fill as u8; // SAFETY/SOUNDNESS: This is Safe/Sound because the above // logic guarantees that ONLY the u8 values for the characters // A through Z (ASCII) will exist in the line vec. This is // guaranteed to be valid UTF-8 as ASCII is valid UTF-8 result.push(unsafe { String::from_utf8_unchecked(line) }); } result }
true
5e82efdbd8846bc9f67007d4f49dd3c1889e646a
Rust
Michael-F-Bryan/include_dir
/include_dir/src/metadata.rs
UTF-8
1,143
3.328125
3
[ "MIT" ]
permissive
use std::time::{Duration, SystemTime}; /// Basic metadata for a file. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct Metadata { accessed: Duration, created: Duration, modified: Duration, } impl Metadata { /// Create a new [`Metadata`] using the number of seconds since the /// [`SystemTime::UNIX_EPOCH`]. pub const fn new(accessed: Duration, created: Duration, modified: Duration) -> Self { Metadata { accessed, created, modified, } } /// Get the time this file was last accessed. /// /// See also: [`std::fs::Metadata::accessed()`]. pub fn accessed(&self) -> SystemTime { SystemTime::UNIX_EPOCH + self.accessed } /// Get the time this file was created. /// /// See also: [`std::fs::Metadata::created()`]. pub fn created(&self) -> SystemTime { SystemTime::UNIX_EPOCH + self.created } /// Get the time this file was last modified. /// /// See also: [`std::fs::Metadata::modified()`]. pub fn modified(&self) -> SystemTime { SystemTime::UNIX_EPOCH + self.modified } }
true
46a6df07ddeb4b922083f84c4b05a0f6b897de88
Rust
parkovski/scifiweb
/model-mem/src/cache/messaging.rs
UTF-8
7,250
2.96875
3
[]
no_license
use std::collections::{BTreeMap, HashMap}; use std::collections::hash_map::Entry as HEntry; use std::collections::btree_map::Entry as BTEntry; use model::instance::Target; use model::instance::messaging::{Mailbox, MessagingError, Message, MessageThread}; pub struct MailboxCache { // Mailbox ID to mailbox mailboxes: BTreeMap<u64, Mailbox>, /// Owner -> name -> indexes into mailboxes mailbox_owner_map: HashMap<Target, BTreeMap<String, u64>>, } impl MailboxCache { pub fn new() -> Self { MailboxCache { mailboxes: BTreeMap::new(), mailbox_owner_map: HashMap::new(), } } pub fn put_mailbox(&mut self, mailbox: Mailbox) -> Result<Mailbox, MessagingError> { trace!( "Storing mailbox #{} ({} for {})", mailbox.id(), mailbox.name(), mailbox.owner() ); let name_map_entry = self .mailbox_owner_map .entry(mailbox.owner()) .or_insert_with(BTreeMap::new) .entry(mailbox.name().to_owned()); let id_map_entry = self.mailboxes.entry(mailbox.id()); match (name_map_entry, id_map_entry) { (_, BTEntry::Occupied(_)) => Err(MessagingError::already_exists("Mailbox ID", mailbox.id())), (BTEntry::Occupied(_), _) => { Err(MessagingError::already_exists("Mailbox name", mailbox.name())) } (BTEntry::Vacant(ne), BTEntry::Vacant(ie)) => { ne.insert(mailbox.id()); ie.insert(mailbox.clone()); Ok(mailbox) } } } pub fn get_mailbox_for_owner(&self, owner: Target, name: &str) -> Option<Mailbox> { self .mailbox_owner_map .get(&owner) .and_then(|name_map| name_map.get(name)) .and_then(|id| self.mailboxes.get(id)) .and_then(|mailbox| Some(mailbox.clone())) } pub fn get_mailbox_by_id(&self, id: u64) -> Option<Mailbox> { self.mailboxes.get(&id).cloned() } pub fn get_mailbox_by_id_mut(&mut self, id: u64) -> Option<&mut Mailbox> { self.mailboxes.get_mut(&id) } pub fn get_all_mailboxes(&self, owner: Target) -> Option<Vec<Mailbox>> { self.mailbox_owner_map.get(&owner).and_then(|name_map| { let mut values = name_map .values() .map(|v| self.mailboxes.get(v)) .filter_map(|option_mailbox| option_mailbox) .cloned(); if values.any(|_| true) { Some(values.collect()) } else { None } }) } /// Returns thread IDs if successful pub fn delete_mailbox_for_owner( &mut self, owner: Target, name: &str, ) -> Result<Vec<u64>, MessagingError> { let mut name_map_entry = match self.mailbox_owner_map.entry(owner.clone()) { HEntry::Occupied(e) => e, HEntry::Vacant(_) => return Err(MessagingError::not_found("entry for owner", owner)), }; let result = if let BTEntry::Occupied(e) = name_map_entry.get_mut().entry(String::from(name)) { trace!("Deleting mailbox {} for {}", name, owner); let id = e.remove(); self .mailboxes .remove(&id) .map(|mb| mb.thread_ids) .ok_or_else(|| MessagingError::not_found("mailbox id", id)) } else { return Err(MessagingError::not_found("mailbox name map entry", name)); }; // If no more mailboxes are left for this owner, remove the map. if name_map_entry.get().is_empty() { name_map_entry.remove(); } result } /// Returns thread IDs if successful pub fn delete_mailbox_by_id(&mut self, id: u64) -> Result<Vec<u64>, MessagingError> { let mailbox = match self.mailboxes.remove(&id) { Some(mb) => mb, None => { trace!("Delete mailbox #{} - not found", id); return Err(MessagingError::not_found("mailbox id", id)); } }; trace!( "Deleted mailbox #{} ({} for {})", id, mailbox.name(), mailbox.owner() ); let mut name_map_entry = match self.mailbox_owner_map.entry(mailbox.owner()) { HEntry::Occupied(e) => e, HEntry::Vacant(_) => { warn!("Mailbox #{} was found but not in a name map", id); return Ok(mailbox.thread_ids); } }; if let BTEntry::Occupied(e) = name_map_entry.get_mut().entry(mailbox.name().to_owned()) { e.remove(); } if name_map_entry.get().is_empty() { name_map_entry.remove(); } Ok(mailbox.thread_ids) } /// Returns thread IDs for all mailboxes found. pub fn delete_all_mailboxes(&mut self, owner: Target) -> Result<Vec<u64>, MessagingError> { trace!("Deleting all mailboxes for {}", owner); let name_map_entry = match self.mailbox_owner_map.entry(owner.clone()) { HEntry::Occupied(e) => e, HEntry::Vacant(_) => { return Err(MessagingError::not_found("name map entry", owner)); } }; let mailboxes = &mut self.mailboxes; let mut ids = Vec::new(); for id in name_map_entry.get().values() { if let Some(mailbox) = mailboxes.remove(id) { ids.extend(mailbox.thread_ids); } } name_map_entry.remove(); Ok(ids) } } pub struct MessageThreadCache { threads: BTreeMap<u64, MessageThread>, } impl MessageThreadCache { pub fn new() -> Self { MessageThreadCache { threads: BTreeMap::new(), } } pub fn put_thread(&mut self, thread: MessageThread) -> Result<MessageThread, MessagingError> { match self.threads.entry(thread.id) { BTEntry::Occupied(_) => Err(MessagingError::already_exists("thread id", thread.id)), BTEntry::Vacant(e) => { e.insert(thread.clone()); Ok(thread) } } } pub fn get_thread_by_id(&self, id: u64) -> Option<MessageThread> { self.threads.get(&id).cloned() } pub fn get_thread_by_id_mut(&mut self, id: u64) -> Option<&mut MessageThread> { self.threads.get_mut(&id) } /// The returned Vec is in the same order as ids, with None /// in place of the ones not found. pub fn get_threads_by_id(&self, ids: &[u64]) -> Vec<Option<MessageThread>> { ids.iter().map(|id| self.threads.get(id).cloned()).collect() } /// Returns the message IDs of all the deleted threads. /// Ignores missing thread IDs. pub fn delete_threads(&mut self, ids: &[u64]) -> Vec<u64> { ids.into_iter().fold(Vec::new(), |mut ids, id| { match self.threads.remove(id) { Some(thread) => { ids.extend(&thread.message_ids); } None => { debug!("Thread id {} missing", id); } } ids }) } } pub struct MessageCache { messages: BTreeMap<u64, Message>, } impl MessageCache { pub fn new() -> Self { MessageCache { messages: BTreeMap::new(), } } pub fn put_message(&mut self, message: Message) -> Result<Message, MessagingError> { match self.messages.entry(message.id) { BTEntry::Occupied(_) => Err(MessagingError::already_exists("message id", message.id)), BTEntry::Vacant(e) => { e.insert(message.clone()); Ok(message) } } } pub fn get_messages_by_id(&self, ids: &[u64]) -> Vec<Option<Message>> { ids .iter() .map(|id| self.messages.get(id).cloned()) .collect() } /// Ignores missing messages. pub fn delete_messages(&mut self, ids: &[u64]) { for id in ids { self.messages.remove(id); } } }
true
3fea4050d05f9584c1313138a08c65f77383700e
Rust
Mackirac/image_processing
/src/intensity/pow.rs
UTF-8
431
2.9375
3
[]
no_license
use crate::{ Transformation, ImageBuffer, Pixel }; pub struct Pow(pub f64); impl <PI: Pixel<Subpixel=u8> + 'static> Transformation<PI> for Pow { type PO = PI; fn transform (&self, mut image: ImageBuffer<PI, Vec<u8>>) -> ImageBuffer<Self::PO, Vec<u8>> { for pixel in image.iter_mut() { *pixel = (255.0 * (*pixel as f64 / 255.0).powf(self.0)) as u8; } image } }
true
c06cf4d1a6bde35ee5758aa407f3e589d31cbab4
Rust
hoangpq/edit-text
/build-tools/src/mdbook_bin/preprocessors.rs
UTF-8
2,579
2.984375
3
[ "Apache-2.0", "MIT" ]
permissive
//! Svgbob preprocessing use mdbook::book::{ Book, BookItem, }; use mdbook::errors::Error; use mdbook::preprocess::*; use regex::{ Captures, Regex, }; pub struct SvgbobPreprocessor; impl Preprocessor for SvgbobPreprocessor { fn name(&self) -> &str { "svgbob" } fn run(&self, ctx: &PreprocessorContext, book: &mut Book) -> Result<(), Error> { process(&mut book.sections) } } fn process<'a, I>(items: I) -> Result<(), Error> where I: IntoIterator<Item = &'a mut BookItem> + 'a, { let re = Regex::new(r"```(?:svg)?bob\n([\S\s]+?)\n```").unwrap(); for item in items { if let BookItem::Chapter(ref mut chapter) = item { // eprintln!("svgbob: processing chapter '{}'", chapter.name); let res = re.replace_all(&chapter.content, |captures: &Captures| { let bob_source = captures.get(1).unwrap().as_str(); // eprintln!("!!!! REPLACING.... {:?}", bob_source); format!("{}", svgbob::to_svg(bob_source)).replace("\n", " ") }); // if re.is_match(&chapter.content) { // eprintln!("\n\n\nresult {}\n\n\n", res); // } chapter.content = res.to_string(); process(&mut chapter.sub_items); } } Ok(()) } pub struct TOCPreprocessor; impl Preprocessor for TOCPreprocessor { fn name(&self) -> &str { "toc" } fn run(&self, ctx: &PreprocessorContext, book: &mut Book) -> Result<(), Error> { for section in &mut book.sections { if let BookItem::Chapter(ref mut chapter) = section { if !chapter.sub_items.is_empty() { let toc: Vec<String> = chapter .sub_items .iter() .filter_map(|sub_item| { if let BookItem::Chapter(ref chapter) = sub_item { Some(format!( "1. [{}]({})", chapter.name, chapter.path.to_string_lossy() )) } else { None } }) .collect(); chapter.content = chapter .content .replace("{{#toc}}", &format!("\n\n{}\n\n", toc.join("\n"))); } } } Ok(()) } }
true
560a00bd94e5be0953cf110ceba75b73dae3db9f
Rust
AZanellato/AOC_2019
/four_2/src/main.rs
UTF-8
2,640
3.6875
4
[]
no_license
use std::collections::HashMap; fn main() { let lower_bound = 147_981; let upper_bound = 691_423; let count = (lower_bound..upper_bound) .filter(|n| check_not_decreasing(*n)) .filter(|n| check_for_double(*n)) .count(); println!("The count is: {}", count); } fn check_not_decreasing(number: u32) -> bool { let d0 = ((number / 100_000) % 10) as u8; let d1 = ((number / 10_000) % 10) as u8; let d2 = ((number / 1_000) % 10) as u8; let d3 = ((number / 100) % 10) as u8; let d4 = ((number / 10) % 10) as u8; let d5 = (number % 10) as u8; let digits = [d0, d1, d2, d3, d4, d5]; let mut sorted_digits = [d0, d1, d2, d3, d4, d5]; sorted_digits.sort_by(|a, b| a.cmp(b)); digits == sorted_digits } fn check_for_double(number: u32) -> bool { let d0 = ((number / 100_000) % 10) as u8; let d1 = ((number / 10_000) % 10) as u8; let d2 = ((number / 1_000) % 10) as u8; let d3 = ((number / 100) % 10) as u8; let d4 = ((number / 10) % 10) as u8; let d5 = (number % 10) as u8; let digits = [d0, d1, d2, d3, d4, d5]; let mut digit_counts = HashMap::new(); for digit in digits.iter() { let count = digit_counts.entry(digit).or_insert(0); *count += 1; } digit_counts.values().any(|&value| value == 2) } #[cfg(test)] mod tests { use super::{check_for_double, check_not_decreasing}; #[test] fn double_test_cases() { assert_eq!(check_for_double(111_222), false); assert_eq!(check_for_double(111_111), false); // assert_eq!(check_for_double(123444), false); // assert_eq!(check_for_double(112233), true); assert_eq!(check_for_double(111_255), true); // assert_eq!(check_for_double(111122), true); // assert_eq!(check_for_double(111223788), true); // assert_eq!(check_for_double(111223788), true); } #[test] fn all_true_cases() { assert_eq!(check_for_double(445_550), true); assert_eq!(check_for_double(446660), true); assert_eq!(check_for_double(447770), true); assert_eq!(check_for_double(448880), true); assert_eq!(check_for_double(449990), true); assert_eq!(check_for_double(556660), true); assert_eq!(check_for_double(557770), true); } #[test] fn decreasing_test_cases() { assert_eq!(check_not_decreasing(12), true); assert_eq!(check_not_decreasing(155), true); assert_eq!(check_not_decreasing(111111), true); assert_eq!(check_not_decreasing(111101), false); assert_eq!(check_not_decreasing(223450), false); } }
true
f94b3fb4ba415bbc9c583721bd1c65f5957a7024
Rust
j-keck/clic
/tests/selftest-runner.rs
UTF-8
1,899
2.84375
3
[]
no_license
use env_logger::Env; use log::{debug, info}; use std::process::Command; use std::{ffi::OsStr, fs, path::PathBuf}; #[test] fn selftest_runner() { env_logger::from_env(Env::default().default_filter_or("info")).init(); let spec_files_path = "tests/selftest"; let entries = fs::read_dir(spec_files_path).expect(&format!(r#"path "{}" not found"#, spec_files_path)); for entry in entries { let path = entry.unwrap().path(); // process only '.spec' files if path.as_path().extension() != Some(OsStr::new("spec")) { continue; } info!("run selftest spec: {}", path.display()); // expected stdout / stderr let expected_stdout = expected_for("stdout", &path); let expected_stderr = expected_for("stderr", &path); // execute 'clic' and collect stdout / stderr let (actual_stdout, actual_stderr) = { let output = Command::new("cargo") .arg("run") .arg("--quiet") .arg("--") .arg("--spec") .arg(&path) .arg("--quiet") .output() .unwrap(); let stdout = String::from_utf8_lossy(&output.stdout).into_owned(); debug!(r#"stdout: "{}""#, stdout); let stderr = String::from_utf8_lossy(&output.stderr).into_owned(); debug!(r#"stderr: "{}""#, stderr); (stdout, stderr) }; // validate the output assert_eq!(expected_stdout, actual_stdout); assert_eq!(expected_stderr, actual_stderr); } } fn expected_for(id: &str, path: &PathBuf) -> String { let path = path.with_extension(id); debug!("read expected {} from: {}", id, path.display()); let s = fs::read_to_string(path).unwrap_or("".to_string()); debug!(r#"expected {}: "{}""#, id, s); s }
true
edf46d8d1ded1567d1953ef9d9fe5e129af9d442
Rust
MostafaAlnasr/idolsched
/src/cards_api/cache.rs
UTF-8
1,157
2.734375
3
[]
no_license
#![allow(dead_code)] // rustc spuriously considers this dead bc of #[cfg]s use serde::{Deserialize, Serialize}; use std::collections::HashMap; use super::{Error, Cfg}; use super::json_card::JsonCard; #[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)] pub struct CardCache { pub provider: String, pub language: Option<String>, pub cards: HashMap<u32, JsonCard>, } pub fn load_cache(cfg: &Cfg) -> Result<CardCache, Error> { if let Some(p) = cfg.cache_path.as_ref() { // only try to load the cache if we think it exists if p.exists() { let cache: CardCache = serde_json::from_slice(&std::fs::read(p)?)?; if cache.provider == cfg.provider && cache.language == cfg.language { return Ok(cache); } } } Ok(CardCache { provider: cfg.provider.clone(), language: cfg.language.clone(), cards: HashMap::new() }) } pub fn save_cache(cfg: &Cfg, cache: &CardCache) -> Result<(), Error> { if let Some(p) = cfg.cache_path.as_ref() { let cache_json = serde_json::to_string_pretty(cache)?; std::fs::write(p, cache_json)?; } Ok(()) }
true
2001a5f636c767bcd880a55a82ec614b2b1d4a2c
Rust
rebo/comp_state
/src/list.rs
UTF-8
7,232
3.25
3
[]
no_license
use crate::state_access::{CloneState, StateAccess}; use crate::state_functions::use_state; use slotmap::{new_key_type, DenseSlotMap, Key}; new_key_type! { pub struct ListKey; } pub fn use_list<T, F>(initial_list_fn: F) -> ListControl<T> where F: FnOnce() -> Vec<T>, T: Clone, { let list_access = use_state(|| List::new(initial_list_fn())); ListControl::new(list_access) } #[derive(Clone)] pub struct ListControl<T> where T: Clone + 'static, { list_access: StateAccess<List<T>>, } impl<T> ListControl<T> where T: Clone + 'static, { fn new(list_access: StateAccess<List<T>>) -> ListControl<T> { ListControl { list_access } } pub fn get_list(&self) -> List<T> { self.list_access.get() } pub fn clear(&self) { self.list_access.update(|list| { list.items_map = ListKeyDenseSlotMap::new(); list.items_order = vec![]; }); } // brain always gets this messed up so I have to write it down! // 0 1 2 3 4 5 6 // a b c d e f g // I want to move c after d (which should be remove 2 put in 3) // remove(2) // 0 1 2 3 4 5 6 // a b d e f g // insert(3) // 0 1 2 3 4 5 6 // a b d e f g // // // 0 1 2 3 4 5 6 // a b c d e f g // I want to move f after d (which should be remove 5 put in 4) // remove(2) // 0 1 2 3 4 5 6 // a b d e f g // insert(3) // 0 1 2 3 4 5 6 // a b d e f g pub fn move_item_to_position(&self, old_idx: usize, new_idx: usize) { let mut list = self.list_access.get(); if new_idx > list.items_order.len() || old_idx > list.items_order.len() - 1 { return; } let old_item = list.items_order.remove(old_idx); use std::cmp::Ordering; match old_idx.cmp(&new_idx) { Ordering::Less => list.items_order.insert(new_idx - 1, old_item), Ordering::Greater => list.items_order.insert(new_idx, old_item), Ordering::Equal => {} } self.list_access.set(list); } pub fn move_item_up(&self, old_idx: usize) { if old_idx == 0 { return; } self.move_item_to_position(old_idx, old_idx - 1); } pub fn move_item_down(&self, old_idx: usize) { self.move_item_to_position(old_idx, old_idx + 2); } pub fn insert(&self, idx: usize, item: T) { let mut list = self.list_access.get(); let inserted_key = list.items_map.0.insert(item); list.items_order.insert(idx, inserted_key); self.list_access.set(list); } pub fn remove(&self, idx: usize) -> T { let mut list = self.list_access.get(); let removed_key = list.items_order.remove(idx); let obj = list.items_map.0.remove(removed_key).unwrap(); self.list_access.set(list); obj } pub fn replace(&self, idx: usize, item: T) -> T { let mut list = self.list_access.get(); let inserted_key = list.items_map.0.insert(item); list.items_order.insert(idx, inserted_key); let replaced_key = list.items_order.remove(idx + 1); let obj = list.items_map.0.remove(replaced_key).unwrap(); self.list_access.set(list); obj } pub fn push(&self, item: T) { let mut list = self.list_access.get(); let pushed_key = list.items_map.0.insert(item); list.items_order.push(pushed_key); self.list_access.set(list); } pub fn unselect_by_key(&self, key: ListKey) { let mut list = self.list_access.get(); list.selected_keys.retain(|k| *k != key); self.list_access.set(list); } pub fn unselect_all(&self) { let mut list = self.list_access.get(); list.selected_keys = vec![]; self.list_access.set(list); } pub fn select_all(&self) { let mut list = self.list_access.get(); for key in &list.items_order { list.selected_keys.push(*key) } self.list_access.set(list); } pub fn unselect(&self, idx: usize) { let mut list = self.list_access.get(); list.selected_keys.remove(idx); self.list_access.set(list); } pub fn select(&self, idx: usize) { let mut list = self.list_access.get(); let key = list.items_order[idx]; list.selected_keys.push(key); self.list_access.set(list); } pub fn toggle_select(&self, idx: usize) { let mut list = self.list_access.get(); let key = list.items_order[idx]; if list.selected_keys.contains(&key) { list.selected_keys.remove(idx); } else { list.selected_keys.push(key); } self.list_access.set(list); } pub fn select_only(&self, idx: usize) { let mut list = self.list_access.get(); let key = list.items_order[idx]; list.selected_keys = vec![]; list.selected_keys.push(key); self.list_access.set(list); } pub fn select_only_by_key(&self, key: ListKey) { let mut list = self.list_access.get(); if !key.is_null() { list.selected_keys = vec![]; list.selected_keys.push(key); } self.list_access.set(list); } pub fn select_by_key(&self, key: ListKey) { let mut list = self.list_access.get(); if !key.is_null() { list.selected_keys.push(key); } self.list_access.set(list); } } #[derive(Clone, Default)] pub struct ListKeyDenseSlotMap<T>(pub DenseSlotMap<ListKey, T>); impl<T> ListKeyDenseSlotMap<T> { pub fn new() -> ListKeyDenseSlotMap<T> { ListKeyDenseSlotMap(DenseSlotMap::<ListKey, T>::with_key()) } } #[derive(Clone, PartialEq)] pub struct List<T> where T: Clone + 'static, { pub items_map: ListKeyDenseSlotMap<T>, pub items_order: Vec<ListKey>, pub selected_keys: Vec<ListKey>, } impl<T> PartialEq for ListKeyDenseSlotMap<T> where T: Clone, { fn eq(&self, other: &Self) -> bool { let mut self_keys = self.0.keys().collect::<Vec<ListKey>>(); let mut other_keys = other.0.keys().collect::<Vec<ListKey>>(); self_keys.sort(); other_keys.sort(); self_keys == other_keys // self.isbn == other.isbn } } impl<T> List<T> where T: Clone + 'static, { fn new(mut items: Vec<T>) -> List<T> { let mut sm = DenseSlotMap::default(); for item in items.drain(..) { sm.insert(item); } let keys = sm.keys().collect::<Vec<_>>(); List { items_map: ListKeyDenseSlotMap(sm), items_order: keys, selected_keys: vec![], } } // an iterator over all items in the list pub fn items(&self) -> impl Iterator<Item = &T> { self.items_order .iter() .filter_map(move |list_key| self.items_map.0.get(*list_key)) } // an iterator over all selected items pub fn selected(&self) -> impl Iterator<Item = &T> { let items_map = &self.items_map.0; self.selected_keys .iter() .filter_map(move |key| items_map.get(*key)) } }
true
02f82a01c9fa59137e1da5d4078da1c37288326c
Rust
Arnavion/k8s-openapi
/src/v1_24/api/core/v1/http_get_action.rs
UTF-8
11,381
2.703125
3
[ "Apache-2.0" ]
permissive
// Generated from definition io.k8s.api.core.v1.HTTPGetAction /// HTTPGetAction describes an action based on HTTP Get requests. #[derive(Clone, Debug, Default, PartialEq)] pub struct HTTPGetAction { /// Host name to connect to, defaults to the pod IP. You probably want to set "Host" in httpHeaders instead. pub host: Option<String>, /// Custom headers to set in the request. HTTP allows repeated headers. pub http_headers: Option<Vec<crate::api::core::v1::HTTPHeader>>, /// Path to access on the HTTP server. pub path: Option<String>, /// Name or number of the port to access on the container. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME. pub port: crate::apimachinery::pkg::util::intstr::IntOrString, /// Scheme to use for connecting to the host. Defaults to HTTP. /// pub scheme: Option<String>, } impl crate::DeepMerge for HTTPGetAction { fn merge_from(&mut self, other: Self) { crate::DeepMerge::merge_from(&mut self.host, other.host); crate::merge_strategies::list::atomic(&mut self.http_headers, other.http_headers); crate::DeepMerge::merge_from(&mut self.path, other.path); crate::DeepMerge::merge_from(&mut self.port, other.port); crate::DeepMerge::merge_from(&mut self.scheme, other.scheme); } } impl<'de> crate::serde::Deserialize<'de> for HTTPGetAction { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> { #[allow(non_camel_case_types)] enum Field { Key_host, Key_http_headers, Key_path, Key_port, Key_scheme, Other, } impl<'de> crate::serde::Deserialize<'de> for Field { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> { struct Visitor; impl<'de> crate::serde::de::Visitor<'de> for Visitor { type Value = Field; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("field identifier") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: crate::serde::de::Error { Ok(match v { "host" => Field::Key_host, "httpHeaders" => Field::Key_http_headers, "path" => Field::Key_path, "port" => Field::Key_port, "scheme" => Field::Key_scheme, _ => Field::Other, }) } } deserializer.deserialize_identifier(Visitor) } } struct Visitor; impl<'de> crate::serde::de::Visitor<'de> for Visitor { type Value = HTTPGetAction; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("HTTPGetAction") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: crate::serde::de::MapAccess<'de> { let mut value_host: Option<String> = None; let mut value_http_headers: Option<Vec<crate::api::core::v1::HTTPHeader>> = None; let mut value_path: Option<String> = None; let mut value_port: Option<crate::apimachinery::pkg::util::intstr::IntOrString> = None; let mut value_scheme: Option<String> = None; while let Some(key) = crate::serde::de::MapAccess::next_key::<Field>(&mut map)? { match key { Field::Key_host => value_host = crate::serde::de::MapAccess::next_value(&mut map)?, Field::Key_http_headers => value_http_headers = crate::serde::de::MapAccess::next_value(&mut map)?, Field::Key_path => value_path = crate::serde::de::MapAccess::next_value(&mut map)?, Field::Key_port => value_port = crate::serde::de::MapAccess::next_value(&mut map)?, Field::Key_scheme => value_scheme = crate::serde::de::MapAccess::next_value(&mut map)?, Field::Other => { let _: crate::serde::de::IgnoredAny = crate::serde::de::MapAccess::next_value(&mut map)?; }, } } Ok(HTTPGetAction { host: value_host, http_headers: value_http_headers, path: value_path, port: value_port.unwrap_or_default(), scheme: value_scheme, }) } } deserializer.deserialize_struct( "HTTPGetAction", &[ "host", "httpHeaders", "path", "port", "scheme", ], Visitor, ) } } impl crate::serde::Serialize for HTTPGetAction { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: crate::serde::Serializer { let mut state = serializer.serialize_struct( "HTTPGetAction", 1 + self.host.as_ref().map_or(0, |_| 1) + self.http_headers.as_ref().map_or(0, |_| 1) + self.path.as_ref().map_or(0, |_| 1) + self.scheme.as_ref().map_or(0, |_| 1), )?; if let Some(value) = &self.host { crate::serde::ser::SerializeStruct::serialize_field(&mut state, "host", value)?; } if let Some(value) = &self.http_headers { crate::serde::ser::SerializeStruct::serialize_field(&mut state, "httpHeaders", value)?; } if let Some(value) = &self.path { crate::serde::ser::SerializeStruct::serialize_field(&mut state, "path", value)?; } crate::serde::ser::SerializeStruct::serialize_field(&mut state, "port", &self.port)?; if let Some(value) = &self.scheme { crate::serde::ser::SerializeStruct::serialize_field(&mut state, "scheme", value)?; } crate::serde::ser::SerializeStruct::end(state) } } #[cfg(feature = "schemars")] impl crate::schemars::JsonSchema for HTTPGetAction { fn schema_name() -> String { "io.k8s.api.core.v1.HTTPGetAction".to_owned() } fn json_schema(__gen: &mut crate::schemars::gen::SchemaGenerator) -> crate::schemars::schema::Schema { crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject { metadata: Some(Box::new(crate::schemars::schema::Metadata { description: Some("HTTPGetAction describes an action based on HTTP Get requests.".to_owned()), ..Default::default() })), instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Object))), object: Some(Box::new(crate::schemars::schema::ObjectValidation { properties: [ ( "host".to_owned(), crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject { metadata: Some(Box::new(crate::schemars::schema::Metadata { description: Some("Host name to connect to, defaults to the pod IP. You probably want to set \"Host\" in httpHeaders instead.".to_owned()), ..Default::default() })), instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))), ..Default::default() }), ), ( "httpHeaders".to_owned(), crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject { metadata: Some(Box::new(crate::schemars::schema::Metadata { description: Some("Custom headers to set in the request. HTTP allows repeated headers.".to_owned()), ..Default::default() })), instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Array))), array: Some(Box::new(crate::schemars::schema::ArrayValidation { items: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(__gen.subschema_for::<crate::api::core::v1::HTTPHeader>()))), ..Default::default() })), ..Default::default() }), ), ( "path".to_owned(), crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject { metadata: Some(Box::new(crate::schemars::schema::Metadata { description: Some("Path to access on the HTTP server.".to_owned()), ..Default::default() })), instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))), ..Default::default() }), ), ( "port".to_owned(), { let mut schema_obj = __gen.subschema_for::<crate::apimachinery::pkg::util::intstr::IntOrString>().into_object(); schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata { description: Some("Name or number of the port to access on the container. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME.".to_owned()), ..Default::default() })); crate::schemars::schema::Schema::Object(schema_obj) }, ), ( "scheme".to_owned(), crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject { metadata: Some(Box::new(crate::schemars::schema::Metadata { description: Some("Scheme to use for connecting to the host. Defaults to HTTP.\n\n".to_owned()), ..Default::default() })), instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))), ..Default::default() }), ), ].into(), required: [ "port".to_owned(), ].into(), ..Default::default() })), ..Default::default() }) } }
true
d756985b193c60a7885ca309c0d6c01bbcdff58f
Rust
redsnapper2006/leetcode-cn
/C/2315-count-asterisks/main.rs
UTF-8
285
3.046875
3
[]
no_license
struct Solution {} impl Solution { pub fn count_asterisks(s: String) -> i32 { let mut ret: i32 = 0; let mut cnt: i32 = 0; for b in s.chars() { if b == '|' { cnt += 1; } else if b == '*' && cnt % 2 == 0 { ret += 1; } } ret } }
true
f37b3a800a17d90775f0a5b1f2c7b8776ce75572
Rust
AndrewGrim/NoteMaker
/src/debug.rs
UTF-8
909
2.9375
3
[ "MIT" ]
permissive
#![allow(dead_code)] pub fn info(text: &str) { let color = "\x1b[94m"; let end = "\x1b[0m"; println!("{}{}{}", color, text, end); } pub fn ok(text: &str) { let color = "\x1b[96m"; let end = "\x1b[0m"; println!("{}{}{}", color, text, end); } pub fn success(text: &str) { let color = "\x1b[92m"; let end = "\x1b[0m"; println!("{}{}{}", color, text, end); } pub fn debug(text: &str) { let color = "\x1b[95m"; let end = "\x1b[0m"; println!("{}{}{}", color, text, end); } pub fn warn(text: &str) { let color = "\x1b[93m"; let end = "\x1b[0m"; println!("{}{}{}", color, text, end); } pub fn fail(text: &str) { let color = "\x1b[91m"; let end = "\x1b[0m"; println!("{}{}{}", color, text, end); } pub fn test() { ok("ok()"); info("info()"); success("success()"); debug("debug()"); warn("warn()"); fail("fail()"); }
true
b08ba0b277a6e1a37a28dd0552f1d668f32e43df
Rust
luketchang/Rust-HTTP-Server
/src/main.rs
UTF-8
704
2.78125
3
[]
no_license
#![allow(dead_code)] use std::env; use server::Server; use site_handler::SiteHandler; mod http; mod server; mod site_handler; /* Function: main * ______________ * - gets default path as the project's director with /public appended * - looks for another file path in as user environment variable and defaults to default_path * if not found * - creates new server instance and runs it */ fn main() { let default_path = format!("{}/public", env!("CARGO_MANIFEST_DIR")); let public_path = env::var("PUBLIC_PATH").unwrap_or(default_path); println!("Path: {}", public_path); let server = Server::new("127.0.0.1:8080".to_string()); server.run(SiteHandler::new(public_path)); }
true
bf02ebfc7fb54cc9fbceddd49afc756a5e8b5801
Rust
alexcrichton/rust-central-station
/run-on-change/src/main.rs
UTF-8
2,096
3.015625
3
[]
no_license
use sha1::Sha1; use std::path::{Path, PathBuf}; use std::error::Error; static CACHE_PATH: &str = "/tmp/run-on-change"; fn cached_path(url: &str) -> PathBuf { Path::new(CACHE_PATH).join(Sha1::from(url).digest().to_string()) } fn cached_hash(url: &str) -> Result<Option<String>, Box<Error>> { let path = cached_path(url); if path.exists() { Ok(Some(std::fs::read_to_string(&path)?.trim().into())) } else { Ok(None) } } fn fetch_url_hash(url: &str) -> Result<String, Box<Error>> { let mut hash = Sha1::new(); let mut easy = curl::easy::Easy::new(); easy.url(url)?; easy.useragent("rust-lang infra tooling (https://github.com/rust-lang/rust-central-station)")?; { let mut transfer = easy.transfer(); transfer.write_function(|data| { hash.update(data); Ok(data.len()) })?; transfer.perform()?; } if easy.response_code()? != 200 { Err(format!("request to {} returned status code {}", url, easy.response_code()?).into()) } else { Ok(hash.digest().to_string()) } } fn main() -> Result<(), Box<Error>> { let args = std::env::args().collect::<Vec<_>>(); if args.len() < 3 { eprintln!("usage: {} <url> <command ...>", args[0]); std::process::exit(1); } let url = &args[1]; let url_hash = fetch_url_hash(url)?; if cached_hash(url)?.as_ref().map(|hash| hash.as_str()) != Some(&url_hash) { let status = std::process::Command::new(&args[2]) .args(&args[3..]) .status()?; if status.success() { let path = cached_path(url); if let Some(parent) = path.parent() { if !parent.exists() { std::fs::create_dir_all(&parent)?; } } std::fs::write(&cached_path(url), format!("{}\n", url_hash).as_bytes())?; } else { std::process::exit(status.code().unwrap_or(1)); } } else { eprintln!("content at {} didn't change, aborting", url); } Ok(()) }
true
373aede2f888821a9fe33a86a67a884341d9d826
Rust
TakeZNt/bitonic-sorter
/src/first.rs
UTF-8
3,144
4.0625
4
[]
no_license
/// 配列をソートする /// # 引数 /// - array : 配列。ただし、要素数は2^nでなければならない /// - asc : 昇順の場合true、降順の場合false pub fn sort(array: &mut [u32], asc: bool) { if array.len() <= 1 { return; } // バイトニック列を作成する let mid = array.len() / 2; sort(&mut array[..mid], true); // 前半を昇順でソート sort(&mut array[mid..], false); // 後半を降順でソート sub_sort(array, asc); } /// バイトニック列をソートする /// # 引数 /// - bitonic_array : バイトニック列 /// - asc : 昇順の場合true、降順の場合false fn sub_sort(bitonic_array: &mut [u32], asc: bool) { if bitonic_array.len() <= 1 { return; } // 比較&入れ替えによって並び順をascに近づける(ソートは不完全。半分にするとそれぞれがバイトニック列になる) compare_and_swap(bitonic_array, asc); let mid = bitonic_array.len() / 2; sub_sort(&mut bitonic_array[..mid], asc); sub_sort(&mut bitonic_array[mid..], asc); } /// 各要素を要素数n / 2だけ右の要素と比較し、昇順か降順かに応じて並べ替える /// 並び替えの結果、配列を半分に分けるとそれぞれがバイトニック列になる /// # 引数 /// - array : 並び替え対象の配列 /// - asc : 昇順の場合true、降順の場合false fn compare_and_swap(array: &mut [u32], asc: bool) { let mid = array.len() / 2; for i in 0..mid { if asc { // 昇順の場合 if array[i] > array[i + mid] { array.swap(i, i + mid); } } else { // 降順の場合 if array[i] < array[i + mid] { array.swap(i, i + mid); } } } } #[cfg(test)] mod tests { use super::sort; // 昇順ソートのテスト #[test] fn sort_u32_ascending() { let mut array = vec![10, 30, 11, 20, 4, 330, 21, 110]; sort(&mut array, true); assert_eq!(array, vec![4, 10, 11, 20, 21, 30, 110, 330]); } // 降順ソートのテスト #[test] fn sort_u32_descending() { let mut array = vec![10, 30, 11, 20, 4, 330, 21, 110]; sort(&mut array, false); assert_eq!(array, vec![330, 110, 30, 21, 20, 11, 10, 4]); } // 要素が0個(昇順) #[test] fn sort_ascending_zero() { let mut array = vec![]; sort(&mut array, true); assert_eq!(array, vec![]); } // 要素が0個(降順) #[test] fn sort_descending_zero() { let mut array = vec![]; sort(&mut array, false); assert_eq!(array, vec![]); } // 要素が1個(昇順) #[test] fn sort_ascending_one() { let mut array = vec![10]; sort(&mut array, true); assert_eq!(array, vec![10]); } // 要素が1個(降順) #[test] fn sort_descending_one() { let mut array = vec![45]; sort(&mut array, false); assert_eq!(array, vec![45]); } }
true
243d41369bc3391fe3f009dd3ce713f3ebd6542e
Rust
megascrapper/rsgames
/src/gladiator_game/army.rs
UTF-8
3,918
3.359375
3
[]
no_license
use std::collections::VecDeque; use std::io; use crate::gladiator_game::fighter::{Archer, Cavalry, Fighter, Soldier}; enum ArmyFormation<'a> { Stack(Vec<Box<dyn Fighter + 'a>>), Queue(VecDeque<Box<dyn Fighter + 'a>>), } pub enum FormationType { Stack, Queue, } pub struct Army<'a> { name: String, budget: i32, force: ArmyFormation<'a>, } impl<'a> Army<'a> { pub fn new(name: String, budget: i32, formation: &FormationType) -> Self { loop { let mut input_string = String::new(); println!("Player {} choose your army as S A C", name); println!("where S is the number of soldiers\nA is the number of archers\nC is the number of cavalries"); io::stdin().read_line(&mut input_string).expect("Cannot read line"); let input_list: Vec<&str> = input_string.trim().split_whitespace().collect(); if input_list.len() != 3 { eprintln!("Error: invalid format"); continue; } let s = match input_list[0].parse::<i32>() { Ok(n) => n, Err(_) => continue, }; let a = match input_list[1].parse::<i32>() { Ok(n) => n, Err(_) => continue, }; let c = match input_list[2].parse::<i32>() { Ok(n) => n, Err(_) => continue, }; if Self::correct_army_given(budget, s, a, c) { return Army { name, budget, force: Self::assign_army(s, a, c, formation), }; } } } pub fn name(&self) -> &String { &self.name } pub fn is_empty(&self) -> bool { match &self.force { ArmyFormation::Stack(s) => s.is_empty(), ArmyFormation::Queue(q) => q.is_empty(), } } pub fn push_unit(&mut self, unit: Box<dyn Fighter + 'a>) { match &mut self.force { ArmyFormation::Stack(s) => s.push(unit), ArmyFormation::Queue(q) => q.push_back(unit), } } pub fn pop_unit(&mut self) -> Option<Box<dyn Fighter + 'a>> { match &mut self.force { ArmyFormation::Stack(s) => s.pop(), ArmyFormation::Queue(q) => q.pop_front(), } } fn correct_army_given(budget: i32, soldiers: i32, archers: i32, cavalry: i32) -> bool { if soldiers < 0 || archers < 0 || cavalry < 0 { false } else { (1 * soldiers + 2 * archers + 3 * cavalry) <= budget } } fn assign_army(sold: i32, arch: i32, cav: i32, formation_type: &FormationType) -> ArmyFormation<'a> { let len = (sold + arch + cav) as usize; match formation_type { FormationType::Stack => { // Stack ordering from bottom: cavalry, archer, soldier let mut s: Vec<Box<dyn Fighter>> = Vec::with_capacity(len); for _ in 0..cav { s.push(Box::new(Cavalry::new())); } for _ in 0..arch { s.push(Box::new(Archer::new())); } for _ in 0..sold { s.push(Box::new(Soldier::new())); } ArmyFormation::Stack(s) }, FormationType::Queue => { let mut q: VecDeque<Box<dyn Fighter>> = VecDeque::with_capacity(len); for _ in 0..sold { q.push_back(Box::new(Soldier::new())); } for _ in 0..arch { q.push_back(Box::new(Archer::new())); } for _ in 0..cav { q.push_back(Box::new(Cavalry::new())); } ArmyFormation::Queue(q) } } } }
true
aa9dd4c8a642beb07a46552e4cb7d522e7976c1e
Rust
Syfaro/advent-of-code-rs
/src/bin/2015-02.rs
UTF-8
4,345
3.625
4
[]
no_license
const PROBLEM_NAME: &str = "2015-02"; /// A package's three dimensions. #[derive(Clone, Debug, PartialEq)] struct Package { length: i32, width: i32, height: i32, } #[cfg(test)] impl Package { fn new(length: i32, width: i32, height: i32) -> Self { Self { length, width, height, } } } impl Package { /// Calculate the amount of wrapping paper required for the package. fn wrapping_paper(&self) -> i32 { // Calculate base area of the paper needed. let area = (2 * self.length * self.width) + (2 * self.width * self.height) + (2 * self.height * self.length); // Calculate each side to find the side with the least area. let smallest = vec![ self.length * self.width, self.width * self.height, self.height * self.length, ] .into_iter() .min() .unwrap(); area + smallest } /// Calculate the amount of ribbon needed for the package. fn ribbon(&self) -> i32 { // Sort the dimensions to find the two smallest edges. let mut faces = vec![self.length, self.width, self.height]; faces.sort(); // These will always have 3 values. let ribbon = faces[0] * 2 + faces[1] * 2; let bow: i32 = faces.iter().product(); ribbon + bow } } fn main() { advent_of_code::init(); let input = advent_of_code::load_input(PROBLEM_NAME); let packages = decode_all_packages(&input); let total_paper = total_wrapping_paper(&packages); log::info!("Part 1 = {}", total_paper); let total_ribbon = total_ribbon(&packages); log::info!("Part 2 = {}", total_ribbon); } /// Decode each line into a package. fn decode_all_packages(input: &str) -> Vec<Package> { input.lines().map(decode_package).collect() } /// Calculate total amount of wrapping paper needed. fn total_wrapping_paper(packages: &[Package]) -> i32 { packages.iter().map(Package::wrapping_paper).sum() } /// Calculate total amount of ribbon needed. fn total_ribbon(packages: &[Package]) -> i32 { packages.iter().map(Package::ribbon).sum() } /// Decode a Package from a a length x width x height formatted line. /// /// # Panics /// /// Will panic if there are not 3 numeric dimensions separated by an 'x' on each /// line. fn decode_package(line: &str) -> Package { let dimensions: Vec<&str> = line.split('x').collect(); assert_eq!(dimensions.len(), 3, "Package must have 3 dimensions"); let dimensions: Vec<i32> = dimensions .into_iter() .map(|dimension| { dimension .parse() .expect("All package dimensions must be numbers") }) .collect(); // We know there are three dimensions in here because of previous assertion. Package { length: dimensions[0], width: dimensions[1], height: dimensions[2], } } #[cfg(test)] mod tests { use super::*; #[test] fn test_decode_all_packages() { let input = "2x3x4\n1x1x10\n"; let packages = decode_all_packages(&input); assert_eq!( packages, vec![Package::new(2, 3, 4), Package::new(1, 1, 10)] ); } #[test] fn test_total_wrapping_paper() { let packages = &[Package::new(2, 3, 4), Package::new(1, 1, 10)]; let total_paper = total_wrapping_paper(packages); assert_eq!(total_paper, 101); } #[test] fn test_total_ribbon() { let packages = &[Package::new(2, 3, 4), Package::new(1, 1, 10)]; let total_ribbon = total_ribbon(packages); assert_eq!(total_ribbon, 48); } #[test] fn test_decode_package() { let package = decode_package("1x2x3"); assert_eq!(package, Package::new(1, 2, 3)); } #[test] fn test_package_wrapping_paper() { let paper = Package::new(2, 3, 4).wrapping_paper(); assert_eq!(paper, 58); let paper = Package::new(1, 1, 10).wrapping_paper(); assert_eq!(paper, 43); } #[test] fn test_package_ribbon() { let ribbon = Package::new(2, 3, 4).ribbon(); assert_eq!(ribbon, 34); let ribbon = Package::new(1, 1, 10).ribbon(); assert_eq!(ribbon, 14); } }
true
a3248007712499692dd71ca22c79fc0b9cfeb433
Rust
emilio/rkv
/src/store/integermulti.rs
UTF-8
4,515
2.703125
3
[ "Apache-2.0" ]
permissive
// Copyright 2018 Mozilla // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. use lmdb::{ Database, WriteFlags, }; use std::marker::PhantomData; use crate::error::StoreError; use crate::readwrite::{ Readable, Writer, }; use crate::value::Value; use crate::store::multi::{ Iter, MultiStore, }; use crate::store::integer::{ Key, PrimitiveInt, }; pub struct MultiIntegerStore<K> where K: PrimitiveInt, { inner: MultiStore, phantom: PhantomData<K>, } impl<K> MultiIntegerStore<K> where K: PrimitiveInt, { pub(crate) fn new(db: Database) -> MultiIntegerStore<K> { MultiIntegerStore { inner: MultiStore::new(db), phantom: PhantomData, } } pub fn get<'env, T: Readable>(&self, reader: &'env T, k: K) -> Result<Iter<'env>, StoreError> { self.inner.get(reader, Key::new(&k)?) } pub fn get_first<'env, T: Readable>(&self, reader: &'env T, k: K) -> Result<Option<Value<'env>>, StoreError> { self.inner.get_first(reader, Key::new(&k)?) } pub fn put(&self, writer: &mut Writer, k: K, v: &Value) -> Result<(), StoreError> { self.inner.put(writer, Key::new(&k)?, v) } pub fn put_with_flags(&self, writer: &mut Writer, k: K, v: &Value, flags: WriteFlags) -> Result<(), StoreError> { self.inner.put_with_flags(writer, Key::new(&k)?, v, flags) } pub fn delete_all(&self, writer: &mut Writer, k: K) -> Result<(), StoreError> { self.inner.delete_all(writer, Key::new(&k)?) } pub fn delete(&self, writer: &mut Writer, k: K, v: &Value) -> Result<(), StoreError> { self.inner.delete(writer, Key::new(&k)?, v) } pub fn clear(&self, writer: &mut Writer) -> Result<(), StoreError> { self.inner.clear(writer) } } #[cfg(test)] mod tests { extern crate tempfile; use self::tempfile::Builder; use std::fs; use super::*; use crate::*; #[test] fn test_integer_keys() { let root = Builder::new().prefix("test_integer_keys").tempdir().expect("tempdir"); fs::create_dir_all(root.path()).expect("dir created"); let k = Rkv::new(root.path()).expect("new succeeded"); let s = k.open_multi_integer("s", StoreOptions::create()).expect("open"); macro_rules! test_integer_keys { ($type:ty, $key:expr) => {{ let mut writer = k.write().expect("writer"); s.put(&mut writer, $key, &Value::Str("hello!")).expect("write"); assert_eq!(s.get_first(&writer, $key).expect("read"), Some(Value::Str("hello!"))); writer.commit().expect("committed"); let reader = k.read().expect("reader"); assert_eq!(s.get_first(&reader, $key).expect("read"), Some(Value::Str("hello!"))); }}; } test_integer_keys!(u32, std::u32::MIN); test_integer_keys!(u32, std::u32::MAX); } #[test] fn test_clear() { let root = Builder::new().prefix("test_multi_integer_clear").tempdir().expect("tempdir"); fs::create_dir_all(root.path()).expect("dir created"); let k = Rkv::new(root.path()).expect("new succeeded"); let s = k.open_multi_integer("s", StoreOptions::create()).expect("open"); { let mut writer = k.write().expect("writer"); s.put(&mut writer, 1, &Value::Str("hello!")).expect("write"); s.put(&mut writer, 1, &Value::Str("hello1!")).expect("write"); s.put(&mut writer, 2, &Value::Str("hello!")).expect("write"); writer.commit().expect("committed"); } { let mut writer = k.write().expect("writer"); s.clear(&mut writer).expect("cleared"); writer.commit().expect("committed"); let reader = k.read().expect("reader"); assert_eq!(s.get_first(&reader, 1).expect("read"), None); assert_eq!(s.get_first(&reader, 2).expect("read"), None); } } }
true
92b2761724d7a3fa5e1e84d422d986ba57c5e8e5
Rust
felixmc/gerrit-cli
/src/gerrit.rs
UTF-8
4,888
2.515625
3
[]
no_license
use std::env; // use std::io::Read; // use ansi_term::Colour::{Red, Green}; use serde_json; use serde_json::*; use curl; pub enum ReviewResult { Rejected, Approved, Disliked, Liked, Neutral, } impl ReviewResult { pub fn value (&self) -> &str { match *self { ReviewResult::Rejected => "rejected", ReviewResult::Disliked => "disliked", ReviewResult::Liked => "liked", ReviewResult::Approved => "approved", ReviewResult::Neutral => "neutral", } } } pub struct ChangeReview { pub author: String, pub result: ReviewResult } impl ChangeReview { fn parse_json (json: &Value) -> Option<ChangeReview> { let (review, review_type) = vec![ ReviewResult::Rejected, ReviewResult::Disliked, ReviewResult::Liked, ReviewResult::Approved, ] .into_iter() .fold((None, ReviewResult::Neutral), |(found_review, prev_key), cur_key| match found_review { None => (json.get(cur_key.value()), cur_key), _ => (found_review, prev_key) }); review.map(|json| ChangeReview { author: json["username"].as_str().unwrap().to_owned(), result: review_type, }) } } pub struct ChangeStatus { pub project: String, pub subject: String, pub owner: String, pub number: String, pub change_id: String, pub insertions: usize, pub deletions: usize, pub code_review: Option<ChangeReview>, pub qa_review: Option<ChangeReview>, pub product_review: Option<ChangeReview>, pub lint_review: Option<ChangeReview>, pub build_review: Option<ChangeReview>, pub can_merge: bool, pub is_merged: bool, pub has_conflict: bool, } impl ChangeStatus { pub fn parse_json (json: &Value) -> ChangeStatus { ChangeStatus { project: json["project"].as_str().unwrap().to_owned(), subject: json["subject"].as_str().unwrap().to_owned(), owner: json["owner"]["username"].as_str().unwrap().to_owned(), number: json["_number"].as_u64().unwrap().to_string(), change_id: json["change_id"].as_str().unwrap().to_owned(), insertions: json["insertions"].as_u64().unwrap() as usize, deletions: json["deletions"].as_u64().unwrap() as usize, code_review: ChangeReview::parse_json(&json["labels"]["Non-Author-Review"]), qa_review: ChangeReview::parse_json(&json["labels"]["QA-Review"]), product_review: ChangeReview::parse_json(&json["labels"]["Code-Review"]), lint_review: ChangeReview::parse_json(&json["labels"]["Lint-Review"]), build_review: ChangeReview::parse_json(&json["labels"]["Verified"]), can_merge: json["submittable"].as_bool().unwrap_or(false), is_merged: json["status"].as_str().unwrap() == "MERGED", has_conflict: !json["mergeable"].as_bool().unwrap_or(false), } } } pub struct Gerrit { user: String, pword: String, url: String, } impl Gerrit { pub fn new () -> Gerrit { Gerrit { user: env::var("GERRIT_USER").expect("Missing GERRIT_USER in env"), pword: env::var("GERRIT_PWD").expect("Missing GERRIT_PWD in env"), url: env::var("GERRIT_URL").expect("Missing GERRIT_URL in env"), } } fn get (&self, path: &str) -> Value { let url = format!("{}{}", &self.url, path); let user_pass = format!("{}:{}", self.user, self.pword); match curl::get(&url, vec!["-u", &user_pass]) { Ok(output) => match output.is_unauthorized() { true => panic!("Unauthorized response from gerrit. is your HTTP password up to date?"), false => match serde_json::from_str(&output.body_for_json()) { Ok(json) => json, Err(json_err) => { #[cfg(debug_assertions)] println!("URL: {0} \nJSON: {1}\n", path, output.body); panic!("bad json from gerrit: {}", json_err) } }, }, Err(curl_err) => panic!("cannot reach gerrit: {}", curl_err) } } pub fn get_change (&self, change_id: &str) -> ChangeStatus { let json_data = self.get(&format!("a/changes/{}?o=LABELS&o=DETAILED_ACCOUNTS", change_id)); ChangeStatus::parse_json(&json_data) } pub fn get_my_changes (&self) -> Vec<ChangeStatus> { let changes = self.get(&format!("a/changes/?q=owner:self+status:open&o=LABELS&o=DETAILED_ACCOUNTS")); changes.as_array().unwrap().iter().map(|change_json| { // println!("{:?}\n\n", change_json); ChangeStatus::parse_json(change_json) }).collect() } }
true
abcec4de682ddba6515c10253531a14f22b7c60c
Rust
alex/abscissa
/core/src/application/lock/reader.rs
UTF-8
745
3.234375
3
[ "Unlicense", "MPL-2.0", "MIT", "Apache-2.0" ]
permissive
//! Mutex guard for immutably accessing global application state use super::Application; use std::{ops::Deref, sync::RwLockReadGuard}; /// Generic `RwLockWriteGuard` for a `'static` lifetime. pub(crate) type Guard<T> = RwLockReadGuard<'static, T>; /// Wrapper around a `RwLockReadGuard` for reading global application state. pub struct Reader<A>(Guard<A>) where A: 'static + Application; impl<A> Reader<A> where A: 'static + Application, { /// Create wrapper around a read-only application mutex guard pub(super) fn new(guard: Guard<A>) -> Self { Reader(guard) } } impl<A> Deref for Reader<A> where A: 'static + Application, { type Target = A; fn deref(&self) -> &A { self.0.deref() } }
true
589de5dd7ff3dd4c1f0f3226b0d1b477643284a6
Rust
Microsvuln/IRL
/src/lang/util.rs
UTF-8
3,515
3.34375
3
[]
no_license
use std::cell::{Ref, RefCell, RefMut}; use std::cmp::Ordering; use std::collections::HashSet; use std::hash::{Hash, Hasher}; use std::iter::FromIterator; use std::ops::Deref; use std::rc::Rc; /// A auxiliary structure to make `Rc` act like pointer. /// The extended behavior include pointer-equality testing and hash. pub struct ExtRc<T>(pub Rc<T>); impl<T> ExtRc<T> { pub fn new(e: T) -> Self { ExtRc(Rc::new(e)) } } impl<T> PartialEq for ExtRc<T> { fn eq(&self, other: &Self) -> bool { Rc::ptr_eq(&self.0, &other.0) } } impl<T> Eq for ExtRc<T> {} impl<T> PartialOrd for ExtRc<T> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { (self.0.as_ref() as *const T).partial_cmp(&(other.as_ref() as *const T)) } } impl<T> Ord for ExtRc<T> { fn cmp(&self, other: &Self) -> Ordering { (self.0.as_ref() as *const T).cmp(&(other.as_ref() as *const T)) } } impl<T> Hash for ExtRc<T> { fn hash<H: Hasher>(&self, state: &mut H) { (self.0.as_ref() as *const T).hash(state) } } impl<T> AsRef<T> for ExtRc<T> { fn as_ref(&self) -> &T { self.0.as_ref() } } impl<T> Deref for ExtRc<T> { type Target = T; fn deref(&self) -> &Self::Target { self.0.deref() } } impl<T> Clone for ExtRc<T> { fn clone(&self) -> Self { ExtRc(self.0.clone()) } } /// Extended reference counting with interior mutability pub struct MutRc<T>(pub Rc<RefCell<T>>); impl<T> MutRc<T> { pub fn new(e: T) -> Self { MutRc(Rc::new(RefCell::new(e))) } } impl<T> PartialEq for MutRc<T> { fn eq(&self, other: &Self) -> bool { Rc::ptr_eq(&self.0, &other.0) } } impl<T> Eq for MutRc<T> {} impl<T> PartialOrd for MutRc<T> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { (self.borrow().deref() as *const T).partial_cmp(&(other.borrow().deref() as *const T)) } } impl<T> Ord for MutRc<T> { fn cmp(&self, other: &Self) -> Ordering { (self.borrow().deref() as *const T).cmp(&(other.borrow().deref() as *const T)) } } impl<T> Hash for MutRc<T> { fn hash<H: Hasher>(&self, state: &mut H) { (self.0.as_ref() as *const RefCell<T>).hash(state) } } impl<T> Clone for MutRc<T> { fn clone(&self) -> Self { MutRc(self.0.clone()) } } impl<T> MutRc<T> { pub fn borrow(&self) -> Ref<T> { self.0.deref().borrow() } pub fn borrow_mut(&self) -> RefMut<T> { self.0.deref().borrow_mut() } } /// Encapsulation of `HashSet` to aid work list algorithms /// A work list must allow quick testing of membership and quick extraction of an element, /// regardless of which element is moved. #[derive(Debug)] pub struct WorkList<T> where T: Eq + Hash + Clone { set: HashSet<T> } impl<T> FromIterator<T> for WorkList<T> where T: Eq + Hash + Clone { fn from_iter<I>(iter: I) -> Self where I: IntoIterator<Item=T> { WorkList { set: HashSet::from_iter(iter) } } } impl<T> WorkList<T> where T: Eq + Hash + Clone { pub fn new() -> WorkList<T> { WorkList { set: Default::default() } } pub fn insert(&mut self, item: T) { self.set.insert(item); } pub fn append<I>(&mut self, iter: I) where I: Iterator<Item=T> { iter.for_each(|e| self.insert(e)) } pub fn pick(&mut self) -> Option<T> { let mut elem = None; for x in self.set.iter() { elem = Some(x.clone()); break } elem.map(|e| { self.set.remove(&e); e }) } pub fn is_empty(&self) -> bool { self.set.is_empty() } }
true
afe0b5d35ca54712da307324ff9660cc75bd5095
Rust
kubo/rosy
/src/mixin/class/classify.rs
UTF-8
1,110
2.71875
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use crate::{ prelude::*, string::Encoding, vm::InstrSeq, }; /// A type that can be instantiated from a typed `Class` instance. pub trait Classify: Object { /// Returns the typed class that can be used to get an instance of `self`. fn class() -> Class<Self>; } macro_rules! impl_trait { ($($t:ty, $c:ident ;)+) => { $( impl Classify for $t { #[inline] fn class() -> Class<Self> { unsafe { Class::cast_unchecked(Class::$c()) } } } )+ }; } impl<O: Object> Classify for Array<O> { #[inline] fn class() -> Class<Self> { unsafe { Class::cast_unchecked(Class::array()) } } } impl<K: Object, V: Object> Classify for Hash<K, V> { #[inline] fn class() -> Class<Self> { unsafe { Class::cast_unchecked(Class::hash()) } } } impl_trait! { AnyObject, object; Class, class; Module, module; Integer, integer; String, string; Symbol, symbol; Encoding, encoding; AnyException, exception; InstrSeq, instr_seq; }
true
8be1ae2b1679034b41684a9f489131af04e10fc8
Rust
hpatjens/hmath
/src/vector.rs
UTF-8
9,642
3.0625
3
[]
no_license
use std::{ ops::{Add,AddAssign,Sub,SubAssign,Mul,MulAssign,Div,DivAssign,Neg}, mem, }; use crate::traits::*; use num_traits::Signed; use serde::{Serialize, Deserialize}; pub use float_cmp::{Ulps,ApproxEq}; macro_rules! implement_vector { ($type:ident { dim: $dim:expr, elems: { $($num:expr => $member:ident),+ } }) => { // // DEFINE THE TYPE // #[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Serialize, Deserialize)] pub struct $type<T> where T: Base { $(pub $member: T),* } // // IMPLEMENTATION WHEN BASE TRAIT // impl<T: Base> $type<T> { pub fn new($($member: T),*) -> Self { $type { $($member: $member),* } } #[inline] pub fn dims() -> usize { ($({let $member = 1; $member} +)* 0) } #[inline] pub fn zero() -> Self { $type { $($member: T::zero()),* } } #[inline] pub fn one() -> Self { $type { $($member: T::one()),* } } // TODO(henk): Explain that one can use .as_array().clone() to create a new array. #[inline] pub fn as_array(&self) -> &[T; $dim] { unsafe { mem::transmute(self) } } } // // IMPLEMENTATION WHEN BASEFLOAT TRAIT // impl<T> $type<T> where T: BaseFloat { #[inline] pub fn length(&self) -> T where T: AddAssign + Mul<Output=T> { let mut sum = T::zero(); $(sum += self.$member * self.$member;)* T::sqrt(sum) } #[inline] pub fn normalize(&self) -> $type<T> where T: Div<Output=T> + Mul<Output=T> { self / self.length() } } // // DEFAULT TRAIT // impl<T> Default for $type<T> where T: Base { fn default() -> Self { $type::new( $({ let $member = T::zero(); $member }),* ) } } // -------------------------------------------------------------------------- // Elem Trait // -------------------------------------------------------------------------- impl<T: Base> Elem<usize> for $type<T> { type Output = T; #[inline] fn elem(self, index: usize) -> Self::Output { match index { $($num => self.$member,)* _ => panic!("index out of range") } } } impl<'a, T: Base> Elem<usize> for &'a $type<T> { type Output = &'a T; #[inline] fn elem(self, index: usize) -> Self::Output { match index { $($num => &self.$member,)* _ => panic!("index out of range") } } } impl<'a, T: Base> Elem<usize> for &'a mut $type<T> { type Output = &'a mut T; #[inline] fn elem(self, index: usize) -> Self::Output { match index { $($num => &mut self.$member,)* _ => panic!("index out of range") } } } impl<T: BaseFloat> ApproxEq for $type<T> where T: ApproxEq<Flt=T> { type Flt = T; #[inline] fn approx_eq(&self, other: &Self, epsilon: T, ulps: <T as Ulps>::U) -> bool { $(self.$member.approx_eq(&other.$member, epsilon, ulps))&&* } } // // IMPLEMENT BINARY OPERATORS // // v + v implement_binary_operator!(Add<$type<T>> for $type<T>, fn add(lhs, rhs) -> $type<T> { $type::new( $(lhs.$member + rhs.$member),* ) } ); // v += v implement_binary_assign_operator!(AddAssign<$type<T>> for $type<T>, fn add_assign(lhs, rhs) {{ $(lhs.$member += rhs.$member;)* }} ); // v - v implement_binary_operator!(Sub<$type<T>> for $type<T>, fn sub(lhs, rhs) -> $type<T> { $type::new( $(lhs.$member - rhs.$member),* ) } ); // v -= v implement_binary_assign_operator!(SubAssign<$type<T>> for $type<T>, fn sub_assign(lhs, rhs) {{ $(lhs.$member -= rhs.$member;)* }} ); // v * s implement_binary_operator!(Mul<T> for $type<T>, fn mul(vector, scalar) -> $type<T> { $type::new( $(vector.$member * scalar),* ) } ); // v *= s implement_binary_assign_operator!(MulAssign<T> for $type<T>, fn mul_assign(vector, scalar) {{ $(vector.$member *= scalar;)* }} ); // v * v implement_binary_operator!(Mul<$type<T>> for $type<T>, fn mul(lhs, rhs) -> $type<T> { $type::new( $(lhs.$member * rhs.$member),* ) } ); // v *= v implement_binary_assign_operator!(MulAssign<$type<T>> for $type<T>, fn mul_assign(lhs, rhs) {{ $(lhs.$member *= rhs.$member;)* }} ); // v / s implement_binary_operator!(Div<T> for $type<T>, fn div(vector, scalar) -> $type<T> { $type::new( $(vector.$member / scalar),* ) } ); // v /= s implement_binary_assign_operator!(DivAssign<T> for $type<T>, fn div_assign(vector, scalar) {{ $(vector.$member /= scalar;)* }} ); // v / v implement_binary_operator!(Div<$type<T>> for $type<T>, fn div(lhs, rhs) -> $type<T> { $type::new( $(lhs.$member / rhs.$member),* ) } ); // v /= s implement_binary_assign_operator!(DivAssign<$type<T>> for $type<T>, fn div_assign(lhs, rhs) {{ $(lhs.$member /= rhs.$member;)* }} ); // -------------------------------------------------------------------------- // Own traits // -------------------------------------------------------------------------- // v cw_min v implement_binary_operator!(CwMin<$type<T>> for $type<T>, fn cw_min(lhs, rhs) -> $type<T> { $type::new( $(if lhs.$member < rhs.$member { lhs.$member } else { rhs.$member }),* ) } ); // v cw_max v implement_binary_operator!(CwMax<$type<T>> for $type<T>, fn cw_max(lhs, rhs) -> $type<T> { $type::new( $(if rhs.$member < lhs.$member { lhs.$member } else { rhs.$member }),* ) } ); // v dot v implement_binary_operator!(Dot<$type<T>> for $type<T>, fn dot(lhs, rhs) -> T {{ let mut sum = T::zero(); $(sum += lhs.$member * rhs.$member;)* sum }} ); implement_unary_operator!(Neg for $type<T> where T: Neg<Output=T>, fn neg(this) -> $type<T> { $type::new( $(-this.$member),* ) } ); implement_unary_operator!(CwAbs for $type<T> where T: Signed, fn cw_abs(this) -> $type<T> { $type::new( $(this.$member.abs()),* ) } ); /* implement_binary_operator!(Elem<usize> for $type<T>, fn elem(this, index) -> T {{ this.to_array()[index] }} ); */ // -------------------------------------------------------------------------- // Operators that cannot be implemented generically // -------------------------------------------------------------------------- implement_specific_operators_for_vector!($type { $($member),* } for i8 ); implement_specific_operators_for_vector!($type { $($member),* } for i16); implement_specific_operators_for_vector!($type { $($member),* } for i32); implement_specific_operators_for_vector!($type { $($member),* } for i64); implement_specific_operators_for_vector!($type { $($member),* } for u8 ); implement_specific_operators_for_vector!($type { $($member),* } for u16); implement_specific_operators_for_vector!($type { $($member),* } for u32); implement_specific_operators_for_vector!($type { $($member),* } for u64); implement_specific_operators_for_vector!($type { $($member),* } for f32); implement_specific_operators_for_vector!($type { $($member),* } for f64); } } implement_vector!(Vector2 { dim: 2, elems: { 0 => x, 1 => y } }); implement_vector!(Vector3 { dim: 3, elems: { 0 => x, 1 => y, 2 => z } }); implement_vector!(Vector4 { dim: 4, elems: { 0 => x, 1 => y, 2 => z, 3 => w } }); implement_binary_operator!(Cross<Vector3<T>> for Vector3<T>, fn cross(a, b) -> Vector3<T> { Vector3::new( a.y*b.z - a.z*b.y, a.z*b.x - a.x*b.z, a.x*b.y - a.y*b.x, ) } ); impl<T: Base> Vector4<T> { pub fn wdiv(&self) -> Vector3<T> { Vector3::new(self.x/self.w, self.y/self.w, self.z/self.w) } }
true
c7caeb9a37f0be38f33b37e51bfba63cdb6084d9
Rust
codyd51/axle
/rust_programs/ide/src/status_view.rs
UTF-8
2,126
2.546875
3
[ "MIT" ]
permissive
use agx_definitions::{ Color, Drawable, LayerSlice, LikeLayerSlice, NestedLayerSlice, Point, Rect, RectInsets, Size, }; use alloc::{ boxed::Box, format, rc::{Rc, Weak}, vec::Vec, }; use libgui::{ bordered::Bordered, button::Button, label::Label, ui_elements::UIElement, view::View, KeyCode, }; use libgui_derive::{Bordered, Drawable, NestedLayerSlice, UIElement}; use crate::MessageHandler; #[derive(UIElement, NestedLayerSlice, Drawable, Bordered)] pub struct StatusView { message_handler: Rc<MessageHandler>, view: Rc<View>, _run_button: Rc<Button>, status_label: Rc<Label>, } impl StatusView { pub fn new<F: 'static + Fn(&View, Size) -> Rect>( message_handler: &Rc<MessageHandler>, sizer: F, ) -> Rc<Self> { let view = Rc::new(View::new(Color::new(180, 180, 180), sizer)); let run_button = Rc::new(Button::new("Run", |_b, superview_size| { let size = Size::new(60, 30); Rect::from_parts( Point::new(10, superview_size.height - size.height - 10), size, ) })); Rc::clone(&view).add_component(Rc::clone(&run_button) as Rc<dyn UIElement>); let status_label = Rc::new(Label::new(Rect::new(10, 10, 400, 16), "", Color::black())); Rc::clone(&view).add_component(Rc::clone(&status_label) as Rc<dyn UIElement>); let ret = Rc::new(Self { message_handler: Rc::clone(message_handler), view, _run_button: Rc::clone(&run_button), status_label, }); let self_clone_for_button = Rc::clone(&ret); Rc::clone(&run_button).on_left_click(move |_b| { self_clone_for_button.set_status("Compiling..."); self_clone_for_button .message_handler .publish(crate::Message::SendCompileRequest); }); ret } pub fn set_status(&self, text: &str) { self.status_label.set_text(&format!("Status: {text}")); // Redraw the status view to reflect its new text Bordered::draw(&*self); } }
true
179f87e78a400c1e5587a3b80a87ecbf7d49d54d
Rust
mitsuhiko/minijinja
/minijinja-contrib/src/globals.rs
UTF-8
433
2.59375
3
[ "Apache-2.0" ]
permissive
#[allow(unused)] use minijinja::value::Value; /// Returns the current time in UTC as unix timestamp. /// /// To format this timestamp, use the [`datetimeformat`](crate::filters::datetimeformat) filter. #[cfg(feature = "datetime")] #[cfg_attr(docsrs, doc(cfg(feature = "datetime")))] pub fn now() -> Value { let now = time::OffsetDateTime::now_utc(); Value::from(((now.unix_timestamp_nanos() / 1000) as f64) / 1_000_000.0) }
true
35dec8d5f047b950012628f114943d1b8569a886
Rust
sirlag/leftgang
/src/filters.rs
UTF-8
1,280
2.578125
3
[]
no_license
use crate::handlers; use sqlx::{Pool, Postgres}; use warp::Filter; pub fn movers( token: String, pool: Pool<Postgres>, ) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { move_users_to_new(token.clone(), pool.clone()) .or(move_users_to_original(token, pool)) .or(hello()) } pub fn move_users_to_original( token: String, pool: Pool<Postgres>, ) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::path!("api" / "move" / String / "original") .and(warp::get()) .and(warp::any().map(move || token.clone())) .and(warp::any().map(move || pool.clone())) .and_then(handlers::move_users_to_original) } pub fn move_users_to_new( token: String, pool: Pool<Postgres>, ) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::path!("api" / "move" / String) .and(warp::get()) .and(warp::any().map(move || token.clone())) .and(warp::any().map(move || pool.clone())) .and_then(handlers::move_users_to_group) } pub fn hello() -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::path!("api" / "hello" / String).map(|name| format!("Hello, {}!", name)) }
true
a8c9eff07bae3f1a1468322f7ec23f931b089f75
Rust
u5surf/rust_sample
/trait_boundary/trait_boundary.rs
UTF-8
573
3.4375
3
[]
no_license
trait DuckLike { fn quack(&self); fn walk(&self) { println!("walking"); } } struct Duck; impl DuckLike for Duck { fn quack(&self) { println!("quack"); } } impl DuckLike for i64 { fn quack(&self) { for _ in 0..*self { println!("quack"); } } } //generics type parameters that "parameter: trait" makes boundary fn duck_go<D: DuckLike>(duck: D) { duck.quack(); duck.walk(); } fn main() { let duck = Duck; let f = 0.0; duck_go(duck); //duck_go(f); //float does not implement DuckLile therefore, f is not be a parameter }
true
5d6d3363f3ea4a1a688d09111019618ab7eb6679
Rust
Frans-Willem/ZigbeeRustPlayground
/old_src/ieee802154/mac/frame.rs
UTF-8
23,378
2.53125
3
[]
no_license
use crate::ieee802154::{ExtendedAddress, ShortAddress, PANID}; use crate::parse_serialize::{ Deserialize, DeserializeError, DeserializeResult, DeserializeTagged, Serialize, SerializeError, SerializeResult, SerializeTagged, }; use bitfield::bitfield; #[cfg(test)] use std::convert::{TryFrom, TryInto}; /*=== Publicly accessible structures & enums ===*/ #[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] pub enum AddressSpecification { None, Short(PANID, ShortAddress), Extended(PANID, ExtendedAddress), } #[derive(Debug, PartialEq, TryFromPrimitive, Copy, Clone, Eq, Hash)] #[TryFromPrimitiveType = "u8"] pub enum DeviceType { RFD = 0, // Reduced function device FFD = 1, // Full functioning device } #[derive(Debug, PartialEq, TryFromPrimitive, Copy, Clone, Eq, Hash)] #[TryFromPrimitiveType = "u8"] pub enum PowerSource { Battery = 0, // Not AC powered Powered = 1, // AC powered } #[derive(Debug, PartialEq, TryFromPrimitive, Copy, Clone, Eq, Hash)] #[TryFromPrimitiveType = "u8"] pub enum AssociationResponseStatus { AssociationSuccessful = 0, PANAtCapacity = 1, PANAccessDenied = 2, HoppingSequenceOffsetDuplication = 3, FastAssociationSuccessful = 0x80, } default_serialization_enum!(AssociationResponseStatus, u8); #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub enum Command { AssociationRequest { /* 0x01 */ alternate_pan_coordinator: bool, device_type: DeviceType, power_source: PowerSource, receive_on_when_idle: bool, security_capability: bool, allocate_address: bool, }, AssociationResponse { /* 0x02 */ short_address: ShortAddress, status: AssociationResponseStatus, }, DataRequest, /* 0x04 */ BeaconRequest, /* 0x07 */ } #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub enum FrameType { Beacon { beacon_order: usize, superframe_order: usize, final_cap_slot: usize, battery_life_extension: bool, pan_coordinator: bool, association_permit: bool, }, Data, Ack, Command(Command), Reserved, Multipurpose, Fragment, Extended, } #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub struct Frame { pub frame_pending: bool, pub acknowledge_request: bool, pub sequence_number: Option<u8>, pub destination: AddressSpecification, pub source: AddressSpecification, pub frame_type: FrameType, pub payload: Vec<u8>, } impl Frame { /** * TODO: Doing this manually is too slow, it should be left to hardware. * Maybe remove ? */ pub fn create_ack(&self, frame_pending: bool) -> Option<Frame> { if !self.acknowledge_request { None } else { Some(Frame { frame_pending, acknowledge_request: false, sequence_number: self.sequence_number, destination: AddressSpecification::None, source: AddressSpecification::None, frame_type: FrameType::Ack, payload: vec![], }) } } pub fn expect_ack(&self) -> Option<u8> { if !self.acknowledge_request { None } else { self.sequence_number } } } /*=== Into & From implementations */ impl From<(PANID, ShortAddress)> for AddressSpecification { fn from(item: (PANID, ShortAddress)) -> Self { AddressSpecification::Short(item.0, item.1) } } impl From<(PANID, ExtendedAddress)> for AddressSpecification { fn from(item: (PANID, ExtendedAddress)) -> Self { AddressSpecification::Extended(item.0, item.1) } } impl<A> From<Option<A>> for AddressSpecification where A: Into<AddressSpecification>, { fn from(item: Option<A>) -> Self { match item { None => AddressSpecification::None, Some(x) => x.into(), } } } impl Into<Option<PANID>> for AddressSpecification { fn into(self) -> Option<PANID> { match self { AddressSpecification::None => None, AddressSpecification::Short(panid, _) => Some(panid), AddressSpecification::Extended(panid, _) => Some(panid), } } } /*=== Bitfields for serialization & parsing ===*/ bitfield! { #[derive(Serialize, Deserialize)] pub struct FrameControl(u16); impl Debug; pub frame_type, set_frame_type: 2, 0; pub security_enabled, set_security_enabled: 3, 3; pub frame_pending, set_frame_pending: 4, 4; pub acknowledge_request, set_acknowledge_request: 5, 5; pub pan_id_compression, set_pan_id_compression: 6, 6; pub reserved, set_reserved: 7, 7; pub sequence_number_supression, set_sequence_number_supression: 8, 8; pub information_elements_present, set_information_elements_present: 9, 9; pub destination_addressing_mode, set_destination_addressing_mode: 11, 10; pub frame_version, set_frame_version: 13, 12; pub source_addressing_mode, set_source_addressing_mode: 15, 14; } #[test] fn test_frame_control_parsing() { // Beacon request let input: [u8; 2] = [0x03, 0x08]; let parsed = FrameControl::deserialize_complete(&input).unwrap(); assert_eq!(parsed.frame_type(), 3); assert_eq!(parsed.security_enabled(), 0); assert_eq!(parsed.frame_pending(), 0); assert_eq!(parsed.acknowledge_request(), 0); assert_eq!(parsed.pan_id_compression(), 0); assert_eq!(parsed.reserved(), 0); assert_eq!(parsed.sequence_number_supression(), 0); assert_eq!(parsed.information_elements_present(), 0); assert_eq!(parsed.destination_addressing_mode(), 2); assert_eq!(parsed.frame_version(), 0); assert_eq!(parsed.source_addressing_mode(), 0); // Link status let input: [u8; 2] = [0x41, 0x88]; let parsed = FrameControl::deserialize_complete(&input).unwrap(); assert_eq!(parsed.frame_type(), 1); assert_eq!(parsed.security_enabled(), 0); assert_eq!(parsed.frame_pending(), 0); assert_eq!(parsed.acknowledge_request(), 0); assert_eq!(parsed.pan_id_compression(), 1); assert_eq!(parsed.reserved(), 0); assert_eq!(parsed.sequence_number_supression(), 0); assert_eq!(parsed.information_elements_present(), 0); assert_eq!(parsed.destination_addressing_mode(), 2); assert_eq!(parsed.frame_version(), 0); assert_eq!(parsed.source_addressing_mode(), 2); // Beacon let input: [u8; 2] = [0x00, 0x80]; let parsed = FrameControl::deserialize_complete(&input).unwrap(); assert_eq!(parsed.frame_type(), 0); assert_eq!(parsed.security_enabled(), 0); assert_eq!(parsed.frame_pending(), 0); assert_eq!(parsed.acknowledge_request(), 0); assert_eq!(parsed.pan_id_compression(), 0); assert_eq!(parsed.reserved(), 0); assert_eq!(parsed.sequence_number_supression(), 0); assert_eq!(parsed.information_elements_present(), 0); assert_eq!(parsed.destination_addressing_mode(), 0); assert_eq!(parsed.frame_version(), 0); assert_eq!(parsed.source_addressing_mode(), 2); } #[test] fn test_frame_control_serialize() { let input: [u8; 2] = [0x41, 0x88]; let parsed = FrameControl::deserialize_complete(&input).unwrap(); assert_eq!(parsed.serialize().unwrap(), input); let input: [u8; 2] = [0x00, 0x80]; let parsed = FrameControl::deserialize_complete(&input).unwrap(); assert_eq!(parsed.serialize().unwrap(), input); } bitfield! { #[derive(Serialize, Deserialize)] struct SuperframeSpecification(u16); impl Debug; pub beacon_order, set_beacon_order: 3, 0; pub superframe_order, set_superframe_order: 7, 4; pub final_cap_slot, set_final_cap_slot: 11, 8; pub battery_life_extension, set_battery_life_extension: 12, 12; pub reserved, set_reserved: 13, 13; pub pan_coordinator, set_pan_coordinator: 14, 14; pub association_permit, set_association_permit: 15, 15; } bitfield! { #[derive(Serialize, Deserialize)] struct AssociationRequest(u8); impl Debug; pub alternate_pan_coordinator, set_alternate_pan_coordinator: 0, 0; pub device_type, set_device_type: 1, 1; pub power_source, set_power_source: 2, 2; pub receive_on_when_idle, set_receive_on_when_idle: 3, 3; pub association_type, set_association_type: 4, 4; pub reserved2, set_reserved2: 5, 5; pub security_capability, set_security_capability: 6, 6; pub allocate_address, set_allocate_address: 7, 7; } /** * Not implementing Serialize & Deserialize, as these serializations take an extra parameter (PANID * handling). */ impl AddressSpecification { fn serialize_to(&self, skip_panid: bool, target: &mut Vec<u8>) -> SerializeResult<()> { match self { AddressSpecification::None => Ok(()), AddressSpecification::Short(panid, address) => { if !skip_panid { panid.serialize_to(target)?; } address.serialize_to(target) } AddressSpecification::Extended(panid, address) => { if !skip_panid { panid.serialize_to(target)?; } address.serialize_to(target) } } } fn serialize_tag(&self) -> SerializeResult<u16> { Ok(match self { AddressSpecification::None => 0, AddressSpecification::Short(_, _) => 2, AddressSpecification::Extended(_, _) => 3, }) } fn deserialize(tag: u16, use_panid: Option<PANID>, input: &[u8]) -> DeserializeResult<Self> { match tag { 0 => Ok((input, AddressSpecification::None)), 1 => Err(nom::Err::Error(DeserializeError::unimplemented( input, "Unable to parse Frame with 'Reserved' address specification", ))), 2 => { let (input, panid) = if let Some(panid) = use_panid { (input, panid) } else { PANID::deserialize(input)? }; let (input, address) = ShortAddress::deserialize(input)?; Ok((input, AddressSpecification::Short(panid, address))) } 3 => { let (input, panid) = if let Some(panid) = use_panid { (input, panid) } else { PANID::deserialize(input)? }; let (input, address) = ExtendedAddress::deserialize(input)?; Ok((input, AddressSpecification::Extended(panid, address))) } _ => Err(nom::Err::Error(DeserializeError::unexpected_data(input))), } } } impl Deserialize for Command { fn deserialize(input: &[u8]) -> DeserializeResult<Self> { let (input, command_id) = u8::deserialize(input)?; match command_id { 1 => { let (input, bf) = AssociationRequest::deserialize(input)?; Ok(( input, Command::AssociationRequest { alternate_pan_coordinator: bf.alternate_pan_coordinator() > 0, device_type: bf.device_type().try_into().unwrap(), power_source: bf.power_source().try_into().unwrap(), receive_on_when_idle: bf.receive_on_when_idle() > 0, security_capability: bf.security_capability() > 0, allocate_address: bf.allocate_address() > 0, }, )) } 2 => { let (input, short_address) = ShortAddress::deserialize(input)?; let (input, status) = AssociationResponseStatus::deserialize(input)?; Ok(( input, Command::AssociationResponse { short_address, status, }, )) } 4 => Ok((input, Command::DataRequest)), 7 => Ok((input, Command::BeaconRequest)), _ => Err(nom::Err::Error(DeserializeError::unimplemented( input, "Command not implemented", ))), } } } impl Serialize for Command { fn serialize_to(&self, target: &mut Vec<u8>) -> SerializeResult<()> { match self { Command::AssociationResponse { short_address, status, } => (2 as u8, short_address, status).serialize_to(target), _ => Err(SerializeError::Unimplemented( "Serialization of command not implemented", )), } } } impl DeserializeTagged for FrameType { fn deserialize(frame_type: u16, input: &[u8]) -> DeserializeResult<FrameType> { match frame_type { 0 => { let (input, (superframe_spec, gts, pending_addresses)) = <(SuperframeSpecification, u8, u8)>::deserialize(input)?; if gts != 0 || pending_addresses != 0 { Err(nom::Err::Error(DeserializeError::unimplemented( input, "Beacon frame, GTS or pending addresses not empty", ))) } else { Ok(( input, FrameType::Beacon { beacon_order: superframe_spec.beacon_order() as usize, superframe_order: superframe_spec.superframe_order() as usize, final_cap_slot: superframe_spec.final_cap_slot() as usize, battery_life_extension: superframe_spec.battery_life_extension() != 0, pan_coordinator: superframe_spec.pan_coordinator() != 0, association_permit: superframe_spec.association_permit() != 0, }, )) } } 1 => Ok((input, FrameType::Data)), 2 => Ok((input, FrameType::Ack)), 3 => nom::combinator::map(Command::deserialize, FrameType::Command)(input), 4 => Ok((input, FrameType::Reserved)), _ => Err(nom::Err::Error(if frame_type > 7 { DeserializeError::unexpected_data(input) } else { DeserializeError::unimplemented(input, "MAC Type not implemented") })), } } } impl SerializeTagged for FrameType { type TagType = u16; fn serialize_tag(&self) -> SerializeResult<u16> { match self { FrameType::Beacon { .. } => Ok(0), FrameType::Data => Ok(1), FrameType::Ack => Ok(2), FrameType::Command(_) => Ok(3), FrameType::Reserved => Ok(4), _ => Err(SerializeError::Unimplemented("FrameType not implemented")), } } } impl Serialize for FrameType { fn serialize_to(&self, target: &mut Vec<u8>) -> SerializeResult<()> { match self { FrameType::Beacon { beacon_order, superframe_order, final_cap_slot, battery_life_extension, pan_coordinator, association_permit, } => { let mut ss = SuperframeSpecification(0); ss.set_beacon_order( (*beacon_order) .try_into() .map_err(|_| SerializeError::Unimplemented("Beacon order is too big"))?, ); ss.set_superframe_order( (*superframe_order).try_into().map_err(|_| { SerializeError::Unimplemented("Superframe order is too big") })?, ); ss.set_final_cap_slot( (*final_cap_slot) .try_into() .map_err(|_| SerializeError::Unimplemented("Final cap slot is too big"))?, ); ss.set_battery_life_extension((*battery_life_extension).into()); ss.set_reserved(0); ss.set_pan_coordinator((*pan_coordinator).into()); ss.set_association_permit((*association_permit).into()); (ss, 0 as u8, 0 as u8).serialize_to(target) } FrameType::Data => Ok(()), FrameType::Ack => Ok(()), FrameType::Command(cmd) => cmd.serialize_to(target), _ => Err(SerializeError::Unimplemented("Frametype not implemented")), } } } impl Deserialize for Frame { fn deserialize(input: &[u8]) -> DeserializeResult<Frame> { let (input, fsf) = FrameControl::deserialize(input)?; let frame_pending = fsf.frame_pending() > 0; let acknowledge_request = fsf.acknowledge_request() > 0; let (input, sequence_number) = nom::combinator::cond(fsf.sequence_number_supression() == 0, u8::deserialize)(input)?; let (input, destination) = AddressSpecification::deserialize(fsf.destination_addressing_mode(), None, input)?; let source_pan_compression: Option<PANID> = if fsf.pan_id_compression() != 0 { destination.into() } else { None }; let (input, source) = AddressSpecification::deserialize( fsf.source_addressing_mode(), source_pan_compression, input, )?; let (input, frame_type) = FrameType::deserialize(fsf.frame_type(), input)?; let (input, payload) = nom::combinator::rest(input)?; Ok(( input, Frame { frame_pending, acknowledge_request, sequence_number, destination, source, frame_type, payload: payload.to_vec(), }, )) } } #[test] fn test_parse_mac_frame() { // Beacon request let input: [u8; 8] = [0x03, 0x08, 0xa5, 0xFF, 0xFF, 0xFF, 0xFF, 0x07]; let parsed = Frame::deserialize_complete(&input).unwrap(); assert_eq!( parsed, Frame { frame_pending: false, acknowledge_request: false, sequence_number: Some(165), destination: (PANID::broadcast(), ShortAddress::broadcast()).into(), source: AddressSpecification::None, frame_type: FrameType::Command(Command::BeaconRequest), payload: vec![] } ); // Link Status let input: [u8; 44] = [ 0x41, 0x88, 0x01, 0x98, 0x76, 0xFF, 0xFF, 0x00, 0x00, 0x09, 0x12, 0xFC, 0xFF, 0x00, 0x00, 0x01, 0x13, 0x15, 0x68, 0x89, 0x0e, 0x00, 0x4b, 0x12, 0x28, 0x02, 0x00, 0x00, 0x00, 0x15, 0x68, 0x89, 0x0e, 0x00, 0x4b, 0x12, 0x00, 0x00, 0x71, 0x50, 0x83, 0x72, 0x0c, 0xe4, ]; let parsed = Frame::deserialize_complete(&input).unwrap(); assert_eq!( parsed, Frame { frame_pending: false, acknowledge_request: false, sequence_number: Some(1), destination: (PANID(0x7698), ShortAddress::broadcast()).into(), source: (PANID(0x7698), ShortAddress(0)).into(), frame_type: FrameType::Data, payload: input[9..].to_vec() } ); // Beacon let input: [u8; 26] = [ 0x00, 0x80, 0x40, 0x98, 0x76, 0x00, 0x00, 0xff, 0xcf, 0x00, 0x00, 0x00, 0x22, 0x84, 0x15, 0x68, 0x89, 0x0e, 0x00, 0x4b, 0x12, 0x00, 0xff, 0xff, 0xff, 0x00, ]; let parsed = Frame::deserialize_complete(&input).unwrap(); assert_eq!( parsed, Frame { frame_pending: false, acknowledge_request: false, sequence_number: Some(64), destination: AddressSpecification::None, source: (PANID(0x7698), ShortAddress(0)).into(), frame_type: FrameType::Beacon { beacon_order: 15, superframe_order: 15, final_cap_slot: 15, battery_life_extension: false, pan_coordinator: true, association_permit: true, }, payload: input[11..].to_vec() } ); } impl Serialize for Frame { fn serialize_to(&self, target: &mut Vec<u8>) -> SerializeResult<()> { let mut fsf = FrameControl(0); fsf.set_frame_type(self.frame_type.serialize_tag()?); fsf.set_security_enabled(0); fsf.set_frame_pending(self.frame_pending.into()); fsf.set_acknowledge_request(self.acknowledge_request.into()); let destination_pan: Option<PANID> = self.destination.into(); let source_pan: Option<PANID> = self.source.into(); let pan_id_compression = source_pan == destination_pan && source_pan.is_some(); fsf.set_pan_id_compression(pan_id_compression.into()); fsf.set_reserved(0); fsf.set_sequence_number_supression(self.sequence_number.is_none().into()); fsf.set_information_elements_present(0); fsf.set_destination_addressing_mode(self.destination.serialize_tag()?); fsf.set_frame_version(0); fsf.set_source_addressing_mode(self.source.serialize_tag()?); fsf.serialize_to(target)?; if let Some(x) = self.sequence_number { x.serialize_to(target)?; } self.destination.serialize_to(false, target)?; self.source.serialize_to(pan_id_compression, target)?; self.frame_type.serialize_to(target)?; target.extend_from_slice(&self.payload); Ok(()) } } #[test] fn test_serialize_mac_frame() { let input = Frame { frame_pending: false, acknowledge_request: false, sequence_number: Some(64), destination: AddressSpecification::None, source: (PANID(0x7698), ShortAddress(0)).into(), frame_type: FrameType::Beacon { beacon_order: 15, superframe_order: 15, final_cap_slot: 15, battery_life_extension: false, pan_coordinator: true, association_permit: true, }, payload: b"\x00\x22\x84\x15\x68\x89\x0e\x00\x4b\x12\x00\xff\xff\xff\x00".to_vec(), }; assert_eq!( vec![ 0x00, 0x80, 0x40, 0x98, 0x76, 0x00, 0x00, 0xFF, 0xCF, 0x00, 0x00, 0x00, 0x22, 0x84, 0x15, 0x68, 0x89, 0x0e, 0x00, 0x4b, 0x12, 0x00, 0xFF, 0xFF, 0xFF, 0x00 ], input.serialize().unwrap() ); let input = Frame { frame_pending: false, acknowledge_request: true, sequence_number: Some(10), destination: (PANID(0x7698), ExtendedAddress(0xd0cf5efffe1c6306)).into(), source: (PANID(0x7698), ExtendedAddress(0x00124b000e896815)).into(), frame_type: FrameType::Command(Command::AssociationResponse { short_address: ShortAddress(0x558b), status: AssociationResponseStatus::AssociationSuccessful, }), payload: vec![], }; assert_eq!( vec![ 0x63, 0xcc, 0x0a, 0x98, 0x76, 0x06, 0x63, 0x1c, 0xfe, 0xff, 0x5e, 0xcf, 0xd0, 0x15, 0x68, 0x89, 0x0e, 0x00, 0x4b, 0x12, 0x00, 0x02, 0x8b, 0x55, 0x00, ], input.serialize().unwrap() ); }
true
c64d4040a061289b008a4bbceeae4b153145d312
Rust
zaynetro/krokodil
/src/main.rs
UTF-8
20,592
2.640625
3
[ "MIT" ]
permissive
use std::collections::HashMap; use std::env; use std::sync::Arc; use std::time::{Duration, Instant}; use log::info; use serde::{Deserialize, Serialize}; use tokio::{ sync::{mpsc, Mutex}, time::interval, }; use uuid::Uuid; use warp::ws::Message; use warp::Filter; mod errors; mod games; use games::{CanvasSize, DrawingSegment, Game, Games, Player}; pub type App = Arc<Mutex<AppState>>; /// A reference to player connection #[derive(Clone)] pub struct PlayerConn { pub id: usize, pub tx: mpsc::UnboundedSender<Result<Message, warp::Error>>, } pub struct AppState { games: Games, /// All active websocket connections. A mapping from player id to connection reference. connections: HashMap<Uuid, PlayerConn>, /// A mapping from player id to the time when WS connection ended. exited_players: HashMap<Uuid, Instant>, } const REMOVE_PLAYER_AFTER: Duration = Duration::from_secs(60 * 5); // TODO: error handling #[tokio::main] async fn main() { if env::var_os("RUST_LOG").is_none() { // Set `RUST_LOG=backend=debug` to see debug logs, // this only shows access logs. env::set_var("RUST_LOG", "krokodil=debug"); } pretty_env_logger::init(); let (host, port) = match env::var("PORT") { Ok(port) => ([0, 0, 0, 0], port.parse().expect("PORT must be a number")), Err(_) => ([127, 0, 0, 1], 3030), }; let app = Arc::new(Mutex::new(AppState { games: Games::new(), connections: HashMap::new(), exited_players: HashMap::new(), })); tokio::spawn(remove_players_job(app.clone())); let routes = filters::index() .or(filters::static_files()) .or(filters::create_game(app.clone())) .or(filters::game(app.clone())) .or(filters::sync(app.clone())) .with(warp::compression::gzip()); info!("Listening on {:?}:{}", host, port); warp::serve(routes.with(warp::log("backend"))) .run((host, port)) .await; } /// Periodically scan for exited players and remove them from games. async fn remove_players_job(app: App) { loop { interval(Duration::from_secs(30)).tick().await; let mut remove_players = vec![]; { // Prepare a list of players to remove let mut app = app.lock().await; let now = Instant::now(); for (player_id, exited_at) in &mut app.exited_players { if now.duration_since(*exited_at) > REMOVE_PLAYER_AFTER { remove_players.push(player_id.clone()); } } } let mut all_modified_games = HashMap::new(); { // Remove players from the games let mut app = app.lock().await; for player_id in &remove_players { log::debug!("Removing exited player {}", player_id); let modified_games = app.games.remove_player(&player_id); for game in modified_games { all_modified_games.insert(game.id.clone(), game); } } } { // Notify all other players in the modified games let app = app.lock().await; for game in all_modified_games.values() { log::debug!( "Notifying {} players in game={} about removed player", game.players.len(), game.id ); for player in &game.players { if let Some(conn) = app.connections.get(&player.id) { let _ = conn.tx.send(message(OutgoingEvent { from_event_id: None, body: OutgoingEventBody::Game(game.clone()), })); } } } } // Remove exited players let mut app = app.lock().await; for player_id in &remove_players { app.exited_players.remove(&player_id); } } } mod filters { use std::convert::Infallible; use warp::http::header; use warp::{filters::reply, Filter}; use crate::SyncQuery; use super::{errors, handlers, App}; pub fn index() -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::get() .and(warp::path::end()) .and(warp::fs::file("./ui/static/index.html")) .with(reply::header( header::CONTENT_SECURITY_POLICY, "default-src 'self'", )) } pub fn static_files() -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::path("static").and(warp::fs::dir("./ui/dist").or(warp::fs::dir("./ui/static"))) } pub fn create_game( app: App, ) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::post() .and(warp::path::end()) .and(with_app(app.clone())) .and_then(handlers::create_game) } pub fn game( app: App, ) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::path!("game" / String) .and(warp::get()) .and(with_app(app.clone())) .and_then(handlers::game) .with(reply::header( header::CONTENT_SECURITY_POLICY, // We allow websocket connections explicitly because ios otherwise will not work "default-src 'self' ws: wss:", )) } pub fn sync( app: App, ) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone { warp::path("sync") .and(warp::query::<SyncQuery>()) .and(with_app(app.clone())) .and_then(require_game_id) .and(warp::ws()) .map(|(app, query): (App, SyncQuery), ws: warp::ws::Ws| { ws.on_upgrade(move |websocket| handlers::sync(websocket, app, query)) }) } async fn require_game_id( query: SyncQuery, app: App, ) -> Result<(App, SyncQuery), warp::Rejection> { let game_present = { let app = app.lock().await; app.games.exists(&query.game_id) }; if game_present { Ok((app, query)) } else { Err(warp::reject::custom(errors::MissingGame)) } } fn with_app(app: App) -> impl Filter<Extract = (App,), Error = Infallible> + Clone { warp::any().map(move || app.clone()) } } mod handlers { use std::{ collections::hash_map::Entry, sync::atomic::{AtomicUsize, Ordering}, time::Instant, }; use futures::{FutureExt, StreamExt}; use log::{error, info}; use tokio::sync::mpsc; use uuid::Uuid; use warp::http::Uri; use warp::ws::Message; use super::{App, PlayerConn}; use crate::{ message, IncomingEvent, IncomingEventBody, OutgoingEvent, OutgoingEventBody, SyncQuery, }; /// Our global unique conn id counter. static NEXT_CONN_ID: AtomicUsize = AtomicUsize::new(1); const GAME_HTML: &str = include_str!("../ui/static/game.html"); pub async fn create_game(app: App) -> Result<Box<dyn warp::Reply>, warp::Rejection> { let mut app = app.lock().await; let game_id = app.games.reserve_id(); let url = format!("/game/{}", game_id); log::debug!("Created a new game {}", url); Ok(Box::new(warp::redirect( url.parse::<Uri>().expect("Parse uri"), ))) } pub async fn game(game_id: String, app: App) -> Result<Box<dyn warp::Reply>, warp::Rejection> { let app = app.lock().await; if app.games.exists(&game_id) { log::debug!("Game found {}", game_id); Ok(Box::new(warp::reply::html(GAME_HTML))) } else { log::debug!("Unknown game {}", game_id); Ok(Box::new(warp::redirect(Uri::from_static("/")))) } } pub async fn sync(websocket: warp::filters::ws::WebSocket, app: App, query: SyncQuery) { let player_id = query.player_id.unwrap_or(Uuid::new_v4()); let conn_id = NEXT_CONN_ID.fetch_add(1, Ordering::Relaxed); if query.player_id.is_some() { info!( "Existing player {} in game {} conn={}", player_id, query.game_id, conn_id ); } else { info!( "New player {} in game {} conn={}", player_id, query.game_id, conn_id ); } // Split the socket into a sender and receive of messages. let (ws_tx, mut ws_rx) = websocket.split(); // Use an unbounded channel to handle buffering and flushing of messages // to the websocket... let (tx, rx) = mpsc::unbounded_channel(); tokio::task::spawn(rx.forward(ws_tx).map(|result| { if let Err(e) = result { error!("websocket send error: {}", e); } })); let mut player_lifecycle = PlayerConnLifecycle { app: app.clone(), conn: PlayerConn { id: conn_id, tx }, player_id, player_nickname: query.nickname, new_player: query.player_id.is_none(), game_id: query.game_id, }; player_lifecycle.init().await; // Read player messages while let Some(result) = ws_rx.next().await { let msg = match result { Ok(msg) => msg, Err(e) => { error!("websocket error(uid={}): {}", player_id, e); break; } }; player_lifecycle.on_message(msg).await; } // Once stream ends -> connection disconnected player_lifecycle.disconnected().await; } struct PlayerConnLifecycle { app: App, conn: PlayerConn, player_id: Uuid, player_nickname: Option<String>, new_player: bool, game_id: String, } impl PlayerConnLifecycle { /// Add our player to the game and to the known connections. Then send game info async fn init(&mut self) { let mut app = self.app.lock().await; // Replace existing connection if there were. We support running game in a single tab only. app.connections.insert(self.player_id, self.conn.clone()); app.exited_players.remove(&self.player_id); let (game, player) = app.games.add_player( &self.game_id, self.player_id.clone(), self.player_nickname.clone(), ); if self.new_player { // Send this player ids only if it was new self.conn .tx .send(message(OutgoingEvent { from_event_id: None, body: OutgoingEventBody::YouAre { player }, })) .expect("Send player info"); } // TODO: Send game info to all players self.conn .tx .send(message(OutgoingEvent { from_event_id: None, body: OutgoingEventBody::Game(game.clone()), })) .expect("Send game"); // Send current drawing game.iter_drawing(|segment| { self.conn .tx .send(message(OutgoingEvent { from_event_id: None, body: OutgoingEventBody::AddDrawingSegment(segment.clone()), })) .expect("Send segment"); }); log::debug!("Player {} initialized", self.player_id); } async fn on_message(&mut self, msg: Message) { let event_str = match msg.to_str() { Ok(s) => s, Err(_) => { // Skip non text messages return; } }; log::debug!("Received message {}", event_str); let event: IncomingEvent = match serde_json::from_str(event_str) { Ok(event) => event, Err(err) => { error!("Failed to read WS message: {} (event={})", err, event_str); return; } }; match event.body { IncomingEventBody::Ping => { self.conn .tx .send(message(OutgoingEvent { from_event_id: None, body: OutgoingEventBody::Pong, })) .expect("Send pong message"); log::debug!("Pong sent"); } IncomingEventBody::AddDrawingSegment(segment) => { { // Add segment to the state let mut app = self.app.lock().await; let game = app.games.find_mut(&self.game_id).expect("Game"); game.add_segment(segment.clone()); } // Let others know self.notify_others(OutgoingEvent { from_event_id: None, body: OutgoingEventBody::AddDrawingSegment(segment), }) .await; log::debug!("Added drawing segment to other players notified"); } IncomingEventBody::RemoveDrawingSegment { segment_id } => { { // Remove segment from the state let mut app = self.app.lock().await; let game = app.games.find_mut(&self.game_id).expect("Game"); game.remove_segment(&segment_id); } // Let others know self.notify_others(OutgoingEvent { from_event_id: None, body: OutgoingEventBody::RemoveDrawingSegment { segment_id }, }) .await; log::debug!("Removed drawing segment to other players notified"); } IncomingEventBody::SubmitWord { word, canvas } => { let game = { let mut app = self.app.lock().await; let game = app.games.find_mut(&self.game_id).expect("Game"); if !game.submit_word(&self.player_id, word, canvas) { // Return when game wasn't changed return; } game.clone() }; // Clear drawing for all self.notify_all(OutgoingEvent { from_event_id: None, body: OutgoingEventBody::ClearDrawing {}, }) .await; // Notify all players of games changes self.notify_all(OutgoingEvent { from_event_id: None, body: OutgoingEventBody::Game(game), }) .await; log::debug!("Player {} submitted a word", self.player_id); } IncomingEventBody::GuessWord { word } => { let game = { let mut app = self.app.lock().await; let game = app.games.find_mut(&self.game_id).expect("Game"); if !game.guess_word(&self.player_id, &word) { // Notify wrong guess let _ = self.conn.tx.send(message(OutgoingEvent { from_event_id: event.event_id, body: OutgoingEventBody::WrongGuess {}, })); return; } game.clone() }; // Notify all players of games changes self.notify_all(OutgoingEvent { from_event_id: None, body: OutgoingEventBody::Game(game), }) .await; log::debug!("Player {} guessed a word", self.player_id); } IncomingEventBody::AskWordTip {} => { let mut app = self.app.lock().await; let game = app.games.find_mut(&self.game_id).expect("Game"); if let Some(tip) = game.ask_word_tip() { let _ = self.conn.tx.send(message(OutgoingEvent { from_event_id: event.event_id, body: OutgoingEventBody::WordTip { tip }, })); } log::debug!("Player {} asked a tip", self.player_id); } } } async fn disconnected(&mut self) { // Remove player connection that is the same as this one let mut app = self.app.lock().await; if let Entry::Occupied(e) = app.connections.entry(self.player_id) { if e.get().id == self.conn.id { log::debug!("Exiting player {} conn={}", self.player_id, self.conn.id); e.remove(); } } app.exited_players.insert(self.player_id, Instant::now()); log::debug!( "Player {} disconnected conn={}", self.player_id, self.conn.id ); } async fn notify_all(&self, event: OutgoingEvent) { let app = self.app.lock().await; let game = app.games.find(&self.game_id).expect("Game"); for player in &game.players { if let Some(conn) = app.connections.get(&player.id) { let _ = conn.tx.send(message(event.clone())); } } } async fn notify_others(&self, event: OutgoingEvent) { let app = self.app.lock().await; let game = app.games.find(&self.game_id).expect("Game"); for player in &game.players { if self.player_id == player.id { // Do not send it to ourselves continue; } if let Some(conn) = app.connections.get(&player.id) { let _ = conn.tx.send(message(event.clone())); } } } } } fn message(response: impl Serialize) -> Result<Message, warp::Error> { let text = serde_json::to_string(&response).expect("Serialize WS message"); Ok(Message::text(&text)) } /// IncomingEvent represents every possible incoming message #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct IncomingEvent { pub event_id: Option<String>, pub body: IncomingEventBody, } #[derive(Debug, Deserialize)] #[serde(tag = "type")] #[serde(rename_all = "camelCase")] enum IncomingEventBody { AddDrawingSegment(DrawingSegment), #[serde(rename_all = "camelCase")] RemoveDrawingSegment { segment_id: String, }, SubmitWord { word: String, canvas: CanvasSize, }, GuessWord { word: String, }, AskWordTip {}, Ping, } /// OutgoingEvent represents every possible outgoing message #[derive(Serialize, Clone)] #[serde(rename_all = "camelCase")] struct OutgoingEvent { pub from_event_id: Option<String>, pub body: OutgoingEventBody, } #[derive(Serialize, Clone)] #[serde(tag = "type")] #[serde(rename_all = "camelCase")] enum OutgoingEventBody { Game(Game), AddDrawingSegment(DrawingSegment), #[serde(rename_all = "camelCase")] RemoveDrawingSegment { segment_id: String, }, #[serde(rename_all = "camelCase")] YouAre { player: Player, }, WrongGuess {}, WordTip { tip: String, }, ClearDrawing {}, Pong, } #[derive(Debug, Deserialize)] pub struct SyncQuery { pub game_id: String, pub player_id: Option<Uuid>, pub nickname: Option<String>, }
true
727f6c68cad23da0e5fe63cc829a0ebead27c1c8
Rust
hermetique/rustzx
/rustzx-core/src/zx/keys.rs
UTF-8
1,619
3.453125
3
[ "MIT" ]
permissive
//! Module with hardware key port\masks /// Struct, which contains mast and port of key #[rustfmt::skip] #[derive(Clone, Copy)] pub enum ZXKey { // Port 0xFEFE Shift, Z, X, C, V, // Port 0xFDFE A, S, D, F, G, // Port 0xFBFE Q, W, E, R, T, // Port 0xF7FE N1, N2, N3, N4, N5, // Port 0xEFFE N0, N9, N8, N7, N6, // Port 0xDFFE P, O, I, U, Y, // Port 0xBFFE Enter, L, K, J, H, // Port 0x7FFE Space, SymShift, M, N, B, } impl ZXKey { pub(crate) fn row_id(self) -> usize { match self.half_port() { 0xFE => 0, 0xFD => 1, 0xFB => 2, 0xF7 => 3, 0xEF => 4, 0xDF => 5, 0xBF => 6, 0x7F => 7, _ => unreachable!(), } } pub(crate) fn mask(&self) -> u8 { use ZXKey::*; match self { Shift | A | Q | N1 | N0 | P | Enter | Space => 0x01, Z | S | W | N2 | N9 | O | L | SymShift => 0x02, X | D | E | N3 | N8 | I | K | M => 0x04, C | F | R | N4 | N7 | U | J | N => 0x08, V | G | T | N5 | N6 | Y | H | B => 0x10, } } fn half_port(self) -> u8 { use ZXKey::*; match self { Shift | Z | X | C | V => 0xFE, A | S | D | F | G => 0xFD, Q | W | E | R | T => 0xFB, N1 | N2 | N3 | N4 | N5 => 0xF7, N0 | N9 | N8 | N7 | N6 => 0xEF, P | O | I | U | Y => 0xDF, Enter | L | K | J | H => 0xBF, Space | SymShift | M | N | B => 0x7F, } } }
true
2aeae10a67434fbb7c8bdc68f035ea7875d11ace
Rust
seungdols/practice
/rust/tuples/src/main.rs
UTF-8
309
3.28125
3
[]
no_license
fn main() { // let tup1 = (20, "Rust", 30, 35, false, 3.5, (1,4,7)); // println!("{}", tup1.5); let tup1 = (20, "Rust", 30); // let tup1 = (20, "Rust", 30, 45); // 아래 구문에서 에러 발생함. let (a, b, c) = tup1; println!("a: {}", a); println!("b: {}", b); println!("c: {}", c); }
true
16aa2af580c076c9089cd3728bde01bf3b1e1f99
Rust
wildarch/toy-compiler
/src/mips/instruction.rs
UTF-8
1,879
3.453125
3
[]
no_license
use std::fmt::{Formatter, Display, Error as FormatError}; use super::register::Register; use super::label::Label; #[derive(Debug)] pub enum Instruction { La(Register, Label), Add(Register, Register, Register), Addi(Register, Register, i32), Sub(Register, Register, Register), Seq(Register, Register, Register), Sgt(Register, Register, Register), Slt(Register, Register, Register), Move(Register, Register), Jal(Label), Li(Register, i32), Jr(Register), SysCall, // Actually just 15 bits Sw(Register, i16, Register), Lw(Register, i16, Register), B(Label), Ble(Register, Register, Label), // Not really an instruction Label(Label), } impl Display for Instruction { fn fmt(&self, f: &mut Formatter) -> Result<(), FormatError> { use self::Instruction::*; match self { La(r, l) => write!(f, "la {}, {}", r, l), Add(a, b, c) => write!(f, "add {}, {}, {}", a, b, c), Addi(a, b, c) => write!(f, "addi {}, {}, {}", a, b, c), Sub(a, b, c) => write!(f, "sub {}, {}, {}", a, b, c), Seq(a, b, c) => write!(f, "seq {}, {}, {}", a, b, c), Sgt(a, b, c) => write!(f, "sgt {}, {}, {}", a, b, c), Slt(a, b, c) => write!(f, "slt {}, {}, {}", a, b, c), Move(a, b) => write!(f, "move {}, {}", a, b), Jal(l) => write!(f, "jal {}", l), Li(r, i) => write!(f, "li {}, {}", r, i), Jr(r) => write!(f, "jr {}", r), SysCall => write!(f, "syscall"), Sw(v, i, d) => write!(f, "sw {}, {}({})", v, i, d), Lw(d, i, v) => write!(f, "lw {}, {}({})", d, i, v), B(l) => write!(f, "b {}", l), Ble(a, b, l) => write!(f, "ble {}, {}, {}", a, b, l), Label(l) => write!(f, "{}:", l), } } }
true
dac8300e9b0d5ad567fb59dc5b1256e27003a12f
Rust
pawanjay176/libp2p_test
/src/rpc/methods.rs
UTF-8
6,265
3.03125
3
[]
no_license
//! Available RPC methods types and ids. use ssz_derive::{Decode, Encode}; pub type Hash256 = String; /// Maximum number of blocks in a single request. pub const MAX_REQUEST_BLOCKS: u64 = 1024; /// Maximum length of error message. pub const MAX_ERROR_LEN: u64 = 256; /// Wrapper over SSZ List to represent error message in rpc responses. pub type ErrorType = String; /* Request/Response data structures for RPC methods */ /* Requests */ /// Identifier of a request. /// // NOTE: The handler stores the `RequestId` to inform back of responses and errors, but it's execution // is independent of the contents on this type. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum RequestId { Router, Sync(usize), Behaviour, } /// The reason given for a `Goodbye` message. /// /// Request a number of beacon block roots from a peer. #[derive(Encode, Decode, Clone, Debug, PartialEq)] pub struct BlocksByRangeRequest { /// The starting slot to request blocks. pub start_slot: u64, /// The number of blocks from the start slot. pub count: u64, /// The step increment to receive blocks. /// /// A value of 1 returns every block. /// A value of 2 returns every second block. /// A value of 3 returns every third block and so on. pub step: u64, } /* RPC Handling and Grouping */ // Collection of enums and structs used by the Codecs to encode/decode RPC messages #[derive(Debug, Clone, PartialEq)] pub enum RPCResponse { /// A response to a get BLOCKS_BY_RANGE request. A None response signifies the end of the /// batch. BlocksByRange(Vec<u8>), } /// Indicates which response is being terminated by a stream termination response. #[derive(Debug, Clone)] pub enum ResponseTermination { /// Blocks by range stream termination. BlocksByRange, } /// The structured response containing a result/code indicating success or failure /// and the contents of the response #[derive(Debug, Clone)] pub enum RPCCodedResponse { /// The response is a successful. Success(RPCResponse), Error(RPCResponseErrorCode, ErrorType), /// Received a stream termination indicating which response is being terminated. StreamTermination(ResponseTermination), } /// The code assigned to an erroneous `RPCResponse`. #[derive(Debug, Clone, Copy, PartialEq)] pub enum RPCResponseErrorCode { RateLimited, InvalidRequest, ServerError, Unknown, } impl RPCCodedResponse { /// Used to encode the response in the codec. pub fn as_u8(&self) -> Option<u8> { match self { RPCCodedResponse::Success(_) => Some(0), RPCCodedResponse::Error(code, _) => Some(code.as_u8()), RPCCodedResponse::StreamTermination(_) => None, } } /// Tells the codec whether to decode as an RPCResponse or an error. pub fn is_response(response_code: u8) -> bool { matches!(response_code, 0) } /// Builds an RPCCodedResponse from a response code and an ErrorMessage pub fn from_error(response_code: u8, err: ErrorType) -> Self { let code = match response_code { 1 => RPCResponseErrorCode::InvalidRequest, 2 => RPCResponseErrorCode::ServerError, _ => RPCResponseErrorCode::Unknown, }; RPCCodedResponse::Error(code, err) } /// Specifies which response allows for multiple chunks for the stream handler. pub fn multiple_responses(&self) -> bool { match self { RPCCodedResponse::Success(resp) => match resp { RPCResponse::BlocksByRange(_) => true, }, RPCCodedResponse::Error(_, _) => true, // Stream terminations are part of responses that have chunks RPCCodedResponse::StreamTermination(_) => true, } } /// Returns true if this response always terminates the stream. pub fn close_after(&self) -> bool { !matches!(self, RPCCodedResponse::Success(_)) } } impl RPCResponseErrorCode { fn as_u8(&self) -> u8 { match self { RPCResponseErrorCode::InvalidRequest => 1, RPCResponseErrorCode::ServerError => 2, RPCResponseErrorCode::Unknown => 255, RPCResponseErrorCode::RateLimited => 128, } } } impl std::fmt::Display for RPCResponseErrorCode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let repr = match self { RPCResponseErrorCode::InvalidRequest => "The request was invalid", RPCResponseErrorCode::ServerError => "Server error occurred", RPCResponseErrorCode::Unknown => "Unknown error occurred", RPCResponseErrorCode::RateLimited => "Rate limited", }; f.write_str(repr) } } impl std::fmt::Display for RPCResponse { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { RPCResponse::BlocksByRange(block) => { write!(f, "BlocksByRange: Block slot: {:?}", block) } } } } impl std::fmt::Display for RPCCodedResponse { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { RPCCodedResponse::Success(res) => write!(f, "{}", res), RPCCodedResponse::Error(code, err) => write!(f, "{}: {}", code, err.to_string()), RPCCodedResponse::StreamTermination(_) => write!(f, "Stream Termination"), } } } impl std::fmt::Display for BlocksByRangeRequest { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "Start Slot: {}, Count: {}, Step: {}", self.start_slot, self.count, self.step ) } } impl slog::Value for RequestId { fn serialize( &self, record: &slog::Record, key: slog::Key, serializer: &mut dyn slog::Serializer, ) -> slog::Result { match self { RequestId::Behaviour => slog::Value::serialize("Behaviour", record, key, serializer), RequestId::Router => slog::Value::serialize("Router", record, key, serializer), RequestId::Sync(ref id) => slog::Value::serialize(id, record, key, serializer), } } }
true
5f647f292087e6a488b2c534a24c29b73ed6dd7c
Rust
dialtone/advent
/advent11/src/solution.rs
UTF-8
3,467
3.140625
3
[ "MIT" ]
permissive
use crate::intcode; use std::collections::HashMap; use std::fmt::Display; pub fn part1(input: &str) -> impl Display { let mut computer = intcode::Computer::from(input); let mut board: HashMap<(isize, isize), isize> = HashMap::new(); // UP RIGHT DOWN LEFT let directions = vec![(0, 1), (1, 0), (0, -1), (-1, 0)]; let mut pos_x: isize = 0; let mut pos_y: isize = 0; let mut direction = 800; while !computer.has_halted() { // println!("--------------------------------"); // println!("NEW CYCLE"); // println!("position ({}, {})", pos_x, pos_y); let tile_color = *board.get(&(pos_x, pos_y)).or(Some(&0)).unwrap(); // println!("tile_color: {}", tile_color); computer.write_input(tile_color); if computer.is_waiting() { computer.resume(); } else { computer.run(); } let color_of_panel = computer.pop_output().unwrap(); let rotation = computer.pop_output().unwrap(); // println!("color_of_panel: {} rotation: {}", color_of_panel, rotation); if color_of_panel != tile_color { // println!("painted ({}, {})", pos_x, pos_y); board.insert((pos_x, pos_y), color_of_panel); } if rotation == 0 { direction -= 1 } else { direction += 1 } let (change_x, change_y) = directions[direction as usize % 4]; // println!("change_x: {} change_y: {}", change_x, change_y); pos_x += change_x; pos_y += change_y; } board.keys().len() } pub fn part2(input: &str) -> impl Display { let mut computer = intcode::Computer::from(input); let mut board: HashMap<(isize, isize), isize> = HashMap::new(); // UP RIGHT DOWN LEFT let directions = vec![(0, 1), (1, 0), (0, -1), (-1, 0)]; let mut pos_x: isize = 0; let mut pos_y: isize = 5; let mut direction = 800; board.insert((pos_x, pos_y), 1); while !computer.has_halted() { // println!("--------------------------------"); // println!("NEW CYCLE"); // println!("position ({}, {})", pos_x, pos_y); let tile_color = *board.get(&(pos_x, pos_y)).or(Some(&0)).unwrap(); // println!("tile_color: {}", tile_color); computer.write_input(tile_color); if computer.is_waiting() { computer.resume(); } else { computer.run(); } let color_of_panel = computer.pop_output().unwrap(); let rotation = computer.pop_output().unwrap(); // println!("color_of_panel: {} rotation: {}", color_of_panel, rotation); if color_of_panel != tile_color { // println!("painted ({}, {})", pos_x, pos_y); board.insert((pos_x, pos_y), color_of_panel); } if rotation == 0 { direction -= 1 } else { direction += 1 } let (change_x, change_y) = directions[direction as usize % 4]; // println!("change_x: {} change_y: {}", change_x, change_y); pos_x += change_x; pos_y += change_y; } let mut final_board = vec![vec![" "; 50]; 10]; for ((x, y), val) in board.iter() { if *val == 1 { final_board[*y as usize][*x as usize] = "#"; } } for row in final_board { println!("{}", row.join("")); } board.keys().len() }
true
cb4e067ff24c4dadadc9d744097894d344614cab
Rust
shelbyd/kyber
/cli/src/refactorings/parser/lex.rs
UTF-8
1,297
3.296875
3
[]
no_license
use logos::*; use std::collections::*; pub type Tokens = VecDeque<Token>; pub fn lex(contents: &str) -> Result<Tokens, String> { let mut tokens = Token::lexer(contents); std::iter::from_fn(|| { Some(match tokens.next()? { Token::Error => Err(tokens.slice().to_owned()), t => Ok(t), }) }) .collect() } #[derive(Logos, Debug, PartialEq, Clone)] pub enum Token { #[error] #[regex(r"[ \t\n\f]+", logos::skip)] Error, #[token("import")] Import, #[token("from")] From, #[token("let")] Let, #[token("for")] For, #[token("in")] In, #[token("{")] OpenBrace, #[token("}")] CloseBrace, #[token("(")] OpenParen, #[token(")")] CloseParen, #[regex("[a-zA-Z][a-zA-Z0-9_]*", |lex| lex.slice().to_string())] Ident(String), #[token(",")] Comma, #[token(";")] SemiColon, #[token(":")] Colon, #[token("..")] Concatenate, #[token(".")] Period, #[regex("\"[^\"]*\"", |lex| lex.slice()[1..(lex.slice().len()-1)].to_string())] StringLiteral(String), #[regex("/[^/]+/", |lex| lex.slice()[1..(lex.slice().len()-1)].to_string())] Regex(String), #[token("@")] Directive, #[token("=")] Equal, }
true
66c58f77afa3f1574bcf7a7ea2f3e6547fe39634
Rust
cgdilley/AdventOfCode2018
/day01_0/src/main.rs
UTF-8
2,453
4.125
4
[]
no_license
use std::fs::File; use std::io; use std::io::prelude::*; fn main() { // Read the lines from the file let mut data = read_lines("../input/day1.txt").expect("Could not load file."); // Convert the lines into integers let numbers = extract_values(&mut data).expect("Could not parse lines."); // Calculate the sum let sum = sum_vector(& numbers); println!("The sum = {}", sum); } /// Reads all lines from the file with the given filename, and returns the lines /// as a vector of strings as a Result. /// /// # Arguments /// /// * `filename` - The name of the file to read /// /// # Returns /// /// A result containing the lines of the read file as a vector of strings. /// /// # Example /// ``` /// let data : Vec<String> = read_lines("file.txt")?; /// ` fn read_lines(filename: &str) -> io::Result<Vec<String>> { let file = File::open(&filename)?; let mut reader = io::BufReader::new(file); let mut buf = String::new(); let mut results : Vec<String> = Vec::new(); while reader.read_line(&mut buf)? > 0 { results.push(buf.trim_right().to_string()); buf.clear(); } Ok(results) } /// Converts the strings in the given vector into integers, checking the /// first character in each string for a "+" or "-", indicating positive /// and negative numbers respectively, then parsing the remaining characters /// as an integer. /// /// # Arguments /// /// * `data` - A vector of strings in the form eg. "+32" /// /// # Returns /// /// A result containing a vector of all parsed integer values. /// /// # Example /// ``` /// let numbers: Vec<i32> = extract_values(&data)?; /// ` fn extract_values(data: &mut Vec<String>) -> Result<Vec<i32>, String> { let mut results : Vec<i32> = Vec::new(); for datum in data { let sign : &str = &datum[0..1]; let s : &str = &datum[1..]; match s.parse::<i32>() { Ok(number) => { results.push(if sign == "-" {number * -1} else {number}); }, Err(e) => return Err(e.to_string()) } } Ok(results) } /// Calculates the sum of all values in the given vector of integers /// /// # Arguments /// /// * `numbers` - A vector of integer values to sum /// /// # Returns /// /// The sum of the integer values in the given vector /// fn sum_vector(numbers: &Vec<i32>) -> i32 { let mut sum = 0; for num in numbers { sum += *num; } sum }
true
43ea0e6c45aa11ff607d25fc8d4c66f09e6fb81b
Rust
keroro520/shot
/src/utils/unspent.rs
UTF-8
1,030
2.640625
3
[]
no_license
use ckb_types::core::BlockNumber; use ckb_types::packed::{CellOutput, OutPoint}; use std::collections::HashMap; #[derive(Clone, Debug)] pub struct LiveCell { pub cell_output: CellOutput, pub out_point: OutPoint, pub tx_index: usize, pub block_number: BlockNumber, } #[derive(Clone, Debug)] pub struct Unspent { inner: HashMap<OutPoint, LiveCell>, } impl Unspent { pub fn new() -> Self { Self { inner: HashMap::new(), } } pub fn update(&mut self, dead_out_points: &[OutPoint], live_cells: Vec<LiveCell>) { for dead in dead_out_points.iter() { self.inner.remove(dead); } for live in live_cells.into_iter() { self.inner.insert(live.out_point.clone(), live); } } pub fn into_iter(self) -> impl IntoIterator<Item = (OutPoint, LiveCell)> { let mut vec = self.inner.into_iter().collect::<Vec<_>>(); vec.sort_by(|a, b| a.1.block_number.cmp(&b.1.block_number)); vec.into_iter() } }
true
0c6e0805d9eeb1c1f92c5e9219aa2f08d9f04d84
Rust
omar2535/leetcode
/rust/src/problems/p344_reverse_string.rs
UTF-8
926
3.3125
3
[]
no_license
pub struct Solution; impl Solution { pub fn reverse_string(s: &mut Vec<char>) { if s.len() == 1 { return; } let midway_point = s.len() / 2; for i in 0..midway_point { let end_point = s.len() - i - 1; s.swap(i, end_point); } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_1() { let mut test_arr = vec!['a', 'b', 'c', 'd', 'e']; Solution::reverse_string(&mut test_arr); assert_eq!(vec!['e', 'd', 'c', 'b', 'a'], test_arr); } #[test] fn test_2() { let mut test_arr = vec!['a']; Solution::reverse_string(&mut test_arr); assert_eq!(vec!['a'], test_arr); } #[test] fn test_3() { let mut test_arr = vec!['a', 'b', 'c', 'd']; Solution::reverse_string(&mut test_arr); assert_eq!(vec!['d', 'c', 'b', 'a'], test_arr); } }
true
99a0b737df480f2beb7b230b5462084202e34093
Rust
Follpvosten/mudders
/src/lib.rs
UTF-8
27,514
3.578125
4
[ "MIT" ]
permissive
/*! Generate lexicographically-evenly-spaced strings between two strings from pre-defined alphabets. This is a rewrite of [mudderjs](https://github.com/fasiha/mudderjs); thanks for the original work of the author and their contributors! ## Usage Add a dependency in your Cargo.toml: ```toml mudders = "0.0.4" ``` Now you can generate lexicographically-spaced strings in a few different ways: ``` use mudders::SymbolTable; // The mudder method takes a NonZeroUsize as the amount, // so you cannot pass in an invalid value. use std::num::NonZeroUsize; // You can use the included alphabet table let table = SymbolTable::alphabet(); // SymbolTable::mudder() returns a Vec containing `amount` Strings. let result = table.mudder_one("a", "z").unwrap(); // These strings are always lexicographically placed between `start` and `end`. let one_str = result.as_str(); assert!(one_str > "a"); assert!(one_str < "z"); // You can also define your own symbol tables let table = SymbolTable::from_chars(&['a', 'b']).unwrap(); let result = table.mudder("a", "b", NonZeroUsize::new(2).unwrap()).unwrap(); assert_eq!(result.len(), 2); assert!(result[0].as_str() > "a" && result[1].as_str() > "a"); assert!(result[0].as_str() < "b" && result[1].as_str() < "b"); // The strings *should* be evenly-spaced and as short as they can be. let table = SymbolTable::alphabet(); let result = table.mudder("anhui", "azazel", NonZeroUsize::new(3).unwrap()).unwrap(); assert_eq!(result.len(), 3); assert_eq!(vec!["aq", "as", "av"], result); ``` ## Notes The most notable difference to Mudder.js is that currently, mudders only supports ASCII characters (because 127 characters ought to be enough for everyone™). Our default `::alphabet()` also only has lowercase letters. */ use core::num::NonZeroUsize; use std::{convert::TryFrom, str::FromStr}; #[macro_use] pub mod error; use error::*; /// The functionality of the crate lives here. /// /// A symbol table is, internally, a vector of valid ASCII bytes that are used /// to generate lexicographically evenly-spaced strings. #[derive(Clone, Debug)] pub struct SymbolTable(Vec<u8>); impl SymbolTable { /// Creates a new symbol table from the given byte slice. /// The slice is internally sorted using `.sort()`. /// /// An error is returned if one of the given bytes is out of ASCII range. pub fn new(source: &[u8]) -> Result<Self, CreationError> { ensure! { !source.is_empty(), CreationError::EmptySlice } ensure! { all_chars_ascii(&source), NonAsciiError::NonAsciiU8 } // Copy the values, we need to own them anyways... let mut vec: Vec<_> = source.iter().copied().collect(); // Sort them so they're actually in order. // (You can pass in ['b', 'a'], but that's not usable internally I think.) vec.sort(); vec.dedup(); Ok(Self(vec)) } /// Creates a new symbol table from the given characters. /// The slice is internally sorted using `.sort()`. /// /// An error is returned if one of the given characters is not ASCII. pub fn from_chars(source: &[char]) -> Result<Self, CreationError> { let inner: Box<[u8]> = source .iter() .map(|c| try_ascii_u8_from_char(*c)) .collect::<Result<_, _>>()?; Ok(Self::new(&inner)?) } /// Returns a SymbolTable which contains the lowercase latin alphabet (`[a-z]`). #[allow(clippy::char_lit_as_u8)] pub fn alphabet() -> Self { Self::new(&('a' as u8..='z' as u8).collect::<Box<[_]>>()).unwrap() } /// Generate `amount` strings that lexicographically sort between `start` and `end`. /// The algorithm will try to make them as evenly-spaced as possible. /// /// When both parameters are empty strings, `amount` new strings that are /// in lexicographical order are returned. /// /// If parameter `b` is lexicographically before `a`, they are swapped internally. /// /// ``` /// # use mudders::SymbolTable; /// # use std::num::NonZeroUsize; /// // Using the included alphabet table /// let table = SymbolTable::alphabet(); /// // Generate 10 strings from scratch /// let results = table.mudder("", "", NonZeroUsize::new(10).unwrap()).unwrap(); /// assert!(results.len() == 10); /// // results should look something like ["b", "d", "f", ..., "r", "t"] /// ``` pub fn mudder( &self, a: &str, b: &str, amount: NonZeroUsize, ) -> Result<Vec<String>, GenerationError> { use error::InternalError::*; use GenerationError::*; ensure! { all_chars_ascii(a), NonAsciiError::NonAsciiU8 } ensure! { all_chars_ascii(b), NonAsciiError::NonAsciiU8 } ensure! { self.contains_all_chars(a), UnknownCharacters(a.to_string()) } ensure! { self.contains_all_chars(b), UnknownCharacters(b.to_string()) } let (a, b) = if a.is_empty() || b.is_empty() { // If an argument is empty, keep the order (a, b) } else if b < a { // If they're not empty and b is lexicographically prior to a, swap them (b, a) } else { // You can't generate values between two matching strings. ensure! { a != b, MatchingStrings(a.to_string()) } // In any other case, keep the order (a, b) }; // TODO: Check for lexicographical adjacency! //ensure! { !lex_adjacent(a, b), LexAdjacentStrings(a.to_string(), b.to_string()) } // Count the characters start and end have in common. let matching_count: usize = { // Iterate through the chars of both given inputs... let (mut start_chars, mut end_chars) = (a.chars(), b.chars()); // We need to keep track of this, because: // In the case of `a` == `"a"` and `b` == `"aab"`, // we actually need to compare `""` to `"b"` later on, not `""` to `"a"`. let mut last_start_char = '\0'; // Counting to get the index. let mut i: usize = 0; loop { // Advance the iterators... match (start_chars.next(), end_chars.next()) { // As long as there's two characters that match, increment i. (Some(sc), Some(ec)) if sc == ec => { last_start_char = sc; i += 1; continue; } // If start_chars have run out, but end_chars haven't, check // if the current end char matches the last start char. // If it does, we still need to increment our counter. (None, Some(ec)) if ec == last_start_char => { i += 1; continue; } // break with i as soon as any mismatch happens or both iterators run out. // matching_count will either be 0, indicating that there's // no leading common pattern, or something other than 0, in // that case it's the count of common characters. (None, None) | (Some(_), None) | (None, Some(_)) | (Some(_), Some(_)) => { break i } } } }; // Count the number to add to the total requests amount. // If a or b is empty, we need one item less in the pool; // two items less if both are empty. let non_empty_input_count = [a, b].iter().filter(|s| !s.is_empty()).count(); // For convenience let computed_amount = || amount.get() + non_empty_input_count; // Calculate the distance between the first non-matching characters. // If matching_count is greater than 0, we have leading common chars, // so we skip those, but add the amount to the depth base. let branching_factor = self.distance_between_first_chars( // v--- matching_count might be higher than a.len() // vvv because we might count past a's end &a[std::cmp::min(matching_count, a.len())..], &b[matching_count..], )?; // We also add matching_count to the depth because if we're starting // with a common prefix, we have at least x leading characters that // will be the same for all substrings. let mut depth = depth_for(dbg!(branching_factor), dbg!(computed_amount())) + dbg!(matching_count); // if branching_factor == 1 { // // This should only be the case when we have an input like `"z", ""`. // // In this case, we can generate strings after the z, but we need // // to go one level deeper in any case. // depth += 1; // } // TODO: Maybe keeping this as an iterator would be more efficient, // but it would have to be cloned at least once to get the pool length. let pool: Vec<String> = self.traverse("".into(), a, b, dbg!(depth)).collect(); let pool = if (pool.len() as isize).saturating_sub(non_empty_input_count as isize) < amount.get() as isize { depth += depth_for(branching_factor, computed_amount() + pool.len()); dbg!(self.traverse("".into(), a, b, dbg!(depth)).collect()) } else { pool }; if (pool.len() as isize).saturating_sub(non_empty_input_count as isize) < amount.get() as isize { // We still don't have enough items, so bail panic!( "Internal error: Failed to calculate the correct tree depth! This is a bug. Please report it at: https://github.com/Follpvosten/mudders/issues and make sure to include the following information: Symbols in table: {symbols:?} Given inputs: {a:?}, {b:?}, amount: {amount} matching_count: {m_count} non_empty_input_count: {ne_input_count} required pool length (computed amount): {comp_amount} branching_factor: {b_factor} final depth: {depth} pool: {pool:?} (length: {pool_len})", symbols = self.0.iter().map(|i| *i as char).collect::<Box<[_]>>(), a = a, b = b, amount = amount, m_count = matching_count, ne_input_count = non_empty_input_count, comp_amount = computed_amount(), b_factor = branching_factor, depth = depth, pool = pool, pool_len = pool.len(), ) } Ok(if amount.get() == 1 { pool.get(pool.len() / 2) .map(|item| vec![item.clone()]) .ok_or_else(|| FailedToGetMiddle)? } else { let step = computed_amount() as f64 / pool.len() as f64; let mut counter = 0f64; let mut last_value = 0; let result: Vec<_> = pool .into_iter() .filter(|_| { counter += step; let new_value = counter.floor() as usize; if new_value > last_value { last_value = new_value; true } else { false } }) .take(amount.into()) .collect(); ensure! { result.len() == amount.get(), NotEnoughItemsInPool }; result }) } /// Convenience wrapper around `mudder` to generate exactly one string. /// /// # Safety /// This function calls `NonZeroUsize::new_unchecked(1)`. pub fn mudder_one(&self, a: &str, b: &str) -> Result<String, GenerationError> { self.mudder(a, b, unsafe { NonZeroUsize::new_unchecked(1) }) .map(|mut vec| vec.remove(0)) } /// Convenience wrapper around `mudder` to generate an amount of fresh strings. /// /// `SymbolTable.generate(amount)` is equivalent to `SymbolTable.mudder("", "", amount)`. pub fn generate(&self, amount: NonZeroUsize) -> Result<Vec<String>, GenerationError> { self.mudder("", "", amount) } /// Traverses a virtual tree of strings to the given depth. fn traverse<'a>( &'a self, curr_key: String, start: &'a str, end: &'a str, depth: usize, ) -> Box<dyn Iterator<Item = String> + 'a> { if depth == 0 { // If we've reached depth 0, we don't go futher. Box::new(std::iter::empty()) } else { // Generate all possible mutations on the current depth Box::new( self.0 .iter() .filter_map(move |c| -> Option<Box<dyn Iterator<Item = String>>> { // TODO: Performance - this probably still isn't the best option. let key = { let the_char = *c as char; let mut string = String::with_capacity(curr_key.len() + the_char.len_utf8()); string.push_str(&curr_key); string.push(the_char); string }; // After the end key, we definitely do not continue. if key.as_str() > end && !end.is_empty() { None } else if key.as_str() < start { // If we're prior to the start key... // ...and the start key is a subkey of the current key... if start.starts_with(&key) { // ...only traverse the subtree, ignoring the key itself. Some(Box::new(self.traverse(key, start, end, depth - 1))) } else { None } } else { // Traverse normally, returning both the parent and sub key, // in all other cases. if key.len() < 2 { let iter = std::iter::once(key.clone()); Some(if key == end { Box::new(iter) } else { Box::new(iter.chain(self.traverse(key, start, end, depth - 1))) }) } else { let first = key.chars().next().unwrap(); Some(if key.chars().all(|c| c == first) { // If our characters are all the same, // don't add key to the list, only the subtree. Box::new(self.traverse(key, start, end, depth - 1)) } else { Box::new(std::iter::once(key.clone()).chain(self.traverse( key, start, end, depth - 1, ))) }) } } }) .flatten(), ) } } fn distance_between_first_chars( &self, start: &str, end: &str, ) -> Result<usize, GenerationError> { use InternalError::WrongCharOrder; // check the first character of both strings... Ok(match (start.chars().next(), end.chars().next()) { // if both have a first char, compare them. (Some(start_char), Some(end_char)) => { ensure! { start_char < end_char, WrongCharOrder(start_char, end_char) } let distance = try_ascii_u8_from_char(end_char)? - try_ascii_u8_from_char(start_char)?; distance as usize + 1 } // if only the start has a first char, compare it to our last possible symbol. (Some(start_char), None) => { let end_u8 = self.0.last().unwrap(); // In this case, we allow the start and end char to be equal. // This is because you can generate something after the last char, // but not before the first char. // vv ensure! { start_char <= *end_u8 as char, WrongCharOrder(start_char, *end_u8 as char) } let distance = end_u8 - try_ascii_u8_from_char(start_char)?; if distance == 0 { 2 } else { distance as usize + 1 } } // if only the end has a first char, compare it to our first possible symbol. (None, Some(end_char)) => { let start_u8 = self.0.first().unwrap(); ensure! { *start_u8 <= end_char as u8, WrongCharOrder(*start_u8 as char, end_char) } let distance = try_ascii_u8_from_char(end_char)? - start_u8; if distance == 0 { 2 } else { distance as usize + 1 } } // if there's no characters given, the whole symboltable is our range. _ => self.0.len(), }) } fn contains_all_chars(&self, chars: impl AsRef<[u8]>) -> bool { chars.as_ref().iter().all(|c| self.0.contains(c)) } } /// Calculate the required depth for the given values. /// /// `branching_factor` is used as the logarithm base, `n_elements` as the /// value, and the result is rounded up and cast to usize. fn depth_for(branching_factor: usize, n_elements: usize) -> usize { f64::log(n_elements as f64, branching_factor as f64).ceil() as usize } fn try_ascii_u8_from_char(c: char) -> Result<u8, NonAsciiError> { u8::try_from(c as u32).map_err(NonAsciiError::from) } fn all_chars_ascii(chars: impl AsRef<[u8]>) -> bool { chars.as_ref().iter().all(|i| i.is_ascii()) } impl FromStr for SymbolTable { type Err = CreationError; fn from_str(s: &str) -> Result<Self, CreationError> { Self::from_chars(&s.chars().collect::<Box<[_]>>()) } } #[cfg(test)] mod tests { use super::*; use std::num::NonZeroUsize; /// Create and unwrap a NonZeroUsize from the given usize. fn n(n: usize) -> NonZeroUsize { NonZeroUsize::new(n).unwrap() } // Public API tests: #[test] #[allow(clippy::char_lit_as_u8)] fn valid_tables_work() { assert!(SymbolTable::new(&[1, 2, 3, 4, 5]).is_ok()); assert!(SymbolTable::new(&[125, 126, 127]).is_ok()); // Possible, but to be discouraged assert!(SymbolTable::new(&['a' as u8, 'f' as u8]).is_ok()); assert!(SymbolTable::from_chars(&['a', 'b', 'c']).is_ok()); assert!(SymbolTable::from_str("0123").is_ok()); } #[test] fn invalid_tables_error() { assert!(SymbolTable::from_str("🍅😂👶🏻").is_err()); assert!(SymbolTable::from_chars(&['🍌', '🍣', '⛈']).is_err()); assert!(SymbolTable::new(&[128, 129, 130]).is_err()); assert!(SymbolTable::new(&[]).is_err()); assert!(SymbolTable::from_chars(&[]).is_err()); assert!(SymbolTable::from_str("").is_err()); } #[test] fn unknown_chars_error() { use error::GenerationError::UnknownCharacters; // You cannot pass in strings with characters not in the SymbolTable: let table = SymbolTable::alphabet(); assert_eq!( table.mudder_one("123", "()/"), Err(UnknownCharacters("123".into())) ); assert_eq!( table.mudder_one("a", "123"), Err(UnknownCharacters("123".into())) ); assert_eq!( table.mudder_one("0)(", "b"), Err(UnknownCharacters("0)(".into())) ); let table = SymbolTable::from_str("123").unwrap(); assert_eq!( table.mudder_one("a", "b"), Err(UnknownCharacters("a".into())) ); assert_eq!( table.mudder_one("456", "1"), Err(UnknownCharacters("456".into())) ); assert_eq!( table.mudder_one("2", "abc"), Err(UnknownCharacters("abc".into())) ); } #[test] fn equal_strings_error() { use error::GenerationError::MatchingStrings; let table = SymbolTable::alphabet(); assert_eq!( table.mudder_one("abc", "abc"), Err(MatchingStrings("abc".into())) ); assert_eq!( table.mudder_one("xyz", "xyz"), Err(MatchingStrings("xyz".into())) ); } // TODO: Make this test work. // I need to find out how to tell if two strings are lexicographically inseparable. // #[test] // fn lexicographically_adjacent_strings_error() { // assert!(SymbolTable::alphabet().mudder("ba", "baa", n(1)).is_err()); // } #[test] fn reasonable_values() { let table = SymbolTable::from_str("ab").unwrap(); let result = table.mudder_one("a", "b").unwrap(); assert_eq!(result, "ab"); let table = SymbolTable::from_str("0123456789").unwrap(); let result = table.mudder_one("1", "2").unwrap(); assert_eq!(result, "15"); } #[test] fn outputs_more_or_less_match_mudderjs() { let table = SymbolTable::from_str("abc").unwrap(); let result = table.mudder_one("a", "b").unwrap(); assert_eq!(result, "ac"); let table = SymbolTable::alphabet(); let result = table.mudder("anhui", "azazel", n(3)).unwrap(); assert_eq!(result.len(), 3); assert_eq!(vec!["aq", "as", "av"], result); } #[test] fn empty_start() { let table = SymbolTable::from_str("abc").unwrap(); let result = table.mudder("", "c", n(2)).unwrap(); assert_eq!(result.len(), 2); } #[test] fn empty_end() { let table = SymbolTable::from_str("abc").unwrap(); let result = table.mudder("b", "", n(2)).unwrap(); assert_eq!(result.len(), 2); } #[test] fn generate_before_ax() { // While you can't generate anything before 'a' with alphabet(), you // should be able to generate something before "a" + something else. let table = SymbolTable::alphabet(); let result = table.mudder("", "axxx", n(10)).unwrap(); assert_eq!(result.len(), 10); assert!(result.iter().all(|k| k.as_str() > "a")); // Some more to be sure assert!(table.mudder_one("", "ab").is_ok()); assert!(table.mudder_one("", "abc").is_ok()); } #[test] fn generate_after_z() { let table = SymbolTable::alphabet(); let result = table.mudder("z", "", n(10)).unwrap(); assert_eq!(result.len(), 10); assert!(result.iter().all(|k| k.as_str() > "z")); } #[test] fn only_amount() { let table = SymbolTable::alphabet(); let result = table.generate(n(10)).unwrap(); assert_eq!(result.len(), 10); } #[test] fn values_sorting_correct() { let mut iter = SymbolTable::alphabet().generate(n(12)).into_iter(); while let (Some(one), Some(two)) = (iter.next(), iter.next()) { assert!(one < two); } } #[test] fn differing_input_lengths() { let table = SymbolTable::alphabet(); let result = table.mudder_one("a", "ab").unwrap(); assert!(result.starts_with('a')); } #[test] fn values_consistently_between_start_and_end() { let table = SymbolTable::alphabet(); { // From z to a let mut right = String::from("z"); for _ in 0..500 { let new_val = dbg!(table.mudder_one("a", &right).unwrap()); assert!(new_val < right); assert!(new_val.as_str() > "a"); right = new_val; } } { // And from a to z let mut left = String::from("a"); // TODO: vv this test fails for higher numbers. FIXME! for _ in 0..17 { let new_val = dbg!(table.mudder_one(&left, "z").unwrap()); assert!(new_val > left); assert!(new_val.as_str() < "z"); left = new_val; } } } // Internal/private method tests: #[test] fn traverse_alphabet() { fn traverse_alphabet(a: &str, b: &str, depth: usize) -> Vec<String> { SymbolTable::alphabet() .traverse("".into(), a, b, depth) .collect() } assert_eq!(traverse_alphabet("a", "d", 1), vec!["a", "b", "c", "d"]); assert_eq!( traverse_alphabet("a", "z", 1), ('a' as u32 as u8..='z' as u32 as u8) .map(|c| (c as char).to_string()) .collect::<Vec<_>>() ); assert_eq!( traverse_alphabet("a", "b", 2), vec![ "a", "ab", "ac", "ad", "ae", "af", "ag", "ah", "ai", "aj", "ak", "al", "am", "an", "ao", "ap", "aq", "ar", "as", "at", "au", "av", "aw", "ax", "ay", "az", "b" ] ) } #[test] fn traverse_custom() { fn traverse(table: &str, a: &str, b: &str, depth: usize) -> Vec<String> { let table = SymbolTable::from_str(table).unwrap(); table.traverse("".into(), a, b, depth).collect() } assert_eq!(traverse("abc", "a", "c", 1), vec!["a", "b", "c"]); assert_eq!( traverse("abc", "a", "c", 2), vec!["a", "ab", "ac", "b", "ba", "bc", "c"] ); assert_eq!( traverse("0123456789", "1", "2", 2), vec!["1", "10", "12", "13", "14", "15", "16", "17", "18", "19", "2"] ); } #[test] fn distance_between_first_chars_correct() { let table = SymbolTable::alphabet(); assert_eq!(table.distance_between_first_chars("a", "b").unwrap(), 2); assert_eq!(table.distance_between_first_chars("a", "z").unwrap(), 26); assert_eq!(table.distance_between_first_chars("", "").unwrap(), 26); assert_eq!(table.distance_between_first_chars("n", "").unwrap(), 13); assert_eq!(table.distance_between_first_chars("", "n").unwrap(), 14); assert_eq!(table.distance_between_first_chars("y", "z").unwrap(), 2); assert_eq!(table.distance_between_first_chars("a", "y").unwrap(), 25); assert_eq!( table.distance_between_first_chars("aaaa", "zzzz").unwrap(), table.distance_between_first_chars("aa", "zz").unwrap() ); let table = SymbolTable::from_str("12345").unwrap(); assert_eq!(table.distance_between_first_chars("1", "2").unwrap(), 2); assert_eq!(table.distance_between_first_chars("1", "3").unwrap(), 3); assert_eq!(table.distance_between_first_chars("2", "3").unwrap(), 2); } }
true
405b2b62ef98708ef0209b57e8b8912147f1248e
Rust
UnHumbleBen/learning-todomvc
/src/controller.rs
UTF-8
7,315
2.90625
3
[]
no_license
// Controller needs access to Item, ItemQuery, and Store structs/enums. pub use crate::store::*; // Controller needs to send messages to View. pub use crate::view::ViewMessage; // Needs to add messages to the Scheduler. pub use crate::{Message, Scheduler}; // Used for generating ids. pub use js_sys::Date; pub use std::cell::RefCell; pub use std::rc::Weak; /// The controller of the application. /// /// Turns page state into functionality. pub struct Controller { /// the struct that stores item into `localStorage`. pub store: Store, /// A reference cell to the weak pointer to the scheduler. /// /// TODO(benlee12): The Option could possibly be for deallocation. pub sched: RefCell<Option<Weak<Scheduler>>>, /// TODO(benlee12): Check if the string includes "#" or not. /// /// The fragment string of the URL pub active_route: String, /// The previous `active_route`. It is used to /// determine whether the displayed list needs to be refreshed or not. pub last_active_route: String, } impl Controller { /// Initializes a new `Controller` that takes local storage maintainer /// `store` and a weak pointer to the Scheduler `sched` as its fields. pub fn new(store: Store, sched: Weak<Scheduler>) -> Controller { Controller { store, sched: RefCell::new(Some(sched)), active_route: "".into(), last_active_route: "none".into(), } } /// Used by `Scheduler` to convert a `ControllerMessage` into a function /// call on a `Controller`. pub fn call(&mut self, method_name: ControllerMessage) { // For ergonomics, remove reductant enum. use self::ControllerMessage::*; // Determining which ControllerMessage variant was passed. match method_name { AddItem(title) => self.add_item(title), SetPage(hash) => self.set_page(hash), // TODO(benlee12): Why do we need to move id and value? EditItemSave(id, value) => self.edit_item_save(id, value), // TODO(benlee12): Why do we need to move id? EditItemCancel(id) => self.edit_item_cancel(id), RemoveCompleted() => self.remove_completed_items(), // Note that we only need to take a string slice here, rather than // moving the entire String over. To remove an item, we just need // use the string as a key. But in the other methods, we actually // need to store the String. RemoveItem(id) => self.remove_item(&id), ToggleAll(completed) => self.toggle_all(completed), // TODO(benlee12): Why do we need to move id? ToggleItem(id, completed) => self.toggle_item(id, completed), } } /// Adds an `Item` to the `Store` with the title `title`. /// /// Signals the `View` to display it in the list. pub fn add_item(&mut self, title: String) { // Inserts item new Item to Store. self.store.insert(Item { // Uses the number of milliseconds elapsed since January 1, 1970 // 00:00:00 UTC as an id. id: Date::now().to_string(), title, // Item starts off as active. completed: false, }); // Tells View to clear the new todo input. self.add_message(ViewMessage::ClearNewTodo()); // Refreshs the list. self._filter(true); } pub fn set_page(&mut self, hash: String) {} pub fn edit_item_save(&mut self, id: String, title: String) {} pub fn edit_item_cancel(&mut self, id: String) {} pub fn remove_completed_items(&mut self) {} pub fn remove_item(&mut self, id: &String) {} pub fn toggle_all(&mut self, completed: bool) {} pub fn toggle_item(&mut self, id: String, completed: bool) {} /// Forwards `view_message` to the Scheduler. pub fn add_message(&self, view_message: ViewMessage) { // self.sched = RefCell<Option<Weak<Scheduler>>> // Unwraps RefCell if let Ok(sched) = self.sched.try_borrow() { // sched = RefMut<Option<Weak<Scheduler>> // Unwraps Option if let Some(ref sched) = *sched { // sched = Weak<Scheduler> // Converts Weak to Rc if let Some(sched) = sched.upgrade() { // sched = Rc<Scheduler> // // Yes! This increases the strong count, see this: // https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=22f9c0ff181c70f2739f43b9350df0b2 // // deref coercion -> Scheduler sched.add_message(Message::View(view_message)); } } } } /// Refresh the list based on the current route. /// /// If `force` is `true`, a refresh is guaranteed to happen. pub fn _filter(&mut self, force: bool) { // route = &String let route = &self.active_route; // If filter is forced or ... // TODO(benlee12): other conditions if force || self.last_active_route != "" || &self.last_active_route != route { // Assigns appropriate ItemQuery variant to query. let query = match route.as_str() { "completed" => ItemQuery::Completed { completed: true }, "active" => ItemQuery::Completed { completed: false }, _ => ItemQuery::EmptyItemQuery, }; let mut v = None; { // Mutably borrows the Store. let store = &mut self.store; if let Some(res) = store.find(query) { v = Some(res.into()); } } if let Some(res) = v { self.add_message(ViewMessage::ShowItem(res)); } } } } /// Messages that represent the methods to be called on the Controller /// /// Note that each of these variants signal what needs to change to the /// internal storage, not the actual representation that the users see. /// /// As such, any description explained below that describes a visible change to /// the user is not handled by the `Controller`, but instead, eventually by /// the `View`. Whenever `View` needs to update, `Controller` sends a message /// to the `Scheduler`, which will eventually forward that message to `View`. pub enum ControllerMessage { /// Add a new todo item with the provided `String` as the title. AddItem(String), /// Set the `Controller`'s `active_route` to `String`. SetPage(String), EditItemSave(String, String), EditItemCancel(String), /// Remove all completed todo items from the todo list (in the storage). RemoveCompleted(), /// Remove an item with id `String` (in the storage). RemoveItem(String), /// If `bool` is true, mark all as completed. If `bool` is false, mark all /// as uncompleted. Signals the `View` to toggle the checkbox for marking /// all items as completed. ToggleAll(bool), /// Updates item with id `String` in storage based on `bool`. /// /// TODO(benlee12): Why is `bool` necessary when Store has it's own field? ToggleItem(String, bool), }
true
8e3cfbf2d761322b5eca8ce6a62370bc8a6ce953
Rust
cyndis/rust-edid
/src/lib.rs
UTF-8
5,862
2.578125
3
[ "MIT" ]
permissive
#[macro_use] extern crate nom; use nom::{be_u16, le_u8, le_u16, le_u32}; mod cp437; #[derive(Debug, PartialEq)] pub struct Header { pub vendor: [char; 3], pub product: u16, pub serial: u32, pub week: u8, pub year: u8, // Starting at year 1990 pub version: u8, pub revision: u8, } fn parse_vendor(v: u16) -> [char; 3] { let mask: u8 = 0x1F; // Each letter is 5 bits let i0 = ('A' as u8) - 1; // 0x01 = A return [ (((v >> 10) as u8 & mask) + i0) as char, (((v >> 5) as u8 & mask) + i0) as char, (((v >> 0) as u8 & mask) + i0) as char, ] } named!(parse_header<&[u8], Header>, do_parse!( tag!(&[0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00][..]) >> vendor: be_u16 >> product: le_u16 >> serial: le_u32 >> week: le_u8 >> year: le_u8 >> version: le_u8 >> revision: le_u8 >> (Header{vendor: parse_vendor(vendor), product, serial, week, year, version, revision}) )); #[derive(Debug, PartialEq)] pub struct Display { pub video_input: u8, pub width: u8, // cm pub height: u8, // cm pub gamma: u8, // datavalue = (gamma*100)-100 (range 1.00–3.54) pub features: u8, } named!(parse_display<&[u8], Display>, do_parse!( video_input: le_u8 >> width: le_u8 >> height: le_u8 >> gamma: le_u8 >> features: le_u8 >> (Display{video_input, width, height, gamma, features}) )); named!(parse_chromaticity<&[u8], ()>, do_parse!( take!(10) >> () )); named!(parse_established_timing<&[u8], ()>, do_parse!( take!(3) >> () )); named!(parse_standard_timing<&[u8], ()>, do_parse!( take!(16) >> () )); named!(parse_descriptor_text<&[u8], String>, map!( map!(take!(13), |b| { b.iter() .filter(|c| **c != 0x0A) .map(|b| cp437::forward(*b)) .collect::<String>() }), |s| s.trim().to_string() ) ); #[derive(Debug, PartialEq)] pub enum Descriptor { DetailedTiming, // TODO SerialNumber(String), UnspecifiedText(String), RangeLimits, // TODO ProductName(String), WhitePoint, // TODO StandardTiming, // TODO ColorManagement, TimingCodes, EstablishedTimings, Dummy, Unknown([u8; 13]), } named!(parse_descriptor<&[u8], Descriptor>, switch!(le_u16, 0 => do_parse!( take!(1) >> d: switch!(le_u8, 0xFF => do_parse!( take!(1) >> s: parse_descriptor_text >> (Descriptor::SerialNumber(s)) ) | 0xFE => do_parse!( take!(1) >> s: parse_descriptor_text >> (Descriptor::UnspecifiedText(s)) ) | 0xFD => do_parse!( take!(1) >> take!(13) >> (Descriptor::RangeLimits) ) | 0xFC => do_parse!( take!(1) >> s: parse_descriptor_text >> (Descriptor::ProductName(s)) ) | 0xFB => do_parse!( take!(1) >> take!(13) >> (Descriptor::WhitePoint) ) | 0xFA => do_parse!( take!(1) >> take!(13) >> (Descriptor::StandardTiming) ) | 0xF9 => do_parse!( take!(1) >> take!(13) >> (Descriptor::ColorManagement) ) | 0xF8 => do_parse!( take!(1) >> take!(13) >> (Descriptor::TimingCodes) ) | 0xF7 => do_parse!( take!(1) >> take!(13) >> (Descriptor::EstablishedTimings) ) | 0x10 => do_parse!( take!(1) >> take!(13) >> (Descriptor::Dummy) ) | _ => do_parse!( take!(1) >> data: count_fixed!(u8, le_u8, 13) >> (Descriptor::Unknown(data)) ) ) >> (d) ) | _ => do_parse!(take!(16) >> (Descriptor::DetailedTiming)) ) ); #[derive(Debug, PartialEq)] pub struct EDID { pub header: Header, pub display: Display, chromaticity: (), // TODO established_timing: (), // TODO standard_timing: (), // TODO pub descriptors: Vec<Descriptor>, } named!(parse_edid<&[u8], EDID>, do_parse!( header: parse_header >> display: parse_display >> chromaticity: parse_chromaticity >> established_timing: parse_established_timing >> standard_timing: parse_standard_timing >> descriptors: count!(parse_descriptor, 4) >> take!(1) // number of extensions >> take!(1) // checksum >> (EDID{header, display, chromaticity, established_timing, standard_timing, descriptors}) )); pub fn parse(data: &[u8]) -> nom::IResult<&[u8], EDID> { parse_edid(data) } #[cfg(test)] mod tests { use super::*; fn test(d: &[u8], expected: &EDID) { match parse(d) { nom::IResult::Done(remaining, parsed) => { assert_eq!(remaining.len(), 0); assert_eq!(&parsed, expected); }, nom::IResult::Error(err) => { panic!(format!("{}", err)); }, nom::IResult::Incomplete(_) => { panic!("Incomplete"); }, } } #[test] fn test_card0_vga_1() { let d = include_bytes!("../testdata/card0-VGA-1"); let expected = EDID{ header: Header{ vendor: ['S', 'A', 'M'], product: 596, serial: 1146106418, week: 27, year: 17, version: 1, revision: 3, }, display: Display{ video_input: 14, width: 47, height: 30, gamma: 120, features: 42, }, chromaticity: (), established_timing: (), standard_timing: (), descriptors: vec!( Descriptor::DetailedTiming, Descriptor::RangeLimits, Descriptor::ProductName("SyncMaster".to_string()), Descriptor::SerialNumber("HS3P701105".to_string()), ), }; test(d, &expected); } #[test] fn test_card0_edp_1() { let d = include_bytes!("../testdata/card0-eDP-1"); let expected = EDID{ header: Header{ vendor: ['S', 'H', 'P'], product: 5193, serial: 0, week: 32, year: 25, version: 1, revision: 4, }, display: Display{ video_input: 165, width: 29, height: 17, gamma: 120, features: 14, }, chromaticity: (), established_timing: (), standard_timing: (), descriptors: vec!( Descriptor::DetailedTiming, Descriptor::Dummy, Descriptor::UnspecifiedText("DJCP6ÇLQ133M1".to_string()), Descriptor::Unknown([2, 65, 3, 40, 0, 18, 0, 0, 11, 1, 10, 32, 32]), ), }; test(d, &expected); } }
true