text
stringlengths
8
4.13M
/* * Copyright Stalwart Labs Ltd. See the COPYING * file at the top-level directory of this distribution. * * Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or * https://www.apache.org/licenses/LICENSE-2.0> or the MIT license * <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your * option. This file may not be copied, modified, or distributed * except according to those terms. */ use super::{Mailbox, MailboxRights, Role}; use crate::{core::get::GetObject, principal::ACL, Get, Set}; use ahash::AHashMap; impl Mailbox<Get> { pub fn id(&self) -> Option<&str> { self.id.as_deref() } pub fn take_id(&mut self) -> String { self.id.take().unwrap_or_default() } pub fn name(&self) -> Option<&str> { self.name.as_deref() } pub fn parent_id(&self) -> Option<&str> { self.parent_id.as_deref() } pub fn role(&self) -> Role { self.role.as_ref().cloned().unwrap_or(Role::None) } pub fn sort_order(&self) -> u32 { self.sort_order.as_ref().copied().unwrap_or(0) } pub fn total_emails(&self) -> usize { self.total_emails.as_ref().copied().unwrap_or(0) } pub fn unread_emails(&self) -> usize { self.unread_emails.as_ref().copied().unwrap_or(0) } pub fn total_threads(&self) -> usize { self.total_threads.as_ref().copied().unwrap_or(0) } pub fn unread_threads(&self) -> usize { self.unread_threads.as_ref().copied().unwrap_or(0) } pub fn is_subscribed(&self) -> bool { *self.is_subscribed.as_ref().unwrap_or(&false) } pub fn my_rights(&self) -> Option<&MailboxRights> { self.my_rights.as_ref() } pub fn acl(&self) -> Option<&AHashMap<String, Vec<ACL>>> { self.acl.as_ref() } pub fn take_acl(&mut self) -> Option<AHashMap<String, Vec<ACL>>> { self.acl.take() } } impl MailboxRights { pub fn may_read_items(&self) -> bool { self.may_read_items } pub fn may_add_items(&self) -> bool { self.may_add_items } pub fn may_remove_items(&self) -> bool { self.may_remove_items } pub fn may_set_seen(&self) -> bool { self.may_set_seen } pub fn may_set_keywords(&self) -> bool { self.may_set_keywords } pub fn may_create_child(&self) -> bool { self.may_create_child } pub fn may_rename(&self) -> bool { self.may_rename } pub fn may_delete(&self) -> bool { self.may_delete } pub fn may_submit(&self) -> bool { self.may_submit } pub fn acl_list(&self) -> Vec<ACL> { let mut acl_list = Vec::new(); for (is_set, acl) in [ (self.may_read_items, ACL::ReadItems), (self.may_add_items, ACL::AddItems), (self.may_remove_items, ACL::RemoveItems), (self.may_set_seen, ACL::ModifyItems), (self.may_set_keywords, ACL::ModifyItems), (self.may_create_child, ACL::CreateChild), (self.may_rename, ACL::Modify), (self.may_delete, ACL::Delete), (self.may_submit, ACL::Submit), ] { if is_set && !acl_list.contains(&acl) { acl_list.push(acl); } } acl_list } } impl GetObject for Mailbox<Set> { type GetArguments = (); } impl GetObject for Mailbox<Get> { type GetArguments = (); }
extern crate bincode; extern crate ethereum_types; extern crate serde; extern crate serde_derive; extern crate sha3; use durian::state_provider::StateProvider; use ethereum_types::{Address, H256, U256, U512}; use serde::{Deserialize, Serialize}; use sha3::{Digest, Keccak256}; use std::collections::HashMap; use std::time::SystemTime; pub type Hash = H256; #[derive(Serialize, Deserialize, PartialEq, Debug)] pub struct Block { num: u32, prev: Hash, time: SystemTime, } #[derive(Debug)] pub struct Account { address: Address, balance: u64, code: Option<Vec<u8>>, storage: HashMap<U256, Vec<u8>>, } #[derive(Debug)] pub struct Blockchain<'a> { blocks: Vec<Block>, accounts: HashMap<&'a str, Account>, } impl<'a> Blockchain<'a> { pub fn new() -> Blockchain<'a> { let gen = Block::new(0, Hash::zero()); let mut accounts = HashMap::new(); accounts.insert("alice", Account::new(Address::random(), 1000000)); accounts.insert("bob", Account::new(Address::random(), 1000000)); accounts.insert("carol", Account::new(Address::random(), 1000000)); accounts.insert("dave", Account::new(Address::random(), 1000000)); Blockchain { blocks: vec![gen], accounts: accounts, } } pub fn commit(&mut self) { let block = Block::new(self.blocks.len() as u32, self.blocks.last().unwrap().hash()); self.blocks.push(block); } pub fn get_address(&self, alias: &str) -> Address { self.accounts.get(alias).unwrap().address } } impl<'a> StateProvider for Blockchain<'a> { fn storage_at(&self, key: U256) -> U256 { U256::zero() } fn blockhahs(&self, num: i64) -> U512 { U512::zero() } fn exist(&self, address: Address) -> bool { false } } impl Block { pub fn new(num: u32, prev: Hash) -> Block { Block { num: num, prev: prev, time: SystemTime::now(), } } pub fn hash(&self) -> Hash { let bytes = bincode::serialize(self).unwrap(); Hash::from_slice(Keccak256::digest(&bytes).as_slice()) } } impl Account { pub fn new(addr: Address, bal: u64) -> Account { Account { address: addr, balance: bal, code: None, storage: HashMap::new(), } } }
use crate::blob::blob::responses::ListBlobsResponse; use crate::blob::prelude::*; use azure_core::headers::add_optional_header; use azure_core::prelude::*; use futures::stream::{unfold, Stream}; use http::method::Method; use http::status::StatusCode; use std::convert::TryInto; #[derive(Debug, Clone)] pub struct ListBlobsBuilder<'a> { container_client: &'a ContainerClient, prefix: Option<Prefix<'a>>, delimiter: Option<Delimiter<'a>>, next_marker: Option<NextMarker>, max_results: Option<MaxResults>, include_snapshots: bool, include_metadata: bool, include_uncommitted_blobs: bool, include_copy: bool, include_deleted: bool, include_tags: bool, include_versions: bool, client_request_id: Option<ClientRequestId<'a>>, timeout: Option<Timeout>, } impl<'a> ListBlobsBuilder<'a> { pub(crate) fn new(container_client: &'a ContainerClient) -> Self { Self { container_client, prefix: None, delimiter: None, next_marker: None, max_results: None, include_snapshots: false, include_metadata: false, include_uncommitted_blobs: false, include_copy: false, include_deleted: false, include_tags: false, include_versions: false, client_request_id: None, timeout: None, } } setters! { prefix: Prefix<'a> => Some(prefix), delimiter: Delimiter<'a> => Some(delimiter), next_marker: NextMarker => Some(next_marker), max_results: MaxResults => Some(max_results), include_snapshots: bool => include_snapshots, include_metadata: bool => include_metadata, include_uncommitted_blobs: bool => include_uncommitted_blobs, include_copy: bool => include_copy, include_deleted: bool => include_deleted, include_tags: bool => include_tags, include_versions: bool => include_versions, client_request_id: ClientRequestId<'a> => Some(client_request_id), timeout: Timeout => Some(timeout), } pub async fn execute( &self, ) -> Result<ListBlobsResponse, Box<dyn std::error::Error + Sync + Send>> { let mut url = self.container_client.url_with_segments(None)?; url.query_pairs_mut().append_pair("restype", "container"); url.query_pairs_mut().append_pair("comp", "list"); self.prefix.append_to_url_query(&mut url); self.delimiter.append_to_url_query(&mut url); self.next_marker.append_to_url_query(&mut url); self.max_results.append_to_url_query(&mut url); // This code will construct the "include" query pair // attribute. It only allocates a Vec of references ('static // str) and, finally, a single string. let mut optional_includes = Vec::new(); if self.include_snapshots { optional_includes.push("snapshots"); } if self.include_metadata { optional_includes.push("metadata"); } if self.include_uncommitted_blobs { optional_includes.push("uncommittedblobs"); } if self.include_copy { optional_includes.push("copy"); } if self.include_deleted { optional_includes.push("deleted"); } if self.include_tags { optional_includes.push("tags"); } if self.include_versions { optional_includes.push("versions"); } if !optional_includes.is_empty() { url.query_pairs_mut() .append_pair("include", &optional_includes.join(",")); } self.timeout.append_to_url_query(&mut url); trace!("list blob url = {}", url); let request = self.container_client.prepare_request( url.as_str(), &Method::GET, &|mut request| { request = add_optional_header(&self.client_request_id, request); request }, None, )?; let response = self .container_client .storage_client() .storage_account_client() .http_client() .execute_request_check_status(request.0, StatusCode::OK) .await?; Ok((&response).try_into()?) } pub fn stream( self, ) -> impl Stream<Item = Result<ListBlobsResponse, Box<dyn std::error::Error + Sync + Send>>> + 'a { #[derive(Debug, Clone, PartialEq)] enum States { Init, NextMarker(NextMarker), } unfold(Some(States::Init), move |next_marker: Option<States>| { let req = self.clone(); async move { debug!("next_marker == {:?}", &next_marker); let response = match next_marker { Some(States::Init) => req.execute().await, Some(States::NextMarker(next_marker)) => { req.next_marker(next_marker).execute().await } None => return None, }; // the ? operator does not work in async move (yet?) // so we have to resort to this boilerplate let response = match response { Ok(response) => response, Err(err) => return Some((Err(err), None)), }; let next_marker = response.next_marker.clone().map(States::NextMarker); Some((Ok(response), next_marker)) } }) } }
//! //! ``` //! use sxd_document::Package; //! let package = Package::new(); //! let doc = package.as_document(); //! //! let hello = doc.create_element("hello"); //! hello.set_attribute_value("planet", "Earth"); //! let comment = doc.create_comment("What about other planets?"); //! let text = doc.create_text("Greetings, Earthlings!"); //! //! hello.append_child(comment); //! hello.append_child(text); //! doc.root().append_child(hello); //! ``` //! //! ### Memory and ownership //! //! The `Package` struct is responsible for owning every node in the //! document. Strings are interned, allowing repeated text to consume //! less memory. This is very useful for documents containing lots of //! the same attributes and tag names. //! //! The flip side of this decision is that allocated nodes and strings //! are not deallocated until the entire `Package` is dropped. This is //! a reasonable decision for two common cases: building up an XML //! document and reading an XML document. You may wish to perform //! large modifications to your data *before* creating a document. //! //! ### Namespaces, QNames, and Prefixes //! //! The names of elements and attributes may use namespaces. XML //! namespacing uses URIs to uniquely distinguish items with the same //! local name. A qualified name (`QName`) combines this optional URI //! with the local name. //! //! When an XML document is represented as text, namespaces are given //! a shorthand reference known as a prefix. Prefix names are //! non-authoritative, and only the URI can be used to namespace a //! name. //! //! Elements and attributes may specify a *preferred prefix*, which is //! an indication of what the user would like to be used as a //! prefix. There are times where the preferred prefix would cause a //! conflict, and so an autogenerated prefix will be used instead. //! //! ### Design decisions //! //! Try to leverage the type system as much as possible. #![cfg_attr(feature = "unstable", feature(pattern))] #![cfg_attr(feature = "unstable", feature(test))] extern crate typed_arena; #[macro_use] extern crate peresil; use std::fmt; mod str_ext; mod lazy_hash_map; mod string_pool; mod raw; mod str; #[doc(hidden)] pub mod thindom; pub mod dom; pub mod parser; pub mod writer; pub use str::XmlChar; static XML_NS_PREFIX: &'static str = "xml"; static XML_NS_URI: &'static str = "http://www.w3.org/XML/1998/namespace"; /// A prefixed name. This represents what is found in the string form /// of an XML document, and does not apply any namespace mapping. #[derive(Debug,Copy,Clone,PartialEq,Eq,PartialOrd,Ord)] pub struct PrefixedName<'a> { prefix: Option<&'a str>, local_part: &'a str, } impl<'a> PrefixedName<'a> { /// Create a `PrefixedName` without a prefix pub fn new(local_part: &str) -> PrefixedName { PrefixedName::with_prefix(None, local_part) } /// Create a `PrefixedName` without an optional prefix pub fn with_prefix(prefix: Option<&'a str>, local_part: &'a str) -> PrefixedName<'a> { PrefixedName { prefix: prefix, local_part: local_part, } } pub fn prefix(&self) -> Option<&str> { self.prefix } pub fn local_part(&self) -> &str { self.local_part } } /// A namespace-qualified name. This represents the name of an element /// or attribute *after* the prefix has been mapped to a specific /// namespace. #[derive(Debug,Copy,Clone,PartialEq)] pub struct QName<'s> { namespace_uri: Option<&'s str>, local_part: &'s str, } impl<'s> QName<'s> { /// Create a `QName` without a namespace pub fn new(local_part: &'s str) -> QName<'s> { QName::with_namespace_uri(None, local_part) } /// Create a `QName` with an optional namespace pub fn with_namespace_uri(namespace_uri: Option<&'s str>, local_part: &'s str) -> QName<'s> { QName { namespace_uri: namespace_uri, local_part: local_part, } } pub fn namespace_uri(&self) -> Option<&'s str> { self.namespace_uri } pub fn local_part(&self) -> &'s str { self.local_part } } impl<'s> From<(&'s str, &'s str)> for QName<'s> { fn from(v: (&'s str, &'s str)) -> QName<'s> { QName { namespace_uri: Some(v.0), local_part: v.1 } } } impl<'s> From<&'s str> for QName<'s> { fn from(v: &'s str) -> QName<'s> { QName { namespace_uri: None, local_part: v } } } /// The main entrypoint to an XML document /// /// This is an opaque structure that stores the internal details of /// the XML document. Modify the document via `as_document`. pub struct Package { storage: raw::Storage, connections: raw::Connections, } impl Package { pub fn new() -> Package { let s = raw::Storage::new(); let root = s.create_root(); Package { storage: s, connections: raw::Connections::new(root), } } pub fn as_document(&self) -> dom::Document { dom::Document::new(&self.storage, &self.connections) } #[doc(hidden)] pub fn as_thin_document(&self) -> (thindom::Storage, thindom::Connections) { let s = thindom::Storage::new(&self.storage); let c = thindom::Connections::new(&self.connections); (s, c) } } impl PartialEq for Package { fn eq(&self, other: &Package) -> bool { self as *const Package == other as *const Package } } impl fmt::Debug for Package { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Package") } }
use std::mem; const NEED_INPUT: u32 = 2; const HALT: u32 = 1; const RESULT_OK: u32 = 0; const DEBUG: bool = false; pub struct IntCodeComputer { pub intcode: Vec<i64>, inputs: Vec<i64>, pub outputs: Vec<i64>, ptr: usize, relative_base: i64, } impl IntCodeComputer { pub fn new(intcode: &Vec<i64>, default_inputs: Vec<i64>) -> IntCodeComputer { return IntCodeComputer { intcode: intcode.to_vec(), inputs: default_inputs, outputs: vec![], ptr: 0, relative_base: 0, }; } pub fn run(&mut self) -> u32 { if DEBUG { println!("\nRunning intcode: {:?}", self.intcode); } loop { let result = self.next_operation(); if result != RESULT_OK { return result; } } } fn next_operation(&mut self) -> u32 { let opcode = self.intcode[self.ptr]; let operation = self.operation_from_opcode(opcode); if DEBUG { println!("\n--Opcode {}, operation {}", opcode, operation); } match operation { 1 => self.add(opcode), 2 => self.multiply(opcode), 3 => { let result = self.input(opcode); if result == NEED_INPUT { // halt until input return NEED_INPUT; } }, 4 => self.output(opcode), 5 => self.jump_if_true(opcode), 6 => self.jump_if_false(opcode), 7 => self.less_then(opcode), 8 => self.equals(opcode), 9 => self.change_relative_base(opcode), 99 => return HALT, _ => panic!("Unknown opcode {}. Something went wrong", opcode) } if DEBUG { println!("--State: ptr: {}. Intcode:", self.ptr); print!("--"); for (index, item) in self.intcode.iter().enumerate() { print!("[{}: {}], ", index, item); } print!("\n"); } return RESULT_OK; } fn operation_from_opcode(&self, opcode: i64) -> i64 { return opcode % 100; } fn add(&mut self, opcode: i64) { let parameter_modes = self.modes_from_opcode(opcode, 3); let value1: i64 = self.value_from_parameter(parameter_modes[0], self.ptr + 1); let value2: i64 = self.value_from_parameter(parameter_modes[1], self.ptr + 2); let result: i64 = value1 + value2; if DEBUG { println!("--Adding {} and {} to get {}", value1, value2, result); } self.update_value_from_parameter(parameter_modes[2], self.ptr + 3, result); self.ptr += 4; } fn multiply(&mut self, opcode: i64) { let parameter_modes = self.modes_from_opcode(opcode, 3); let value1: i64 = self.value_from_parameter(parameter_modes[0], self.ptr + 1); let value2: i64 = self.value_from_parameter(parameter_modes[1], self.ptr + 2); let result: i64 = value1 * value2; if DEBUG { println!("--Multiplying {} and {} to get {}", value1, value2, result); } self.update_value_from_parameter(parameter_modes[2], self.ptr + 3, result); self.ptr += 4; } fn input(&mut self, opcode: i64) -> u32 { let parameter_modes = self.modes_from_opcode(opcode, 1); if self.inputs.is_empty() { if DEBUG { println!("--Need Input. Waiting..."); } return NEED_INPUT; } let input = self.inputs.remove(0); if DEBUG { println!("--Inputting {}", input); } self.update_value_from_parameter(parameter_modes[0], self.ptr + 1, input); self.ptr += 2; return RESULT_OK; } fn output(&mut self, opcode: i64) { let parameter_modes = self.modes_from_opcode(opcode, 1); let output = self.value_from_parameter(parameter_modes[0], self.ptr + 1); if DEBUG { println!("--Outputting {} from address {}", output, self.intcode[self.ptr + 1]); } self.outputs.push(output); self.ptr += 2; } fn jump_if_true(&mut self, opcode: i64) { let parameter_modes = self.modes_from_opcode(opcode, 2); let value1: i64 = self.value_from_parameter(parameter_modes[0], self.ptr + 1); let value2: i64 = self.value_from_parameter(parameter_modes[1], self.ptr + 2); if DEBUG { println!("--JumpIfTrue: if {} != 0, Jump to {}", value1, value2); } if value1 != 0 { self.ptr = value2 as usize; } else { self.ptr += 3; } } fn jump_if_false(&mut self, opcode: i64) { let parameter_modes = self.modes_from_opcode(opcode, 2); let value1: i64 = self.value_from_parameter(parameter_modes[0], self.ptr + 1); let value2: i64 = self.value_from_parameter(parameter_modes[1], self.ptr + 2); if DEBUG { println!("--JumpIfFalse: If {} == 0, Jump to {}", value1, value2); } if value1 == 0 { self.ptr = value2 as usize; } else { self.ptr += 3; } } fn less_then(&mut self, opcode: i64) { let parameter_modes = self.modes_from_opcode(opcode, 3); let value1: i64 = self.value_from_parameter(parameter_modes[0], self.ptr + 1); let value2: i64 = self.value_from_parameter(parameter_modes[1], self.ptr + 2); if DEBUG { println!("--Less than: If {} is less than {} 1 else 0", value1, value2); } if value1 < value2 { self.update_value_from_parameter(parameter_modes[2], self.ptr + 3, 1); } else { self.update_value_from_parameter(parameter_modes[2], self.ptr + 3, 0); } self.ptr += 4; } fn equals(&mut self, opcode: i64) { let parameter_modes = self.modes_from_opcode(opcode, 3); let value1: i64 = self.value_from_parameter(parameter_modes[0], self.ptr + 1); let value2: i64 = self.value_from_parameter(parameter_modes[1], self.ptr + 2); if DEBUG { println!("--Equals: If {} is equal to {} then 1 else 0", value1, value2); } if value1 == value2 { self.update_value_from_parameter(parameter_modes[2], self.ptr + 3, 1); } else { self.update_value_from_parameter(parameter_modes[2], self.ptr + 3, 0); } self.ptr += 4; } fn change_relative_base(&mut self, opcode: i64) { let parameter_modes = self.modes_from_opcode(opcode, 1); let value1: i64 = self.value_from_parameter(parameter_modes[0], self.ptr + 1); self.relative_base += value1; if DEBUG { println!("--Change relative base by {} to {}", value1, self.relative_base); } self.ptr += 2; } fn modes_from_opcode(&self, opcode: i64, parameters: usize) -> Vec<i64> { let params = opcode / 100; let mut results = vec![]; let base: i64 = 10; for i in 0..parameters { let power = (i + 1) as u32; results.push((params % base.pow(power)) / base.pow(power-1)); } return results; } fn value_from_parameter(&mut self, parameter: i64, ptr: usize) -> i64 { return match parameter { 0 => { let address_at_pointer = self.intcode[ptr]; assert!(address_at_pointer >= 0); self.get_value(address_at_pointer as usize) }, 1 => self.get_value(ptr), 2 => { let address_at_pointer = self.intcode[ptr]; let relative = address_at_pointer + self.relative_base; assert!(relative >= 0); self.get_value(relative as usize) } _ => panic!("parameter value {} not understood", parameter) }; } fn update_value_from_parameter(&mut self, parameter: i64, ptr: usize, value: i64) { match parameter { 0 | 1 => { let address_at_pointer = self.intcode[ptr]; assert!(address_at_pointer >= 0); self.replace_value(address_at_pointer as usize, value) }, 2 => { let address_at_pointer = self.intcode[ptr]; let relative = address_at_pointer + self.relative_base; assert!(relative >= 0); self.replace_value(relative as usize, value); } _ => panic!("parameter value {} not understood", parameter) } } fn replace_value(&mut self, address: usize, value: i64) { let length = self.intcode.len(); if address >= length { self.intcode.resize(address + 1, 0); } if DEBUG { println!("--Update address {} to value {}", address, value); } mem::replace(&mut self.intcode[address], value); } fn get_value(&mut self, address: usize) -> i64 { let length = self.intcode.len(); if address >= length { self.intcode.resize(address + 1, 0); } return self.intcode[address]; } } #[cfg(test)] mod tests { use super::*; #[test] fn test_day9_example1() { let intcode = vec![109,1,204,-1,1001,100,1,100,1008,100,16,101,1006,101,0,99]; let expected = vec![109,1,204,-1,1001,100,1,100,1008,100,16,101,1006,101,0,99]; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.run(); assert_eq!(computer.outputs, expected); } #[test] fn test_day9_example2() { let intcode = vec![1102,34915192,34915192,7,4,7,99,0]; let expected = 16; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.run(); assert_eq!(format!("{}", computer.outputs[0]).len(), expected); } #[test] fn test_day9_example3() { let intcode = vec![104,1125899906842624,99]; let expected = 1125899906842624; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.run(); assert_eq!(computer.outputs[0], expected); } #[test] fn test_day9_203() { let intcode = vec![9, 3, 203, 6, 4, 12, 99]; let inputs = vec![1]; let expected = 1; let mut computer = IntCodeComputer::new(&intcode, inputs); computer.run(); assert_eq!(computer.outputs[0], expected); } // OLD IntCode Computer tests #[test] fn test_run_intcode_example1() { let intcode: Vec<i64> = vec![1,0,0,0,99]; let end = vec![2,0,0,0,99]; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.run(); assert_eq!(computer.intcode, end); } #[test] fn test_run_intcode_example2() { let intcode: Vec<i64> = vec![2,3,0,3,99]; let end = vec![2,3,0,6,99]; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.run(); assert_eq!(computer.intcode, end); } #[test] fn test_run_intcode_example3() { let intcode: Vec<i64> = vec![2,4,4,5,99,0]; let end = vec![2,4,4,5,99,9801]; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.run(); assert_eq!(computer.intcode, end); } #[test] fn test_run_intcode_example4() { let intcode: Vec<i64> = vec![1,1,1,4,99,5,6,0,99]; let end = vec![30,1,1,4,2,5,6,0,99]; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.run(); assert_eq!(computer.intcode, end); } #[test] fn test_add() { let intcode: Vec<i64> = vec![1001,4,3,4,33]; let expected = vec![1001,4,3,4,36]; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.next_operation(); assert_eq!(computer.intcode, expected); assert_eq!(computer.ptr, 4); } #[test] fn test_multiply() { let intcode: Vec<i64> = vec![1002,4,3,4,33]; let expected = vec![1002,4,3,4,99]; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.next_operation(); assert_eq!(computer.intcode, expected); assert_eq!(computer.ptr, 4); } #[test] fn test_input() { let intcode: Vec<i64> = vec![3,5,0,0,0,0]; let expected = vec![3,5,0,0,0,44]; let mut computer = IntCodeComputer::new(&intcode, vec![44]); computer.next_operation(); assert_eq!(computer.intcode, expected); assert_eq!(computer.ptr, 2); } #[test] fn test_output() { let intcode: Vec<i64> = vec![4,5,0,0,0,33]; let expected = vec![4,5,0,0,0,33]; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.next_operation(); assert_eq!(computer.intcode, expected); assert_eq!(computer.ptr, 2); } #[test] fn test_jump_if_true_mode_position_is_true() { let intcode: Vec<i64> = vec![5,3,6,2,0,33,12]; let expected = 12; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.next_operation(); assert_eq!(computer.ptr, expected); } #[test] fn test_jump_if_true_mode_immidiate_is_true() { let intcode: Vec<i64> = vec![1105,1,6,0,0,33,12]; let expected = 6; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.next_operation(); assert_eq!(computer.ptr, expected); } #[test] fn test_jump_if_false_mode_position_is_false() { let intcode: Vec<i64> = vec![6,3,6,0,0,33,12]; let expected = 12; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.next_operation(); assert_eq!(computer.ptr, expected); } #[test] fn test_jump_if_false_mode_immidiate_is_false() { let intcode: Vec<i64> = vec![1106,0,6,0,0,33]; let expected = 6; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.next_operation(); assert_eq!(computer.ptr, expected); } #[test] fn test_less_than_mode_position() { let intcode: Vec<i64> = vec![7,4,5,6,6,7,7,24]; let expected = vec![7,4,5,6,6,7,1,24]; let expected_ptr = 4; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.next_operation(); assert_eq!(computer.intcode, expected); assert_eq!(computer.ptr, expected_ptr); } #[test] fn test_less_than_mode_immidiate() { let intcode: Vec<i64> = vec![11107,3,6,5,5,33]; let expected = vec![11107,3,6,5,5,1]; let expected_ptr = 4; let mut computer = IntCodeComputer::new(&intcode, vec![]); computer.next_operation(); assert_eq!(computer.intcode, expected); assert_eq!(computer.ptr, expected_ptr); } }
#![allow(dead_code)] mod compile; mod matcher; mod matching; mod optimize; mod parse; mod repr; mod state; mod tests; use std::iter::FromIterator; /// Easily take a substring from a match tuple. pub fn substring(s: &str, (from, len): (usize, usize)) -> String { String::from_iter(s.chars().skip(from).take(len)) } /// Render the state machine generated from `re` as graphviz `dot` input. The result can be pasted /// into `visualize.sh`, which renders a PNG image from it. pub fn render_graph(re: &str) -> String { return format!( "digraph st {{ {} }}", state::dot(&compile::start_compile(parse::parse(re).as_ref().unwrap())) ); } /// Translate a regular expression string into an unoptimized `Pattern`. This is useful for /// inspecting (Pattern implements `Debug`) the parser output if there are unexpected effects. fn parse(re: &str) -> Result<repr::Pattern, String> { return parse::parse(re); } /// Compiles a parsed regular expression into the internal state graph and matches s against it. /// Returns whether the string matched as well as a list of submatches. The first submatch is the /// entire matched string. A submatch is a tuple of (start, end), where end is the index of the /// first character that isn't part of the submatch anymore (i.e. [start, end)). fn compile_and_match(re: &repr::Pattern, s: &str) -> (bool, Vec<(usize, usize)>) { let compiled = compile::start_compile(re); matching::do_match(&compiled, s) } /// Parse, compile, and match a regular expression. Not recommended for repeated use, as the /// regular expression will be compiled every time. Use `compile()` and `match_re()` to make this /// more efficient (about 3x faster). pub fn match_re_str(re: &str, s: &str) -> Result<(bool, Vec<(usize, usize)>), String> { return Ok(compile_and_match(&optimize::optimize(parse::parse(re)?), s)); } /// Optimize and compile a regular expression into a representation that can be directly used for /// matching with `match_re()`. pub fn compile(re: &str) -> Result<state::CompiledRE, String> { Ok(state::CompiledRE(compile::start_compile( &optimize::optimize(parse(re)?), ))) } /// Match a regular expression compiled with `compile()` against a string. Returns a tuple of a /// boolean (whether there was a match or partial match) and a vector of `(position, length)` /// tuples for all submatches, where the first element describes the match by the whole regular /// expression. pub fn match_re(re: &state::CompiledRE, s: &str) -> (bool, Vec<(usize, usize)>) { matching::do_match(&re.0, s) }
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. */ use std::str::FromStr; use serde::Deserialize; use serde::Serialize; bitflags::bitflags! { /// A namespace that may be unshared with [`Command::unshare`]. /// /// [`Command::unshare`]: super::Command::unshare #[derive(Deserialize, Serialize)] pub struct Namespace: i32 { /// Cgroup namespace. const CGROUP = libc::CLONE_NEWCGROUP; /// IPC namespace. const IPC = libc::CLONE_NEWIPC; /// Network namespace. const NETWORK = libc::CLONE_NEWNET; /// Mount namespace. const MOUNT = libc::CLONE_NEWNS; /// PID namespace. const PID = libc::CLONE_NEWPID; /// User and group namespace. const USER = libc::CLONE_NEWUSER; /// UTS namespace. const UTS = libc::CLONE_NEWUTS; } } impl Default for Namespace { fn default() -> Self { Self::empty() } } #[derive(Debug, Clone)] pub enum ParseNamespaceError { InvalidNamespace(String), } impl std::error::Error for ParseNamespaceError {} impl core::fmt::Display for ParseNamespaceError { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { match self { ParseNamespaceError::InvalidNamespace(ns) => { write!(f, "Invalid namespace: {}", ns) } } } } impl FromStr for Namespace { type Err = ParseNamespaceError; fn from_str(s: &str) -> Result<Self, ParseNamespaceError> { s.split(',').try_fold(Namespace::empty(), |ns, s| match s { "cgroup" => Ok(ns | Namespace::CGROUP), "ipc" => Ok(ns | Namespace::IPC), "network" => Ok(ns | Namespace::NETWORK), "pid" => Ok(ns | Namespace::PID), "mount" => Ok(ns | Namespace::MOUNT), "user" => Ok(ns | Namespace::USER), "uts" => Ok(ns | Namespace::UTS), "" | "none" => Ok(ns), invalid_ns => Err(ParseNamespaceError::InvalidNamespace(invalid_ns.to_owned())), }) } }
use std::num::Wrapping; /* Wrapping * * Wrapping实现内部wrap功能, 防止越界 */ fn main() { let a = Wrapping(1u32); assert_eq!(a.0, 1); let b = Wrapping(5u32); assert_eq!((b - a).0, 4); }
#![deny(warnings)] #![deny(missing_docs)] #![allow(clippy::needless_doctest_main)] //! Provides the ability to execute async code from a sync context, //! without blocking a tokio core thread or busy looping the cpu. //! //! # Example //! //! ``` //! #[tokio::main(threaded_scheduler)] //! async fn main() { //! // we need to ensure we are in the context of a tokio task //! tokio::task::spawn(async move { //! // some library api may take a sync callback //! // but we want to be able to execute async code //! (|| { //! let r = tokio_safe::tokio_safe( //! // async code to poll synchronously //! async move { //! // simulate some async work //! tokio::time::delay_for( //! std::time::Duration::from_millis(2) //! ).await; //! //! // return our result //! "test" //! }, //! //! // timeout to allow async execution //! std::time::Duration::from_millis(10), //! ).unwrap(); //! //! // note we get the result inline with no `await` //! assert_eq!("test", r); //! })() //! }) //! .await //! .unwrap(); //! } //! ``` /// Error Type #[derive(Debug, PartialEq)] pub enum BlockOnError { /// The future did not complete within the time alloted. Timeout, } impl std::fmt::Display for BlockOnError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self) } } impl std::error::Error for BlockOnError {} /// Provides the ability to execute async code from a sync context, /// without blocking a tokio core thread or busy looping the cpu. /// You must ensure you are within the context of a tokio::task, /// This allows `tokio::task::block_in_place` to move to a blocking thread. /// This version will never time out - you may end up binding a /// tokio background thread forever. pub fn tokio_safe_block_forever_on<F: std::future::Future>(f: F) -> F::Output { // work around pin requirements with a Box let f = Box::pin(f); let handle = tokio::runtime::Handle::current(); // first, we need to make sure to move this thread to the background tokio::task::block_in_place(move || { // poll until we get a result // futures::executor::block_on(async move { f.await }) handle.block_on(async move { f.await }) }) } /// Provides the ability to execute async code from a sync context, /// without blocking a tokio core thread or busy looping the cpu. /// You must ensure you are within the context of a tokio::task, /// This allows `tokio::task::block_in_place` to move to a blocking thread. pub fn tokio_safe<F: std::future::Future>( f: F, timeout: std::time::Duration, ) -> Result<F::Output, BlockOnError> { // work around pin requirements with a Box let f = Box::pin(f); let handle = tokio::runtime::Handle::current(); // first, we need to make sure to move this thread to the background tokio::task::block_in_place(move || { // poll until we get a result or a timeout // futures::executor::block_on(async move { handle.block_on(async move { match futures::future::select(f, tokio::time::delay_for(timeout)).await { futures::future::Either::Left((res, _)) => Ok(res), futures::future::Either::Right(_) => Err(BlockOnError::Timeout), } }) }) } #[cfg(test)] mod tests { use super::*; #[tokio::test(threaded_scheduler)] async fn it_should_execute_async_from_sync_context_forever() { tokio::task::spawn(async move { (|| { let result = tokio_safe_block_forever_on(async move { "test0" }); assert_eq!("test0", result); })() }) .await .unwrap(); } #[tokio::test(threaded_scheduler)] async fn it_should_execute_async_from_sync_context() { tokio::task::spawn(async move { (|| { let result = tokio_safe( async move { "test1" }, std::time::Duration::from_millis(10), ); assert_eq!("test1", result.unwrap()); })() }) .await .unwrap(); } #[tokio::test(threaded_scheduler)] async fn it_should_execute_timed_async_from_sync_context() { tokio::task::spawn(async move { (|| { let result = tokio_safe( async move { tokio::time::delay_for(std::time::Duration::from_millis(2)).await; "test2" }, std::time::Duration::from_millis(10), ); assert_eq!("test2", result.unwrap()); })() }) .await .unwrap(); } #[tokio::test(threaded_scheduler)] async fn it_should_timeout_timed_async_from_sync_context() { tokio::task::spawn(async move { (|| { let result = tokio_safe( async move { tokio::time::delay_for(std::time::Duration::from_millis(10)).await; "test3" }, std::time::Duration::from_millis(2), ); assert_eq!(BlockOnError::Timeout, result.unwrap_err()); })() }) .await .unwrap(); } }
use super::helper::new_index_transaction; use crate::relayer::block_transactions_verifier::BlockTransactionsVerifier; use crate::relayer::error::{Error, Misbehavior}; use ckb_types::packed::{CompactBlock, CompactBlockBuilder, IndexTransaction}; use ckb_types::prelude::*; // block_short_ids: vec![None, Some(1), None, Some(3), Some(4), None] fn build_compact_block() -> CompactBlock { let prefilled: Vec<IndexTransaction> = vec![0, 2, 5] .into_iter() .map(new_index_transaction) .collect(); let short_ids = vec![1, 3, 4] .into_iter() .map(new_index_transaction) .map(|tx| tx.transaction().proposal_short_id()); CompactBlockBuilder::default() .short_ids(short_ids.pack()) .prefilled_transactions(prefilled.into_iter().pack()) .build() } #[test] fn test_invalid() { let block = build_compact_block(); let indexes = vec![1, 3, 4]; // Invalid len let block_txs: Vec<_> = vec![1, 3] .into_iter() .map(|i| new_index_transaction(i).transaction().into_view()) .collect(); let ret = BlockTransactionsVerifier::verify(&block, &indexes, block_txs.as_slice()); assert_eq!( ret.err(), Some(Error::Misbehavior( Misbehavior::InvalidBlockTransactionsLength { expected: 3, actual: 2 } )) ); // Unordered txs let block_txs: Vec<_> = vec![1, 4, 3] .into_iter() .map(|i| new_index_transaction(i).transaction().into_view()) .collect(); let expected = new_index_transaction(3).transaction().proposal_short_id(); let actual = new_index_transaction(4).transaction().proposal_short_id(); let ret = BlockTransactionsVerifier::verify(&block, &indexes, &block_txs); assert_eq!( ret.err(), Some(Error::Misbehavior(Misbehavior::InvalidBlockTransactions { expected, actual })) ); } #[test] fn test_ok() { let block = build_compact_block(); let indexes = vec![1, 3, 4]; let block_txs: Vec<_> = vec![1, 3, 4] .into_iter() .map(|i| new_index_transaction(i).transaction().into_view()) .collect(); let ret = BlockTransactionsVerifier::verify(&block, &indexes, &block_txs); assert!(ret.is_ok()); }
extern crate regex; #[macro_use] extern crate lazy_static; use regex::Regex; /// #User module with validation email /// /// Regular expression is taken from [source]: https://habr.com/post/55820/ /// /// ## Examples /// /// Basic usage: /// /// ```rust /// use user::User; /// /// if let Some(user) = User::validate_and_set_email("mail@mail.ru") { /// if let Some(domain) = user.email_domain(){ /// assett_eq!(domain,"mail.ru"); /// } /// } /// ``` mod user { use super::*; /// Structure containing the user's email. pub struct User<'a> { pub email: &'a str, } /// Methods for verifying the user's email. impl<'a> User<'a> { /// Creates the `User` object after successfully checking its email. /// Validation email «username@hostname», /// username : latin characters, numbers, signs !#$%&'*+—/=?^_`{|}~ /// hostname : contains components and suffixes (first-level domains) and domains of countries. /// /// ## Examples /// /// Basic usage: /// /// ```rust /// use user::User; /// /// if let Some(user) = User::validate_and_set_email("mail@mail.ru") { /// assert!(true); /// } /// ``` pub fn validate_and_set_email(email: &'a str) -> Option<Self> { lazy_static! { static ref EMAIL: Regex = Regex::new(r"(?x) ^[-a-z0-9!\#$%&'*+/=?^_`{|}~]+(\.[-a-z0-9!\#$%&'*+/=?^_`{|}~]+)* # the username @([a-z0-9]([-a-z0-9]{0,61}[a-z0-9])?\.)* # components separated by a period and not exceeding 63 characters ([a-z]{2,5}) # suffixes (limited list of first level domains) \.[a-z][a-z]$ # country domains ").unwrap(); } if EMAIL.is_match(email) { return Some(User { email: email }); } return None; } /// Analyzes a portion of the domain with the user's email address with a regular expression and returns it. /// ## Examples /// /// Basic usage: /// /// ```rust /// use user::User; /// /// if let Some(user) = User::validate_and_set_email("mail@mail.ru") { /// if let Some(domain) = user.email_domain(){ /// assett_eq!(domain,"mail.ru"); /// } /// } /// ``` pub fn email_domain(&self) -> Option<&'a str> { lazy_static! { static ref EMAIL_DOMAIN: Regex = Regex::new(r"@").unwrap(); } EMAIL_DOMAIN.split(self.email).last() } } #[cfg(test)] mod test { use super::*; #[test] fn none_validation_email() { match User::validate_and_set_email("mailmail.ru") { Some(_) => assert!(false), None => assert!(true), } } #[test] fn none_validation_domain() { match User::validate_and_set_email("mail@mailru") { Some(_) => assert!(false), None => assert!(true), } } #[test] fn none_validation_username() { match User::validate_and_set_email("().@mail.ru") { Some(_) => assert!(false), None => assert!(true), } } #[test] fn some_validation_username() { match User::validate_and_set_email("user.user@mail.ru") { Some(_) => assert!(true), None => assert!(false), } } #[test] fn some_validation_domain() { match User::validate_and_set_email("user.user@mail.ru") { Some(user) => { if let Some(domain) = user.email_domain() { assert_eq!(domain, "mail.ru"); } } None => assert!(false), } } } } fn main() { use user::User; if let Some(user) = User::validate_and_set_email("mail@mail.ru") { if let Some(domain) = user.email_domain() { assert_eq!(domain, "mail.ru"); } } }
use chrono::{DateTime, Local}; use rusqlite::types::ToSql; use rusqlite::{Connection, Result, NO_PARAMS}; /// Standard task for things that need to be completed #[derive(Debug, Clone, PartialEq)] pub struct Task { /// Database unique id to identify instance id: i64, /// Task description of what needs to be done text: String, /// Denote that task has been completed is_completed: bool, /// Date and Time that task is completed completed_date: DateTime<Local>, } impl Task { /// Create a new instance of a Task /// /// # Example /// /// ``` /// let mgr = datamgr::DataMgr::new(String::from("./data/green-thumb-test-new_task.db")); /// let task = Task::new(&mgr.conn, String::from("Water garden")); /// assert_eq!(String::from("Water garden"), task.text); /// ``` pub fn new(conn: &Connection, text: String) -> Self { conn.execute( "INSERT INTO tasks (text, is_completed, completed_date) VALUES (?1, ?2, ?3)", &[&text, &false as &dyn ToSql, &Local::now()], ) .unwrap(); Task { id: conn.last_insert_rowid(), text, is_completed: false, // TODO: This is terrible and needs to be changed completed_date: Local::now(), } } /// Get the completed state pub fn get_completed(&self) -> bool { self.is_completed } /// Mark a task as completed pub fn set_completed(&mut self, conn: &Connection) { self.is_completed = true; self.completed_date = Local::now(); conn.execute_named("UPDATE tasks SET is_completed = :iscompleted, completed_date = :completeddate WHERE id = :id", &[(":iscompleted", &true as &dyn ToSql), (":completeddate", &Local::now()), (":id", &self.id)]).unwrap(); } /// Get the date that the task was completed. /// This cannot be trusted if set_completed returns false. pub fn get_completed_date(&self) -> DateTime<Local> { self.completed_date } /// Get Task text field pub fn get_text(&self) -> String { self.text.clone() } /// Update Task text field pub fn set_text(&mut self, conn: &Connection, text: String) { self.text = text; conn.execute_named( "UPDATE tasks SET text = :text WHERE id = :id", &[(":text", &self.text), (":id", &self.id)], ) .unwrap(); } /// Access all tasks pub fn get_tasks(conn: &Connection) -> Result<Vec<Task>> { let mut tasks: Vec<Task> = Vec::new(); let mut stmt = try!(conn.prepare("SELECT id, text, is_completed, completed_date FROM tasks")); let map_tasks = try!(stmt.query_map(NO_PARAMS, |row| Task { id: row.get(0), text: row.get(1), is_completed: row.get(2), completed_date: row.get(3), })); for task in map_tasks { info!("Accessing {:?}", task); tasks.push(task.unwrap()); } Ok(tasks) } pub fn get_task_by_id(conn: &Connection, id: i64) -> Result<Task> { let mut stmt = try!( conn.prepare("SELECT text, is_completed, completed_date FROM tasks WHERE id = :id") ); let task = try!(stmt.query_map(&[&id], |row| Task { id, text: row.get(0), is_completed: row.get(1), completed_date: row.get(2) })); Ok(task.last().unwrap().unwrap()) } } #[cfg(test)] mod tests { use super::*; use db::DataMgr; #[test] fn new_task() { let mgr = DataMgr::new(String::from("./data/green-thumb-test-new_task.db")); let task = Task::new(&mgr.conn, String::from("Water garden")); assert_eq!(String::from("Water garden"), task.text); } #[test] fn get_tasks() { let mgr = DataMgr::new(String::from("./data/green-thumb-test-get_task.db")); let task1 = Task::new(&mgr.conn, String::from("Water garden")); let task2 = Task::new(&mgr.conn, String::from("Weed garden")); let tasks = Task::get_tasks(&mgr.conn).unwrap(); assert_eq!(task1.text, tasks[0].text); assert_eq!(task2.text, tasks[1].text); } #[test] fn set_completed() { let mgr = DataMgr::new(String::from("./data/green-thumb-test-set_completed.db")); let mut task1 = Task::new(&mgr.conn, String::from("Test completion")); assert_eq!(false, task1.get_completed()); task1.set_completed(&mgr.conn); assert_eq!(true, task1.get_completed()); } #[test] fn completed_date() { let mgr = DataMgr::new(String::from("./data/green-thumb-test-completed_date.db")); let mut task = Task::new(&mgr.conn, String::from("Test completion")); // Accessing the value assigned at creation to ensure that it changes when the task is marked completed let date = task.get_completed_date(); task.set_completed(&mgr.conn); assert_ne!(date, task.get_completed_date()); } #[test] fn get_text() { let mgr = DataMgr::new(String::from("./data/green-thumb-test-completed_date.db")); let mut task = Task::new(&mgr.conn, String::from("Test completion")); assert_eq!(String::from("Test completion"), task.get_text()); task.set_text(&mgr.conn, String::from("Updated Text.")); assert_eq!(String::from("Updated Text."), task.get_text()); } #[test] fn get_task_by_id() { let mgr = DataMgr::new(String::from("./data/green-thumb-test-get_task_by_id.db")); let task = Task::new(&mgr.conn, String::from("Get by Id")); let task2 = Task::get_task_by_id(&mgr.conn, task.id); assert_eq!(task.get_text(), task2.unwrap().get_text()); } }
use sudo_test::{Command, Env, User}; use crate::{Result, HOSTNAME, OTHER_USERNAME, PASSWORD, USERNAME}; #[test] #[ignore = "gh709"] fn invoking_user_has_list_perms() -> Result<()> { let env = Env(format!("{USERNAME} ALL=(ALL:ALL) list")) .user(User(USERNAME).password(PASSWORD)) .hostname(HOSTNAME) .build()?; let output = Command::new("sudo") .args(["-S", "-l"]) .stdin(PASSWORD) .as_user(USERNAME) .output(&env)?; assert_contains!( output.stdout()?, format!("User {USERNAME} may run the following commands on {HOSTNAME}:") ); Ok(()) } #[test] #[ignore = "gh709"] fn invoking_user_has_list_perms_nopasswd() -> Result<()> { let env = Env(format!("{USERNAME} ALL=(ALL:ALL) NOPASSWD: list")) .user(USERNAME) .hostname(HOSTNAME) .build()?; let output = Command::new("sudo") .arg("-l") .as_user(USERNAME) .output(&env)?; assert_contains!( output.stdout()?, format!( "User {USERNAME} may run the following commands on {HOSTNAME}: (ALL : ALL) NOPASSWD: list" ) ); Ok(()) } #[test] fn other_user_has_list_perms_but_invoking_user_has_not() -> Result<()> { let env = Env(format!("{OTHER_USERNAME} ALL=(ALL:ALL) list")) .user(User(USERNAME).password(PASSWORD)) .user(OTHER_USERNAME) .hostname(HOSTNAME) .build()?; let output = Command::new("sudo") .args(["-S", "-l", "-U", OTHER_USERNAME]) .stdin(PASSWORD) .as_user(USERNAME) .output(&env)?; assert!(!output.status().success()); assert_contains!( output.stderr(), format!( "Sorry, user {USERNAME} is not allowed to execute 'list' as {OTHER_USERNAME} on {HOSTNAME}." ) ); Ok(()) } #[test] #[ignore = "gh709"] fn invoking_user_has_list_perms_but_other_user_does_not_have_sudo_perms() -> Result<()> { let env = Env(format!("{USERNAME} ALL=(ALL:ALL) NOPASSWD: list")) .user(User(USERNAME).password(PASSWORD)) .user(OTHER_USERNAME) .hostname(HOSTNAME) .build()?; let output = Command::new("sudo") .args(["-S", "-l", "-U", OTHER_USERNAME]) .stdin(PASSWORD) .as_user(USERNAME) .output(&env)?; assert_contains!( output.stdout()?, format!("User {OTHER_USERNAME} is not allowed to run sudo on {HOSTNAME}.") ); Ok(()) }
mod json_schema; mod ui_schema; pub use json_schema::JsonSchema; pub use ui_schema::UiSchema;
use super::ClipboardError; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use druid::{Application, ClipboardFormat}; use std::io::Cursor; use std::io::Write; pub fn get_image_from_clipboard() -> Result<Option<image::DynamicImage>, ClipboardError> { let clipboard = Application::global().clipboard(); let format_id = match clipboard.preferred_format(&["CF_DIBV5"]) { Some(id) => id, None => return Ok(None), }; let mut data = match clipboard.get_format(format_id) { Some(data) => data, None => return Ok(None), }; let mut bmp_buf = compute_bmp_header(&data)?; bmp_buf.append(&mut data); Ok(Some(image::load(Cursor::new(bmp_buf), image::ImageFormat::Bmp)?)) } pub fn put_image_to_clipboard(img: &image::DynamicImage) -> Result<(), ClipboardError> { let mut clipboard = Application::global().clipboard(); let mut data = vec![]; img.write_to(&mut data, image::ImageFormat::Bmp)?; data.drain(0..FILE_HEADER_SIZE as usize); let data = upgrade_bmp_header(&mut data)?; let fmt = ClipboardFormat::new("CF_DIBV5", data); clipboard.put_formats(&[fmt]); Ok(()) } fn upgrade_bmp_header(data: &mut Vec<u8>) -> Result<Vec<u8>, std::io::Error> { let size = Cursor::new(&data).read_u32::<LittleEndian>()?; assert_eq!(size, 108); let mut remain = data.split_off(size as usize); let mut buf = std::io::BufWriter::new(Vec::new()); buf.write_u32::<LittleEndian>(size + 16)?; buf.write(&data[4..])?; buf.write_u32::<LittleEndian>(0)?; buf.write_u32::<LittleEndian>(0)?; buf.write_u32::<LittleEndian>(0)?; buf.write_u32::<LittleEndian>(0)?; let mut data = buf.into_inner()?; data.append(&mut remain); Ok(data) } // BITMAPV5HEADER // DWORD bV5Size; 4 OFFSET 0 // LONG bV5Width; 4 OFFSET 4 // LONG bV5Height; 4 OFFSET 8 // WORD bV5Planes; 2 OFFSET 12 // WORD bV5BitCount; 2 OFFSET 14 // DWORD bV5Compression; 4 OFFSET 16 // DWORD bV5SizeImage; 4 OFFSET 20 // LONG bV5XPelsPerMeter; 4 OFFSET 24 // LONG bV5YPelsPerMeter; 4 OFFSET 28 // DWORD bV5ClrUsed; 4 OFFSET 32 const V5_COMPRESSION_OFFSET: u64 = 16; const V5_CLR_USED_OFFSET: u64 = 32; const FILE_HEADER_SIZE: u32 = 14; // https://itnext.io/bits-to-bitmaps-a-simple-walkthrough-of-bmp-image-format-765dc6857393 fn compute_bmp_header(content: &[u8]) -> Result<Vec<u8>, std::io::Error> { let mut cursor = Cursor::new(content); let dib_header_size = cursor.read_u32::<LittleEndian>()?; cursor.set_position(V5_COMPRESSION_OFFSET); // FIXME: compute correct color table size cursor.set_position(V5_CLR_USED_OFFSET); let color_count = cursor.read_u32::<LittleEndian>()?; let sizeof_rgba = 4; let pixel_data_offset = dib_header_size + color_count * sizeof_rgba; let mut buf = std::io::BufWriter::new(Vec::new()); // File Type buf.write(b"BM")?; // File Size buf.write_u32::<LittleEndian>(content.len() as u32 + FILE_HEADER_SIZE)?; // Reserved buf.write_u16::<LittleEndian>(0)?; // Reserved buf.write_u16::<LittleEndian>(0)?; // the offset of actual pixel data in bytes buf.write_u32::<LittleEndian>(FILE_HEADER_SIZE + pixel_data_offset)?; Ok(buf.into_inner()?) }
pub mod crypto_auth; mod signin; pub mod signup;
extern crate pretty_env_logger; #[macro_use] extern crate log; use warp::Filter; use mobc_postgres::{tokio_postgres}; use tokio_postgres::NoTls; type DbConnectionManager = mobc_postgres::PgConnectionManager<NoTls>; type DbConnection = mobc::Connection<DbConnectionManager>; type DbConnectionPool = mobc::Pool<DbConnectionManager>; mod app; #[tokio::main] async fn main() { pretty_env_logger::init(); let db_pool = app::database::create_pool().expect("database pool can't be created"); let heartbeat = warp::path!("monitoring" / "heartbeat") .and(warp::get()) .map(warp::reply) .map(|reply| { info!("GET /monitoring/heartbeat 204"); warp::reply::with_status( reply, warp::http::StatusCode::from_u16(204).unwrap() ) }); let auth = warp::path("auth") .and(warp::post()) .and(with_db(db_pool.clone())) .and(warp::body::json()) .and_then(app::handlers::auth_handler); let routes = heartbeat .or(auth) .recover(app::handlers::rejection_handler); debug!("Serving on 0.0.0.0:8080"); warp::serve(routes).run(([0, 0, 0, 0], 8080)).await; } fn with_db( db_pool: DbConnectionPool ) -> impl warp::Filter<Extract = (DbConnectionPool,), Error = std::convert::Infallible> + Clone { warp::any().map(move || db_pool.clone()) }
use crate::ast::{ Ast }; use crate::builtins::{ builtins }; use crate::env::*; use crate::lexier::{ Lexier }; use crate::object::{ Object, new_error }; use crate::parser::{ Parser }; use crate::token::{ TokenKind }; pub fn eval(node: Ast, env: &mut Env) -> Option<Object> { match node { Ast::Program { .. } => return eval_program(node, env), Ast::ExpressionStatement { expression, .. } => { match eval(*expression, env) { Some(value) => return Some(value), None => return None, } }, Ast::IntegerLiteral { value, .. } => return Some(Object::Integer{value: value}), Ast::Boolean { value, .. } => return Some(Object::Boolean{value: value}), Ast::PrefixExpression { operator, right, .. } => { let right = match eval(*right, env){ Some(value) => value, None => return Some(new_error("prefix expression has no right hand side.".to_string())), }; if is_error(&right) { return Some(right); } match eval_prefix_expression(operator, right) { Some(value) => return Some(value), None => return None, } }, Ast::InfixExpression { left, operator, right, .. } => { let left = match eval(*left, env){ Some(value) => value, None => return Some(new_error("infix expression has no left hand side.".to_string())), }; if is_error(&left) { return Some(left); } let right = match eval(*right, env){ Some(value) => value, None => return Some(new_error("infix expression has no right hand side.".to_string())), }; if is_error(&right) { return Some(right); } return Some(eval_infix_expression(operator, left, right)); }, Ast::BlockStatement { .. } => return eval_block_statement(node, env), Ast::IfExpression { .. } => return eval_if_expression(node, env), Ast::ReturnStatement { return_value, .. } => { let val = match eval(*return_value, env){ Some(value) => Box::new(value), None => Box::new(Object::Null), }; if is_error(&*val) { return Some(*val); } return Some(Object::ReturnValue { value: val }); }, Ast::LetStatement { ident, value, .. } => { let val = match eval(*value, env){ Some(value) => value, None => Object::Null, }; if is_error(&val) { return Some(val); } if let Ast::Identifier { value, ..} = *ident { return Some(env.set(value, val)); } return None; }, Ast::Identifier { value, .. } => return eval_identifier(value, env), Ast::FunctionLiteral { parameters, body, .. } => { return Some(Object::Function{ parameters: parameters, body: body, env: Box::new(env.clone()) }); }, Ast::CallExpression { function, arguments, ..} => { let func = match eval(*function, env) { Some(value) => value, None => Object::Null, }; if is_error(&func) { return Some(func); } let args = eval_expressions(arguments, env); if args.len() == 1 && is_error(&args[0]) { return Some(args[0].clone()); } Some(apply_function(func, args)) }, Ast::StringLiteral { value, .. } => return Some(Object::String { value: value }), Ast::ArrayLiteral { elements, token } => { let elems = eval_expressions(elements, env); if elems.len() == 1 && is_error(&elems[0]) { return Some(elems[0].clone()); } return Some(Object::Array { elements: elems}) }, Ast::IndexExpression { left, index, .. } => { let l = match eval(*left, env) { Some(value) => value, None => Object::Null, }; if is_error(&l) { return Some(l); } let i = match eval(*index, env) { Some(value) => value, None => Object::Null, }; if is_error(&i) { return Some(i); } return eval_index_expression(l, i) }, _ => return None, } } fn eval_statements(statements: Vec<Box<Ast>>, env: &mut Env) -> Option<Object> { let mut result = Object::Null; for statement in statements { result = match eval(*statement, env) { Some(value) => { match value { Object::ReturnValue { value: ret_value } => return Some(*ret_value), _ => value, } }, None => return None, } } match result { Object::Integer { .. } | Object::Boolean { .. } | Object::Null => Some(result), _ => None, } } fn eval_prefix_expression(operator: String, right: Object) -> Option<Object> { match operator.as_ref() { "!" => return Some(eval_bang_operator_expression(right)), "-" => return Some(eval_minus_operator_expression(right)), _ => return None, } } fn eval_bang_operator_expression(right: Object) -> Object { match right { Object::Boolean {value} => return Object::Boolean{value: !value}, Object::Null => return Object::Boolean{value: true}, _ => return Object::Boolean{value: false}, } } fn eval_minus_operator_expression(right: Object) -> Object { match right { Object::Integer { value } => return Object::Integer{value: -value}, _ => return new_error(format!("unknown operator: -{}", right.kind())), } } fn eval_infix_expression(operator: String, left: Object, right: Object) -> Object { if left.kind() == "Integer".to_string() && right.kind() == "Integer".to_string() { return eval_integer_infix_expression(operator, left, right); } else if left.kind() == "String".to_string() && right.kind() == "String".to_string() { return eval_string_infix_expression(operator, left, right); } else if left.kind() != right.kind() { return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); } if operator == "==".to_string() { match left { Object::Boolean { value: lvalue } => { match right { Object::Boolean { value: rvalue } => { return Object::Boolean { value: lvalue==rvalue}; }, _ => return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())), } }, _ => return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())), } } else if operator == "!=".to_string() { match left { Object::Boolean { value: lvalue } => { match right { Object::Boolean { value: rvalue } => { return Object::Boolean { value: lvalue!=rvalue}; }, _ => return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())), } }, _ => return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())), } } new_error(format!("unknown operator: {} {} {}", left.kind(), operator, right.kind())) } fn eval_integer_infix_expression(operator: String, left: Object, right: Object) -> Object { match operator.as_ref() { "+" => { if let Object::Integer { value: lvalue } = left { if let Object::Integer { value: rvalue } = right { return Object::Integer { value: lvalue + rvalue}; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, "-" => { if let Object::Integer { value: lvalue } = left { if let Object::Integer { value: rvalue } = right { return Object::Integer { value: lvalue - rvalue}; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, "*" => { if let Object::Integer { value: lvalue } = left { if let Object::Integer { value: rvalue } = right { return Object::Integer { value: lvalue * rvalue}; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, "/" => { if let Object::Integer { value: lvalue } = left { if let Object::Integer { value: rvalue } = right { return Object::Integer { value: lvalue / rvalue}; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, "<" => { if let Object::Integer { value: lvalue } = left { if let Object::Integer { value: rvalue } = right { return Object::Boolean { value: lvalue < rvalue}; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, ">" => { if let Object::Integer { value: lvalue } = left { if let Object::Integer { value: rvalue } = right { return Object::Boolean { value: lvalue > rvalue}; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, "==" => { if let Object::Integer { value: lvalue } = left { if let Object::Integer { value: rvalue } = right { return Object::Boolean { value: lvalue == rvalue}; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, "!=" => { if let Object::Integer { value: lvalue } = left { if let Object::Integer { value: rvalue } = right { return Object::Boolean { value: lvalue != rvalue}; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, _ => return new_error(format!("unknown operator: {} {} {}", left.kind(), operator, right.kind())), } } fn eval_string_infix_expression(operator: String, left: Object, right: Object) -> Object{ match operator.as_ref() { "+" => { if let Object::String { value: lvalue } = left.clone() { if let Object::String { value: rvalue } = right.clone() { return Object::String { value: lvalue + &rvalue }; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, "==" => { if let Object::String { value: lvalue } = left.clone() { if let Object::String { value: rvalue } = right.clone() { return Object::Boolean { value: lvalue == rvalue }; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, "!=" => { if let Object::String { value: lvalue } = left.clone() { if let Object::String { value: rvalue } = right.clone() { return Object::Boolean { value: lvalue != rvalue }; }; }; return new_error(format!("type mismatch: {} {} {}", left.kind(), operator, right.kind())); }, _ => new_error(format!("unknown operator: {} {} {}", left.kind(), operator, right.kind())), } } fn eval_if_expression(node: Ast, env: &mut Env) -> Option<Object> { match node { Ast::IfExpression { condition, consequence, alternative, .. } => { let condition = match eval(*condition, env) { Some(value) => value, None => return Some(Object::Null), }; if is_error(&condition) { return Some(condition); } if is_truthy(condition) { return eval(*consequence, env); } else { match *alternative { Ast::Expression { ref token, .. } => { if token.get_kind_literal() == "Illegal".to_string() { return Some(Object::Null); } else { return eval(*alternative, env); } }, _ => return eval(*alternative, env), } } }, _ => return Some(Object::Null), } } fn is_truthy(obj: Object) -> bool { match obj { Object::Null => return false, Object::Boolean { value } => return value, _ => return true, } } fn eval_program(program: Ast, env: &mut Env) -> Option<Object> { let mut result = Object::Null; if let Ast::Program { statements, .. } = program { for statement in statements { result = match eval(*statement, env) { Some(value) => value, None => Object::Null, }; match result { Object::ReturnValue { value } => return Some(*value), Object::Error { .. } => return Some(result), _ => (), }; } } Some(result) } fn eval_block_statement(block: Ast, env: &mut Env) -> Option<Object> { let mut result = Object::Null; if let Ast::BlockStatement { statements, .. } = block { for statement in statements { result = match eval(*statement, env){ Some(value) => value, None => Object::Null, }; if result.kind() == "ReturnValue".to_string() || result.kind() == "Error" { return Some(result); } } } Some(result) } fn eval_identifier(value: String, env: &mut Env) -> Option<Object> { let val = env.get(value.clone()); match val { Object::Null => (), _ => return Some(val), } if let Object::Builtin { function } = builtins(value.clone()) { return Some(builtins(value.clone())); } Some(new_error(format!("identifier not found: {}", value))) } fn eval_expressions(exps: Vec<Box<Ast>>, env: &mut Env) -> Vec<Object>{ let mut result = Vec::new(); for exp in exps { let evaluated = match eval(*exp, env){ Some(value) => value, None => Object::Null, }; if is_error(&evaluated) { return vec![evaluated]; } result.push(evaluated); } result } fn eval_index_expression(left: Object, index: Object) -> Option<Object> { if left.kind() == "Array".to_string() && index.kind() == "Integer" { return Some(eval_array_index_expression(left, index)); } Some(new_error(format!("index operator not supported: {}", left.kind()))) } fn eval_array_index_expression(array: Object, index: Object) -> Object { let idx = match index { Object::Integer { value } => value as usize, _ => return Object::Null, }; let max = match array { Object::Array { ref elements } => (&elements.len() - 1), _ => return Object::Null, }; if idx < 0 || idx > max { return Object::Null; } match array { Object::Array { ref elements } => elements[idx].clone(), _ => Object::Null, } } fn apply_function(func: Object, args: Vec<Object>) -> Object { match func { Object::Function { .. } => { let mut extend_env = extend_function_env(func.clone(), args); let evaluated = match func { Object::Function { body, ..} => match eval(*body, &mut extend_env) { Some(value) => value, None => return Object::Null, }, _ => return Object::Null, }; return unwrap_return_value(evaluated); }, Object::Builtin { function } => return function(args), _ => return new_error(format!("not a function: {}", func.kind())), } } fn extend_function_env(func: Object, args: Vec<Object>) -> Env { let mut env = match func.clone() { Object::Function { env, .. } => Env::new_enclosed_env(env), _ => return Env::new(), }; if let Object::Function { parameters, ..} = func { for (i, parameter) in parameters.iter().enumerate() { if let Ast::Identifier { value, .. } = (**parameter).clone() { env.set(value, args[i].clone()); } } } env } fn unwrap_return_value(obj: Object) -> Object { if let Object::ReturnValue { value } = obj { return *value; } obj } fn is_error(obj: &Object) -> bool { match obj { Object::Null => (), _ => return obj.kind() == "Error".to_string(), } false }
use { crate::{ comp::{Body, Mass, Mounting, Ori, PhysicsState, Pos, Scale, Sticky, Vel}, event::{EventBus, ServerEvent}, state::DeltaTime, terrain::{Block, TerrainGrid}, vol::ReadVol, }, specs::{Entities, Join, Read, ReadExpect, ReadStorage, System, WriteStorage}, sphynx::Uid, vek::*, }; pub const GRAVITY: f32 = 9.81 * 4.0; const BOUYANCY: f32 = 0.0; // Friction values used for linear damping. They are unitless quantities. The // value of these quantities must be between zero and one. They represent the // amount an object will slow down within 1/60th of a second. Eg. if the frction // is 0.01, and the speed is 1.0, then after 1/60th of a second the speed will // be 0.99. after 1 second the speed will be 0.54, which is 0.99 ^ 60. const FRIC_GROUND: f32 = 0.125; const FRIC_AIR: f32 = 0.0125; const FRIC_FLUID: f32 = 0.2; // Integrates forces, calculates the new velocity based off of the old velocity // dt = delta time // lv = linear velocity // damp = linear damping // Friction is a type of damping. fn integrate_forces(dt: f32, mut lv: Vec3<f32>, grav: f32, damp: f32) -> Vec3<f32> { // this is not linear damping, because it is proportional to the original // velocity this "linear" damping in in fact, quite exponential. and thus // must be interpolated accordingly let linear_damp = (1.0 - damp.min(1.0)).powf(dt * 60.0); lv.z = (lv.z - grav * dt).max(-50.0); lv * linear_damp } /// This system applies forces and calculates new positions and velocities. pub struct Sys; impl<'a> System<'a> for Sys { type SystemData = ( Entities<'a>, ReadStorage<'a, Uid>, ReadExpect<'a, TerrainGrid>, Read<'a, DeltaTime>, Read<'a, EventBus<ServerEvent>>, ReadStorage<'a, Scale>, ReadStorage<'a, Sticky>, ReadStorage<'a, Mass>, ReadStorage<'a, Body>, WriteStorage<'a, PhysicsState>, WriteStorage<'a, Pos>, WriteStorage<'a, Vel>, WriteStorage<'a, Ori>, ReadStorage<'a, Mounting>, ); fn run( &mut self, ( entities, uids, terrain, dt, event_bus, scales, stickies, masses, bodies, mut physics_states, mut positions, mut velocities, mut orientations, mountings, ): Self::SystemData, ) { let mut event_emitter = event_bus.emitter(); // Apply movement inputs for (entity, scale, sticky, _b, mut pos, mut vel, _ori, _) in ( &entities, scales.maybe(), stickies.maybe(), &bodies, &mut positions, &mut velocities, &mut orientations, !&mountings, ) .join() { let mut physics_state = physics_states.get(entity).cloned().unwrap_or_default(); if sticky.is_some() && (physics_state.on_wall.is_some() || physics_state.on_ground) { continue; } let scale = scale.map(|s| s.0).unwrap_or(1.0); // Basic collision with terrain let player_rad = 0.3 * scale; // half-width of the player's AABB let player_height = 1.5 * scale; // Probe distances let hdist = player_rad.ceil() as i32; let vdist = player_height.ceil() as i32; // Neighbouring blocks iterator let near_iter = (-hdist..=hdist) .map(move |i| (-hdist..=hdist).map(move |j| (0..=vdist).map(move |k| (i, j, k)))) .flatten() .flatten(); let old_vel = *vel; // Integrate forces // Friction is assumed to be a constant dependent on location let friction = FRIC_AIR .max(if physics_state.on_ground { FRIC_GROUND } else { 0.0 }) .max(if physics_state.in_fluid { FRIC_FLUID } else { 0.0 }); let downward_force = if physics_state.in_fluid { (1.0 - BOUYANCY) * GRAVITY } else { GRAVITY }; vel.0 = integrate_forces(dt.0, vel.0, downward_force, friction); // Don't move if we're not in a loaded chunk let pos_delta = if terrain .get_key(terrain.pos_key(pos.0.map(|e| e.floor() as i32))) .is_some() { // this is an approximation that allows most framerates to // behave in a similar manner. (vel.0 + old_vel.0 * 4.0) * dt.0 * 0.2 } else { Vec3::zero() }; // Function for determining whether the player at a specific position collides with the ground let collision_with = |pos: Vec3<f32>, hit: fn(&Block) -> bool, near_iter| { for (i, j, k) in near_iter { let block_pos = pos.map(|e| e.floor() as i32) + Vec3::new(i, j, k); if terrain.get(block_pos).map(hit).unwrap_or(false) { let player_aabb = Aabb { min: pos + Vec3::new(-player_rad, -player_rad, 0.0), max: pos + Vec3::new(player_rad, player_rad, player_height), }; let block_aabb = Aabb { min: block_pos.map(|e| e as f32), max: block_pos.map(|e| e as f32) + 1.0, }; if player_aabb.collides_with_aabb(block_aabb) { return true; } } } false }; let was_on_ground = physics_state.on_ground; physics_state.on_ground = false; let mut on_ground = false; let mut attempts = 0; // Don't loop infinitely here // Don't jump too far at once let increments = (pos_delta.map(|e| e.abs()).reduce_partial_max() / 0.3) .ceil() .max(1.0); let old_pos = pos.0; for _ in 0..increments as usize { pos.0 += pos_delta / increments; const MAX_ATTEMPTS: usize = 16; // While the player is colliding with the terrain... while collision_with(pos.0, |vox| vox.is_solid(), near_iter.clone()) && attempts < MAX_ATTEMPTS { // Calculate the player's AABB let player_aabb = Aabb { min: pos.0 + Vec3::new(-player_rad, -player_rad, 0.0), max: pos.0 + Vec3::new(player_rad, player_rad, player_height), }; // Determine the block that we are colliding with most (based on minimum collision axis) let (_block_pos, block_aabb) = near_iter .clone() // Calculate the block's position in world space .map(|(i, j, k)| pos.0.map(|e| e.floor() as i32) + Vec3::new(i, j, k)) // Calculate the AABB of the block .map(|block_pos| { ( block_pos, Aabb { min: block_pos.map(|e| e as f32), max: block_pos.map(|e| e as f32) + 1.0, }, ) }) // Make sure the block is actually solid .filter(|(block_pos, _)| { terrain .get(*block_pos) .map(|vox| vox.is_solid()) .unwrap_or(false) }) // Determine whether the block's AABB collides with the player's AABB .filter(|(_, block_aabb)| block_aabb.collides_with_aabb(player_aabb)) // Find the maximum of the minimum collision axes (this bit is weird, trust me that it works) .min_by_key(|(_, block_aabb)| { ((block_aabb.center() - player_aabb.center() - Vec3::unit_z() * 0.5) .map(|e| e.abs()) .sum() * 1_000_000.0) as i32 }) .expect("Collision detected, but no colliding blocks found!"); // Find the intrusion vector of the collision let dir = player_aabb.collision_vector_with_aabb(block_aabb); // Determine an appropriate resolution vector (i.e: the minimum distance needed to push out of the block) let max_axis = dir.map(|e| e.abs()).reduce_partial_min(); let resolve_dir = -dir.map(|e| { if e.abs().to_bits() == max_axis.to_bits() { e } else { 0.0 } }); // When the resolution direction is pointing upwards, we must be on the ground if resolve_dir.z > 0.0 && vel.0.z <= 0.0 { on_ground = true; if !was_on_ground { event_emitter.emit(ServerEvent::LandOnGround { entity, vel: vel.0 }); } } // When the resolution direction is non-vertical, we must be colliding with a wall // If the space above is free... if !collision_with(Vec3::new(pos.0.x, pos.0.y, (pos.0.z + 0.1).ceil()), |vox| vox.is_solid(), near_iter.clone()) // ...and we're being pushed out horizontally... && resolve_dir.z == 0.0 // ...and the vertical resolution direction is sufficiently great... && -dir.z > 0.1 // ...and we're falling/standing OR there is a block *directly* beneath our current origin (note: not hitbox)... && (vel.0.z <= 0.0 || terrain .get((pos.0 - Vec3::unit_z() * 0.1).map(|e| e.floor() as i32)) .map(|vox| vox.is_solid()) .unwrap_or(false)) // ...and there is a collision with a block beneath our current hitbox... && collision_with( old_pos + resolve_dir - Vec3::unit_z() * 1.05, |vox| vox.is_solid(), near_iter.clone(), ) { // ...block-hop! pos.0.z = (pos.0.z + 0.1).ceil(); on_ground = true; break; } else { // Correct the velocity vel.0 = vel.0.map2( resolve_dir, |e, d| if d * e.signum() < 0.0 { 0.0 } else { e }, ); } // Resolve the collision normally pos.0 += resolve_dir; attempts += 1; } if attempts == MAX_ATTEMPTS { pos.0 = old_pos; break; } } if on_ground { physics_state.on_ground = true; // If the space below us is free, then "snap" to the ground } else if collision_with( pos.0 - Vec3::unit_z() * 1.05, |vox| vox.is_solid(), near_iter.clone(), ) && vel.0.z < 0.0 && vel.0.z > -1.5 && was_on_ground && !terrain .get( Vec3::new(pos.0.x, pos.0.y, (pos.0.z - 0.05).floor()) .map(|e| e.floor() as i32), ) .map(|vox| vox.is_solid()) .unwrap_or(false) { pos.0.z = (pos.0.z - 0.05).floor(); physics_state.on_ground = true; } let dirs = [ Vec3::unit_x(), Vec3::unit_y(), -Vec3::unit_x(), -Vec3::unit_y(), ]; if let (wall_dir, true) = dirs.iter().fold((Vec3::zero(), false), |(a, hit), dir| { if collision_with(pos.0 + *dir * 0.01, |vox| vox.is_solid(), near_iter.clone()) { (a + dir, true) } else { (a, hit) } }) { physics_state.on_wall = Some(wall_dir); } else { physics_state.on_wall = None; } // Figure out if we're in water physics_state.in_fluid = collision_with(pos.0, |vox| vox.is_fluid(), near_iter.clone()); let _ = physics_states.insert(entity, physics_state); } // Apply pushback for (pos, scale, mass, vel, _, _, physics) in ( &positions, scales.maybe(), masses.maybe(), &mut velocities, &bodies, !&mountings, &mut physics_states, ) .join() { let scale = scale.map(|s| s.0).unwrap_or(1.0); let mass = mass.map(|m| m.0).unwrap_or(scale); for (other, pos_other, scale_other, mass_other, _, _) in ( &uids, &positions, scales.maybe(), masses.maybe(), &bodies, !&mountings, ) .join() { let scale_other = scale_other.map(|s| s.0).unwrap_or(1.0); let mass_other = mass_other.map(|m| m.0).unwrap_or(scale_other); if mass_other == 0.0 { continue; } let diff = Vec2::<f32>::from(pos.0 - pos_other.0); let collision_dist = 0.95 * (scale + scale_other); if diff.magnitude_squared() > 0.0 && diff.magnitude_squared() < collision_dist.powf(2.0) && pos.0.z + 1.6 * scale > pos_other.0.z && pos.0.z < pos_other.0.z + 1.6 * scale_other { let force = (collision_dist - diff.magnitude()) * 2.0 * mass_other / (mass + mass_other); vel.0 += Vec3::from(diff.normalized()) * force; physics.touch_entity = Some(*other); } } } } }
use std::collections::HashMap; use token::{TokenId, Token}; use lexer::Lexer; use error::{ErrorId, Error, gen_error}; use node::{Program, Node}; struct Parser { lexer: Lexer, token_result: Result<Token, Error> } impl Parser { fn advance(&mut self) { self.token_result = self.lexer.token(); } fn parse_paren(&mut self) -> Result<Node, Error> { match self.token_result.clone() { Err(err) => Err(err), Ok(token) => match token.id { TokenId::ParenL => { self.advance(); let node_result = self.parse_opers(); match node_result { Err(_) => node_result, Ok(_) => match self.token_result.clone() { Err(err) => Err(err), Ok(token) => match token.id { TokenId::ParenR => { self.advance(); node_result }, _ => gen_error(ErrorId::ExpectedParen, token.clone()) } } } }, _ => gen_error(ErrorId::ExpectedAtom, token.clone()) } } } fn parse_atom(&mut self) -> Result<Node, Error> { match self.token_result.clone() { Err(err) => Err(err), Ok(token) => match token.id { // parse a number TokenId::Num(num) => { let node = Node::Num(num as i32); self.advance(); Ok(node) }, // parse identifier TokenId::Iden(iden) => { let node = Node::Iden(iden.clone()); self.advance(); Ok(node) }, // parse register TokenId::Reg(reg) => { let node = Node::Reg(reg); self.advance(); Ok(node) }, // parse `_` TokenId::Empty => { self.advance(); Ok(Node::Empty) }, // parse !x TokenId::Not => { self.advance(); let node_result = self.parse_atom(); match node_result { Err(_) => node_result, Ok(node) => Ok(Node::Not(Box::new(node))) } }, // parse -x TokenId::Sub => { self.advance(); let node_result = self.parse_atom(); match node_result { Err(_) => node_result, Ok(node) => Ok(Node::Neg(Box::new(node))) } }, // parse /x TokenId::Div => { self.advance(); let node_result = self.parse_atom(); match node_result { Err(_) => node_result, Ok(node) => Ok(Node::Rep(Box::new(node))) } }, TokenId::Mem8 => { self.advance(); match self.parse_paren() { Err(err) => Err(err), Ok(node) => Ok(Node::Mem8(Box::new(node))) } }, TokenId::Mem16 => { self.advance(); match self.parse_paren() { Err(err) => Err(err), Ok(node) => Ok(Node::Mem16(Box::new(node))) } }, TokenId::Mem32 => { self.advance(); match self.parse_paren() { Err(err) => Err(err), Ok(node) => Ok(Node::Mem32(Box::new(node))) } }, _ => self.parse_paren() } } } fn parse_oper(&mut self) -> Result<Node, Error> { let node_result = self.parse_atom(); match node_result { Err(_) => node_result, Ok(mut node) => loop { match self.token_result.clone() { Err(err) => return Err(err), Ok(token) => match token.id { TokenId::Or => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Or(Box::new(node), Box::new(right)) } }, TokenId::And => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::And(Box::new(node), Box::new(right)) } }, TokenId::Xor => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Xor(Box::new(node), Box::new(right)) } }, TokenId::Add => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Add(Box::new(node), Box::new(right)) } }, TokenId::Sub => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Sub(Box::new(node), Box::new(right)) } }, TokenId::Sl => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Sl(Box::new(node), Box::new(right)) } }, TokenId::Sr => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Sr(Box::new(node), Box::new(right)) } }, TokenId::Mul => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Mul(Box::new(node), Box::new(right)) } }, TokenId::Div => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Div(Box::new(node), Box::new(right)) } }, TokenId::Eql => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Eql(Box::new(node), Box::new(right)) } }, TokenId::Gt => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Lt(Box::new(right), Box::new(node)) } }, TokenId::Lt => { self.advance(); let right_result = self.parse_atom(); match right_result { Err(_) => return right_result, Ok(right) => node = Node::Lt(Box::new(node), Box::new(right)) } }, _ => return Ok(node) } } } } } // parse operands // x, y, z, ..., w fn parse_opers(&mut self) -> Result<Node, Error> { let node_result = self.parse_oper(); match node_result { Err(err) => Err(err), Ok(node) => match self.token_result.clone() { Err(err) => Err(err), Ok(token) => match token.id { TokenId::Comma => { self.advance(); let mut nodes = vec![node]; let node_result = self.parse_oper(); match node_result { Err(err) => Err(err), Ok(node) => { nodes.push(node); loop { match self.token_result.clone() { Err(err) => return Err(err), Ok(token) => match token.id { TokenId::Comma => { self.advance(); let node_result = self.parse_oper(); match node_result { Err(err) => return Err(err), Ok(node) => nodes.push(node) } }, _ => return Ok(Node::Opers(nodes)) } } } } } }, _ => return Ok(node) } } } } fn parse_to(&mut self) -> Result<Node, Error> { let left_result = self.parse_opers(); match left_result { Err(_) => left_result, Ok(left) => match self.token_result.clone() { Err(err) => Err(err), Ok(token) => match token.id { TokenId::To => { self.advance(); let right_result = self.parse_opers(); match right_result { Err(_) => right_result, Ok(right) => match self.token_result.clone() { Err(err) => Err(err), Ok(token) => match token.id { TokenId::If => { self.advance(); let cond_result = self.parse_oper(); match cond_result { Err(err) => Err(err), Ok(cond) => Ok(Node::Cond(Box::new(Node::To(Box::new(left), Box::new(right))), Box::new(cond))) } }, _ => Ok(Node::To(Box::new(left), Box::new(right))) } } } }, _ => Ok(left) } } } } fn parse(&mut self) -> Result<Program, Error> { let mut nodes = Vec::new(); loop { match self.token_result.clone() { Err(err) => return Err(err), Ok(token) => match token.id { TokenId::Tab => { self.advance(); let node_or_err = self.parse_to(); match node_or_err { Err(err) => return Err(err), Ok(node) => { nodes.push(node); } } match self.token_result.clone() { Err(err) => return Err(err.clone()), Ok(token) => match token.id { TokenId::Line => self.advance(), TokenId::Eof => break, _ => return gen_error(ErrorId::ExpectedLine, token) } } }, TokenId::Iden(iden) => { // Parse label nodes.push(Node::Label(iden.clone())); self.advance(); match self.token_result.clone() { Err(err) => return Err(err.clone()), Ok(token) => match token.id { TokenId::Line | TokenId::Eof => (), _ => return gen_error(ErrorId::ExpectedLine, token) } } self.advance(); }, TokenId::Line => { self.advance(); }, TokenId::Eof => break, _ => return gen_error(ErrorId::ExpectedProgram, token.clone()) } } } Ok(Program { nodes: nodes, binary: Vec::new(), labels: HashMap::new(), queue: HashMap::new(), addr: 0 }) } } pub fn parse(source: String) -> Result<Program, Error> { let mut lexer = Lexer::new(source); let token_result = lexer.token(); let mut parser = Parser { lexer: lexer, token_result: token_result }; parser.parse() }
use wasm_bindgen::prelude::*; #[wasm_bindgen] #[derive(Debug)] #[derive(Copy, Clone)] pub struct Position { pub x: i32, pub y: i32, pub previous_x: i32, pub previous_y: i32, } #[wasm_bindgen] impl Position { pub fn new(x: i32, y: i32, previous_x: i32, previous_y: i32) -> Position { Position{ x, y, previous_x, previous_y, } } }
use general::SemiGroup; use general::TryReader; use token::Token; use sourcecode::Code; use sourcecode::Span; use token::Operator; use parse::SyntaxTree; use parse::Primary; pub enum Unary { Positive(Primary, Span), Negative(Primary, Span), } impl SyntaxTree for Unary { fn parse(token_reader: &mut TryReader<Code<Token>>) -> Result<Unary, (Option<Span>, String)> { let operator = token_reader.try_(|reader| { let maybe_token = reader.next(); match maybe_token { None => Err(()), Some(token) => { match token.value { Token::Operator(Operator::Add) => Ok((Operator::Add, token.span)), Token::Operator(Operator::Sub) => Ok((Operator::Sub, token.span)), _ => Err(()), } } } }).map(|(_, op)| op); match operator { Ok((Operator::Add, span)) => Primary::parse(token_reader).map(|primary| { let s = span.plus(&primary.span()); Unary::Positive(primary, s) }), Ok((Operator::Sub, span)) => Primary::parse(token_reader).map(|primary| { let s = span.plus(&primary.span()); Unary::Negative(primary, s) }), _ => Primary::parse(token_reader).map(|primary| { let span = primary.span(); Unary::Positive(primary, span) }), } } fn span(&self) -> Span { match self { Unary::Positive(_, span) => span.clone(), Unary::Negative(_, span) => span.clone(), } } } #[cfg(test)] mod tests { use super::*; use token::tokenize; #[test] fn test_parse_positive() { let src = "+3"; let tokens = tokenize(&src.to_string()).unwrap(); let mut token_reader = TryReader::new(&tokens); let unary = Unary::parse(&mut token_reader).unwrap(); if let Unary::Positive(_, _) = unary { } else { panic!("正になっていません。") } } #[test] fn test_parse_implicit_positive() { // 6 let src = "6"; let tokens = tokenize(&src.to_string()).unwrap(); let mut token_reader = TryReader::new(&tokens); let unary = Unary::parse(&mut token_reader).unwrap(); if let Unary::Positive(_, _) = unary { } else { panic!("正になっていません。") } } #[test] fn test_parse_negative() { let src = "-5"; let tokens = tokenize(&src.to_string()).unwrap(); let mut token_reader = TryReader::new(&tokens); let unary = Unary::parse(&mut token_reader).unwrap(); if let Unary::Negative(_, _) = unary { } else { panic!("正になっています。") } } }
use std::collections::HashMap; use std::env; use serenity::{ async_trait, model::{ channel::Channel, channel::Reaction, channel::ReactionType, gateway::Ready, }, prelude::*, }; const CHANNEL_TO_WATCH: &'static str = "channel-management"; enum Action { AddRole, RemoveRole, } struct Handler { cache: Mutex<Option<HashMap<String, u64>>>, } impl Handler { async fn handle_reaction(&self, ctx: Context, reaction: Reaction, action: Action) { // We have to have a channel so we can track where this reaction occured // We can clone http because it is an `Arc`, so it's basically free if let Ok(channel) = reaction.channel(ctx.http.clone()).await { // We only want to match on `Guild` because we don't care about private chats or anything match channel { Channel::Guild(gc) => { // We are only going to be watching this specific channel for reactions, ignore everything else if gc.name() == CHANNEL_TO_WATCH { // We want to make sure we can get the message from the channel because we use that to parse // out which roles we assign dynamically, if we can't get it, just return because there is // nothing the bot can do if let Ok(msg) = reaction.message(ctx.http.clone()).await { // Pull out `emoji`, `user_id`, and `guild_id` from a partial destructure of the reaction // These are all the things we need for later and consumes the reaction itself let Reaction { emoji, user_id, guild_id, .. } = reaction; // We want to make sure we have a `guild_id` associated with the reaction change, so we can // later retrieve roles and assign them to members if let Some(guild_id) = guild_id { // We need to make sure we have a `user_id` so that we can get the `Member` associated // with that id if let Some(user_id) = user_id { // Use the `emoji` Unicode for comparison match emoji { ReactionType::Unicode(emoji) => { // Parse the role list by taking any text the reactions are attached to // then get the last "\n\n" split. let role_list = match msg.content.split("\n\n").last() { Some(role_list) => role_list, None => { println!("Error while parsing the pinned message, couldn't find role list"); return; } }; let mut role = ""; // Parse the `role_list` to get the specific role by iterating over each line // comparing to see if the unicode matches, then spliting on `:`, trim the last // split, which we assume is our role name role_list.lines().filter(|line| line.contains(&emoji)).for_each(|line| { role = match line.splitn(2, ":").last() { Some(role_split) => role_split.trim(), None => { println!("Error while parsing the pinned message, couldn't find role within {}", line); return } }; }); // We get the `Member` of the `GuildId` so that we can assign or remove the role let mut member = guild_id .member(ctx.http.clone(), user_id) .await .unwrap(); // We need to get the `RoleId` by turning our `GuildId` into a `PartialGuild` // such that we can call `role_by_name` just so that we don't need to track // the rediculous ID's associated with each role let role_value = match guild_id .to_partial_guild(ctx.http.clone()) .await { Ok(pg) => { if let Some(role) = pg.role_by_name(role) { role.id } else { println!( "Error while getting role by name" ); return; } } Err(e) => { println!( "Error {} while getting PartialGuild", e ); return; } }; // Finally, we take the associated action with that Role, adding it // or removing it from the particular member match action { Action::AddRole => { println!( "AddRole for member : {:?}", member .add_role(ctx.http.clone(), role_value) .await ); } Action::RemoveRole => { println!( "RemoveRole for member : {:?}", member .remove_role(ctx.http.clone(), role_value) .await ) } } }, // This variant occurs when you just absolutely need a custom emoji in your // channel because all the possible Unicode emoji's are not enough. ReactionType::Custom {id, name, ..} => { let name = if let Some(name) = name { name } else { println!("Custom emoji name is None?"); return }; // Parse the role list by taking any text the reactions are attached to // then get the last "\n\n" split. let role_list = match msg.content.split("\n\n").last() { Some(role_list) => role_list, None => { println!("Error while parsing the pinned message, couldn't find role list"); return; } }; let mut role = ""; // Parse the `role_list` to get the specific role by iterating over each line // comparing to see if the unicode matches, then spliting on `:`, trim the last // split, which we assume is our role name role_list.lines().filter(|line| line.contains(&name)).for_each(|line| { role = match line.split(":").last() { Some(role_split) => role_split.trim(), None => { println!("Error while parsing the pinned message, couldn't find role within {}", line); return } }; }); // We get the `Member` of the `GuildId` so that we can assign or remove the role let mut member = guild_id .member(ctx.http.clone(), user_id) .await .unwrap(); // We need to get the `RoleId` by turning our `GuildId` into a `PartialGuild` // such that we can call `role_by_name` just so that we don't need to track // the rediculous ID's associated with each role let role_value = match guild_id .to_partial_guild(ctx.http.clone()) .await { Ok(pg) => { if let Some(role) = pg.role_by_name(role) { role.id } else { println!( "Error while getting role by name" ); return; } } Err(e) => { println!( "Error {} while getting PartialGuild", e ); return; } }; // Finally, we take the associated action with that Role, adding it // or removing it from the particular member match action { Action::AddRole => { println!( "AddRole for member : {:?}", member .add_role(ctx.http.clone(), role_value) .await ); } Action::RemoveRole => { println!( "RemoveRole for member : {:?}", member .remove_role(ctx.http.clone(), role_value) .await ) } } }, _ => { println!("Unsupported ReactionType!"); } } } } } } } _ => {} } } } } #[async_trait] impl EventHandler for Handler { async fn reaction_add(&self, ctx: Context, add_reaction: Reaction) { self.handle_reaction(ctx, add_reaction, Action::AddRole) .await } async fn reaction_remove(&self, ctx: Context, removed_reaction: Reaction) { self.handle_reaction(ctx, removed_reaction, Action::RemoveRole) .await } async fn ready(&self, _: Context, ready: Ready) { println!("{} is connected!", ready.user.name); } } #[tokio::main] pub async fn main() { // Configure the client with your Discord bot token in the environment. let token = env::var("DISCORD_TOKEN").expect("Expected a token in the environment"); // Build our client. let mut client = Client::builder(token) .event_handler(Handler { cache: Mutex::new(None), }) .await .expect("Error creating client"); // Finally, start a single shard, and start listening to events. // // Shards will automatically attempt to reconnect, and will perform // exponential backoff until it reconnects. if let Err(why) = client.start().await { println!("Client error: {:?}", why); } }
use std::env; use std::path::PathBuf; fn main() { // compile c code // cc crate tells link option to cargo automatically. // this means cc crate print following cargo flags from inside cc, // so programmer doesn't have to explicitly write this in build.rs. // println!("cargo:rustc-link-search=native={}", env::var("OUT_DIR").unwrap()); // println!("cargo:rustc-link-lib=fibonacci"); cc::Build::new() .warnings(true) .flag("-Wall") .flag("-Wextra") .file("src/c/fibonacci.c") .include("src/c") .compile("libfibonacci.a"); // tell libmpsse header file to bindgen let bindings = bindgen::Builder::default() .header("src/c/fibonacci.h") .generate() .expect("Unable to generate bindings!"); let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); // output file generated by bindgen bindings .write_to_file(out_path.join("bindings.rs")) .expect("Couldn't write bindings!"); }
use general::TryReader; use::general::SemiGroup; use sourcecode::Span; use sourcecode::Code; use token::Token; use token::BracketSide; use token::Bracket; use token::ReservedWord; use parse::SyntaxTree; use parse::Statement; use parse::Equality; pub enum Expression { PureExpression(PureExpression), IfExpression(IfExpression), BlockExpression(BlockExpression), } impl SyntaxTree for Expression { fn parse(mut token_reader: &mut TryReader<Code<Token>>) -> Result<Expression, (Option<Span>, String)> { match token_reader.try_(|reader| IfExpression::parse(reader)) { Ok((_, expr)) => return Ok(Self::IfExpression(expr)), _ => (), } match token_reader.try_(|reader| BlockExpression::parse(reader)) { Ok((_, expr)) => return Ok(Self::BlockExpression(expr)), _ => (), } PureExpression::parse(&mut token_reader) .map(Self::PureExpression) } fn span(&self) -> Span { match &self { Self::PureExpression(expr) => expr.span(), Self::IfExpression(expr) => expr.span(), Self::BlockExpression(expr) => expr.span(), } } } pub struct PureExpression { pub equality: Equality, } impl SyntaxTree for PureExpression { fn parse(mut token_reader: &mut TryReader<Code<Token>>) -> Result<PureExpression, (Option<Span>, String)> { Equality::parse(&mut token_reader) .map(|equality| Self {equality}) } fn span(&self) -> Span { self.equality.span() } } pub struct IfExpression { pub condition: Box<Expression>, pub then: Box<Expression>, pub else_: Box<Expression>, } impl SyntaxTree for IfExpression { fn parse(mut token_reader: &mut TryReader<Code<Token>>) -> Result<IfExpression, (Option<Span>, String)> { match token_reader.next() { Some(token) => match token.value { Token::ReservedWord(ReservedWord::If) => (), _ => return Err((Some(token.span), "ifを期待していました".to_string())), }, _ => return Err((None, "ifを期待していました".to_string())), }; let condition = match Expression::parse(&mut token_reader) { Ok(expression) => expression, Err(e) => return Err(e), }; token_reader.drop_while(|token| token.value == Token::LineBreak); match token_reader.next() { Some(token) => match token.value { Token::ReservedWord(ReservedWord::Then) => (), _ => return Err((Some(token.span), "thenを期待していました".to_string())), }, _ => return Err((None, "thenを期待していました".to_string())), }; let then = match Expression::parse(&mut token_reader) { Ok(expression) => expression, Err(e) => return Err(e), }; token_reader.drop_while(|token| token.value == Token::LineBreak); match token_reader.next() { Some(token) => match token.value { Token::ReservedWord(ReservedWord::Else) => (), _ => return Err((Some(token.span), "elseを期待していました".to_string())), }, _ => return Err((None, "elseを期待していました".to_string())), }; let else_ = match Expression::parse(&mut token_reader) { Ok(expression) => expression, Err(e) => return Err(e), }; Ok(Self{ condition: Box::new(condition), then: Box::new(then), else_: Box::new(else_), }) } // TODO: if が含まれていない fn span(&self) -> Span { self.condition.span() .plus(&self.then.span()) .plus(&self.else_.span()) } } pub struct BlockExpression { pub statements: Vec::<Statement>, pub outcome: Box<Expression>, open: Span, close: Span, } impl SyntaxTree for BlockExpression { fn parse(token_reader: &mut TryReader<Code<Token>>) -> Result<Self, (Option<Span>, String)> { let open = match token_reader.try_next(|token| { match token.value { Token::Bracket(BracketSide::Left(Bracket::Curly)) => Ok(token.span), _ => Err(token.span) } }) { Ok(span) => span, Err(e) => return Err((e, "{ を期待していました".to_string())), }; token_reader.drop_while(|token| token.value == Token::LineBreak); let mut statements = Vec::new(); while let Ok((_, statement)) = token_reader.try_(|reader| Statement::parse(reader)) { statements.push(statement) } token_reader.drop_while(|token| token.value == Token::LineBreak); let outcome = Box::new(match token_reader.try_(|reader| Expression::parse(reader)) { Ok((_, expr)) => expr, Err(e) => return Err(e), }); token_reader.drop_while(|token| token.value == Token::LineBreak); let close = match token_reader.try_next(|token| { match token.value { Token::Bracket(BracketSide::Right(Bracket::Curly)) => Ok(token.span), _ => Err((Some(token.span), "} を期待していました".to_string())), } }) { Ok(span) => span, Err(Some(e)) => return Err(e), _ => return Err((None, "ブロックを期待しいていました".to_string())), }; Ok(Self { open, close, statements, outcome, }) } fn span(&self) -> Span { self.statements.iter().fold(self.open, |acc, s| acc.plus(&s.span())) .plus(&self.outcome.span()) .plus(&self.close) } }
//! # Turbine-Scene3D //! //! Scene rendering for the Turbine game engine. //! //! <video width="320" height="240" controls> //! <source src="https://i.imgur.com/M0frz9B.mp4" type="video/mp4"> //! Your browser does not support the video tag. //! </video> //! //! ### Design //! //! - Scene object stores all resources used for rendering //! - Frame graph stores command lists //! //! This design allows flexible programming of scenes, without the need for //! a tree structure to store nodes for scene data. //! The frame graph can be used to debug the scene. #![deny(missing_docs)] extern crate piston; extern crate vecmath; extern crate wavefront_obj; extern crate image; mod gl_backend; pub use gl_backend::Scene; use std::path::Path; use std::io; use vecmath::*; /// Stores a scene command. #[derive(Copy, Clone, Debug, PartialEq)] pub enum Command { /// Use program. UseProgram(Program), /// Set model-view-projection transform. SetModelViewProjection(Matrix4Uniform), /// Set model transform. SetModel(Matrix4Uniform), /// Set view transform. SetView(Matrix4Uniform), /// Set texture. SetTexture(Texture), /// Set f32 uniform. SetF32(F32Uniform, f32), /// Set 2D vector uniform. SetVector2(Vector2Uniform, Vector2<f32>), /// Set 3D vector uniform. SetVector3(Vector3Uniform, Vector3<f32>), /// Set matrx uniform. SetMatrix4(Matrix4Uniform, Matrix4<f32>), /// Enable framebuffer sRGB. EnableFrameBufferSRGB, /// Disable framebuffer sRGB. DisableFrameBufferSRGB, /// Enable blend. EnableBlend, /// Disable blend. DisableBlend, /// Enable cull face. EnableCullFace, /// Disable cull face. DisableCullFace, /// Cull front face. CullFaceFront, /// Cull back face. CullFaceBack, /// Cull both front and back face. CullFaceFrontAndBack, /// Draw triangles. DrawTriangles(VertexArray, usize), /// Draw triangle strip. DrawTriangleStrip(VertexArray, usize), /// Draw lines. DrawLines(VertexArray, usize), /// Draw points. DrawPoints(VertexArray, usize), /// Translate model. Translate(Vector3<f32>), /// Translate model in global coordinates. TranslateGlobal(Vector3<f32>), /// Scale model. Scale(Vector3<f32>), /// Rotate model around x axis with degrees. RotateXDeg(f32), /// Rotate model around y axis with degrees. RotateYDeg(f32), /// Rotate model around z axis with degrees. RotateZDeg(f32), /// Rotate model around axis with degrees. RotateAxisDeg(Vector3<f32>, f32), /// Push model transform to transform stack. PushTransform, /// Pop model transform from transform stack. PopTransform, /// Draw a command list. Draw(CommandList), } /// Stores how stuff is rendered in a single frame. #[derive(Debug)] pub struct FrameGraph { command_lists: Vec<Vec<Command>>, } impl FrameGraph { /// Creates a new frame graph. pub fn new() -> FrameGraph { FrameGraph { command_lists: vec![] } } /// Create command list. pub fn command_list(&mut self, commands: Vec<Command>) -> CommandList { let id = self.command_lists.len(); self.command_lists.push(commands); CommandList(id) } } /// References a vertex shader. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct VertexShader(usize); /// References a fragment shader. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct FragmentShader(usize); /// References a program. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct Program(usize); /// References 4D matrix uniform. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct Matrix4Uniform(usize); /// References a 2D vector uniform. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct Vector2Uniform(usize); /// References a 3D vector uniform. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct Vector3Uniform(usize); /// References a f32 uniform. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct F32Uniform(usize); /// References a vertex array object. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct VertexArray(usize); /// References a color buffer object. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct ColorBuffer(usize, usize); /// References a 3D vertex buffer object. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct VertexBuffer3(usize, usize); /// References a 2D vertex buffer object. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct VertexBuffer2(usize, usize); /// References an UV buffer object. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct UVBuffer(usize, usize); /// References a normal buffer object. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct NormalBuffer(usize, usize); /// References a command list object. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct CommandList(usize); /// References a texture object. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct Texture(usize); impl ColorBuffer { /// Length of color buffer. pub fn len(&self) -> usize {self.1} } impl VertexBuffer3 { /// Length of vertex buffer. pub fn len(&self) -> usize {self.1} } impl VertexBuffer2 { /// Length of vertex buffer. pub fn len(&self) -> usize {self.1} } /// Stores OBJ mesh data. pub struct ObjMesh { /// Stores vertex coordinates. pub vertices: Vec<f32>, /// Stores texture coordinates. pub uvs: Vec<f32>, /// Stores normal coordinates. pub normals: Vec<f32>, } impl ObjMesh { /// Load OBJ file. pub fn load<P: AsRef<Path>>(path: P) -> Result<ObjMesh, io::Error> { use std::fs::File; use std::io::Read; let mut obj_file = File::open(path)?; let mut data = String::new(); obj_file.read_to_string(&mut data)?; let obj_set = wavefront_obj::obj::parse(data).unwrap(); let obj = &obj_set.objects[0]; let temp_vertices = { let mut res = vec![]; for v in &obj.vertices { res.push(v.x as f32); res.push(v.y as f32); res.push(v.z as f32); } res }; let temp_uvs = { let mut res = vec![]; for uv in &obj.tex_vertices { res.push(uv.u as f32); res.push(1.0 - uv.v as f32); } res }; let temp_normals = { let mut res = vec![]; for normal in &obj.normals { res.push(normal.x as gl::types::GLfloat); res.push(normal.y as gl::types::GLfloat); res.push(normal.z as gl::types::GLfloat); } res }; let mut vertices = vec![]; let mut uvs = vec![]; let mut normals = vec![]; for geom in &obj.geometry { for shape in &geom.shapes { use wavefront_obj::obj::Primitive; if let Primitive::Triangle( (a_v, Some(a_uv), Some(a_n)), (b_v, Some(b_uv), Some(b_n)), (c_v, Some(c_uv), Some(c_n)) ) = shape.primitive { vertices.push(temp_vertices[a_v * 3 + 0]); vertices.push(temp_vertices[a_v * 3 + 1]); vertices.push(temp_vertices[a_v * 3 + 2]); vertices.push(temp_vertices[b_v * 3 + 0]); vertices.push(temp_vertices[b_v * 3 + 1]); vertices.push(temp_vertices[b_v * 3 + 2]); vertices.push(temp_vertices[c_v * 3 + 0]); vertices.push(temp_vertices[c_v * 3 + 1]); vertices.push(temp_vertices[c_v * 3 + 2]); uvs.push(temp_uvs[a_uv * 2 + 0]); uvs.push(temp_uvs[a_uv * 2 + 1]); uvs.push(temp_uvs[b_uv * 2 + 0]); uvs.push(temp_uvs[b_uv * 2 + 1]); uvs.push(temp_uvs[c_uv * 2 + 0]); uvs.push(temp_uvs[c_uv * 2 + 1]); normals.push(temp_normals[a_n * 3 + 0]); normals.push(temp_normals[a_n * 3 + 1]); normals.push(temp_normals[a_n * 3 + 2]); normals.push(temp_normals[b_n * 3 + 0]); normals.push(temp_normals[b_n * 3 + 1]); normals.push(temp_normals[b_n * 3 + 2]); normals.push(temp_normals[c_n * 3 + 0]); normals.push(temp_normals[c_n * 3 + 1]); normals.push(temp_normals[c_n * 3 + 2]); } } } Ok(ObjMesh { vertices, uvs, normals }) } } /// Stores scene settings. #[derive(Clone)] pub struct SceneSettings { clear_depth_buffer: bool, clear_enable_depth_test: bool, } impl SceneSettings { /// Returns new scene settings with default settings. pub fn new() -> SceneSettings { SceneSettings { clear_depth_buffer: true, clear_enable_depth_test: true, } } /// Set whether to clear depth buffer on clear. pub fn clear_depth_buffer(mut self, val: bool) -> Self { self.clear_depth_buffer = val; self } /// Set whether to enable depth test on clear. /// /// Uses depth test function `LESS` by default. pub fn clear_enable_depth_test(mut self, val: bool) -> Self { self.clear_enable_depth_test = val; self } } impl Default for SceneSettings { fn default() -> Self {SceneSettings::new()} } #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
extern crate kravl_parser; use std::env; use std::fs::File; use std::io; use std::io::prelude::*; use kravl_parser::syntax; fn main() { let args: Vec<String> = env::args().collect(); if args.len() > 1 { let mut lexer = syntax::lexer::Lexer::new(); let path = &args[1]; let mut source = match File::open(path) { Ok(f) => f, Err(_) => panic!("failed to open: {}", path), }; let mut buffer = String::new(); source.read_to_string(&mut buffer).unwrap(); lexer.tokenize(buffer); let mut parser = syntax::ast::Parser::from(lexer); let stack = parser.parse_full(); for n in stack { for j in n { println!("{:?}", j) } } std::process::exit(0) } else { println!("the kravl language"); loop { print!(">> "); io::stdout().flush(); let mut input = String::new(); match io::stdin().read_line(&mut input) { Ok(n) => { let mut lexer = syntax::lexer::Lexer::new(); lexer.tokenize(input); let mut parser = syntax::ast::Parser::from(lexer); let stack = parser.parse_full(); for n in stack { for j in n { println!("{:?}", j) } } }, Err(e) => panic!(e) } } } }
/// An enum to represent all characters in the InscriptionalParthian block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum InscriptionalParthian { /// \u{10b40}: '𐭀' LetterAleph, /// \u{10b41}: '𐭁' LetterBeth, /// \u{10b42}: '𐭂' LetterGimel, /// \u{10b43}: '𐭃' LetterDaleth, /// \u{10b44}: '𐭄' LetterHe, /// \u{10b45}: '𐭅' LetterWaw, /// \u{10b46}: '𐭆' LetterZayin, /// \u{10b47}: '𐭇' LetterHeth, /// \u{10b48}: '𐭈' LetterTeth, /// \u{10b49}: '𐭉' LetterYodh, /// \u{10b4a}: '𐭊' LetterKaph, /// \u{10b4b}: '𐭋' LetterLamedh, /// \u{10b4c}: '𐭌' LetterMem, /// \u{10b4d}: '𐭍' LetterNun, /// \u{10b4e}: '𐭎' LetterSamekh, /// \u{10b4f}: '𐭏' LetterAyin, /// \u{10b50}: '𐭐' LetterPe, /// \u{10b51}: '𐭑' LetterSadhe, /// \u{10b52}: '𐭒' LetterQoph, /// \u{10b53}: '𐭓' LetterResh, /// \u{10b54}: '𐭔' LetterShin, /// \u{10b55}: '𐭕' LetterTaw, /// \u{10b58}: '𐭘' NumberOne, /// \u{10b59}: '𐭙' NumberTwo, /// \u{10b5a}: '𐭚' NumberThree, /// \u{10b5b}: '𐭛' NumberFour, /// \u{10b5c}: '𐭜' NumberTen, /// \u{10b5d}: '𐭝' NumberTwenty, /// \u{10b5e}: '𐭞' NumberOneHundred, } impl Into<char> for InscriptionalParthian { fn into(self) -> char { match self { InscriptionalParthian::LetterAleph => '𐭀', InscriptionalParthian::LetterBeth => '𐭁', InscriptionalParthian::LetterGimel => '𐭂', InscriptionalParthian::LetterDaleth => '𐭃', InscriptionalParthian::LetterHe => '𐭄', InscriptionalParthian::LetterWaw => '𐭅', InscriptionalParthian::LetterZayin => '𐭆', InscriptionalParthian::LetterHeth => '𐭇', InscriptionalParthian::LetterTeth => '𐭈', InscriptionalParthian::LetterYodh => '𐭉', InscriptionalParthian::LetterKaph => '𐭊', InscriptionalParthian::LetterLamedh => '𐭋', InscriptionalParthian::LetterMem => '𐭌', InscriptionalParthian::LetterNun => '𐭍', InscriptionalParthian::LetterSamekh => '𐭎', InscriptionalParthian::LetterAyin => '𐭏', InscriptionalParthian::LetterPe => '𐭐', InscriptionalParthian::LetterSadhe => '𐭑', InscriptionalParthian::LetterQoph => '𐭒', InscriptionalParthian::LetterResh => '𐭓', InscriptionalParthian::LetterShin => '𐭔', InscriptionalParthian::LetterTaw => '𐭕', InscriptionalParthian::NumberOne => '𐭘', InscriptionalParthian::NumberTwo => '𐭙', InscriptionalParthian::NumberThree => '𐭚', InscriptionalParthian::NumberFour => '𐭛', InscriptionalParthian::NumberTen => '𐭜', InscriptionalParthian::NumberTwenty => '𐭝', InscriptionalParthian::NumberOneHundred => '𐭞', } } } impl std::convert::TryFrom<char> for InscriptionalParthian { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { '𐭀' => Ok(InscriptionalParthian::LetterAleph), '𐭁' => Ok(InscriptionalParthian::LetterBeth), '𐭂' => Ok(InscriptionalParthian::LetterGimel), '𐭃' => Ok(InscriptionalParthian::LetterDaleth), '𐭄' => Ok(InscriptionalParthian::LetterHe), '𐭅' => Ok(InscriptionalParthian::LetterWaw), '𐭆' => Ok(InscriptionalParthian::LetterZayin), '𐭇' => Ok(InscriptionalParthian::LetterHeth), '𐭈' => Ok(InscriptionalParthian::LetterTeth), '𐭉' => Ok(InscriptionalParthian::LetterYodh), '𐭊' => Ok(InscriptionalParthian::LetterKaph), '𐭋' => Ok(InscriptionalParthian::LetterLamedh), '𐭌' => Ok(InscriptionalParthian::LetterMem), '𐭍' => Ok(InscriptionalParthian::LetterNun), '𐭎' => Ok(InscriptionalParthian::LetterSamekh), '𐭏' => Ok(InscriptionalParthian::LetterAyin), '𐭐' => Ok(InscriptionalParthian::LetterPe), '𐭑' => Ok(InscriptionalParthian::LetterSadhe), '𐭒' => Ok(InscriptionalParthian::LetterQoph), '𐭓' => Ok(InscriptionalParthian::LetterResh), '𐭔' => Ok(InscriptionalParthian::LetterShin), '𐭕' => Ok(InscriptionalParthian::LetterTaw), '𐭘' => Ok(InscriptionalParthian::NumberOne), '𐭙' => Ok(InscriptionalParthian::NumberTwo), '𐭚' => Ok(InscriptionalParthian::NumberThree), '𐭛' => Ok(InscriptionalParthian::NumberFour), '𐭜' => Ok(InscriptionalParthian::NumberTen), '𐭝' => Ok(InscriptionalParthian::NumberTwenty), '𐭞' => Ok(InscriptionalParthian::NumberOneHundred), _ => Err(()), } } } impl Into<u32> for InscriptionalParthian { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for InscriptionalParthian { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for InscriptionalParthian { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl InscriptionalParthian { /// The character with the lowest index in this unicode block pub fn new() -> Self { InscriptionalParthian::LetterAleph } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("InscriptionalParthian{:#?}", self); string_morph::to_sentence_case(&s) } }
extern crate getopts; extern crate toml; use std::ffi::OsStr; use std::str::FromStr; use std::fmt::Result as FmtResult; use std::fmt::{Formatter, Display}; use std::error::Error; use std::any::TypeId; //Any // use std::result; use std::convert::From; use std::collections::HashMap; use std::collections::hash_map::Entry; use getopts::{HasArg, Occur, ParsingStyle}; use toml::Value; // #[derive(Debug)] // pub struct ConfigError { // /// Description // description: String, // /// what option caused this // option: String, // /// Which file it came from (TOML or CLI) // file: String, // linecol: Option<(u64, u64)> // } // // impl Display for ConfigError { // fn fmt(&self, fmtr: &mut Formatter) -> FmtResult { // match self { // &ConfigError{description: ref d, option: ref opt, linecol: Some((l,c)), file: ref f, ..} => // write!(fmtr, "Error in {}:{}:{} with option {}: {}", f, l, c, opt, d), // &ConfigError{description: ref d, option: ref opt, linecol: None, file: ref f, ..} => // write!(fmtr, "Error in {} with option {}: {}", f, opt, d) // } // } // } // // // impl Error for ConfigError { // fn description(&self) -> &str { // return &*self.description // } // } #[derive(Clone, Copy, PartialEq, Eq)] pub enum TomlType { String, Integer, Float, } impl TomlType { pub fn to_value(&self, s: &str) -> Result<Value, Vec<toml::ParserError>> { match *self { TomlType::String => Ok(Value::String(s.to_owned())), TomlType::Integer => { let v = Value::from_str(s); match v { Ok(Value::Integer(n)) => Ok(Value::Integer(n)), Ok(wrong_type) => unimplemented!(), Err(e) => Err(e), } } TomlType::Float => { let v = Value::from_str(s); match v { Ok(Value::Float(n)) => Ok(Value::Float(n)), Ok(wrong_type) => unimplemented!(), Err(e) => Err(e), } } } } } #[derive(Debug)] pub struct DuplicateKeyError { key: String, msg: String, } impl Display for DuplicateKeyError { fn fmt(&self, fmtr: &mut Formatter) -> FmtResult { self.msg.fmt(fmtr) } } impl Error for DuplicateKeyError { fn description(&self) -> &str { &self.key } } #[derive(Debug)] pub struct MissingKeyError { key: String, msg: String, } impl Display for MissingKeyError { fn fmt(&self, fmtr: &mut Formatter) -> FmtResult { self.msg.fmt(fmtr) } } impl Error for MissingKeyError { fn description(&self) -> &str { &self.key } } #[derive(Debug)] pub enum ConfigError { CliError(getopts::Fail), } impl Display for ConfigError { fn fmt(&self, fmtr: &mut Formatter) -> FmtResult { match self { &ConfigError::CliError(ref e) => e.fmt(fmtr), } } } impl Error for ConfigError { fn description(&self) -> &str { match self { &ConfigError::CliError(ref e) => e.description(), } } } impl From<getopts::Fail> for ConfigError { fn from(e: getopts::Fail) -> Self { ConfigError::CliError(e) } } /// A single option, for use in either a config file or as a command-line option #[derive(Clone, PartialEq)] pub struct ConfigOption { /// TOML option name. If `""`, not allowed in TOML toml_name: String, // Fields copied fro getopts::OptGroup /// Short name of the option, e.g. `h` for a `-h` option short_name: String, /// Long name of the option, e.g. `help` for a `--help` option long_name: String, /// Hint for argument, e.g. `FILE` for a `-o FILE` option hint: String, /// Description for usage help text desc: String, /// Whether option has an argument hasarg: HasArg, /// How often it can occur occur: Occur, /// The type we expect to extract typ: TomlType, /// Default value, if present default: Option<Value>, } impl ConfigOption { pub fn get_name(&self) -> &str { match (self.toml_name.as_ref(), self.long_name.as_ref()) { (ref tname, "") => tname, (_, ref long_name) => long_name, } } } /// The best "match" for an option, kept as a `toml::Value` pub struct Match { /// Value found so far pub value: Option<Value>, /// Current value's "precedence". 0 for CLI, std::i32::MIN for compiled-in-default. pub precedence: i32, } /// The configuration values after parsing. /// Kept as a map of (Group name) -> (match name) -> Match pub struct Matches { // Group name -> match name -> match /// The name of the option will be long_name or toml_name from ConfigOption, defaulting to /// long_name if both are present groups: HashMap<String, HashMap<String, Match>>, } impl Matches { /// Convert a list of ConfiguratorGroups into matches, with just the defaults so far. /// Later, we can call `update` to include information from other sources, e.g. a config /// file or from the command-line pub fn from_configs<C: IntoIterator>(config: C) -> Result<Matches, DuplicateKeyError> where C::Item: AsRef<ConfiguratorGroup> { let mut matches = Matches { groups: HashMap::new() }; for group_ref in config { let group: &ConfiguratorGroup = group_ref.as_ref(); let mut map: HashMap<String, Match> = HashMap::new(); for opt in &group.args { let name: String = opt.get_name().to_owned(); let mtch = Match { value: opt.default.clone(), precedence: std::i32::MIN, }; map.insert(name, mtch); } match matches.groups.insert(group.name.clone(), map) { None => {} Some(_) => { return Err(DuplicateKeyError { key: group.name.clone(), msg: format!("Found two groups with the same key {}", group.name), }); } } } Ok(matches) } pub fn update(&mut self, group: &str, name: &str, value: Value, precedence: i32) -> Result<(), MissingKeyError> { match self.groups .entry(group.to_owned()) { Entry::Vacant(_) => { return Err(MissingKeyError { key: group.to_owned(), msg: format!("Group {} not found", group), }) } Entry::Occupied(mut loc) => { let mut group_map: &mut HashMap<String, Match> = loc.get_mut(); match group_map.entry(name.to_owned()) { Entry::Vacant(_) => { return Err(MissingKeyError { key: name.to_owned(), msg: format!("Key {} not found in group {}", name, group), }) } Entry::Occupied(mut loc) => { let ref mut cur_match = loc.get_mut(); if cur_match.precedence > precedence { **cur_match = Match { value: Some(value), precedence: precedence, }; } } } } } Ok(()) } } /// A set of options that form a configuration group, as would be found in a toml file. pub struct ConfiguratorGroup { pub name: String, /// The arguments allowed pub args: Vec<ConfigOption>, pub in_toml: bool, pub in_cli: bool, } /// A class for 1) managing what options are available and what their defaults are, and /// 2) which options have been found so far. pub struct Configurator { groups: Vec<ConfiguratorGroup>, matches: Matches, } impl ConfiguratorGroup { // /// Create a group // pub fn new() -> ConfiguratorGroup<'c> { // return ConfiguratorGroup { // // } // } } impl Configurator { pub fn new(groups: Vec<ConfiguratorGroup>) -> Configurator { unimplemented!() } fn getopts(&self) -> getopts::Options { let mut opts = getopts::Options::new(); for group in &self.groups { if !group.in_cli { continue; } for opt in &group.args { opts.opt(&opt.short_name, &opt.long_name, &opt.desc, &opt.hint, opt.hasarg, opt.occur); } } opts } pub fn parse_cli<C: IntoIterator>(&mut self, args: C, p: ParsingStyle, precedence: i32) -> Result<Matches, ConfigError> where C::Item: AsRef<OsStr> { let mut getopt_struct = self.getopts(); getopt_struct.parsing_style(p); let parsed = try!(getopt_struct.parse(args)); for group in &self.groups { if !group.in_cli { continue; } for opt in &group.args { let getopt_match_str = match parsed.opt_str(&*opt.short_name) { None => continue, Some(s) => s, }; let value = try!(opt.typ.to_value(&*getopt_match_str)); self.matches.update(&*group.name, opt.get_name(), value, precedence); } } Ok(self.matches) } pub fn parse_toml_partial(&self, parsed_toml: toml::Value, precedence: i32) -> Result<Matches, ConfigError> { for group in &self.groups { if !group.in_toml { continue; } for opt in &group.args { let lookup_str = format!("{}.{}", group.name, opt.get_name()); let value = try!(parsed_toml.lookup(&*lookup_str)); self.matches.update(&*group.name, opt.get_name(), value, precedence); } } Ok(self.matches) } pub fn parse_toml(&self, parsed_toml: toml::Value, precedence: i32) -> Result<Matches, ConfigError> { unimplemented!(); } }
//! Textures and methods for working with images. use crate::{texture::Texture, util::clamp, vec3, vec3::Vec3}; use image as i; use std::{path::Path, sync::Arc}; /// Renders an image as a texture. pub fn image<P>(path: P) -> Texture where P: AsRef<Path>, { let img = i::open(path) .unwrap_or_else(|e| panic!("Could not open image for texture!\n{}", e)) .into_rgb(); let (width, height) = img.dimensions(); Texture(Arc::new(move |(u, v), _p| { // Clamp input texture coordinates to [0,1] x [1,0] let u = clamp(u, 0.0, 1.0); let v = 1.0 - clamp(v, 0.0, 1.0); // Flip v to image coordinates! let mut i = (u * width as f32) as u32; let mut j = (v * height as f32) as u32; // Clamp integer mapping, since actual coordinates should be less than 1.0 if i >= width { i = width - 1; } if j >= height { j = height - 1; } let color_scale = 1.0 / 255.0; let pixel = img.get_pixel(i, j); color_scale * vec3!(pixel.0[0].into(), pixel.0[1].into(), pixel.0[2].into()) })) }
use crate::config::Config; use crate::exhentai::*; use crate::telegram::Bot; use crate::trans::TRANS; use anyhow::{format_err, Error}; use futures::prelude::*; use lazy_static::lazy_static; use log::{debug, error, info}; use reqwest::{Client, Response}; use telegraph_rs::{html_to_node, Page, Telegraph, UploadResult}; use tempfile::NamedTempFile; use tokio::time::delay_for; use v_htmlescape::escape; use std::collections::HashMap; use std::env; use std::fs::{create_dir_all, File}; use std::io::Write; use std::path::Path; use std::sync::{ atomic::{AtomicU32, Ordering::SeqCst}, Arc, }; use std::time; mod config; mod exhentai; mod telegram; mod trans; mod xpath; lazy_static! { static ref CONFIG: Config = Config::new("config.toml").unwrap_or_else(|e| { eprintln!("配置文件解析失败:\n{}", e); std::process::exit(1); }); static ref DB: sled::Db = sled::open("./db").expect("无法打开数据库"); } /// 通过 URL 上传图片至 telegraph async fn upload_by_url(url: &str, path: &str) -> Result<UploadResult, Error> { let client = Client::builder() .timeout(time::Duration::from_secs(15)) .build()?; // 下载图片 debug!("下载图片: {}", url); let mut tmp = NamedTempFile::new()?; let file = if Path::new(path).exists() { Path::new(path).to_owned() } else { let bytes = client.get(url).send().and_then(Response::bytes).await?; if CONFIG.exhentai.local_cache { File::create(path).and_then(|mut file| file.write_all(bytes.as_ref()))?; Path::new(path).to_owned() } else { tmp.write_all(bytes.as_ref())?; tmp.path().to_owned() } }; let result = if CONFIG.telegraph.upload { debug!("上传图片: {:?}", file); Telegraph::upload(&[file]) .await .map_err(|e| format_err!("上传 telegraph 失败: {}", e))? .swap_remove(0) } else { UploadResult { src: "".to_owned() } }; Ok(result) } /// 将 tag 转换为可以直接发送至 tg 的文本格式 fn tags_to_string(tags: &HashMap<String, Vec<String>>) -> String { tags.iter() .map(|(k, v)| { let v = v .iter() .map(|s| { let trans = vec![ (" ", "_"), ("_|_", " #"), ("-", "_"), ("/", "_"), ("·", "_"), ]; let mut s = TRANS.trans(k, s).to_owned(); for (from, to) in trans { s = s.replace(from, to); } format!("#{}", s) }) .collect::<Vec<_>>() .join(" "); format!("<code>{:>5}</code>: {}", TRANS.trans("rows", k), v) }) .collect::<Vec<_>>() .join("\n") } /// 将图片地址格式化为 html fn img_urls_to_html(img_urls: &[String]) -> String { img_urls .iter() .map(|s| format!(r#"<img src="{}">"#, s)) .collect::<Vec<_>>() .join("") } /// 从图片页面地址获取图片原始地址 async fn get_img_urls<'a>(gallery: &BasicGalleryInfo<'a>, img_pages: &[String]) -> Vec<String> { let img_cnt = img_pages.len(); let idx = Arc::new(AtomicU32::new(0)); let data_path = format!("{}/{}", &CONFIG.exhentai.cache_path, &gallery.title); if CONFIG.exhentai.local_cache { create_dir_all(data_path).unwrap(); } let update_progress = || { let now = idx.load(SeqCst); idx.store(now + 1, SeqCst); info!("第 {} / {} 张图片", now + 1, img_cnt); }; let get_image_url = |i: usize, url: String| async move { let path = format!("{}/{}/{}", &CONFIG.exhentai.cache_path, &gallery.title, i); match DB.get(&url) { Ok(Some(v)) => { debug!("找到缓存!"); Ok(String::from_utf8(v.to_vec()).expect("无法转为 UTF-8")) } _ => gallery .get_image_url(&url) .and_then(|img_url| async move { upload_by_url(&img_url, &path).await }) .await .map(|result| result.src), } }; let f = img_pages .iter() .enumerate() .map(|(i, url)| { async move { update_progress(); // 最多重试五次 for _ in 0..5i32 { let img_url = get_image_url(i, url.to_owned()).await; match img_url { Ok(v) => { DB.insert(url, v.as_bytes()).expect("fail to insert"); return Some(v); } Err(e) => { error!("获取图片地址失败: {}", e); delay_for(time::Duration::from_secs(10)).await; } } } None } }) .collect::<Vec<_>>(); let ret = futures::stream::iter(f) .buffered(CONFIG.threads_num) .filter_map(|x| async move { x }) .collect::<Vec<_>>() .await; DB.flush_async().await.expect("无法写入数据库"); ret } struct ExLoli { config: Config, bot: Bot, exhentai: ExHentai, telegraph: Telegraph, } impl ExLoli { async fn new() -> Result<Self, Error> { let config = Config::new("config.toml").map_err(|e| format_err!("配置文件解析失败:\n{}", e))?; let bot = config.init_telegram(); let exhentai = config.init_exhentai().await?; let telegraph = config.init_telegraph().await?; Ok(ExLoli { config, bot, exhentai, telegraph, }) } async fn scan_and_upload(&self) -> Result<(), Error> { // 筛选最新本子 let galleries = self .exhentai .search_n_pages(&self.config.exhentai.keyword, CONFIG.exhentai.max_pages) .await?; // 从后往前爬, 防止半路失败导致进度记录错误 for gallery in galleries.into_iter().rev() { if DB.contains_key(gallery.url.as_bytes())? { continue; } self.upload_gallery_to_telegram(&gallery).await?; } Ok(()) } async fn upload_gallery_by_url(&self, url: &str) -> Result<(), Error> { let gallery = self.exhentai.get_gallery_by_url(url).await?; self.upload_gallery_to_telegram(&gallery).await } fn cap_img_pages<'a>(&self, img_pages: &'a [String]) -> &'a [String] { let actual_img_cnt = img_pages.len(); let allow_img_cnt = self.config.exhentai.max_img_cnt; let final_img_cnt = std::cmp::min(actual_img_cnt, allow_img_cnt); info!("保留图片数量: {}", final_img_cnt); &img_pages[..final_img_cnt] } async fn upload_gallery_to_telegram<'a>( &'a self, gallery: &BasicGalleryInfo<'a>, ) -> Result<(), Error> { info!("画廊名称: {}", gallery.title); info!("画廊地址: {}", gallery.url); let gallery_info = gallery.get_full_info().await?; let img_pages = self.cap_img_pages(&gallery_info.img_pages); let img_urls = get_img_urls(gallery, img_pages).await; if !self.config.telegraph.upload { return Ok(()); } let overflow = img_pages.len() != gallery_info.img_pages.len(); let page = self .publish_to_telegraph(&gallery_info, &img_urls, overflow) .await?; info!("文章地址: {}", page.url); // 由于画廊会更新,这个地址不能用于判断是否重复上传了,仅用于后续查询使用 DB.insert(gallery.url.as_bytes(), page.url.as_bytes()) .expect("插入失败"); self.publish_to_telegram(&gallery_info, &page.url).await } async fn publish_to_telegraph( &self, gallery: &FullGalleryInfo, img_urls: &[String], overflow: bool, ) -> Result<Page, Error> { info!("上传到 Telegraph"); let mut content = img_urls_to_html(&img_urls); if overflow { content.push_str(r#"<p>图片数量过多, 只显示部分. 完整版请前往 E 站观看.</p>"#); } self.telegraph .create_page(&gallery.title, &html_to_node(&content), false) .await .map_err(|e| e.into()) } async fn publish_to_telegram( &self, gallery: &FullGalleryInfo, article: &str, ) -> Result<(), Error> { info!("发布到 Telegram 频道"); let tags = tags_to_string(&gallery.tags); let text = format!( "{}\n<a href=\"{}\">{}</a>", tags, article, escape(&gallery.title) ); self.bot .send_message(&self.config.telegram.channel_id, &text, &gallery.url) .await?; Ok(()) } } fn dump_db() -> Result<(), Error> { let mut map = HashMap::new(); for i in DB.iter() { let (k, v) = i?; let k = String::from_utf8(k.to_vec()).unwrap_or_default(); let v = String::from_utf8(v.to_vec()).unwrap_or_default(); map.insert(k, v); } let string = serde_json::to_string_pretty(&map)?; println!("{}", string); Ok(()) } fn load_db(file: &str) -> Result<(), Error> { let file = File::open(file)?; let map: HashMap<String, String> = serde_json::from_reader(file)?; for (k, v) in map.iter() { DB.insert(k.as_bytes(), v.as_bytes())?; } DB.flush()?; Ok(()) } #[tokio::main] async fn main() { let exloli = ExLoli::new().await.unwrap_or_else(|e| { eprintln!("{}", e); std::process::exit(1); }); let args = env::args().collect::<Vec<_>>(); env::set_var("RUST_LOG", format!("exloli={}", exloli.config.log_level)); env_logger::init(); // color_backtrace::install(); for _ in 0..3i32 { let result = match (args.len(), args.get(1).map(String::as_str).unwrap_or("")) { (3, "upload") => exloli.upload_gallery_by_url(&args[2]).await, (2, "dump") => dump_db(), (3, "load") => load_db(&args[2]), _ => exloli.scan_and_upload().await, }; match result { Ok(()) => { info!("任务完成!"); return; } Err(e) => { error!("任务出错: {}", e); delay_for(time::Duration::from_secs(60)).await; } } } }
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// SyntheticsCiTest : Test configuration for Synthetics CI #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SyntheticsCiTest { /// Disable certificate checks in API tests. #[serde(rename = "allowInsecureCertificates", skip_serializing_if = "Option::is_none")] pub allow_insecure_certificates: Option<bool>, #[serde(rename = "basicAuth", skip_serializing_if = "Option::is_none")] pub basic_auth: Option<Box<crate::models::SyntheticsBasicAuth>>, /// Body to include in the test. #[serde(rename = "body", skip_serializing_if = "Option::is_none")] pub body: Option<String>, /// Type of the data sent in a synthetics API test. #[serde(rename = "bodyType", skip_serializing_if = "Option::is_none")] pub body_type: Option<String>, /// Cookies for the request. #[serde(rename = "cookies", skip_serializing_if = "Option::is_none")] pub cookies: Option<String>, /// For browser test, array with the different device IDs used to run the test. #[serde(rename = "deviceIds", skip_serializing_if = "Option::is_none")] pub device_ids: Option<Vec<crate::models::SyntheticsDeviceId>>, /// For API HTTP test, whether or not the test should follow redirects. #[serde(rename = "followRedirects", skip_serializing_if = "Option::is_none")] pub follow_redirects: Option<bool>, /// Headers to include when performing the test. #[serde(rename = "headers", skip_serializing_if = "Option::is_none")] pub headers: Option<::std::collections::HashMap<String, String>>, /// Array of locations used to run the test. #[serde(rename = "locations", skip_serializing_if = "Option::is_none")] pub locations: Option<Vec<String>>, #[serde(rename = "metadata", skip_serializing_if = "Option::is_none")] pub metadata: Option<Box<crate::models::SyntheticsCiTestMetadata>>, /// The public ID of the Synthetics test to trigger. #[serde(rename = "public_id")] pub public_id: String, #[serde(rename = "retry", skip_serializing_if = "Option::is_none")] pub retry: Option<Box<crate::models::SyntheticsTestOptionsRetry>>, /// Starting URL for the browser test. #[serde(rename = "startUrl", skip_serializing_if = "Option::is_none")] pub start_url: Option<String>, /// Variables to replace in the test. #[serde(rename = "variables", skip_serializing_if = "Option::is_none")] pub variables: Option<::std::collections::HashMap<String, String>>, } impl SyntheticsCiTest { /// Test configuration for Synthetics CI pub fn new(public_id: String) -> SyntheticsCiTest { SyntheticsCiTest { allow_insecure_certificates: None, basic_auth: None, body: None, body_type: None, cookies: None, device_ids: None, follow_redirects: None, headers: None, locations: None, metadata: None, public_id, retry: None, start_url: None, variables: None, } } }
use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize)] pub struct Loan { pub title: String, pub author: String, pub can_renew: bool, pub date_due: String, pub item_number: String, }
use crate::components::{TileMap, TileMapConfig}; use crate::resources::{get_screen_size, Board, Context, Game, State}; use amethyst::{core::Transform, prelude::*, renderer::Camera, ui::{ UiCreator, UiFinder, }, }; pub struct MainState; impl SimpleState for MainState { fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) { let world = data.world; world.write_resource::<Game>().set_state(State::Main); if let Some(entity) = world.exec(|finder: UiFinder| finder.find("loading")) { world.delete_entity(entity).expect("deleting loading ui"); } } fn on_resume(&mut self, mut data: StateData<'_, GameData<'_, '_>>) { data.world.write_resource::<Game>().set_state(State::Main); } }
use std::fs::File; use chrono::prelude::Utc; use crate::types::Sample; pub fn save_collected(csi: &Vec<Sample>) { let date = match csi.first() { Some(s) => s.date, None => Utc::now(), }; let fname = format!("csi_data_{}.csv", date); let output = File::create(&fname).unwrap(); let mut wtr = csv::Writer::from_writer(output); for r in csi { wtr.write_record( [ vec![ format!("{}", r.date), format!("{}", r.x), format!("{}", r.y), ], r.csi[0][0].iter().map(ToString::to_string).collect(), r.csi[0][1].iter().map(ToString::to_string).collect(), r.csi[1][0].iter().map(ToString::to_string).collect(), r.csi[1][1].iter().map(ToString::to_string).collect(), ].concat() ).unwrap(); } wtr.flush() .unwrap(); } // use crate::types::CSIData; // pub fn save_collected_(csi: &CSIData) { // let c = csi.c.as_ref().unwrap(); // let fname = format!("csi_data.csv"); // let output = File::create(&fname) // .unwrap(); // let mut wtr = csv::Writer::from_writer(output); // for r in &csi.inner { // wtr.write_record( // [ // vec![ // format!("{}", 1), // format!("{}", 0), // format!("{}", 0), // ], // r[0][0].iter().map(ToString::to_string).collect(), // r[0][1].iter().map(ToString::to_string).collect(), // r[1][0].iter().map(ToString::to_string).collect(), // r[1][1].iter().map(ToString::to_string).collect(), // ].concat() // ).unwrap(); // } // wtr.flush() // .unwrap(); // }
// use std::collections::HashMap; use std::collections::BTreeMap; use op::*; pub fn collect_stream<T: Iterator<Item = f64>>( source: T, ops: Vec<Op>, ) -> Vec<f64> { let mut v: Box<Iterator<Item = f64>> = Box::new(source); for op in ops { v = match op { Op::Map(f) => { Box::new(v.map(move |x| f(x))) } Op::Filter(f) => { Box::new(v.filter(move |x| f(x))) } Op::GroupBy(key, group) => { let mut map = BTreeMap::new(); let array = v.collect::<Vec<f64>>(); for ele in array { let k = key(ele); if !map.contains_key(&k) { let mut vec = Vec::new(); vec.push(ele); map.insert(k, vec); } else { let mut val = map.get_mut(&k).unwrap(); val.push(ele); } } let mut ret = Vec::new(); for (key, val) in map.iter() { ret.push(group(val)); } return ret; } } } v.collect() }
// SPDX-License-Identifier: MIT OR Apache-2.0 // SPDX-FileCopyrightText: Ferrous Systems GmbH fn main() { println!("Hello world"); }
use lib::{Token, Node}; // <program> ::= <function> // <function> ::= <statement> // <statement> ::= <exp> // <exp> ::= <term> {("+" | "-") <term>} // <term> ::= <factor> {("*" | "/") <factor>} // <factor> ::= <unary_op> <factor> | <constant> | "(" <exp> ")" pub fn parse(token_vec: Vec<Token::Token>, filename: &str) -> Vec<Node::Node> { // println!(""); // println!("----------------"); // println!("[+] Parser:"); // Get the token vector let mut token_vec = token_vec; // Build AST let mut ast: Vec<Node::Node> = Vec::new(); // println!("\n[+] Start parsing.\n"); program(&mut token_vec, &mut ast, filename); // println!("\n[+] Finish parsing."); ast } fn program(mut token_vec: &mut Vec<Token::Token>, mut ast: &mut Vec<Node::Node>, filename: &str) { // first node of AST ast.push(Node::new()); ast[0]._level = String::from("Program"); ast[0]._type = String::from("FILE"); ast[0]._name = String::from(filename); if ast.len() == 0 { panic!("Parser: Unable to create AST."); } while token_vec.len() != 0 { function(&mut token_vec, &mut ast, 0); } } fn function(mut token_vec: &mut Vec<Token::Token>, mut ast: &mut Vec<Node::Node>, root: usize) { loop { let id = ast.len(); ast.push(Node::new()); ast[root].to.push(id); ast[id]._level = String::from("Function"); // set Function node's type if token_vec[0]._type == "INT_KEYWORD" { ast[id]._type = String::from(token_vec[0]._type.clone()); token_vec.remove(0); } else { panic!("Parser: Function type was invalid.\n Function type: {} {}", token_vec[0]._type, token_vec[0]._value); } // set Function node's name, value if token_vec[0]._type == "IDENTIFIER" { ast[id]._name = String::from(token_vec[0]._value.clone()); token_vec.remove(0); } else { panic!("Parser: Function name was unvalid.\n Function name: {} {}", token_vec[0]._type, token_vec[0]._value); } // set Function node's ( if token_vec[0]._type == "OPEN_PAREN" { token_vec.remove(0); } else { panic!("Parser: Function ( not found.\n Function (: {} {}", token_vec[0]._type, token_vec[0]._value); } // set Function node's ) if token_vec[0]._type == "CLOSE_PAREN" { token_vec.remove(0); } else { panic!("Parser: Function ) not found.\n Function ): {} {}", token_vec[0]._type, token_vec[0]._value); } // set Function node's { if token_vec[0]._type == "OPEN_BRACE" { token_vec.remove(0); } else { panic!("Parser: Function {{ not found.\n Function {{: {} {}", token_vec[0]._type, token_vec[0]._value); } statement(&mut token_vec, &mut ast, id); // set Function node's } if token_vec[0]._type == "CLOSE_BRACE" { token_vec.remove(0); break; } else { panic!("Parser: Function }} not found.\n Function }}: {} {}", token_vec[0]._type, token_vec[0]._value); } } } fn statement(mut token_vec: &mut Vec<Token::Token>, mut ast: &mut Vec<Node::Node>, root: usize) { // set Statement node's type while token_vec[0]._type != "CLOSE_BRACE" { let id = ast.len(); // push ast node ast.push(Node::new()); // set previous node's "to" ast[root].to.push(id); // set Statement node's level ast[id]._level = String::from("Statement"); match token_vec[0]._type.as_str() { "RETURN_KEYWORD" => Return(&mut token_vec, &mut ast), _ => panic!("Parser: Statement type was wrong. \n Statement type: {} {}", token_vec[0]._type, token_vec[0]._value), } if token_vec[0]._type == "SEMICOLON" { token_vec.remove(0); } else { panic!("Parser: Statement end was wrong. \n Statement end: {} {}", token_vec[0]._type, token_vec[0]._value); } } } fn Return(mut token_vec: &mut Vec<Token::Token>, mut ast: &mut Vec<Node::Node>) { // modify statement node to return let id = ast.len()-1; ast[id]._type = String::from(token_vec[0]._type.clone()); ast[id]._name = String::from(token_vec[0]._value.clone()); token_vec.remove(0); // push child let child: usize; child = exp(&mut token_vec, &mut ast); ast[id].to.push(child); } // fn exp(mut token_vec: &mut Vec<Token::Token>, mut ast: &mut Vec<Node::Node>) -> usize { let mut left_child: usize; left_child = term(&mut token_vec, &mut ast); while token_vec[0]._type == "ADDITION" || token_vec[0]._type == "MINUS" { let op = Token::Token { _type: "BINARY_OP".to_string(), _value: token_vec[0]._value.clone() }; token_vec.remove(0); let right_child = term(&mut token_vec, &mut ast); left_child = BinOp(&mut ast, op, left_child, right_child); } left_child } fn term(mut token_vec: &mut Vec<Token::Token>, mut ast: &mut Vec<Node::Node>) -> usize { let mut left_child: usize; left_child = factor(&mut token_vec, &mut ast); while token_vec[0]._type == "MULTIPLICATION" || token_vec[0]._type == "DIVISION" { let op = Token::Token { _type: "BINARY_OP".to_string(), _value: token_vec[0]._value.clone() }; token_vec.remove(0); let right_child = factor(&mut token_vec, &mut ast); left_child = BinOp(&mut ast, op, left_child, right_child); } left_child } fn BinOp(ast: &mut Vec<Node::Node>, op: Token::Token, left_child: usize, right_child: usize) -> usize { let id = ast.len(); ast.push(Node::new()); ast[id]._level = "Expression".to_string(); ast[id]._type = op._type; ast[id]._value = op._value; ast[id].to.push(left_child); ast[id].to.push(right_child); id } fn factor(mut token_vec: &mut Vec<Token::Token>, mut ast: &mut Vec<Node::Node>) -> usize { let ret: usize; // parse "(", ")"" if token_vec[0]._type=="OPEN_PAREN" { token_vec.remove(0); ret = exp(&mut token_vec, &mut ast); if token_vec[0]._type != "CLOSE_PAREN" { panic!("Parser factor \")\" invalid\nFactor type: {} {}", token_vec[0]._type, token_vec[0]._value); } token_vec.remove(0); } // pasre constant, UnOPs else { match token_vec[0]._type.as_str() { "CONSTANT" => ret = Constant(&mut token_vec, &mut ast), "MINUS" | "BIT_COMPLE" | "LOGIC_NEG" => ret = UnOp(&mut token_vec, &mut ast), _ => panic!("Parser factor: factor type invalid\n Factor type: {} {}", token_vec[0]._type, token_vec[0]._value), } } ret } fn Constant(token_vec: &mut Vec<Token::Token>, ast: &mut Vec<Node::Node>) -> usize { let id = ast.len(); ast.push(Node::new()); ast[id]._level = "Expression".to_string(); ast[id]._type = token_vec[0]._type.clone(); ast[id]._value = token_vec[0]._value.clone(); token_vec.remove(0); id } fn UnOp(mut token_vec: &mut Vec<Token::Token>, mut ast: &mut Vec<Node::Node>) -> usize { let id = ast.len(); ast.push(Node::new()); ast[id]._level = "Expression".to_string(); // modify minus to negation // if token_vec[0]._type == "MINUS" { // ast[id]._type = "NEGATION".to_string(); // } // else { // ast[id]._type = token_vec[0]._type.clone(); // } ast[id]._type = "UNARY_OP".to_string(); ast[id]._value = token_vec[0]._value.clone(); token_vec.remove(0); let child: usize; child = factor(&mut token_vec, &mut ast); ast[id].to.push(child); id }
use std::ops::{Mul, MulAssign, Div, DivAssign}; use alga::general::Real; use alga::linear::Rotation; use core::ColumnVector; use core::dimension::{DimName, U1, U3, U4}; use core::storage::OwnedStorage; use core::allocator::OwnedAllocator; use geometry::{PointBase, RotationBase, SimilarityBase, TranslationBase, UnitQuaternionBase, IsometryBase}; // FIXME: there are several cloning of rotations that we could probably get rid of (but we didn't // yet because that would require to add a bound like `where for<'a, 'b> &'a R: Mul<&'b R, Output = R>` // which is quite ugly. /* * * In this file, we provide: * ========================= * * * (Operators) * * SimilarityBase × SimilarityBase * SimilarityBase × R * SimilarityBase × IsometryBase * * IsometryBase × SimilarityBase * IsometryBase ÷ SimilarityBase * * * SimilarityBase ÷ SimilarityBase * SimilarityBase ÷ R * SimilarityBase ÷ IsometryBase * * SimilarityBase × PointBase * SimilarityBase × ColumnVector * * * SimilarityBase × TranslationBase * TranslationBase × SimilarityBase * * NOTE: The following are provided explicitly because we can't have R × SimilarityBase. * RotationBase × SimilarityBase<RotationBase> * UnitQuaternion × SimilarityBase<UnitQuaternion> * * RotationBase ÷ SimilarityBase<RotationBase> * UnitQuaternion ÷ SimilarityBase<UnitQuaternion> * * (Assignment Operators) * * SimilarityBase ×= TranslationBase * * SimilarityBase ×= SimilarityBase * SimilarityBase ×= IsometryBase * SimilarityBase ×= R * * SimilarityBase ÷= SimilarityBase * SimilarityBase ÷= IsometryBase * SimilarityBase ÷= R * */ // XXX: code duplication: those macros are the same as for the isometry. macro_rules! similarity_binop_impl( ($Op: ident, $op: ident; $lhs: ident: $Lhs: ty, $rhs: ident: $Rhs: ty, Output = $Output: ty; $action: expr; $($lives: tt),*) => { impl<$($lives ,)* N, D: DimName, S, R> $Op<$Rhs> for $Lhs where N: Real, S: OwnedStorage<N, D, U1>, R: Rotation<PointBase<N, D, S>>, S::Alloc: OwnedAllocator<N, D, U1, S> { type Output = $Output; #[inline] fn $op($lhs, $rhs: $Rhs) -> Self::Output { $action } } } ); macro_rules! similarity_binop_impl_all( ($Op: ident, $op: ident; $lhs: ident: $Lhs: ty, $rhs: ident: $Rhs: ty, Output = $Output: ty; [val val] => $action_val_val: expr; [ref val] => $action_ref_val: expr; [val ref] => $action_val_ref: expr; [ref ref] => $action_ref_ref: expr;) => { similarity_binop_impl!( $Op, $op; $lhs: $Lhs, $rhs: $Rhs, Output = $Output; $action_val_val; ); similarity_binop_impl!( $Op, $op; $lhs: &'a $Lhs, $rhs: $Rhs, Output = $Output; $action_ref_val; 'a); similarity_binop_impl!( $Op, $op; $lhs: $Lhs, $rhs: &'b $Rhs, Output = $Output; $action_val_ref; 'b); similarity_binop_impl!( $Op, $op; $lhs: &'a $Lhs, $rhs: &'b $Rhs, Output = $Output; $action_ref_ref; 'a, 'b); } ); macro_rules! similarity_binop_assign_impl_all( ($OpAssign: ident, $op_assign: ident; $lhs: ident: $Lhs: ty, $rhs: ident: $Rhs: ty; [val] => $action_val: expr; [ref] => $action_ref: expr;) => { impl<N, D: DimName, S, R> $OpAssign<$Rhs> for $Lhs where N: Real, S: OwnedStorage<N, D, U1>, R: Rotation<PointBase<N, D, S>>, S::Alloc: OwnedAllocator<N, D, U1, S> { #[inline] fn $op_assign(&mut $lhs, $rhs: $Rhs) { $action_val } } impl<'b, N, D: DimName, S, R> $OpAssign<&'b $Rhs> for $Lhs where N: Real, S: OwnedStorage<N, D, U1>, R: Rotation<PointBase<N, D, S>>, S::Alloc: OwnedAllocator<N, D, U1, S> { #[inline] fn $op_assign(&mut $lhs, $rhs: &'b $Rhs) { $action_ref } } } ); // SimilarityBase × SimilarityBase // SimilarityBase ÷ SimilarityBase similarity_binop_impl_all!( Mul, mul; self: SimilarityBase<N, D, S, R>, rhs: SimilarityBase<N, D, S, R>, Output = SimilarityBase<N, D, S, R>; [val val] => &self * &rhs; [ref val] => self * &rhs; [val ref] => &self * rhs; [ref ref] => { let mut res = self * &rhs.isometry; res.prepend_scaling_mut(rhs.scaling()); res }; ); similarity_binop_impl_all!( Div, div; self: SimilarityBase<N, D, S, R>, rhs: SimilarityBase<N, D, S, R>, Output = SimilarityBase<N, D, S, R>; [val val] => self * rhs.inverse(); [ref val] => self * rhs.inverse(); [val ref] => self * rhs.inverse(); [ref ref] => self * rhs.inverse(); ); // SimilarityBase ×= TranslationBase similarity_binop_assign_impl_all!( MulAssign, mul_assign; self: SimilarityBase<N, D, S, R>, rhs: TranslationBase<N, D, S>; [val] => *self *= &rhs; [ref] => { let shift = self.isometry.rotation.transform_vector(&rhs.vector) * self.scaling(); self.isometry.translation.vector += shift; }; ); // SimilarityBase ×= SimilarityBase // SimilarityBase ÷= SimilarityBase similarity_binop_assign_impl_all!( MulAssign, mul_assign; self: SimilarityBase<N, D, S, R>, rhs: SimilarityBase<N, D, S, R>; [val] => *self *= &rhs; [ref] => { *self *= &rhs.isometry; self.prepend_scaling_mut(rhs.scaling()); }; ); similarity_binop_assign_impl_all!( DivAssign, div_assign; self: SimilarityBase<N, D, S, R>, rhs: SimilarityBase<N, D, S, R>; [val] => *self /= &rhs; // FIXME: don't invert explicitly. [ref] => *self *= rhs.inverse(); ); // SimilarityBase ×= IsometryBase // SimilarityBase ÷= IsometryBase similarity_binop_assign_impl_all!( MulAssign, mul_assign; self: SimilarityBase<N, D, S, R>, rhs: IsometryBase<N, D, S, R>; [val] => *self *= &rhs; [ref] => { let shift = self.isometry.rotation.transform_vector(&rhs.translation.vector) * self.scaling(); self.isometry.translation.vector += shift; self.isometry.rotation *= rhs.rotation.clone(); }; ); similarity_binop_assign_impl_all!( DivAssign, div_assign; self: SimilarityBase<N, D, S, R>, rhs: IsometryBase<N, D, S, R>; [val] => *self /= &rhs; // FIXME: don't invert explicitly. [ref] => *self *= rhs.inverse(); ); // SimilarityBase ×= R // SimilarityBase ÷= R similarity_binop_assign_impl_all!( MulAssign, mul_assign; self: SimilarityBase<N, D, S, R>, rhs: R; [val] => self.isometry.rotation *= rhs; [ref] => self.isometry.rotation *= rhs.clone(); ); similarity_binop_assign_impl_all!( DivAssign, div_assign; self: SimilarityBase<N, D, S, R>, rhs: R; // FIXME: don't invert explicitly? [val] => *self *= rhs.inverse(); [ref] => *self *= rhs.inverse(); ); // SimilarityBase × R // SimilarityBase ÷ R similarity_binop_impl_all!( Mul, mul; self: SimilarityBase<N, D, S, R>, rhs: R, Output = SimilarityBase<N, D, S, R>; [val val] => { let scaling = self.scaling(); SimilarityBase::from_isometry(self.isometry * rhs, scaling) }; [ref val] => SimilarityBase::from_isometry(&self.isometry * rhs, self.scaling()); [val ref] => { let scaling = self.scaling(); SimilarityBase::from_isometry(self.isometry * rhs, scaling) }; [ref ref] => SimilarityBase::from_isometry(&self.isometry * rhs, self.scaling()); ); similarity_binop_impl_all!( Div, div; self: SimilarityBase<N, D, S, R>, rhs: R, Output = SimilarityBase<N, D, S, R>; [val val] => { let scaling = self.scaling(); SimilarityBase::from_isometry(self.isometry / rhs, scaling) }; [ref val] => SimilarityBase::from_isometry(&self.isometry / rhs, self.scaling()); [val ref] => { let scaling = self.scaling(); SimilarityBase::from_isometry(self.isometry / rhs, scaling) }; [ref ref] => SimilarityBase::from_isometry(&self.isometry / rhs, self.scaling()); ); // SimilarityBase × IsometryBase // SimilarityBase ÷ IsometryBase similarity_binop_impl_all!( Mul, mul; self: SimilarityBase<N, D, S, R>, rhs: IsometryBase<N, D, S, R>, Output = SimilarityBase<N, D, S, R>; [val val] => &self * &rhs; [ref val] => self * &rhs; [val ref] => &self * rhs; [ref ref] => { let shift = self.isometry.rotation.transform_vector(&rhs.translation.vector) * self.scaling(); SimilarityBase::from_parts( TranslationBase::from_vector(&self.isometry.translation.vector + shift), self.isometry.rotation.clone() * rhs.rotation.clone(), self.scaling()) }; ); similarity_binop_impl_all!( Div, div; self: SimilarityBase<N, D, S, R>, rhs: IsometryBase<N, D, S, R>, Output = SimilarityBase<N, D, S, R>; [val val] => self * rhs.inverse(); [ref val] => self * rhs.inverse(); [val ref] => self * rhs.inverse(); [ref ref] => self * rhs.inverse(); ); // IsometryBase × SimilarityBase // IsometryBase ÷ SimilarityBase similarity_binop_impl_all!( Mul, mul; self: IsometryBase<N, D, S, R>, rhs: SimilarityBase<N, D, S, R>, Output = SimilarityBase<N, D, S, R>; [val val] => { let scaling = rhs.scaling(); SimilarityBase::from_isometry(self * rhs.isometry, scaling) }; [ref val] => { let scaling = rhs.scaling(); SimilarityBase::from_isometry(self * rhs.isometry, scaling) }; [val ref] => { let scaling = rhs.scaling(); SimilarityBase::from_isometry(self * &rhs.isometry, scaling) }; [ref ref] => { let scaling = rhs.scaling(); SimilarityBase::from_isometry(self * &rhs.isometry, scaling) }; ); similarity_binop_impl_all!( Div, div; self: IsometryBase<N, D, S, R>, rhs: SimilarityBase<N, D, S, R>, Output = SimilarityBase<N, D, S, R>; [val val] => self * rhs.inverse(); [ref val] => self * rhs.inverse(); [val ref] => self * rhs.inverse(); [ref ref] => self * rhs.inverse(); ); // SimilarityBase × PointBase similarity_binop_impl_all!( Mul, mul; self: SimilarityBase<N, D, S, R>, right: PointBase<N, D, S>, Output = PointBase<N, D, S>; [val val] => { let scaling = self.scaling(); self.isometry.translation * (self.isometry.rotation.transform_point(&right) * scaling) }; [ref val] => &self.isometry.translation * (self.isometry.rotation.transform_point(&right) * self.scaling()); [val ref] => { let scaling = self.scaling(); self.isometry.translation * (self.isometry.rotation.transform_point(right) * scaling) }; [ref ref] => &self.isometry.translation * (self.isometry.rotation.transform_point(right) * self.scaling()); ); // SimilarityBase × Vector similarity_binop_impl_all!( Mul, mul; self: SimilarityBase<N, D, S, R>, right: ColumnVector<N, D, S>, Output = ColumnVector<N, D, S>; [val val] => self.isometry.rotation.transform_vector(&right) * self.scaling(); [ref val] => self.isometry.rotation.transform_vector(&right) * self.scaling(); [val ref] => self.isometry.rotation.transform_vector(right) * self.scaling(); [ref ref] => self.isometry.rotation.transform_vector(right) * self.scaling(); ); // SimilarityBase × TranslationBase similarity_binop_impl_all!( Mul, mul; self: SimilarityBase<N, D, S, R>, right: TranslationBase<N, D, S>, Output = SimilarityBase<N, D, S, R>; [val val] => &self * &right; [ref val] => self * &right; [val ref] => &self * right; [ref ref] => { let shift = self.isometry.rotation.transform_vector(&right.vector) * self.scaling(); SimilarityBase::from_parts( TranslationBase::from_vector(&self.isometry.translation.vector + shift), self.isometry.rotation.clone(), self.scaling()) }; ); // TranslationBase × SimilarityBase similarity_binop_impl_all!( Mul, mul; self: TranslationBase<N, D, S>, right: SimilarityBase<N, D, S, R>, Output = SimilarityBase<N, D, S, R>; [val val] => { let scaling = right.scaling(); SimilarityBase::from_isometry(self * right.isometry, scaling) }; [ref val] => { let scaling = right.scaling(); SimilarityBase::from_isometry(self * right.isometry, scaling) }; [val ref] => SimilarityBase::from_isometry(self * &right.isometry, right.scaling()); [ref ref] => SimilarityBase::from_isometry(self * &right.isometry, right.scaling()); ); macro_rules! similarity_from_composition_impl( ($Op: ident, $op: ident; ($R1: ty, $C1: ty),($R2: ty, $C2: ty) $(for $Dims: ident: $DimsBound: ident),*; $lhs: ident: $Lhs: ty, $rhs: ident: $Rhs: ty, Output = $Output: ty; $action: expr; $($lives: tt),*) => { impl<$($lives ,)* N $(, $Dims: $DimsBound)*, SA, SB> $Op<$Rhs> for $Lhs where N: Real, SA: OwnedStorage<N, $R1, $C1>, SB: OwnedStorage<N, $R2, $C2, Alloc = SA::Alloc>, SA::Alloc: OwnedAllocator<N, $R1, $C1, SA>, SB::Alloc: OwnedAllocator<N, $R2, $C2, SB> { type Output = $Output; #[inline] fn $op($lhs, $rhs: $Rhs) -> Self::Output { $action } } } ); macro_rules! similarity_from_composition_impl_all( ($Op: ident, $op: ident; ($R1: ty, $C1: ty),($R2: ty, $C2: ty) $(for $Dims: ident: $DimsBound: ident),*; $lhs: ident: $Lhs: ty, $rhs: ident: $Rhs: ty, Output = $Output: ty; [val val] => $action_val_val: expr; [ref val] => $action_ref_val: expr; [val ref] => $action_val_ref: expr; [ref ref] => $action_ref_ref: expr;) => { similarity_from_composition_impl!( $Op, $op; ($R1, $C1),($R2, $C2) $(for $Dims: $DimsBound),*; $lhs: $Lhs, $rhs: $Rhs, Output = $Output; $action_val_val; ); similarity_from_composition_impl!( $Op, $op; ($R1, $C1),($R2, $C2) $(for $Dims: $DimsBound),*; $lhs: &'a $Lhs, $rhs: $Rhs, Output = $Output; $action_ref_val; 'a); similarity_from_composition_impl!( $Op, $op; ($R1, $C1),($R2, $C2) $(for $Dims: $DimsBound),*; $lhs: $Lhs, $rhs: &'b $Rhs, Output = $Output; $action_val_ref; 'b); similarity_from_composition_impl!( $Op, $op; ($R1, $C1),($R2, $C2) $(for $Dims: $DimsBound),*; $lhs: &'a $Lhs, $rhs: &'b $Rhs, Output = $Output; $action_ref_ref; 'a, 'b); } ); // RotationBase × SimilarityBase similarity_from_composition_impl_all!( Mul, mul; (D, D), (D, U1) for D: DimName; self: RotationBase<N, D, SA>, right: SimilarityBase<N, D, SB, RotationBase<N, D, SA>>, Output = SimilarityBase<N, D, SB, RotationBase<N, D, SA>>; [val val] => &self * &right; [ref val] => self * &right; [val ref] => &self * right; [ref ref] => SimilarityBase::from_isometry(self * &right.isometry, right.scaling()); ); // RotationBase ÷ SimilarityBase similarity_from_composition_impl_all!( Div, div; (D, D), (D, U1) for D: DimName; self: RotationBase<N, D, SA>, right: SimilarityBase<N, D, SB, RotationBase<N, D, SA>>, Output = SimilarityBase<N, D, SB, RotationBase<N, D, SA>>; // FIXME: don't call iverse explicitly? [val val] => self * right.inverse(); [ref val] => self * right.inverse(); [val ref] => self * right.inverse(); [ref ref] => self * right.inverse(); ); // UnitQuaternion × SimilarityBase similarity_from_composition_impl_all!( Mul, mul; (U4, U1), (U3, U1); self: UnitQuaternionBase<N, SA>, right: SimilarityBase<N, U3, SB, UnitQuaternionBase<N, SA>>, Output = SimilarityBase<N, U3, SB, UnitQuaternionBase<N, SA>>; [val val] => &self * &right; [ref val] => self * &right; [val ref] => &self * right; [ref ref] => SimilarityBase::from_isometry(self * &right.isometry, right.scaling()); ); // UnitQuaternion ÷ SimilarityBase similarity_from_composition_impl_all!( Div, div; (U4, U1), (U3, U1); self: UnitQuaternionBase<N, SA>, right: SimilarityBase<N, U3, SB, UnitQuaternionBase<N, SA>>, Output = SimilarityBase<N, U3, SB, UnitQuaternionBase<N, SA>>; // FIXME: don't call inverse explicitly? [val val] => self * right.inverse(); [ref val] => self * right.inverse(); [val ref] => self * right.inverse(); [ref ref] => self * right.inverse(); );
mod badges; mod bg_rankings; mod command_count; mod common; mod country_snipe_list; mod leaderboard; mod map; mod map_search; mod match_compare; mod medal_recent; mod medals_common; mod medals_list; mod medals_missing; mod most_played; mod most_played_common; mod nochoke; mod osekai_medal_count; mod osekai_medal_rarity; mod osustats_globals; mod osustats_list; mod osutracker_countrytop; mod osutracker_mappers; mod osutracker_maps; mod osutracker_mapsets; mod osutracker_mods; mod pinned; mod player_snipe_list; mod profile; mod ranking; mod ranking_countries; mod recent_list; mod scores; mod sniped_difference; mod top; mod top_if; use std::{borrow::Cow, time::Duration}; use eyre::Report; use smallvec::SmallVec; use tokio::time::sleep; use tokio_stream::StreamExt; use twilight_gateway::Event; use twilight_http::error::ErrorType; use twilight_model::{ channel::{Message, Reaction, ReactionType}, id::{marker::UserMarker, Id}, }; use crate::{ embeds::EmbedData, error::Error, util::{numbers, send_reaction, Emote}, BotResult, Context, }; pub use self::{ badges::BadgePagination, bg_rankings::BGRankingPagination, command_count::CommandCountPagination, common::CommonPagination, country_snipe_list::CountrySnipeListPagination, leaderboard::LeaderboardPagination, map::MapPagination, map_search::MapSearchPagination, match_compare::MatchComparePagination, medal_recent::MedalRecentPagination, medals_common::MedalsCommonPagination, medals_list::MedalsListPagination, medals_missing::MedalsMissingPagination, most_played::MostPlayedPagination, most_played_common::MostPlayedCommonPagination, nochoke::NoChokePagination, osekai_medal_count::MedalCountPagination, osekai_medal_rarity::MedalRarityPagination, osustats_globals::OsuStatsGlobalsPagination, osustats_list::OsuStatsListPagination, osutracker_countrytop::OsuTrackerCountryTopPagination, osutracker_mappers::OsuTrackerMappersPagination, osutracker_maps::OsuTrackerMapsPagination, osutracker_mapsets::OsuTrackerMapsetsPagination, osutracker_mods::OsuTrackerModsPagination, pinned::PinnedPagination, player_snipe_list::PlayerSnipeListPagination, profile::ProfilePagination, ranking::RankingPagination, ranking_countries::RankingCountriesPagination, recent_list::RecentListPagination, scores::ScoresPagination, sniped_difference::SnipedDiffPagination, top::TopPagination, top_if::TopIfPagination, }; type ReactionVec = SmallVec<[Emote; 7]>; type PaginationResult = Result<(), PaginationError>; #[derive(Debug, thiserror::Error)] #[error("pagination error")] pub enum PaginationError { Bot(#[from] Error), Http(#[from] twilight_http::Error), } #[async_trait] pub trait Pagination: Sync + Sized { type PageData: EmbedData; // Make these point to the corresponding struct fields fn msg(&self) -> &Message; fn pages(&self) -> Pages; fn pages_mut(&mut self) -> &mut Pages; // Implement this async fn build_page(&mut self) -> BotResult<Self::PageData>; // Optionally implement these fn reactions() -> ReactionVec { Self::arrow_reactions() } fn arrow_reactions() -> ReactionVec { smallvec![ Emote::JumpStart, Emote::SingleStepBack, Emote::SingleStep, Emote::JumpEnd, ] } fn arrow_reactions_full() -> ReactionVec { smallvec![ Emote::JumpStart, Emote::MultiStepBack, Emote::SingleStepBack, Emote::SingleStep, Emote::MultiStep, Emote::JumpEnd, ] } fn single_step(&self) -> usize { 1 } fn multi_step(&self) -> usize { self.pages().per_page } fn jump_index(&self) -> Option<usize> { None } fn thumbnail(&self) -> Option<String> { None } fn content(&self) -> Option<Cow<'_, str>> { None } fn process_data(&mut self, _data: &Self::PageData) {} async fn final_processing(mut self, _ctx: &Context) -> BotResult<()> { Ok(()) } // Don't implement anything else async fn start( mut self, ctx: &Context, owner: Id<UserMarker>, duration: u64, ) -> PaginationResult { ctx.store_msg(self.msg().id); let reactions = Self::reactions(); let reaction_stream = { let msg = self.msg(); let msg_id = msg.id; for emote in &reactions { send_reaction(ctx, msg, *emote).await?; } ctx.standby .wait_for_event_stream(move |event: &Event| match event { Event::ReactionAdd(event) => { event.message_id == msg_id && event.user_id == owner } Event::ReactionRemove(event) => { event.message_id == msg_id && event.user_id == owner } _ => false, }) .map(|event| match event { Event::ReactionAdd(add) => ReactionWrapper::Add(add.0), Event::ReactionRemove(remove) => ReactionWrapper::Remove(remove.0), _ => unreachable!(), }) .timeout(Duration::from_secs(duration)) }; tokio::pin!(reaction_stream); while let Some(Ok(reaction)) = reaction_stream.next().await { if let Err(why) = self.next_page(reaction.into_inner(), ctx).await { warn!("{:?}", Report::new(why).wrap_err("error while paginating")); } } let msg = self.msg(); if !ctx.remove_msg(msg.id) { return Ok(()); } let delete_fut = ctx.http.delete_all_reactions(msg.channel_id, msg.id).exec(); if let Err(why) = delete_fut.await { if matches!(why.kind(), ErrorType::Response { status, .. } if status.raw() == 403) { sleep(Duration::from_millis(100)).await; for emote in &reactions { let request_reaction = emote.request_reaction_type(); ctx.http .delete_current_user_reaction(msg.channel_id, msg.id, &request_reaction) .exec() .await?; } } else { return Err(why.into()); } } self.final_processing(ctx).await.map_err(From::from) } async fn next_page(&mut self, reaction: Reaction, ctx: &Context) -> BotResult<()> { if self.process_reaction(&reaction.emoji).await == PageChange::Change { let data = self.build_page().await?; self.process_data(&data); let msg = self.msg(); let mut update = ctx.http.update_message(msg.channel_id, msg.id); let content = self.content(); if let Some(ref content) = content { update = update.content(Some(content.as_ref()))?; } let mut builder = data.into_builder(); if let Some(thumbnail) = self.thumbnail() { builder = builder.thumbnail(thumbnail); } update.embeds(Some(&[builder.build()]))?.exec().await?; } Ok(()) } async fn process_reaction(&mut self, reaction: &ReactionType) -> PageChange { let change_result = match reaction { ReactionType::Custom { name: Some(name), .. } => match name.as_str() { // Move to start "jump_start" => (self.index() != 0).then(|| 0), // Move one page left "multi_step_back" => match self.index() { 0 => None, idx => Some(idx.saturating_sub(self.multi_step())), }, // Move one index left "single_step_back" => match self.index() { 0 => None, idx => Some(idx.saturating_sub(self.single_step())), }, // Move to specific position "my_position" => { if let Some(index) = self.jump_index() { let i = numbers::last_multiple(self.per_page(), index + 1); if i != self.index() { Some(i) } else { None } } else { None } } // Move one index right "single_step" => (self.index() != self.last_index()) .then(|| self.last_index().min(self.index() + self.single_step())), // Move one page right "multi_step" => (self.index() != self.last_index()) .then(|| self.last_index().min(self.index() + self.multi_step())), // Move to end "jump_end" => (self.index() != self.last_index()).then(|| self.last_index()), _ => None, }, _ => None, }; match change_result { Some(index) => { *self.index_mut() = index; PageChange::Change } None => PageChange::None, } } fn index(&self) -> usize { self.pages().index } fn last_index(&self) -> usize { self.pages().last_index } fn per_page(&self) -> usize { self.pages().per_page } fn total_pages(&self) -> usize { self.pages().total_pages } fn index_mut(&mut self) -> &mut usize { &mut self.pages_mut().index } fn page(&self) -> usize { self.index() / self.per_page() + 1 } } #[derive(Eq, PartialEq)] pub enum PageChange { None, Change, } #[derive(Copy, Clone, Debug)] pub struct Pages { index: usize, last_index: usize, per_page: usize, total_pages: usize, } impl Pages { /// `per_page`: How many entries per page /// /// `amount`: How many entries in total pub fn new(per_page: usize, amount: usize) -> Self { Self { index: 0, per_page, total_pages: numbers::div_euclid(per_page, amount), last_index: numbers::last_multiple(per_page, amount), } } } enum ReactionWrapper { Add(Reaction), Remove(Reaction), } impl ReactionWrapper { fn into_inner(self) -> Reaction { match self { Self::Add(r) | Self::Remove(r) => r, } } }
use error; use clap::App; pub struct ArgValues { pub width: f32, pub height: f32, // None if using default fragment shader pub shaderpath: Option<String>, // None if using default textures pub texture0path: Option<String>, pub texture1path: Option<String>, pub texture2path: Option<String>, pub texture3path: Option<String>, // Some(name) if running an example pub examplename: Option<String>, // Some(id) if downloading a shader pub getid: Option<String>, // true if also running downloaded shader pub andrun: bool, } impl ArgValues { pub fn from_cli() -> error::Result<ArgValues> { // Load CLI matches let yaml = load_yaml!("cli.yml"); let matches = App::from_yaml(yaml).get_matches(); // Closure for converting &str to String let str_to_string = |s: &str| s.to_string(); // Window dimensions let width = matches.value_of("width").unwrap().parse()?; let height = matches.value_of("height").unwrap().parse()?; // Check to see if they want an example run let examplename = matches.value_of("example").map(&str_to_string); // Fragment shader path let shaderpath = matches.value_of("shader").map(&str_to_string); // Texture paths let texture0path = matches.value_of("texture0").map(&str_to_string); let texture1path = matches.value_of("texture1").map(&str_to_string); let texture2path = matches.value_of("texture2").map(&str_to_string); let texture3path = matches.value_of("texture3").map(&str_to_string); // Check to see if they want to download a shader (and then run it) let (getid, andrun) = if let Some(getmatches) = matches.subcommand_matches("get") { (getmatches.value_of("id").map(&str_to_string), getmatches.is_present("run")) } else { (None, false) }; Ok(ArgValues { width: width, height: height, shaderpath: shaderpath, texture0path: texture0path, texture1path: texture1path, texture2path: texture2path, texture3path: texture3path, examplename: examplename, getid: getid, andrun: andrun, }) } }
extern crate dmbc; extern crate exonum; extern crate exonum_testkit; extern crate hyper; extern crate iron; extern crate iron_test; extern crate mount; extern crate serde_json; pub mod dmbc_testkit; use std::collections::HashMap; use dmbc_testkit::{DmbcTestApiBuilder, DmbcTestKitApi}; use exonum::crypto; use hyper::status::StatusCode; use dmbc::currency::api::fees::FeesResponseBody; use dmbc::currency::configuration::{Configuration, TransactionFees, TransactionPermissions}; use dmbc::currency::error::Error; use dmbc::currency::transactions::builders::transaction; #[test] fn fees_for_transfer() { let transaction_fee = 1000; let amount = 2; let fixed = 10; let meta_data = "asset"; let config_fees = TransactionFees::with_default_key(0, 0, 0, 0, 0, transaction_fee); let permissions = TransactionPermissions::default(); let (creator_key, _) = crypto::gen_keypair(); let (recipient_key, _) = crypto::gen_keypair(); let (sender_pub_key, sender_sec_key) = crypto::gen_keypair(); let (asset, info) = dmbc_testkit::create_asset( meta_data, amount, dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()), &creator_key, ); let testkit = DmbcTestApiBuilder::new() .with_configuration(Configuration::new(config_fees, permissions)) .add_asset_to_wallet(&sender_pub_key, (asset.clone(), info)) .create(); let api = testkit.api(); let tx_transfer = transaction::Builder::new() .keypair(sender_pub_key, sender_sec_key) .tx_transfer() .add_asset_value(asset) .recipient(recipient_key) .seed(42) .build(); let (status, response) = api.post_fee(&tx_transfer); let mut expected = HashMap::new(); let expected_fee = transaction_fee + amount * fixed; expected.insert(sender_pub_key, expected_fee); assert_eq!(status, StatusCode::Ok); assert_eq!(response, Ok(Ok(FeesResponseBody { fees: expected }))); } #[test] fn fees_for_transfer_sender_is_creator() { let transaction_fee = 1000; let amount = 2; let fixed = 10; let meta_data = "asset"; let config_fees = TransactionFees::with_default_key(0, 0, 0, 0, 0, transaction_fee); let permissions = TransactionPermissions::default(); let (recipient_key, _) = crypto::gen_keypair(); let (sender_pub_key, sender_sec_key) = crypto::gen_keypair(); let (asset, info) = dmbc_testkit::create_asset( meta_data, amount, dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()), &sender_pub_key, ); let testkit = DmbcTestApiBuilder::new() .with_configuration(Configuration::new(config_fees, permissions)) .add_asset_to_wallet(&sender_pub_key, (asset.clone(), info)) .create(); let api = testkit.api(); let tx_transfer = transaction::Builder::new() .keypair(sender_pub_key, sender_sec_key) .tx_transfer() .add_asset_value(asset) .recipient(recipient_key) .seed(42) .build(); let (status, response) = api.post_fee(&tx_transfer); let mut expected = HashMap::new(); expected.insert(sender_pub_key, transaction_fee); assert_eq!(status, StatusCode::Ok); assert_eq!(response, Ok(Ok(FeesResponseBody { fees: expected }))); } #[test] fn fees_for_transfer_asset_not_found() { let transaction_fee = 1000; let amount = 2; let fixed = 10; let meta_data = "asset"; let config_fees = TransactionFees::with_default_key(0, 0, 0, 0, 0, transaction_fee); let permissions = TransactionPermissions::default(); let (creator_key, _) = crypto::gen_keypair(); let (recipient_key, _) = crypto::gen_keypair(); let (sender_pub_key, sender_sec_key) = crypto::gen_keypair(); let (asset, _) = dmbc_testkit::create_asset( meta_data, amount, dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()), &creator_key, ); let testkit = DmbcTestApiBuilder::new() .with_configuration(Configuration::new(config_fees, permissions)) .create(); let api = testkit.api(); let tx_transfer = transaction::Builder::new() .keypair(sender_pub_key, sender_sec_key) .tx_transfer() .add_asset_value(asset) .recipient(recipient_key) .seed(42) .build(); let (status, response) = api.post_fee(&tx_transfer); assert_eq!(status, StatusCode::BadRequest); assert_eq!(response, Ok(Err(Error::AssetNotFound))); }
use diesel::pg::PgConnection; use iron::prelude::*; use iron; use iron::middleware; use r2d2::{Pool, PooledConnection}; use r2d2_diesel::ConnectionManager; type DBPool = Pool<ConnectionManager<PgConnection>>; pub struct DatabaseMiddleware { pub pool: DBPool } impl DatabaseMiddleware { pub fn new(pool: DBPool) -> DatabaseMiddleware { DatabaseMiddleware { pool: pool } } } impl iron::typemap::Key for DatabaseMiddleware { type Value = PooledConnection<ConnectionManager<PgConnection>>; } impl middleware::BeforeMiddleware for DatabaseMiddleware { fn before(&self, req: &mut Request) -> IronResult<()> { req.extensions.insert::<DatabaseMiddleware>(self.pool.get().unwrap()); Ok(()) } } pub trait DatabaseExt { fn db_conn(&self) -> &PgConnection; } impl<'a, 'b> DatabaseExt for Request<'a, 'b> { fn db_conn(&self) -> &PgConnection { self.extensions.get::<DatabaseMiddleware>().unwrap() } } /// Hack for properly clearing session cookies pub struct DeleteCookieMiddleware; impl middleware::AfterMiddleware for DeleteCookieMiddleware { fn after(&self, _: &mut Request, mut res: Response) -> IronResult<Response> { use iron::headers::SetCookie; { let headers = &mut res.headers; if let Some(sc) = headers.get_mut::<SetCookie>() { let SetCookie(ref mut cookies) = *sc; for c in cookies { if c.starts_with("X-Liuyan-Session=; Max-Age=0;") { c.push_str(";Path=/"); } } } } Ok(res) } } /// Middleware for inserting Access-Control-Allow-Origin header pub struct CorsMiddleware { domain: String } impl CorsMiddleware { pub fn new(domain: &String) -> CorsMiddleware { CorsMiddleware { domain: domain.clone() } } } impl middleware::AfterMiddleware for CorsMiddleware { fn after(&self, _: &mut Request, mut res: Response) -> IronResult<Response> { use iron::headers::AccessControlAllowOrigin; res.headers.set(AccessControlAllowOrigin::Value(self.domain.clone())); Ok(res) } }
pub mod actors; pub mod import { pub use { futures :: { future::{ FutureExt }, SinkExt } , thespis :: { * } , log :: { * } , }; }
use std::io; use std::str::FromStr; use crate::base::Part; pub fn part1(r: &mut dyn io::Read) -> Result<String, String> { solve(r, Part::One) } pub fn part2(r: &mut dyn io::Read) -> Result<String, String> { solve(r, Part::Two) } fn solve(r: &mut dyn io::Read, part: Part) -> Result<String, String> { let mut input = String::new(); r.read_to_string(&mut input).map_err(|e| e.to_string())?; input = input.trim().to_string(); let made_recipes = parse_input(&input); let nr_recipes = made_recipes + 10; let mut scores = Vec::with_capacity(nr_recipes); scores.extend(&[3, 7]); let mut indices = [0, 1]; match part { Part::One => { generate_scores(&mut scores, &mut indices, nr_recipes); let following_ten = scores.iter().skip(made_recipes).take(10); let s = following_ten .map(|score| score.to_string()) .collect::<String>(); Ok(s) } Part::Two => { let pattern = input .chars() .map(|c| c.to_string()) .map(|s| usize::from_str(&s).unwrap()) .collect::<Vec<usize>>(); let recipes_before = generate_until_pattern(&mut scores, &mut indices, &pattern); Ok(recipes_before.to_string()) } } } fn parse_input(input: &str) -> usize { usize::from_str(input).unwrap() } #[allow(dead_code)] fn print_scores(scores: &[usize], indices: &[usize]) { for (i, score) in scores.iter().enumerate() { let surround = if i == indices[0] { ('(', ')') } else if i == indices[1] { ('[', ']') } else { (' ', ' ') }; print!("{}{}{}", surround.0, score, surround.1); } println!(); } fn add_scores_to(scores: &mut Vec<usize>, indices: &mut [usize]) { let sum = indices.iter().map(|&idx| scores[idx]).sum::<usize>(); if sum >= 10 { scores.push(1); } scores.push(sum % 10); for idx in indices.iter_mut() { *idx += 1 + scores[*idx]; *idx %= scores.len(); } } fn generate_scores(scores: &mut Vec<usize>, indices: &mut [usize], nr_recipes: usize) { while scores.len() < nr_recipes { add_scores_to(scores, indices); } } fn generate_until_pattern( scores: &mut Vec<usize>, indices: &mut [usize], pattern: &[usize], ) -> usize { let n = pattern.len(); while scores.len() < n { add_scores_to(scores, indices); } let mut found = false; let mut starting_from = 0; while !found { match contains_subslice_starting_from(&scores, pattern, starting_from) { (true, idx) => { found = true; starting_from = idx; } (false, idx) => { starting_from = idx + 1; add_scores_to(scores, indices); } }; } starting_from } fn contains_subslice_starting_from<T>( slice: &[T], pattern: &[T], starting_from: usize, ) -> (bool, usize) where T: PartialEq, { let n = pattern.len(); if let Some((idx, _subslice)) = slice[starting_from..] .windows(n) .enumerate() .find(|&(_i, subslice)| subslice == pattern) { (true, starting_from + idx) } else { (false, slice.len() - n) } } #[cfg(test)] mod tests { use super::*; use crate::test; mod part1 { use super::*; test!(example1, "9", "5158916779", part1); test!(example2, "5", "0124515891", part1); test!(example3, "18", "9251071085", part1); test!(example4, "2018", "5941429882", part1); test!(actual, file "../../../inputs/2018/14", "5371393113", part1); } mod part2 { use super::*; test!(example1, "51589", "9", part2); test!(example2, "01245", "5", part2); test!(example3, "92510", "18", part2); test!(example4, "59414", "2018", part2); test!(actual, file "../../../inputs/2018/14", "20286858", part2); } }
use crate::*; pub fn init(globals: &mut Globals) { let class = globals.module_class; globals.add_builtin_instance_method(class, "constants", constants); globals.add_builtin_instance_method(class, "instance_methods", instance_methods); globals.add_builtin_instance_method(class, "attr_accessor", attr_accessor); globals.add_builtin_instance_method(class, "attr", attr_reader); globals.add_builtin_instance_method(class, "attr_reader", attr_reader); globals.add_builtin_instance_method(class, "attr_writer", attr_writer); globals.add_builtin_instance_method(class, "module_function", module_function); globals.add_builtin_instance_method(class, "singleton_class?", singleton_class); globals.add_builtin_instance_method(class, "const_get", const_get); globals.add_builtin_instance_method(class, "include", include); globals.add_builtin_instance_method(class, "included_modules", included_modules); globals.add_builtin_instance_method(class, "ancestors", ancestors); } fn constants(vm: &mut VM, self_val: Value, _: &Args) -> VMResult { let mut v: Vec<Value> = vec![]; let mut class = self_val; loop { v.append( &mut class .as_object() .var_table() .keys() .filter(|x| { vm.globals .get_ident_name(**x) .chars() .nth(0) .unwrap() .is_ascii_uppercase() }) .map(|k| Value::symbol(*k)) .collect(), ); match class.superclass() { Some(superclass) => { if superclass == vm.globals.builtins.object { break; } else { class = superclass }; } None => break, } } Ok(Value::array_from(&vm.globals, v)) } fn const_get(vm: &mut VM, self_val: Value, args: &Args) -> VMResult { vm.check_args_num(args.len(), 1)?; let name = match args[0].as_symbol() { Some(symbol) => symbol, None => return Err(vm.error_type("1st arg must be Symbol.")), }; let val = vm.get_super_const(self_val, name)?; Ok(val) } fn instance_methods(vm: &mut VM, self_val: Value, args: &Args) -> VMResult { let mut class = vm.expect_module(self_val)?; vm.check_args_range(args.len(), 0, 1)?; let inherited_too = args.len() == 0 || vm.val_to_bool(args[0]); match inherited_too { false => { let v = class .method_table .keys() .map(|k| Value::symbol(*k)) .collect(); Ok(Value::array_from(&vm.globals, v)) } true => { let mut v = std::collections::HashSet::new(); loop { v = v .union( &class .method_table .keys() .map(|k| Value::symbol(*k)) .collect(), ) .cloned() .collect(); match class.superclass() { Some(superclass) => class = superclass, None => break, }; } Ok(Value::array_from(&vm.globals, v.iter().cloned().collect())) } } } pub fn attr_accessor(vm: &mut VM, self_val: Value, args: &Args) -> VMResult { for arg in args.iter() { if arg.is_packed_symbol() { let id = arg.as_packed_symbol(); define_reader(vm, self_val, id); define_writer(vm, self_val, id); } else { return Err(vm.error_name("Each of args for attr_accessor must be a symbol.")); } } Ok(Value::nil()) } fn attr_reader(vm: &mut VM, self_val: Value, args: &Args) -> VMResult { for arg in args.iter() { if arg.is_packed_symbol() { let id = arg.as_packed_symbol(); define_reader(vm, self_val, id); } else { return Err(vm.error_name("Each of args for attr_accessor must be a symbol.")); } } Ok(Value::nil()) } fn attr_writer(vm: &mut VM, self_val: Value, args: &Args) -> VMResult { for arg in args.iter() { if arg.is_packed_symbol() { let id = arg.as_packed_symbol(); define_writer(vm, self_val, id); } else { return Err(vm.error_name("Each of args for attr_accessor must be a symbol.")); } } Ok(Value::nil()) } fn define_reader(vm: &mut VM, class: Value, id: IdentId) { let instance_var_id = get_instance_var(vm, id); let info = MethodInfo::AttrReader { id: instance_var_id, }; let methodref = vm.globals.add_method(info); vm.add_instance_method(class, id, methodref); } fn define_writer(vm: &mut VM, class: Value, id: IdentId) { let instance_var_id = get_instance_var(vm, id); let assign_id = vm.globals.ident_table.add_postfix(id, "="); let info = MethodInfo::AttrWriter { id: instance_var_id, }; let methodref = vm.globals.add_method(info); vm.add_instance_method(class, assign_id, methodref); } fn get_instance_var(vm: &mut VM, id: IdentId) -> IdentId { let s = vm.globals.get_ident_name(id).to_string(); vm.globals.get_ident_id(format!("@{}", s)) } fn module_function(vm: &mut VM, _: Value, args: &Args) -> VMResult { vm.check_args_num(args.len(), 0)?; vm.module_function(true); Ok(Value::nil()) } fn singleton_class(vm: &mut VM, self_val: Value, _: &Args) -> VMResult { let class = vm.expect_module(self_val)?; Ok(Value::bool(class.is_singleton)) } fn include(vm: &mut VM, self_val: Value, args: &Args) -> VMResult { vm.check_args_num(args.len(), 1)?; let mut class = vm.expect_module(self_val)?; let module = args[0]; class.include.push(module); Ok(Value::nil()) } fn included_modules(vm: &mut VM, self_val: Value, args: &Args) -> VMResult { vm.check_args_num(args.len(), 0)?; let mut class = self_val; let mut ary = vec![]; loop { if class.is_nil() { break; } class = match class.as_module() { Some(cref) => { for included in &cref.include { ary.push(*included); } cref.superclass } None => { let inspect = vm.val_inspect(class); return Err( vm.error_internal(format!("Illegal value in superclass chain. {}", inspect)) ); } }; } Ok(Value::array_from(&vm.globals, ary)) } fn ancestors(vm: &mut VM, self_val: Value, args: &Args) -> VMResult { vm.check_args_num(args.len(), 0)?; let mut superclass = self_val; let mut ary = vec![]; loop { if superclass.is_nil() { break; } ary.push(superclass); superclass = match superclass.as_module() { Some(cref) => { for included in &cref.include { ary.push(*included); } cref.superclass } None => { let inspect = vm.val_inspect(superclass); return Err( vm.error_internal(format!("Illegal value in superclass chain. {}", inspect)) ); } }; } Ok(Value::array_from(&vm.globals, ary)) } #[cfg(test)] mod test { use crate::test::*; #[test] fn module_function() { let program = r#" class Foo module_function def bar 123 end end assert(123, Foo.bar) assert(123, Foo.new.bar) "#; assert_script(program); } #[test] fn constants() { let program = r#" class Foo Bar = 100 Ker = 777 end class Bar < Foo Doo = 555 end def ary_cmp(a,b) return false if a - b != [] return false if b - a != [] true end assert(100, Foo.const_get(:Bar)) assert(100, Bar.const_get(:Bar)) assert(true, ary_cmp(Foo.constants, [:Bar, :Ker])) assert(true, ary_cmp(Bar.constants, [:Doo, :Bar, :Ker])) "#; assert_script(program); } #[test] fn attr_accessor() { let program = " class Foo attr_accessor :car, :cdr end bar = Foo.new assert nil, bar.car assert nil, bar.cdr bar.car = 1000 bar.cdr = :something assert 1000, bar.car assert :something, bar.cdr "; assert_script(program); } #[test] fn module_methods() { let program = r#" class A Foo = 100 Bar = 200 def fn puts "fn" end def fo puts "fo" end end def ary_cmp(a,b) puts a,b return false if a - b != [] return false if b - a != [] true end assert(true, ary_cmp(A.constants, [:Bar, :Foo])) assert(true, ary_cmp(A.instance_methods - Class.instance_methods, [:fn, :fo])) "#; assert_script(program); } }
use std::fs; use std::convert::TryFrom; #[derive(Clone, Copy, PartialEq)] enum Cell { Floor, Empty, Taken, } #[derive(Clone)] struct SeatingArea { cells: Vec<Cell>, w: usize, h: usize, } impl SeatingArea { fn from_str(s: &str) -> Self { let mut w = 0; let cells: Vec<_> = s .lines() .flat_map(|l| { w = l.len(); l.chars().map(|c| match c { '.' => Cell::Floor, 'L' => Cell::Empty, '#' => Cell::Taken, _ => unreachable!(), }) }).collect(); let h = cells.len() / w; Self { cells, w, h } } fn get(&self, x: usize, y: usize) -> Cell { self.cells[y * self.w + x] } fn set(&mut self, x: usize, y: usize, c: Cell) { self.cells[y * self.w + x] = c; } fn check_dir(&self, x: isize, y: isize, dx: isize, dy: isize, dist: usize) -> bool { if dx == 0 && dy == 0 { return false; } let mut cx = x; let mut cy = y; let mut steps = 0; loop { cx += dx; cy += dy; steps += 1; if cx < 0 || usize::try_from(cx).unwrap() >= self.w || cy < 0 || usize::try_from(cy).unwrap() >= self.h { return false; } let cell = self.get( usize::try_from(cx).unwrap(), usize::try_from(cy).unwrap(), ); if dist != 0 && steps == dist { return cell == Cell::Taken; } match cell { Cell::Empty => return false, Cell::Taken => return true, Cell::Floor => continue, } } } fn count(&self, x: usize, y: usize, dist: usize) -> i32 { let x = isize::try_from(x).unwrap(); let y = isize::try_from(y).unwrap(); let mut count = 0; for dy in -1..=1 { for dx in -1..=1 { if self.check_dir(x, y, dx, dy, dist) { count += 1; } } } count } fn step(&mut self, next: &mut Self, dist: usize, min_taken: i32) -> bool { let mut changed = false; for y in 0..self.h { for x in 0..self.w { let new = match self.get(x, y) { Cell::Empty if self.count(x, y, dist) == 0 => { changed = true; Cell::Taken } Cell::Taken if self.count(x, y, dist) >= min_taken => { changed = true; Cell::Empty } cell => cell, }; next.set(x, y, new); } } std::mem::swap(self, next); changed } fn total(&self) -> i32 { self.cells.iter().filter(|&&c| c == Cell::Taken).count() as i32 } fn simulate_until_stable(&mut self, dist: usize, min_taken: i32) -> i32 { let mut next = self.clone(); loop { let changed = self.step(&mut next, dist, min_taken); if !changed { return self.total(); } } } } fn part1(input: &SeatingArea) { let mut sa = input.clone(); println!("{}", sa.simulate_until_stable(1, 4)); } fn part2(input: &SeatingArea) { let mut sa = input.clone(); println!("{}", sa.simulate_until_stable(0, 5)); } fn main() { let input = fs::read_to_string("input").unwrap(); let sa = SeatingArea::from_str(&input); part1(&sa); part2(&sa); }
fn main() { //println!("cargo:rustc-link-search=native=/opt/intel/compilers_and_libraries_2017/linux/mpi/intel64/lib/release_mt"); println!("cargo:rustc-link-search=native=/opt/intel/compilers_and_libraries_2017.1.132/linux/mpi/intel64/lib"); }
#![warn(rust_2018_idioms)] #![warn(clippy::all)] #![feature(portable_simd)] #![feature(test)] //#![feature(core_intrinsics)] //#![feature(vec_into_raw_parts)] mod rayon_worker; //mod beggar_pool; mod sudoku; mod helpers; //mod worker; //mod pool; use std::time::Instant; use log::{error, info, LevelFilter}; use sudoku::Sudoku; use helpers::{ IntoError, Void, Res }; fn main() -> Void { let args: Vec<String> = std::env::args().collect(); rayon::ThreadPoolBuilder::new().num_threads(32).build_global().unwrap(); // Set the log level. //simple_logger::init().unwrap(); //log::set_max_level(LevelFilter::Info); let sudoku = parse_sudoku_from_args(&args)?; println!("Entered ...\n\n{}", sudoku); let start = Instant::now(); let (done_sudoku, total_ops) = rayon_worker::solve(sudoku); let elapsed = start.elapsed().as_micros(); // Print! println!("Done!\n\n{}", done_sudoku); println!("Finished in {} μs using {} operations.", elapsed, total_ops); Ok(()) } fn parse_sudoku_from_args(args: &[String]) -> Res<Sudoku> { let sudoku_text = if args.len() == 2 { let sudoku_file = args[1].to_owned(); let sudoku_file_data = std::fs::read(sudoku_file)?; std::str::from_utf8(&sudoku_file_data)?.to_owned() } else { let e = "A sudoku file must be passed in."; error!("{}", e); return e.into_error(); }; // Parse original sudoku. Ok(Sudoku::from_str(&sudoku_text)) } #[cfg(test)] mod tests { extern crate test; use super::*; use test::Bencher; #[bench] fn bench_hard_solve(b: &mut Bencher) -> Void { b.iter(|| { let sudoku = parse_sudoku_from_args(&["dummy".to_owned(), "hard.txt".to_owned()])?; let _ = rayon_worker::solve(sudoku); Ok(()) as Void }); Ok(()) } }
struct SummaryRanges { summary: Vec<Vec<i32>>, } impl SummaryRanges { /** Initialize your data structure here. */ fn new() -> Self { Self { summary: vec![], } } fn add_num(&mut self, val: i32) { if self.summary.is_empty() { self.summary.push(vec![val, val]); } else { if val < self.summary[0][0] - 1 { self.summary.insert(0, vec![val, val]); } else if val == self.summary[0][0] - 1 { self.summary[0][0] = val; } else if val > self.summary.last().unwrap()[1] + 1 { self.summary.push(vec![val, val]); } else if val == self.summary.last().unwrap()[1] + 1 { let n = self.summary.len(); self.summary[n - 1][1] = val; } else { let mut left = 0; let mut right = self.summary.len(); if right == 1 { return; } while right - left > 1 { let mid = (left + right) / 2; if self.summary[mid][0] <= val && val <= self.summary[mid][1] { return; } if self.summary[mid][0] > val { right = mid; } else { left = mid; } } if self.summary[left][1] < val - 1 { if self.summary[left + 1][0] > val + 1 { self.summary.insert(left + 1, vec![val, val]); } else { self.summary[left + 1][0] = val; } } else if self.summary[left][1] == val - 1 { if self.summary[left + 1][0] > val + 1 { self.summary[left][1] = val; } else { let l = self.summary[left][0]; let r = self.summary[left + 1][1]; self.summary.remove(left); self.summary[left] = vec![l, r]; } } } } } fn get_intervals(&self) -> Vec<Vec<i32>> { self.summary.clone() } }
use crate::RAM_SIZE; pub struct Ram { memory: [u8; RAM_SIZE] } impl Ram { pub fn new() -> Ram { let mut ram = Ram { memory: [0; RAM_SIZE] }; let sprites: [[u8; 5]; 16] = [ [0xF0, 0x90, 0x90, 0x90, 0xF0], [0x20, 0x60, 0x20, 0x20, 0x70], [0xF0, 0x10, 0xF0, 0x80, 0xF0], [0xF0, 0x10, 0xF0, 0x10, 0xF0], [0x90 ,0x90, 0xF0, 0x10, 0x10], [0xF0, 0x80, 0xF0, 0x10, 0xF0], [0xF0, 0x80, 0xF0, 0x90, 0xF0], [0xF0, 0x10, 0x20, 0x40, 0x40], [0xF0, 0x90, 0xF0, 0x90, 0xF0], [0xF0, 0x90, 0xF0, 0x10, 0xF0], [0xF0, 0x90, 0xF0, 0x90, 0x90], [0xE0, 0x90, 0xE0, 0x90, 0xE0], [0xF0, 0x80, 0x80, 0x80, 0xF0], [0xE0, 0x90, 0x90, 0x90, 0xE0], [0xF0, 0x80, 0xF0, 0x80, 0xF0], [0xF0, 0x80, 0xF0, 0x80, 0x80] ]; let mut i = 0; for sprite in sprites.iter() { for ch in sprite { ram.memory[i] = *ch; i += 1; } } ram } pub fn read_byte(&mut self, addr: u16) -> u8 { return self.memory[addr as usize]; } pub fn write_byte(&mut self, addr: u16, value: u8) { self.memory[addr as usize] = value; } }
//! A SOCKS5 proxy server implemented in Rust //! //! Gatekeeperd is an SOCKS5 proxy built on gatekeeper crate. //! use std::io; use std::net::IpAddr; use std::path::PathBuf; use log::*; use structopt::*; use gatekeeper as gk; #[derive(StructOpt, Debug)] #[structopt(name = "gatekeeper")] struct Opt { #[structopt(short = "p", long = "port", default_value = "1080")] /// Set port to listen on port: u16, #[structopt(short = "i", long = "ip", default_value = "0.0.0.0")] /// Set ipaddress to listen on ipaddr: IpAddr, #[structopt(short = "r", long = "rule")] /// Set path to connection rule file (format: yaml) rulefile: Option<PathBuf>, } fn set_handler(signals: &[i32], handler: impl Fn(i32) + Send + 'static) -> io::Result<()> { use signal_hook::*; let signals = iterator::Signals::new(signals)?; std::thread::spawn(move || signals.forever().for_each(handler)); Ok(()) } fn main() { use signal_hook::*; pretty_env_logger::init_timed(); println!("gatekeeperd"); let opt = Opt::from_args(); debug!("option: {:?}", opt); let config = match opt.rulefile { Some(ref path) => gk::ServerConfig::with_file(opt.ipaddr, opt.port, path), None => Ok(gk::ServerConfig::new( opt.ipaddr, opt.port, gk::ConnectRule::any(), )), } .expect("server config"); let (mut server, tx) = gk::server::Server::new(config); set_handler(&[SIGTERM, SIGINT, SIGQUIT, SIGCHLD], move |_| { tx.send(gk::ServerCommand::Terminate).ok(); }) .expect("setting ctrl-c handler"); if let Err(err) = server.serve() { error!("server error: {:?}", err); } }
struct Point { x : i32, y : i32, } fn main() { let x = 2; //Completeness check by match match x { 1 | 2 => println!("1"), 3 => println!("3"), //something else _ => println!("x is not 1,2,3"), } let number = match x { 1 => "one", 2 => "two", 5 => "five", _ => "something else.", }; println!("number:{:?}",number); //destructuring let origin = Point {x : 3,y : 3}; match origin { Point{x,y} => println!("({},{})",x,y), Point{x,..} => println!("x:{}",x), Point{y,..} => println!("y:{}",y), } //reference let x = 5; let mut y = 5; match x { //ref keyword : create an reference used in pattern ref r => println!("{}",r), } match y { ref mut rm => println!("{}",rm), } //pattern matching between ... //bind value to name by @ let z = 3; match z { e @ 1 ... 2 => println!("value one to two:{}",e), e @ 3 ... 4 => println!("value three to four:{}",e), _ => println!("something else"), } //guard let x = 3; let y = false; match x { 1 | 2 if y => println!("x is one or two"), 3 | 4 if y => println!("x is three or four"), _ => println!("something else"), } }
use std::fs::read_to_string; use std::io::{Error,ErrorKind}; use crate::parser::parse_program; fn limit(x: &str, n: usize) -> &str { &x[..x.len().min(n)] } fn to_io_error(e: nom::Err<(&str,nom::error::ErrorKind)>) -> Error { let string = match e { nom::Err::Incomplete(needed) => format!("incomplete {:?}", needed), nom::Err::Error((s,k)) | nom::Err::Failure((s,k)) => format!("error {:?} at {}", k, limit(s,100)) }; Error::new(ErrorKind::InvalidData,string) } pub fn compile(path: &str) -> Result<(),Error> { let text = read_to_string(path)?; let program = parse_program(&text).map_err(to_io_error)?; println!("{:?}", program); Ok(()) }
#[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_end_to_end_length(number_of_links: u8, link_length: f64, persistance_length: f64, force: f64, temperature: f64) -> f64 { super::end_to_end_length(&number_of_links, &link_length, &persistance_length, &force, &temperature) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_end_to_end_length_per_link(number_of_links: u8, link_length: f64, persistance_length: f64, force: f64, temperature: f64) -> f64 { super::end_to_end_length_per_link(&number_of_links, &link_length, &persistance_length, &force, &temperature) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_nondimensional_end_to_end_length(number_of_links: u8, nondimensional_persistance_length: f64, nondimensional_force: f64) -> f64 { super::nondimensional_end_to_end_length(&number_of_links, &nondimensional_persistance_length, &nondimensional_force) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_nondimensional_end_to_end_length_per_link(number_of_links: u8, nondimensional_persistance_length: f64, nondimensional_force: f64) -> f64 { super::nondimensional_end_to_end_length_per_link(&number_of_links, &nondimensional_persistance_length, &nondimensional_force) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_gibbs_free_energy(number_of_links: u8, link_length: f64, hinge_mass: f64, persistance_length: f64, force: f64, temperature: f64) -> f64 { super::gibbs_free_energy(&number_of_links, &link_length, &hinge_mass, &persistance_length, &force, &temperature) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_gibbs_free_energy_per_link(number_of_links: u8, link_length: f64, hinge_mass: f64, persistance_length: f64, force: f64, temperature: f64) -> f64 { super::gibbs_free_energy_per_link(&number_of_links, &link_length, &hinge_mass, &persistance_length, &force, &temperature) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_relative_gibbs_free_energy(number_of_links: u8, link_length: f64, persistance_length: f64, force: f64, temperature: f64) -> f64 { super::relative_gibbs_free_energy(&number_of_links, &link_length, &persistance_length, &force, &temperature) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_relative_gibbs_free_energy_per_link(number_of_links: u8, link_length: f64, persistance_length: f64, force: f64, temperature: f64) -> f64 { super::relative_gibbs_free_energy_per_link(&number_of_links, &link_length, &persistance_length, &force, &temperature) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_nondimensional_gibbs_free_energy(number_of_links: u8, link_length: f64, hinge_mass: f64, nondimensional_persistance_length: f64, nondimensional_force: f64, temperature: f64) -> f64 { super::nondimensional_gibbs_free_energy(&number_of_links, &link_length, &hinge_mass, &nondimensional_persistance_length, &nondimensional_force, &temperature) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_nondimensional_gibbs_free_energy_per_link(number_of_links: u8, link_length: f64, hinge_mass: f64, nondimensional_persistance_length: f64, nondimensional_force: f64, temperature: f64) -> f64 { super::nondimensional_gibbs_free_energy_per_link(&number_of_links, &link_length, &hinge_mass, &nondimensional_persistance_length, &nondimensional_force, &temperature) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_nondimensional_relative_gibbs_free_energy(number_of_links: u8, nondimensional_persistance_length: f64, nondimensional_force: f64) -> f64 { super::nondimensional_relative_gibbs_free_energy(&number_of_links, &nondimensional_persistance_length, &nondimensional_force) } #[no_mangle] pub extern fn physics_single_chain_wlc_thermodynamics_isotensional_nondimensional_relative_gibbs_free_energy_per_link(number_of_links: u8, nondimensional_persistance_length: f64, nondimensional_force: f64) -> f64 { super::nondimensional_relative_gibbs_free_energy_per_link(&number_of_links, &nondimensional_persistance_length, &nondimensional_force) }
extern crate macbuild_build; fn main() { macbuild_build::go("src/main.rs"); }
use crate::ast; use crate::{Parse, Peek, Spanned, ToTokens}; /// The unit literal `()`. /// /// # Examples /// /// ```rust /// use rune::{parse_all, ast}; /// /// parse_all::<ast::LitUnit>("()").unwrap(); /// ``` #[derive(Debug, Clone, ToTokens, Parse, Spanned)] pub struct LitUnit { /// The open parenthesis. pub open: ast::OpenParen, /// The close parenthesis. pub close: ast::CloseParen, } impl Peek for LitUnit { fn peek(t1: Option<ast::Token>, t2: Option<ast::Token>) -> bool { matches! { (peek!(t1).kind, peek!(t2).kind), ( ast::Kind::Open(ast::Delimiter::Parenthesis), ast::Kind::Close(ast::Delimiter::Parenthesis), ) } } }
use glam::Vec3A; use crate::{ray::Ray, util::random_within_unit_disc}; pub struct Camera { pos: Vec3A, view_origin: Vec3A, view_up: Vec3A, view_right: Vec3A, view_x: Vec3A, // view_right, scaled by focus distance and viewport width view_y: Vec3A, // view_up, scaled by focus distance and viewport height lens_radius: f32, } pub struct Config { pub pos: Vec3A, pub target: Vec3A, pub vup: Vec3A, pub vfov: f32, pub aspect: f32, pub lens_radius: f32, // half the aperture, which is a diameter pub focus_distance: f32, } impl Camera { pub fn new(config: Config) -> Camera { let view_height = 2.0 * (config.vfov / 2.0).tan(); let view_width = view_height * config.aspect; let lookdir = (config.target - config.pos).normalize(); let view_right = lookdir.cross(config.vup).normalize(); let view_up = view_right.cross(lookdir); let pos = config.pos; let view_x = view_right * view_width * config.focus_distance; let view_y = view_up * view_height * config.focus_distance; let view_origin = pos - view_x / 2.0 - view_y / 2.0 + lookdir * config.focus_distance; Camera { pos, view_origin, view_x, view_y, view_up, view_right, lens_radius: config.lens_radius, } } pub fn ray(&self, u: f32, v: f32) -> Ray { // A random offset of the ray origin provides for defocus blurring. let offset_factor = random_within_unit_disc() * self.lens_radius; let offset = self.view_right * offset_factor.x + self.view_up * offset_factor.y; let view_origin = self.pos + offset; let view_pos = self.view_origin + self.view_x * u + self.view_y * v; Ray { origin: view_origin, dir: view_pos - view_origin, } } }
use cpal::traits::{ DeviceTrait, StreamTrait }; use cpal::{ StreamError, InputCallbackInfo }; use std::sync::mpsc::*; //use portaudio; mod provider; fn main() -> Result<(), cpal::PlayStreamError> { let provider = provider::Provider::new(); let host = provider.get_host(); let device = provider.get_device(); let sup_config = provider.get_sup_config(); let config = sup_config.config(); //let config = cpal::StreamConfig { // channels: 2, // buffer_size: cpal::BufferSize::Default, // sample_rate: cpal::SampleRate(44100) //}; println!("Host: {}", host.id().name()); println!("Output device: {}", device.name().unwrap()); println!("Config > Channels: {}", config.channels); println!("Config > Buffer Size: {:?}", config.buffer_size); println!("Config > Sample Rate: {:?}", config.sample_rate); let (zender, receiver) = channel(); static mut sender: Option<Sender<f32>> = None; // Vector<f32> unsafe { sender = Some(zender); } fn data_cb(data: &[f32], _: &InputCallbackInfo) { //let d = data.to_owned(); for &sample in data { unsafe { sender.as_ref().unwrap().send(sample).ok(); } } /*unsafe { println!("Length: {}", d.len()); sender.as_ref().unwrap().send(d).expect("I dunno what to expect anymore."); }*/ } fn err_cb(err: StreamError) { println!("Error: {}", err); return; } let stream = device.build_input_stream( &config, data_cb, err_cb ); match stream { Ok(_) => { stream.unwrap().play()?; println!("Building stream..."); } Err(_) => { println!("Error: Stream unable to start!"); } } let data = receiver.try_recv().unwrap(); println!("{:?}", data); return Ok(()); }
#![feature(async_closure)] #![feature(result_map_or_else)] #[macro_use] extern crate if_chain; #[macro_use] extern crate lazy_static; #[macro_use] extern crate sc; #[macro_use] extern crate serde; #[macro_use] extern crate slog; #[macro_use] extern crate tokio; use std::collections::{HashMap, HashSet}; use std::ffi::{OsStr, OsString}; use std::fs::Metadata; use std::io::Read; use std::os::unix::ffi::OsStrExt; use std::path::{Path, PathBuf}; use std::sync::Arc; use async_trait::async_trait; use clap::arg_enum; use exit::{Exit, ExitDisplay}; use filetime::FileTime; use futures::stream::{self, Stream, StreamExt, TryStreamExt}; use rand::{thread_rng, Rng}; use regex::bytes::Regex; // NOTE: &[u8] Regex, not &str use serde::de::{self, Deserialize}; use slog::{Drain, LevelFilter}; use slog_async; use slog_scope_futures::FutureExt; use slog_stdlog; use slog_term; use snafu::{OptionExt, ResultExt, Snafu}; use structopt::StructOpt; use tokio::{fs, io, process::Command, runtime::Builder, sync::mpsc, task}; // NOTE list: // - If someone ever wants to implement Windows support, they'll likely need to use the // remove_dir_all crate due to https://github.com/rust-lang/rust/issues/29497 // TODO Chuck the tempdir in the ReifiedConfig as well // TODO Check for presence of a config file in the 'source' directory, prefer it above the XDG // config file // TODO Configure the ffmpeg CLI options, as well as the output format // TODO use rio for async IO once io_uring supports lstat, readdir, and rename; do copy via // sendfile? // TODO Handle symlinks in the input? // TODO smart progress reporting, rather than log output // - (jobs completed)/(running tally of jobs queued) // - change colour of the "total jobs queued" once the scan is done // TODO make the modified time of transcoded files equal to that of their source; this means that // time-based synchronisation to the target mobile device will be incremental even if the // output library has been destroyed and recreated. It also means that the output is truly // idempotent. // TODO an option for copying additional filetypes (for lyrics, cover art, etc.)? // - Maybe take a list of name-matching regex instead of just an extension? const TMP_DIR_NAME: &str = ".harmonise_tmp"; const TMP_FILE_RETRIES: usize = 8; const QUEUE_BUFFER: usize = 1024; // Command-line options {{{ #[derive(Debug, StructOpt)] #[structopt( name = "Harmonise", about = "Utility to prepare a music library for use on an Android phone" )] struct Opt { /// Log level. #[structopt(short, long, default_value = "info")] log_level: LogLevel, /// The source directory to transcode/copy files *from*. /// /// Required if not specified in the configuration file. Overrides any specified in the /// configuration file. #[structopt(short, long, parse(from_os_str))] source: Option<PathBuf>, /// The output directory to transcode/copy files *to*. /// /// Required if not specified in the configuration file. Overrides any specified in the /// configuration file. #[structopt(short, long, parse(from_os_str))] output: Option<PathBuf>, /// The config file to use, instead of the default. #[structopt(short, long, parse(from_os_str))] config_file: Option<PathBuf>, } arg_enum! { #[derive(Clone, Copy, Debug)] enum LogLevel { Debug, Info, Warning, Error, Critical } } impl Into<slog::Level> for LogLevel { fn into(self) -> slog::Level { match self { LogLevel::Debug => slog::Level::Debug, LogLevel::Info => slog::Level::Info, LogLevel::Warning => slog::Level::Warning, LogLevel::Error => slog::Level::Error, LogLevel::Critical => slog::Level::Critical, } } } // }}} // Configuration {{{ #[derive(Debug)] struct ReifiedConfig { source: PathBuf, output: PathBuf, music_filetypes: HashSet<OsString>, lossy_filetypes: HashSet<OsString>, nice: ConfigNice, ionice: ConfigIoNice, replace_pattern: Regex, } impl ReifiedConfig { fn from(c: Config) -> Result<Self, Error> { let music_filetypes: HashSet<OsString> = c .lossless_filetypes .union(&c.lossy_filetypes) .map(|s| s.clone()) .collect(); Ok(Self { source: c.source.context(ConfigNoSource)?, output: c.output.context(ConfigNoOutput)?, music_filetypes, lossy_filetypes: c.lossy_filetypes, nice: c.nice, ionice: c.ionice, replace_pattern: c.replace_pattern, }) } } #[derive(Debug, Deserialize)] #[serde(default)] struct Config { // TODO Allow ~ paths in the config file? Support other shell expansions? // Doing this optimally would require https://github.com/serde-rs/serde/issues/723 to be // resolved source: Option<PathBuf>, output: Option<PathBuf>, lossless_filetypes: HashSet<OsString>, lossy_filetypes: HashSet<OsString>, nice: ConfigNice, ionice: ConfigIoNice, #[serde(with = "serde_regex")] replace_pattern: Regex, } #[derive(Debug, Deserialize)] struct ConfigNice { enable: bool, #[serde(deserialize_with = "nice_level_in_range")] level: i8, } #[derive(Debug, Deserialize)] struct ConfigIoNice { enable: bool, level: IoPrioValue, } impl Default for Config { fn default() -> Self { Self { source: None, output: None, lossless_filetypes: LOSSLESS_FILETYPES.clone(), lossy_filetypes: LOSSY_FILETYPES.clone(), nice: ConfigNice { enable: true, level: 19, }, ionice: ConfigIoNice { enable: true, level: IoPrioValue::BestEffort(7), }, replace_pattern: ANDROID_UNSAFE_PATTERN.clone(), } } } fn nice_level_in_range<'de, D>(d: D) -> Result<i8, D::Error> where D: de::Deserializer<'de>, { let val: i8 = Deserialize::deserialize(d)?; if val < NICE_MIN || val > NICE_MAX { Err(de::Error::invalid_value( de::Unexpected::Signed(val as i64), &"a number between -20 and 19, inclusive", )) } else { Ok(val) } } const NICE_MAX: i8 = 19; const NICE_MIN: i8 = -20; // }}} // Can be overridden by configuration file lazy_static! { static ref LOSSLESS_FILETYPES: HashSet<OsString> = vec!["ape", "flac", "wav"] .into_iter() .map(OsString::from) .collect(); static ref LOSSY_FILETYPES: HashSet<OsString> = vec!["mp3", "m4a", "vob", "wma", "ogg"] .into_iter() .map(OsString::from) .collect(); // On at least some Android sd cards (exfat), the following characters are invalid in // filenames: ?:"*|\<> static ref ANDROID_UNSAFE_PATTERN: Regex = Regex::new(r#"[?:"*|\\<>]"#) .expect("Build statically defined regex"); } fn main() -> Exit<Error> { let opt = Opt::from_args(); let _guard = setup_logging(&opt); let log = slog_scope::logger().new(o!("scope" => "main")); let config = build_config(&opt)?; let config = ReifiedConfig::from(config)?; // Change process IO priority before we create threads if config.ionice.enable { lower_own_io_priority(&config)?; }; // NOTE: We effectively split the usage of these threads into three: // - One third are used for running ffmpeg instances // - One third are used for handling disk IO // - One third (in practice, less than) are used for handling all other // computation/orchestration let threads = num_cpus::get() * 3; let mut rt = Builder::new() .threaded_scheduler() .core_threads(threads) .enable_io() .build() .context(CreateRuntime)?; // Arc-wrapped as they're referenced from within multiple Futures let config = Arc::new(config); let tmpdir = Arc::new(TempDir { path: PathBuf::from(&config.output).join(TMP_DIR_NAME), }); rt.block_on(main_task(config, tmpdir).with_logger(log.new(o!("scope" => "main_task"))))?; Exit::Ok } async fn main_task(config: Arc<ReifiedConfig>, tmpdir: Arc<TempDir>) -> Result<(), Error> { let log = slog_scope::logger(); // Ensure temporary directory exists and is empty ensure_tmpdir(tmpdir.as_ref()).await?; // 1. Create a job-queueing channel (a stream?) let (tx, rx) = mpsc::channel(QUEUE_BUFFER); // 2. Spawn a second task that is responsible for scanning the filesystem, queuing jobs, and // eventually pushing the stream-end type info!(log, "Spawning filesytem scanner"); let scanner = tokio::spawn( scan_source_files(config.clone(), tmpdir.clone(), tx) .with_logger(log.new(o!("scope" => "scan"))), ); // 3. Spawn a task that consumes this stream using a buffer_unordered consumer, buffered to the // # of hardware threads, and that quits when the stream pops an explicit end type info!(log, "Spawning library harmoniser"); let harmoniser = tokio::spawn( harmonise_music(config, tmpdir, rx).with_logger(log.new(o!("scope" => "harmonise"))), ); // 4. Block on completion of both tasks (`join!`?) info!(log, "Waiting for completion..."); let (scanner, harmoniser) = try_join!(scanner, harmoniser).context(MainJoin)?; scanner?; harmoniser?; info!(log, "All done!"); Ok(()) } fn setup_logging(opt: &Opt) -> slog_scope::GlobalLoggerGuard { let log_level: slog::Level = opt.log_level.into(); // Create logger let decorator = slog_term::PlainDecorator::new(std::io::stderr()); let drain = slog_term::CompactFormat::new(decorator).build().fuse(); let drain = LevelFilter::new(drain, log_level).fuse(); let drain = slog_async::Async::new(drain).build().fuse(); let log = slog::Logger::root(drain, o!("scope" => "root")); debug!( &log, "Logging setup, command-line options received: {:?}", opt ); // std::log interop let guard = slog_scope::set_global_logger(log); slog_stdlog::init().expect("Initialise slog/std::log interop"); guard } fn build_config(opt: &Opt) -> Result<Config, Error> { let log = slog_scope::logger(); // Config preferences order: // 1. Read config file passed at CLI // 2. Read config file from XDG directory // 3. Use defaults let mut config: Config = opt .config_file .clone() .map(|path| load_config(&path)) .or_else(|| { dirs::config_dir().map(|path| { let path = path.join("harmonise").join("config.toml"); load_config(&path) }) }) .unwrap_or_else(|| Ok(Config::default()))?; // Prefer source/output given at CLI to those in a configuration file config.source = opt.source.clone().or(config.source); config.output = opt.output.clone().or(config.output); debug!(log, "Configuration parsed and merged: {:?}", config); Ok(config) } fn load_config(path: &Path) -> Result<Config, Error> { let mut config_file_buf = vec![]; std::fs::File::open(path) .context(ConfigFileOpen { path })? .read_to_end(&mut config_file_buf) .context(ConfigFileRead { path })?; Ok(toml::from_slice(&config_file_buf).context(ConfigParse { path })?) } async fn ensure_tmpdir<P>(tmpdir: P) -> Result<(), Error> where P: AsRef<Path>, { let log = slog_scope::logger(); let tmpdir = tmpdir.as_ref(); if tmpdir.async_exists().await.context(TmpDirExist { tmpdir: tmpdir.clone(), })? { warn!( log, "Found left-over temporary directory at `{}`, cleaning it up...", tmpdir.display() ); fs::remove_dir_all(&tmpdir).await.context(TmpDirCleanup { tmpdir: tmpdir.clone(), })?; }; fs::create_dir_all(&tmpdir) .await .context(TmpDirCreate { tmpdir }) } async fn scan_source_files( config: Arc<ReifiedConfig>, tmpdir: Arc<TempDir>, harmonise_queue: mpsc::Sender<HarmoniseJob>, ) -> Result<(), Error> { // TODO can I use a bufferunordered instead of try_for_each_concurrent here...? let jobs = num_cpus::get(); scan_tree(config, tmpdir, harmonise_queue) .try_for_each_concurrent(jobs, |_| async { Ok(()) }) .await } fn scan_tree( config: Arc<ReifiedConfig>, tmpdir: Arc<TempDir>, harmonise_queue: mpsc::Sender<HarmoniseJob>, ) -> impl Stream<Item = Result<(), Error>> + Send + 'static { let log = slog_scope::logger(); // Initialise the stack of directories to scan/compare, and unfold a stream of futures to // process the scan/compare jobs let (source_root, output_root) = (config.source.clone(), config.output.clone()); stream::unfold(vec![(source_root, output_root)], move |mut to_visit| { let log = log.clone(); let config = config.clone(); let tmpdir = tmpdir.clone(); let harmonise_queue = harmonise_queue.clone(); async move { let (source, output) = to_visit.pop()?; let source_str = source.to_string_lossy().into_owned(); let output_str = output.to_string_lossy().into_owned(); let visit_res = scan_one( config, tmpdir, source, output, harmonise_queue, &mut to_visit, ) .with_logger(log.new(o!( "source" => source_str, "output" => output_str ))) .await; Some((visit_res, to_visit)) } }) } async fn scan_one( config: Arc<ReifiedConfig>, tmpdir: Arc<TempDir>, source: PathBuf, output: PathBuf, harmonise_queue: mpsc::Sender<HarmoniseJob>, to_visit: &mut Vec<ScanJob>, ) -> Result<(), Error> { let log = slog_scope::logger(); debug!(log, "Scanning and comparing source vs output",); // Scan both directories and get their files' metadata let (source_map, mut output_map) = try_join!(scan_map_for_dir(&source), scan_map_for_dir(&output))?; // Sanitise and filter source file names to match outputs against, determine type of harmonise // job per source let match_job_map = scan_map_to_match_job_map(config, source_map); // Dispatch harmonise jobs based on state of source and output files dispatch_harmonise_jobs( &output, harmonise_queue, match_job_map, to_visit, &mut output_map, ) .await?; // Remove orphaned output files remove_orphaned_outputs(tmpdir, output_map).await } async fn scan_map_for_dir<P>(dir: P) -> Result<ScanMap, Error> where P: AsRef<Path>, { let entries = fs::read_dir(dir).await.context(ScanDir)?; entries .then(async move |res| { let entry = res.context(ScanDirEntry)?; Result::<(OsString, (PathBuf, Metadata)), Error>::Ok(( entry.file_name(), ( entry.path().into(), entry.metadata().await.context(ScanDirMetadata)?, ), )) }) .try_collect() .await } fn scan_map_to_match_job_map(config: Arc<ReifiedConfig>, scan_map: ScanMap) -> MatchJobMap { scan_map .into_iter() .map(|(name, (path, metadata))| { // Extract extension if file match metadata.is_file() { true => { let extension = path.extension().map(|ext| ext.to_os_string()); (name, (path, metadata, extension)) } false => (name, (path, metadata, None)), } }) .filter(|(_name, (_path, metadata, extension))| { // Filter so we have directories, and files with valid music extensions (metadata.is_dir() || match extension { None => false, Some(extension) => config.music_filetypes.contains(extension.as_os_str()), }) }) .map(|(_name, (path, metadata, extension))| { // Determine output names to match against, and job type for files let name_path = sanitise_name(config.clone(), &path); if metadata.is_file() { let ext = extension.expect("Unwrap an extension we know is present"); let (ext, job_type) = if config.lossy_filetypes.contains(ext.as_os_str()) { (ext, HarmoniseJobType::Copy) } else { (OsString::from("ogg"), HarmoniseJobType::Transcode) }; let name = name_path .with_extension(ext) .file_name() .expect("Unwrap a filename we know is present") .to_os_string(); (name, (path, metadata, Some(job_type))) } else { let name = name_path .file_name() .expect("Unwrap a filename we know is present") .to_os_string(); (name, (path, metadata, None)) } }) .collect() } fn sanitise_name(config: Arc<ReifiedConfig>, source_path: &Path) -> PathBuf { let source_name = source_path .file_name() .expect("Get filename for known-valid path"); // Replace unsafe characters with surrogates // TODO during scanning perform collision detection on the sanitised names? let sanitised_name = config .replace_pattern .replace_all(source_name.as_bytes(), "_".as_bytes()); source_path.with_file_name(OsStr::from_bytes(sanitised_name.as_ref())) } async fn dispatch_harmonise_jobs( output: &Path, mut harmonise_queue: mpsc::Sender<HarmoniseJob>, match_job_map: MatchJobMap, to_visit: &mut Vec<ScanJob>, output_map: &mut ScanMap, ) -> Result<(), Error> { let log = slog_scope::logger(); for (match_name, (source_path, source_metadata, job_type)) in match_job_map { debug!(log, "Checking source path {}", source_path.display()); let out_path = &output.join(&match_name); if source_metadata.is_file() { // We use HashMap::remove() here, meaning any remaining output entries after this for // loop are orphans, and can be killed let harmonise = match output_map.remove(&match_name) { // If the path wasn't in the map, then the output does not exist and we must // harmonise it None => true, Some((out_path, out_metadata)) => { if !out_metadata.is_file() { info!( log, "Removing unexpected output directory `{}`", out_path.display() ); fs::remove_dir_all(&out_path) .await .context(ScanUnexpectedPath { path: out_path, path_type: "directory", })?; true } else { // Otherwise, compare the modified times let out_time = out_metadata .modified() .context(ScanOutputFileModifiedTime { path: out_path.clone(), })?; let source_time = source_metadata .modified() .context(ScanSourceFileModifiedTime { path: source_path.clone(), })?; source_time != out_time } } }; if harmonise { debug!( log, "Queueing harmonise job for path pair:\n\t{}\n\t{}", source_path.display(), out_path.display() ); let job = HarmoniseJob { source: source_path.clone(), output: out_path.clone(), job_type: job_type.expect("Unwrap a job type for a file to harmonise"), }; harmonise_queue .send(job) .await .context(HarmoniseQueuePush)?; }; } else { if let Some((out_path, out_metadata)) = output_map.remove(&match_name) { debug!(log, "Output path exists: {}", out_path.display()); if !out_metadata.is_dir() { info!( log, "Removing unexpected output file `{}`", out_path.display() ); fs::remove_file(&out_path) .await .context(ScanUnexpectedPath { path: out_path, path_type: "file", })?; }; } else { debug!( log, "Output path does not exist, creating directory: {}", out_path.display() ); fs::create_dir_all(&out_path) .await .context(ScanOutputDirCreate { path: out_path.clone(), })?; }; // Add to the stack of directories to scan to_visit.push((source_path.clone(), out_path.clone())); }; } Ok(()) } async fn remove_orphaned_outputs( tmpdir: Arc<TempDir>, remaining_output_map: ScanMap, ) -> Result<(), Error> { let log = slog_scope::logger(); for (_name, (out_path, out_metadata)) in &remaining_output_map { if out_path == (*tmpdir).as_ref() { // Don't touch the tmpdir :o continue; } if out_metadata.is_file() { info!(log, "Removing orphaned output file {}", out_path.display()); fs::remove_file(&out_path).await.context(ScanOrphanPath { path: out_path, path_type: "file", })?; } else { info!( log, "Removing orphaned output directory {}", out_path.display() ); fs::remove_dir_all(&out_path) .await .context(ScanOrphanPath { path: out_path, path_type: "directory", })?; } } Ok(()) } async fn harmonise_music( config: Arc<ReifiedConfig>, tmpdir: Arc<TempDir>, queue: mpsc::Receiver<HarmoniseJob>, ) -> Result<(), Error> { let log = slog_scope::logger(); let jobs = num_cpus::get(); queue .then(async move |job| Result::<HarmoniseJob, Error>::Ok(job)) .try_for_each_concurrent(jobs, |job| { harmonise(config.clone(), tmpdir.clone(), job).with_logger(log.clone()) }) .await } async fn harmonise( config: Arc<ReifiedConfig>, tmpdir: Arc<TempDir>, job: HarmoniseJob, ) -> Result<(), Error> { let log = slog_scope::logger(); debug!(log, "Got harmonise job: {:?}", job); // Get path for a temporary output file let tmp_path = temp_file_path(tmpdir).await?; match &job.job_type { HarmoniseJobType::Transcode => { debug!( log, "Transcoding `{}` -> `{}`", job.source.display(), tmp_path.display() ); // Run ffmpeg to produce the output file // TODO detect whether ffmpeg binary is present before attempting to use it, error out // if it is not -- should just be done once at startup really // TODO detect whether nice binary is present before attempting to use it, warn if not // TODO nice via syscall instead? let mut cmd = Command::new("nice"); if config.nice.enable { cmd.args(&[&format!("-n{}", config.nice.level), "--"]); }; let proc = cmd .arg("ffmpeg") .args(&["-v", "quiet", "-y", "-i"]) .arg(&job.source) .args(&[ "-codec:a", "libvorbis", "-qscale:a", "8", "-map_metadata", "0", "-f", "ogg", ]) .arg(&tmp_path) .spawn() .context(HarmoniseFfmpegSpawn)?; let status = proc.await.context(HarmoniseFfmpegWait)?; if !status.success() { return Err(Error::HarmoniseFfmpegFailed { status, job }); } else { debug!(log, "Transcoded to temporary file {}", tmp_path.display()); }; } HarmoniseJobType::Copy => { debug!( log, "Copying `{}` -> `{}`", job.source.display(), tmp_path.display() ); fs::copy(&job.source, &tmp_path) .await .context(HarmoniseCopy { from: job.source.clone(), to: tmp_path.clone(), })?; } }; // Ensure the timestamp of the harmonised file matches that of the original copy_file_times(&job.source, &tmp_path).await?; // Move the output file from the temp path to the final path fs::rename(&tmp_path, &job.output).await.map_err(|err| { if_chain! { if let Some(errno) = err.raw_os_error(); if errno == 18; then { Error::HarmoniseTmpMoveMount { from: tmp_path, to: job.output.clone(), } } else { Error::HarmoniseTmpMove { from: tmp_path, to: job.output.clone(), source: err, } } } })?; info!(log, "Harmonised output file {}", job.output.display()); Ok(()) } // Ensures the temporary file is unique by retrying up to TMP_FILE_RETRIES times on create_new() // failure // TODO replace by using upstream tempfile, and just calling into_path() on the resulting // Drop-implementing TempFile object? async fn temp_file_path(tmpdir: Arc<TempDir>) -> Result<PathBuf, Error> { let mut opts = fs::OpenOptions::new(); opts.write(true).create_new(true); for _ in 0..TMP_FILE_RETRIES { let random_string: String = thread_rng() .sample_iter(&rand::distributions::Alphanumeric) .take(32) .collect(); let path = (*tmpdir).as_ref().join(random_string); match opts.open(&path).await { Err(e) => { if e.kind() == io::ErrorKind::AlreadyExists { continue; } else { return Err(Error::HarmoniseTmpCreate { path, source: e }); } } Ok(_) => return Ok(path), }; } Err(Error::HarmoniseTmpCreateRetries) } #[derive(Debug)] struct HarmoniseJob { source: PathBuf, output: PathBuf, job_type: HarmoniseJobType, } #[derive(Debug)] enum HarmoniseJobType { Copy, Transcode, } type ScanJob = (PathBuf, PathBuf); type ScanMap = HashMap<OsString, (PathBuf, Metadata)>; type MatchJobMap = HashMap<OsString, (PathBuf, Metadata, Option<HarmoniseJobType>)>; // Helpers {{{ #[async_trait] trait AsyncPath { async fn async_exists(&self) -> Result<bool, io::Error>; async fn async_metadata(&self) -> Result<Metadata, io::Error>; } #[async_trait] impl AsyncPath for Path { async fn async_exists(&self) -> Result<bool, io::Error> { fs::symlink_metadata(self).await.map_or_else( |err| match err.kind() { io::ErrorKind::NotFound => Ok(false), _ => Err(err), }, |_metadata| Ok(true), ) } async fn async_metadata(&self) -> Result<Metadata, io::Error> { fs::symlink_metadata(self).await } } // Minimal self-cleaning temporary directory struct TempDir { path: PathBuf, } impl Drop for TempDir { fn drop(&mut self) { let _ = std::fs::remove_dir_all(&self.path); } } impl AsRef<Path> for TempDir { fn as_ref(&self) -> &Path { self.path.as_ref() } } // Essentially ionices ourself // TODO move this to its own tiny crate, along with non-io nicing? #[cfg(all(target_os = "linux", target_arch = "x86_64"))] #[cfg(any(target_pointer_width = "32", target_pointer_width = "64"))] // safe u32 -> usize cast fn lower_own_io_priority(config: &ReifiedConfig) -> Result<(), Error> { let pid = std::process::id(); let prio = (&config.ionice.level).into(); // TODO handle error values? Probably not much point. raw_ioprio_set(IOPRIO_WHO_PROCESS, pid as usize, prio); Ok(()) } #[derive(Debug, Deserialize)] #[serde(rename_all = "kebab-case")] enum IoPrioValue { #[serde(deserialize_with = "ioprio_level_in_range")] RealTime(usize), #[serde(deserialize_with = "ioprio_level_in_range")] BestEffort(usize), Idle, } fn ioprio_level_in_range<'de, D>(d: D) -> Result<usize, D::Error> where D: de::Deserializer<'de>, { let val: usize = Deserialize::deserialize(d)?; if val < IOPRIO_LEVEL_NR { Err(de::Error::invalid_value( de::Unexpected::Unsigned(val as u64), &"a number between 0 and 7, inclusive", )) } else { Ok(val) } } impl From<&IoPrioValue> for usize { fn from(val: &IoPrioValue) -> Self { match val { IoPrioValue::RealTime(level) => ioprio_prio_value(IOPRIO_CLASS_RT, *level), IoPrioValue::BestEffort(level) => ioprio_prio_value(IOPRIO_CLASS_BE, *level), IoPrioValue::Idle => ioprio_prio_value(IOPRIO_CLASS_IDLE, 0), } } } #[inline] #[cfg(all(target_os = "linux", target_arch = "x86_64"))] const fn ioprio_prio_value(class: i32, data: usize) -> usize { ((class as usize) << IOPRIO_CLASS_SHIFT) | data } // fn foprio_realtime_vvalidation<'de, D>(d: D) -> Result<IoPrioValue> const IOPRIO_LEVEL_NR: usize = 8; const IOPRIO_CLASS_SHIFT: u8 = 13; const IOPRIO_CLASS_RT: i32 = 1; const IOPRIO_CLASS_BE: i32 = 2; const IOPRIO_CLASS_IDLE: i32 = 3; const IOPRIO_WHO_PROCESS: i32 = 1; // const IOPRIO_WHO_PGRP: i32 = 2; // const IOPRIO_WHO_USER: i32 = 3; #[cfg(all(target_os = "linux", target_arch = "x86_64"))] fn raw_ioprio_set(which: i32, who: usize, prio: usize) -> usize { unsafe { // NOTE: I'm pretty sure that if this syscall is actually unsafe, that'd mean we have a // kernel bug... syscall!(IOPRIO_SET, which, who, prio) as usize } } async fn copy_file_times<P>(from: P, to: P) -> Result<(), Error> where P: AsRef<Path>, { let (from, to) = (from.as_ref().to_owned(), to.as_ref()).to_owned(); let metadata = fs::metadata(&from) .await .context(HarmoniseFileTimesMetadata { path: from.clone() })?; let access = FileTime::from_last_access_time(&metadata); let modified = FileTime::from_last_modification_time(&metadata); set_file_times(to, access, modified) .await .context(HarmoniseFileTimesSet { path: to }) } async fn set_file_times<P>(path: P, access: FileTime, modified: FileTime) -> Result<(), io::Error> where P: AsRef<Path>, { let path = path.as_ref().to_owned(); async_io(move || filetime::set_file_times(path, access, modified)).await } async fn async_io<F, R>(f: F) -> tokio::io::Result<R> where F: FnOnce() -> tokio::io::Result<R> + Send + 'static, R: Send + 'static, { match task::spawn_blocking(f).await { Ok(res) => res, Err(err) => Err(tokio::io::Error::new( tokio::io::ErrorKind::Other, format!("failure in task wrapper for blocking closure: {:?}", err), )), } } // }}} // Error type and related {{{ #[derive(Debug, Snafu)] enum Error { #[snafu(display("Failed to open configuration file `{}`: {}", path.display(), source))] ConfigFileOpen { path: PathBuf, source: std::io::Error, }, #[snafu(display("Failed to read configuration file `{}`: {}", path.display(), source))] ConfigFileRead { path: PathBuf, source: std::io::Error, }, #[snafu(display("Failed to parse configuration file as TOML `{}`: {}", path.display(), source))] ConfigParse { path: PathBuf, source: toml::de::Error, }, #[snafu(display("No 'source' directory specified via CLI or configuration file"))] ConfigNoSource, #[snafu(display("No 'output' directory specified via CLI or configuration file"))] ConfigNoOutput, #[snafu(display("Failed to create Tokio runtime: {}", source))] CreateRuntime { source: tokio::io::Error, }, #[snafu(display("Failed to join main tasks: {}", source))] MainJoin { source: task::JoinError, }, #[snafu(display("Failed to scanner task: {}", source))] ScannerJoin { source: task::JoinError, }, #[snafu(display("Failed to check existence of temporary directory `{}`: {}", tmpdir.display(), source))] TmpDirExist { tmpdir: PathBuf, source: io::Error, }, #[snafu(display("Failed to create temporary directory `{}`: {}", tmpdir.display(), source))] TmpDirCreate { tmpdir: PathBuf, source: io::Error, }, #[snafu(display("Failed to remove temporary directory `{}` for cleanup: {}", tmpdir.display(), source))] TmpDirCleanup { tmpdir: PathBuf, source: io::Error, }, #[snafu(display("One or more scan workers failed: {}", errors))] Scan { errors: ErrorList, }, #[snafu(display("Error scanning directory: {}", source))] ScanDir { source: io::Error, }, #[snafu(display("Error reading directory entry during scan: {}", source))] ScanDirEntry { source: io::Error, }, #[snafu(display("Error reading directory entry's metadata during scan: {}", source))] ScanDirMetadata { source: io::Error, }, #[snafu(display("Failed to join directory map scan task: {}", source))] ScanDirMapJoin { source: task::JoinError, }, #[snafu(display("Failed to remove orphaned {} `{}`: {}", path_type, path.display(), source))] ScanOrphanPath { path: PathBuf, path_type: String, source: io::Error, }, #[snafu(display("Failed to remove unexpected {} `{}` in output: {}", path_type, path.display(), source))] ScanUnexpectedPath { path: PathBuf, path_type: String, source: io::Error, }, #[snafu(display("Failed to create output directory `{}` in output: {}", path.display(), source))] ScanOutputDirCreate { path: PathBuf, source: io::Error, }, #[snafu(display("Error reading output file `{}`'s last-modified time during scan: {}", path.display(), source))] ScanOutputFileModifiedTime { path: PathBuf, source: io::Error, }, #[snafu(display("Failed to check existence of output path `{}`: {}", path.display(), source))] ScanOutputPathExist { path: PathBuf, source: io::Error, }, #[snafu(display("Error reading source file `{}`'s last-modified time during scan: {}", path.display(), source))] ScanSourceFileModifiedTime { path: PathBuf, source: io::Error, }, #[snafu(display("Failed to queue up new harmonise job: {:?}", source))] HarmoniseQueuePush { source: mpsc::error::SendError<HarmoniseJob>, }, #[snafu(display("Failed to spawn ffmpeg subprocess: {:?}", source))] HarmoniseFfmpegSpawn { source: tokio::io::Error, }, #[snafu(display("Failure while awaiting ffmpeg subprocess: {:?}", source))] HarmoniseFfmpegWait { source: tokio::io::Error, }, #[snafu(display("ffmpeg exited with non-zero status '{}' for job {:?}", status, job))] HarmoniseFfmpegFailed { status: std::process::ExitStatus, job: HarmoniseJob, }, #[snafu(display("Failed to read metadata from `{}`: {}", path.display(), source))] HarmoniseFileTimesMetadata { path: PathBuf, source: tokio::io::Error, }, #[snafu(display("Failed to set access and modified time on `{}`: {}", path.display(), source))] HarmoniseFileTimesSet { path: PathBuf, source: tokio::io::Error, }, #[snafu(display("Failed to copy file for harmonise job:\n\t\tFrom: {}\n\t\tTo: {}\n\t{}", from.display(), to.display(), source))] HarmoniseCopy { from: PathBuf, to: PathBuf, source: io::Error, }, #[snafu(display("Failed to move temporary file for harmonise job:\n\t\tFrom: {}\n\t\tTo: {}\n\t{}", from.display(), to.display(), source))] HarmoniseTmpMove { from: PathBuf, to: PathBuf, source: io::Error, }, #[snafu(display("Failed to move temporary file for harmonise job, because the 'from' and 'to' directories are on different filesystems:\n\t\tFrom: {}\n\t\tTo: {}", from.display(), to.display()))] HarmoniseTmpMoveMount { from: PathBuf, to: PathBuf, }, #[snafu(display( "Failed to create temporary file `{}` for harmonise job: {}", path.display(), source ))] HarmoniseTmpCreate { path: PathBuf, source: io::Error, }, #[snafu(display( "Failed to create temporary file for harmonise job #{} retries failed", TMP_FILE_RETRIES, ))] HarmoniseTmpCreateRetries, Placeholder, } #[derive(Debug)] struct ErrorList(pub Vec<Error>); impl std::fmt::Display for ErrorList { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let mut first = true; write!(f, "[ ")?; for err in self.0.iter() { write!(f, "{}", err)?; if !first { write!(f, ", ")?; } else { first = false; } } write!(f, " ]")?; Ok(()) } } // }}} // Error -> Status code mappings {{{ impl From<Error> for i32 { fn from(err: Error) -> Self { match err { _ => exit_code::FAILURE, } } } impl ExitDisplay for Error { fn display(&self) -> String { // TODO Better error message here? Print embedded backtraces? format!("{}", self) } } // }}}
pub mod constants; pub mod identifiers; mod request_builder; pub mod requests; pub use request_builder::{PreparedRequest, RequestBuilder}; pub use requests::TxnAuthrAgrmtAcceptanceData;
// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{ counters, metrics_safety_rules::MetricsSafetyRules, network_interface::ConsensusMsg, state_replication::StateComputer, }; use channel::Receiver; use consensus_types::{ common::Author, executed_block::ExecutedBlock, experimental::{commit_decision::CommitDecision, commit_vote::CommitVote}, }; use diem_crypto::{ed25519::Ed25519Signature, HashValue}; use diem_infallible::Mutex; use diem_logger::prelude::*; use diem_metrics::monitor; use diem_types::{ account_address::AccountAddress, block_info::Round, ledger_info::{LedgerInfo, LedgerInfoWithSignatures}, validator_verifier::ValidatorVerifier, }; use executor_types::Error as ExecutionError; use futures::{select, SinkExt, StreamExt}; use safety_rules::TSafetyRules; use std::{ collections::{hash_map::Entry, BTreeMap, HashMap, VecDeque}, sync::Arc, }; /* ┌───────────┬──────────────────────────────────────┐ │ │ │ │ Message ├─────────────────┐ │ │ Channels │ │ │ │ │ ▼ ▼ └─▲──┬──────┘ ┌───────► Commit Vote Commit Decision ◄────────────────┐ │ │ │ │ │ │ │ │ │ │ Add sig │ Replace sig tree │ │ │ │ │ │ │ │ │ │ ┌──────▼───────┐ │ │ │ │ │ │ │ │ │ │ │ │ │ Local Cache │◄───────────┘ │ │ │ │ │ (HashMap) │ │ │ │ │ │ │ │ Send │ │ │ └──────────────┴──────────────┐ │ │ │ │ Send │ │ │ │ │ ┌──────────────┐ │ │ │ └────► Commit │ │ │ │ │ │ │ Local Queue │◄─────────┐ │ │ │ └──────►│ │ │ │ │ │ Enqueue └──────────┬───┘ │ ▼ │ │ │ │ Check if committable: │ └─────────────── Check Channels │ │ If so, commit and dequeue │ │ └────────► Main Loop ▲ │ fn start Commit phase takes in the executed blocks from the execution phase and commit them. Specifically, commit phase signs a commit vote message containing the execution result and broadcast it. Upon collecting a quorum of agreeing votes to a execution result, the commit phase commits the blocks as well as broadcasts a commit decision message together with the quorum of signatures. The commit decision message helps the slower nodes to quickly catch up without having to collect the signatures. */ #[derive(Clone)] struct PendingBlocks { vecblocks: Vec<ExecutedBlock>, ledger_info_sig: LedgerInfoWithSignatures, } impl PendingBlocks { pub fn new(vecblocks: Vec<ExecutedBlock>, ledger_info_sig: LedgerInfoWithSignatures) -> Self { Self { vecblocks, ledger_info_sig, } } } #[derive(Debug)] pub enum CommitPhaseMessage { CommitVote(Author, LedgerInfo, Ed25519Signature), CommitDecision(LedgerInfoWithSignatures), } pub struct CommitPhase { commit_channel_recv: Receiver<(Vec<ExecutedBlock>, LedgerInfoWithSignatures)>, execution_proxy: Arc<dyn StateComputer>, local_cache: HashMap<HashValue, LedgerInfoWithSignatures>, local_queue: VecDeque<PendingBlocks>, commit_msg_sender: channel::Sender<CommitPhaseMessage>, commit_msg_receiver: channel::Receiver<ConsensusMsg>, verifier: ValidatorVerifier, safety_rules: Arc<Mutex<MetricsSafetyRules>>, author: Author, committed_round: Round, } pub async fn commit( execution_proxy: &Arc<dyn StateComputer>, vecblock: &[ExecutedBlock], ledger_info: &LedgerInfoWithSignatures, ) -> Result<(), ExecutionError> { // have to maintain the order. execution_proxy .commit( &vecblock .iter() .map(|eb| Arc::new(eb.clone())) .collect::<Vec<Arc<ExecutedBlock>>>(), ledger_info.clone(), ) .await } macro_rules! report_err { ($result:expr, $error_string:literal) => { if let Err(err) = $result { counters::ERROR_COUNT.inc(); error!(error = err.to_string(), $error_string,) } }; } impl CommitPhase { pub fn new( commit_channel_recv: Receiver<(Vec<ExecutedBlock>, LedgerInfoWithSignatures)>, execution_proxy: Arc<dyn StateComputer>, commit_msg_sender: channel::Sender<CommitPhaseMessage>, commit_msg_receiver: channel::Receiver<ConsensusMsg>, verifier: ValidatorVerifier, safety_rules: Arc<Mutex<MetricsSafetyRules>>, author: Author, ) -> Self { Self { commit_channel_recv, execution_proxy, local_cache: HashMap::<HashValue, LedgerInfoWithSignatures>::new(), local_queue: VecDeque::<PendingBlocks>::new(), commit_msg_sender, commit_msg_receiver, verifier, safety_rules, author, committed_round: 0, } } /// Notified when receiving a commit vote message pub async fn process_commit_vote(&mut self, commit_vote: &CommitVote) -> anyhow::Result<()> { let li = commit_vote.ledger_info(); if li.commit_info().round() < self.committed_round { return Ok(()); // we ignore the message } // verify the signature commit_vote.verify(&self.verifier)?; let executed_state_hash = li.commit_info().executed_state_id(); // add the signature to local_cache match self.local_cache.entry(executed_state_hash) { Entry::Occupied(mut ledger_info_entry) => { let mut_ledger_info_entry = ledger_info_entry.get_mut(); mut_ledger_info_entry .add_signature(commit_vote.author(), commit_vote.signature().clone()); } Entry::Vacant(_) => { let mut li_sig = LedgerInfoWithSignatures::new( li.clone(), BTreeMap::<AccountAddress, Ed25519Signature>::new(), ); li_sig.add_signature(commit_vote.author(), commit_vote.signature().clone()); self.local_cache.insert(executed_state_hash, li_sig); } }; Ok(()) } pub async fn process_commit_decision( &mut self, commit_decision: CommitDecision, ) -> anyhow::Result<()> { let li = commit_decision.ledger_info(); if li.ledger_info().commit_info().round() < self.committed_round { return Ok(()); // we ignore the message } commit_decision.verify(&self.verifier)?; let executed_state_hash = li.ledger_info().commit_info().executed_state_id(); // TODO: optimization1: probe local_cache first to see if the existing already verifies, // TODO: otherwise we do not make changes. // TODO: optimization2: we can set a bit to indicate this tree of signatures are already verified, // TODO: we do not have to verify it again in the main loop. // replace the signature tree directly if it does not verify self.local_cache.insert(executed_state_hash, li.clone()); Ok(()) } pub async fn process_local_queue(&mut self) -> anyhow::Result<()> { let mut_local_queue = &mut self.local_queue; let mut_local_cache = &mut self.local_cache; while let Some(front) = mut_local_queue.front() { let front_executed_state_hash = front .ledger_info_sig .ledger_info() .commit_info() .executed_state_id(); match mut_local_cache.entry(front_executed_state_hash) { Entry::Occupied(ledger_info_occupied_entry) => { // cancel out an item from local_cache and an item from local_queue if ledger_info_occupied_entry .get() .check_voting_power(&self.verifier) .is_ok() { commit( &self.execution_proxy, &front.vecblocks, &front.ledger_info_sig, ) .await .expect("Failed to commit the executed blocks."); assert!( self.committed_round < front.ledger_info_sig.ledger_info().commit_info().round() ); self.committed_round = front.ledger_info_sig.ledger_info().commit_info().round(); self.commit_msg_sender .send(CommitPhaseMessage::CommitDecision( ledger_info_occupied_entry.get().clone(), )) .await?; ledger_info_occupied_entry.remove_entry(); mut_local_queue.pop_front(); } else { break; } } Entry::Vacant(_) => { break; } } } Ok(()) } pub async fn process_executed_blocks( &mut self, vecblock: Vec<ExecutedBlock>, ledger_info: LedgerInfoWithSignatures, ) -> anyhow::Result<()> { let new_ledger_info = LedgerInfo::new( vecblock.last().unwrap().block_info(), ledger_info.ledger_info().consensus_data_hash(), ); let sig = self .safety_rules .lock() .sign_commit_vote(ledger_info.clone(), new_ledger_info.clone())?; // if fails, it needs to resend, otherwise the liveness might compromise. self.commit_msg_sender .send(CommitPhaseMessage::CommitVote( self.author, new_ledger_info.clone(), sig, )) .await?; // note that this message will also reach the node itself self.local_queue .push_back(PendingBlocks::new(vecblock, ledger_info)); Ok(()) } pub async fn start(mut self) { loop { select! { (vecblock, ledger_info) = self.commit_channel_recv.select_next_some() => { report_err!(self.process_executed_blocks(vecblock, ledger_info).await, "Error in processing executed blocks"); } msg = self.commit_msg_receiver.select_next_some() => { match msg { ConsensusMsg::CommitVoteMsg(request) => { monitor!( "process_commit_vote", report_err!(self.process_commit_vote(&*request).await, "Error in processing commit vote.") ); } ConsensusMsg::CommitDecisionMsg(request) => { monitor!( "process_commit_decision", report_err!(self.process_commit_decision(*request).await, "Error in processing commit decision.") ); } _ => {} }; } } report_err!( self.process_local_queue().await, "Error in processing local queue" ); } } }
#[doc = "Register `OTG_HNPTXSTS` reader"] pub type R = crate::R<OTG_HNPTXSTS_SPEC>; #[doc = "Field `NPTXFSAV` reader - NPTXFSAV"] pub type NPTXFSAV_R = crate::FieldReader<u16>; #[doc = "Field `NPTQXSAV` reader - NPTQXSAV"] pub type NPTQXSAV_R = crate::FieldReader; #[doc = "Field `NPTXQTOP` reader - NPTXQTOP"] pub type NPTXQTOP_R = crate::FieldReader; impl R { #[doc = "Bits 0:15 - NPTXFSAV"] #[inline(always)] pub fn nptxfsav(&self) -> NPTXFSAV_R { NPTXFSAV_R::new((self.bits & 0xffff) as u16) } #[doc = "Bits 16:23 - NPTQXSAV"] #[inline(always)] pub fn nptqxsav(&self) -> NPTQXSAV_R { NPTQXSAV_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bits 24:30 - NPTXQTOP"] #[inline(always)] pub fn nptxqtop(&self) -> NPTXQTOP_R { NPTXQTOP_R::new(((self.bits >> 24) & 0x7f) as u8) } } #[doc = "In device mode, this register is not valid. This read-only register contains the free space information for the non-periodic Tx FIFO and the non-periodic transmit request queue.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`otg_hnptxsts::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct OTG_HNPTXSTS_SPEC; impl crate::RegisterSpec for OTG_HNPTXSTS_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`otg_hnptxsts::R`](R) reader structure"] impl crate::Readable for OTG_HNPTXSTS_SPEC {} #[doc = "`reset()` method sets OTG_HNPTXSTS to value 0x0008_0400"] impl crate::Resettable for OTG_HNPTXSTS_SPEC { const RESET_VALUE: Self::Ux = 0x0008_0400; }
use super::api; use crate::{Error, PrefixContext}; use std::borrow::Cow; // Small thing about multiline strings: while hacking on this file I was unsure how to handle // trailing newlines in multiline strings: // - should they have one ("hello\nworld\n") // - or not? ("hello\nworld") // After considering several use cases and intensely thinking about it, I arrived at the // most mathematically sound and natural way: always have a trailing newline, except for the empty // string. This means, that there'll always be exactly as many newlines as lines, which is // mathematically sensible. It also means you can also naturally concat multiple multiline // strings, and `is_empty` will still work. // So that's how (hopefully) all semantically-multiline strings in this code work /// Returns the parsed flags and a String of parse errors. The parse error string will have a /// trailing newline (except if empty) pub fn parse_flags(mut args: poise::KeyValueArgs) -> (api::CommandFlags, String) { let mut errors = String::new(); let mut flags = api::CommandFlags { channel: api::Channel::Nightly, mode: api::Mode::Debug, edition: api::Edition::E2018, warn: false, }; if let Some(channel) = args.0.remove("channel") { match channel.parse() { Ok(x) => flags.channel = x, Err(e) => errors += &format!("{}\n", e), } } if let Some(mode) = args.0.remove("mode") { match mode.parse() { Ok(x) => flags.mode = x, Err(e) => errors += &format!("{}\n", e), } } if let Some(edition) = args.0.remove("edition") { match edition.parse() { Ok(x) => flags.edition = x, Err(e) => errors += &format!("{}\n", e), } } if let Some(warn) = args.0.remove("warn") { match warn.parse() { Ok(x) => flags.warn = x, Err(e) => errors += &format!("{}\n", e), } } for (remaining_flag, _) in args.0 { errors += &format!("unknown flag `{}`\n", remaining_flag); } (flags, errors) } pub struct GenericHelp<'a> { pub command: &'a str, pub desc: &'a str, pub mode_and_channel: bool, pub warn: bool, pub example_code: &'a str, } pub fn generic_help(spec: GenericHelp<'_>) -> String { let mut reply = format!( "{}. All code is executed on https://play.rust-lang.org.\n", spec.desc ); reply += "```rust\n?"; reply += spec.command; if spec.mode_and_channel { reply += " mode={} channel={}"; } reply += " edition={}"; if spec.warn { reply += " warn={}"; } reply += " ``\u{200B}`"; reply += spec.example_code; reply += "``\u{200B}`\n```\n"; reply += "Optional arguments:\n"; if spec.mode_and_channel { reply += "- mode: debug, release (default: debug)\n"; reply += "- channel: stable, beta, nightly (default: nightly)\n"; } reply += "- edition: 2015, 2018, 2021 (default: 2018)\n"; if spec.warn { reply += "- warn: true, false (default: false)\n"; } reply } /// Strip the input according to a list of start tokens and end tokens. Everything after the start /// token up to the end token is stripped. Remaining trailing or loading empty lines are removed as /// well. /// /// If multiple potential tokens could be used as a stripping point, this function will make the /// stripped output as compact as possible and choose from the matching tokens accordingly. // Note to self: don't use "Finished dev" as a parameter to this, because that will break in release // compilation mode pub fn extract_relevant_lines<'a>( mut stderr: &'a str, strip_start_tokens: &[&str], strip_end_tokens: &[&str], ) -> &'a str { // Find best matching start token if let Some(start_token_pos) = strip_start_tokens .iter() .filter_map(|t| stderr.rfind(t)) .max() { // Keep only lines after that stderr = match stderr[start_token_pos..].find('\n') { Some(line_end) => &stderr[(line_end + start_token_pos + 1)..], None => "", }; } // Find best matching end token if let Some(end_token_pos) = strip_end_tokens .iter() .filter_map(|t| stderr.rfind(t)) .min() { // Keep only lines before that stderr = match stderr[..end_token_pos].rfind('\n') { Some(prev_line_end) => &stderr[..=prev_line_end], None => "", }; } // Strip trailing or leading empty lines stderr = stderr.trim_start_matches('\n'); while stderr.ends_with("\n\n") { stderr = &stderr[..(stderr.len() - 1)]; } stderr } pub enum ResultHandling { /// Don't consume results at all, making rustc throw an error when the result isn't () None, /// Consume using `let _ = { ... };` Discard, /// Print the result with `println!("{:?}")` Print, } pub fn hoise_crate_attributes(code: &str, after_crate_attrs: &str, after_code: &str) -> String { let mut lines = code.lines().peekable(); let mut output = String::new(); // First go through the input lines and extract the crate attributes at the start. Those will // be put right at the beginning of the generated code, else they won't work (crate attributes // need to be at the top of the file) while let Some(line) = lines.peek() { let line = line.trim(); if line.starts_with("#![") { output.push_str(line); output.push('\n'); } else if line.is_empty() { // do nothing, maybe more crate attributes are coming } else { break; } lines.next(); // Advance the iterator } output.push_str(after_crate_attrs); // Write the rest of the lines that don't contain crate attributes for line in lines { output.push_str(line); output.push('\n'); } output.push_str(after_code); output } /// Utility used by the commands to wrap the given code in a `fn main` if not already wrapped. /// To check, whether a wrap was done, check if the return type is Cow::Borrowed vs Cow::Owned /// If a wrap was done, also hoists crate attributes to the top so they keep working pub fn maybe_wrap(code: &str, result_handling: ResultHandling) -> Cow<'_, str> { if code.contains("fn main") { return Cow::Borrowed(code); } // fn main boilerplate let after_crate_attrs = match result_handling { ResultHandling::None => "fn main() {\n", ResultHandling::Discard => "fn main() { let _ = {\n", ResultHandling::Print => "fn main() { println!(\"{:?}\", {\n", }; // fn main boilerplate counterpart let after_code = match result_handling { ResultHandling::None => "}", ResultHandling::Discard => "}; }", ResultHandling::Print => "}); }", }; Cow::Owned(hoise_crate_attributes(code, after_crate_attrs, after_code)) } /// Send a Discord reply with the formatted contents of a Playground result pub async fn send_reply( ctx: PrefixContext<'_>, result: api::PlayResult, code: &str, flags: &api::CommandFlags, flag_parse_errors: &str, ) -> Result<(), Error> { let result = if result.stderr.is_empty() { result.stdout } else if result.stdout.is_empty() { result.stderr } else { format!("{}\n{}", result.stderr, result.stdout) }; // Discord displays empty code blocks weirdly if they're not formatted in a specific style, // so we special-case empty code blocks if result.trim().is_empty() { poise::say_reply( poise::Context::Prefix(ctx), format!("{}``` ```", flag_parse_errors), ) .await?; } else { super::reply_potentially_long_text( ctx, &format!("{}```rust\n{}", flag_parse_errors, result), "```", &format!( "Output too large. Playground link: <{}>", api::url_from_gist(flags, &api::post_gist(ctx, code).await?), ), ) .await?; } Ok(()) } // This function must not break when provided non-formatted text with messed up formatting: rustfmt // may not be installed on the host's computer! pub fn strip_fn_main_boilerplate_from_formatted(text: &str) -> String { // Remove the fn main boilerplate let prefix = "fn main() {"; let postfix = "}"; let text = match (text.find(prefix), text.rfind(postfix)) { (Some(prefix_pos), Some(postfix_pos)) => text .get((prefix_pos + prefix.len())..postfix_pos) .unwrap_or(text), _ => text, }; let text = text.trim(); // Revert the indent introduced by rustfmt let mut output = String::new(); for line in text.lines() { output.push_str(line.strip_prefix(" ").unwrap_or(line)); output.push('\n'); } output } /// Split stderr into compiler output and program stderr output and format the two nicely /// /// If the program doesn't compile, the compiler output is returned. If it did compile and run, /// compiler output (i.e. warnings) is shown only when show_compiler_warnings is true. pub fn format_play_eval_stderr(stderr: &str, show_compiler_warnings: bool) -> String { let compiler_output = extract_relevant_lines( stderr, &["Compiling playground"], &[ "warning emitted", "warnings emitted", "warning: `playground` (bin \"playground\") generated", "error: could not compile", "error: aborting", "Finished ", ], ); if stderr.contains("Running `target") { // Program successfully compiled, so compiler output will be just warnings let program_stderr = extract_relevant_lines(stderr, &["Running `target"], &[]); if show_compiler_warnings { // Concatenate compiler output and program stderr with a newline match (compiler_output, program_stderr) { ("", "") => String::new(), (warnings, "") => warnings.to_owned(), ("", stderr) => stderr.to_owned(), (warnings, stderr) => format!("{}\n{}", warnings, stderr), } } else { program_stderr.to_owned() } } else { // Program didn't get to run, so there must be an error, so we yield the compiler output // regardless of whether warn is enabled or not compiler_output.to_owned() } }
/* * Slack Web API * * One way to interact with the Slack platform is its HTTP RPC-based Web API, a collection of methods requiring OAuth 2.0-based user, bot, or workspace tokens blessed with related OAuth scopes. * * The version of the OpenAPI document: 1.7.0 * * Generated by: https://openapi-generator.tech */ #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ObjsResources { #[serde(rename = "excluded_ids", skip_serializing_if = "Option::is_none")] pub excluded_ids: Option<Vec<serde_json::Value>>, #[serde(rename = "ids")] pub ids: Vec<serde_json::Value>, #[serde(rename = "wildcard", skip_serializing_if = "Option::is_none")] pub wildcard: Option<bool>, } impl ObjsResources { pub fn new(ids: Vec<serde_json::Value>) -> ObjsResources { ObjsResources { excluded_ids: None, ids, wildcard: None, } } }
//! Testing artifacts. These are bits of data produced by your tests that Egress will compare with //! previously produced "reference" artifacts. use ::{ serde::{Deserialize, Serialize}, serde_json::Value, std::{ collections::BTreeMap, fmt::{self}, }, }; use crate::ErrorKind; fn diff_json(mismatches: &mut Vec<Mismatch>, prefix: String, value: &Value, reference: &Value) { use Value::*; match (value, reference) { (Object(map), Object(reference_map)) => { for (k, v) in map { let v_ref = match reference_map.get(k) { Some(it) => it, None => { mismatches.push(Mismatch::NotInReference( format!("{}.{}", prefix, k), Entry::Json(v.clone()), )); continue; } }; diff_json(&mut *mismatches, format!("{}.{}", prefix, k), v, v_ref); } for (k, v_ref) in reference_map.iter() { if !map.contains_key(k) { mismatches.push(Mismatch::NotProduced( format!("{}.{}", prefix, k), Entry::Json(v_ref.clone()), )); } } } (Array(array), Array(array_ref)) => { if array.len() != array_ref.len() { if array.len() > array_ref.len() { for (i, elem) in array.iter().enumerate().skip(array_ref.len()) { mismatches.push(Mismatch::NotInReference( format!("{}[{}]", prefix, i), Entry::Json(elem.clone()), )); } } else if array.len() < array_ref.len() { for (i, elem_ref) in array_ref.iter().enumerate().skip(array.len()) { mismatches.push(Mismatch::NotProduced( format!("{}[{}]", prefix, i), Entry::Json(elem_ref.clone()), )); } } mismatches.push(Mismatch::LengthMismatch( format!("{}.len()", prefix), array.len(), array_ref.len(), )); } else { for (i, (elem, elem_ref)) in array.iter().zip(array_ref.iter()).enumerate() { diff_json( &mut *mismatches, format!("{}[{}]", prefix, i), elem, elem_ref, ); } } } (other, other_ref) => { if other != other_ref { mismatches.push(Mismatch::NotEq( prefix, Entry::Json(other.clone()), Entry::Json(other_ref.clone()), )); } } } } /// Artifacts are maps from string keys to `Entry` objects. Entries in an /// artifact can be strings, JSON values, byte buffers, or - because /// artifacts are tree structured - another `Artifact`. #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub enum Entry { /// A string entry. Str(String), /// A JSON entry. The `Value` type comes from the `serde_json` crate. Json(Value), /// A raw byte entry. Bytes(Vec<u8>), /// An artifact entry. Artifact(Artifact), } /// An `Artifact` is the main object that Egress uses to handle and compare /// data produced from your tests. It's basically just a map from string keys /// to `Entry`s. #[serde(transparent)] #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] pub struct Artifact { entries: BTreeMap<String, Entry>, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub enum Mismatch { NotEq(String, Entry, Entry), NotInReference(String, Entry), NotProduced(String, Entry), LengthMismatch(String, usize, usize), } impl Artifact { /// Create an empty `Artifact`. This is useful for building tree-structured /// artifacts, but the root artifact for a given test should always come from /// `Egress::artifact`. pub fn new() -> Self { Self::default() } /// Insert an `Entry` into the artifact, with a given string name. The other /// `insert_*` methods are just convenient wrappers around this one. pub fn insert(&mut self, name: &str, entry: Entry) { if self.entries.insert(name.to_string(), entry).is_some() { panic!( "Duplicate entries under the same name (`{}`) are not allowed!", name ); } } /// Convert a value to a string via the `fmt::Debug` formatter and then insert /// that into the `Artifact` with the given string key. pub fn insert_debug<T: fmt::Debug>(&mut self, name: &str, value: &T) { self.insert(name, Entry::Str(format!("{:#?}", value))); } /// Convert a value to a string via the `fmt::Display` formatter and then insert /// that into the `Artifact` with the given string key. pub fn insert_display<T: fmt::Display>(&mut self, name: &str, value: &T) { self.insert(name, Entry::Str(value.to_string())); } /// Convert a value to a JSON value via `serde_json` and then insert that into /// the `Artifact` with the given string key. /// /// Egress uses `serde` to do this, so if you want to be able to have nicely formatted /// diffs between your types, you'll want them to derive `serde::{Serialize}`. pub fn insert_serialize<T: Serialize>( &mut self, name: &str, value: &T, ) -> Result<(), ErrorKind> { self.insert_json(name, serde_json::to_value(value)?); Ok(()) } /// Insert a JSON `Value` into the `Artifact` with the given string key. pub fn insert_json(&mut self, name: &str, json_value: Value) { self.insert(name, Entry::Json(json_value)); } fn compare_against_reference(&self, prefix: String, reference: &Artifact) -> Vec<Mismatch> { let mut mismatches = Vec::new(); for (k, v) in self.entries.iter() { let v_ref = match reference.entries.get(k) { Some(it) => it, None => { mismatches.push(Mismatch::NotInReference( format!("{}::{}", prefix, k), v.clone(), )); continue; } }; use Entry::*; match (v, v_ref) { (Artifact(art), Artifact(art_ref)) => { mismatches.extend( art.compare_against_reference(format!("{}::{}", prefix, k), art_ref), ); } (Json(json), Json(json_ref)) => { diff_json( &mut mismatches, format!("{}::{}", prefix, k), json, json_ref, ); } (other, other_ref) => { if other != other_ref { mismatches.push(Mismatch::NotEq( format!("{}::{}", prefix, k), other.clone(), other_ref.clone(), )); } } } } for (k_ref, v_ref) in reference.entries.iter() { if !self.entries.contains_key(k_ref) { mismatches.push(Mismatch::NotProduced( format!("{}::{}", prefix, k_ref), v_ref.clone(), )); } } mismatches } pub(crate) fn report_mismatches(&self, prefix: String, reference: &Artifact) -> Vec<Mismatch> { self.compare_against_reference(prefix, reference) } }
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn main() { let _ = (0..4).filter_map(|x| if x > 1 { Some(x) } else { None }); let _ = (0..4).filter_map(|x| { if x > 1 { return Some(x); }; None }); let _ = (0..4).filter_map(|x| match x { 0 | 1 => None, _ => Some(x), }); let _ = (0..4).filter_map(|x| Some(x + 1)); let _ = (0..4).filter_map(i32::checked_abs); }
use serde::Deserialize; #[derive(Debug, Default, Deserialize)] pub struct Value { #[serde(rename = "k")] pub value: f64, #[serde(rename = "x")] pub expression: Option<String>, #[serde(rename = "ix")] pub index: Option<i64>, }
fn apparently(string: &str) -> String { let ret: Vec<&str> = string .split_whitespace() .collect(); let mut r: Vec<&str> = vec![]; for (i, x) in ret.iter().enumerate() { if r.len() < 2 { r.push(x); } else if *x != "apparently" || (*x == "apparently" && (ret[i - 1] != "and" && ret[i - 2] != "but")) { r.push(x); } if *x == "and" || *x == "but" { if i < ret.len() - 1 && ret[i + 1] != "apparently" { r.push("apparently"); continue; } r.push("apparently"); } } r.join(" ") } fn main() { print!("{}", apparently("a xx and eiii but, eee but")); } #[test] fn test_apparently() { assert_eq!(apparently("It was great and I have never been on live television before but sometimes I dont watch this."), "It was great and apparently I have never been on live television before but apparently sometimes I dont watch this."); assert_eq!(apparently("and"), "and apparently"); assert_eq!(apparently("and apparently"), "and apparently"); assert_eq!(apparently("apparently"), "apparently"); }
// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. use core::ops::{BitOr, Not, BitAnd}; use core::fmt::{Debug, Formatter}; use crate::kty::{ c_int, O_APPEND, O_NONBLOCK, O_DIRECT, O_NOATIME, O_ASYNC, O_SYNC, O_DSYNC, O_PATH, O_RDWR, O_RDONLY, O_WRONLY, }; /// File description flags. #[derive(PartialEq, Eq)] pub struct DescriptionFlags(pub c_int); impl BitOr for DescriptionFlags { type Output = DescriptionFlags; fn bitor(self, other: DescriptionFlags) -> DescriptionFlags { DescriptionFlags(self.0 | other.0) } } impl BitAnd for DescriptionFlags { type Output = DescriptionFlags; fn bitand(self, other: DescriptionFlags) -> DescriptionFlags { DescriptionFlags(self.0 & other.0) } } impl Not for DescriptionFlags { type Output = DescriptionFlags; fn not(self) -> DescriptionFlags { DescriptionFlags(!self.0) } } /// Dummy flag with all flags unset. pub const FD_NONE: DescriptionFlags = DescriptionFlags(0); /// Mask containing the access flags. /// /// = Remarks /// /// That is, FD_READ_ONLY, FD_WRITE_ONLY, and FD_READ_WRITE. pub const FD_ACCESS_MASK: DescriptionFlags = DescriptionFlags(3); macro_rules! create_flags { ($($(#[$meta:meta])* flag $name:ident = $val:expr;)*) => { $($(#[$meta])* pub const $name: DescriptionFlags = DescriptionFlags($val);)* impl Debug for DescriptionFlags { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let rm = match self.0 & 3 { O_RDONLY => "FD_READ_ONLY", O_WRONLY => "FD_WRITE_ONLY", _ => "FD_READ_WRITE", }; core::write!(f, "{}", rm)?; let flags = self.0 & !3; $( if flags & $val != 0 { core::write!(f, "|")?; core::write!(f, stringify!($name))?; } )* Ok(()) } } } } create_flags! { #[doc = "The file description is in read-only mode.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_RDONLY therein"] flag FD_READ_ONLY = O_RDONLY; #[doc = "The file description is in write-only mode.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_WRONLY therein"] flag FD_WRITE_ONLY = O_WRONLY; #[doc = "The file description is open for reading and writing.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_RDWR therein"] flag FD_READ_WRITE = O_RDWR; #[doc = "Bypass kernel buffers and write directly to the disk.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_DIRECT therein"] flag FD_BYPASS_BUFFER = O_DIRECT; #[doc = "Don't update the access time of the file.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_NOATIME therein"] flag FD_NO_ACCESS_TIME_UPDATE = O_NOATIME; #[doc = "Perform all writes to the file at the end of the file.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_APPEND therein"] flag FD_APPEND = O_APPEND; #[doc = "Send a signal to the process when the file becomes ready for reading or \ writing.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_ASYNC therein"] flag FD_SIGNAL_IO = O_ASYNC; #[doc = "Ensure that all data has been passed to the hardware after a write.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_SYNC therein"] flag FD_SYNC = O_SYNC; #[doc = "Ensure that enough data has been passed to the hardware after a write so \ that the data can be read back.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_DSYNC therein"] flag FD_DATA_SYNC = O_DSYNC; #[doc = "Return an error instead of blocking.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_NONBLOCK therein"] flag FD_DONT_BLOCK = O_NONBLOCK; #[doc = "The file description identifies a path in a filesystem.\n"] #[doc = "= See also"] #[doc = "* link:man:open(2) and O_PATH therein"] flag FD_PATH = O_PATH; } impl DescriptionFlags { /// Sets a flag. /// /// [argument, flag] /// The flag to be set. pub fn set(&mut self, flag: DescriptionFlags) { self.0 |= flag.0 } /// Clears a flag. /// /// [argument, flag] /// The flag to be cleared. pub fn unset(&mut self, flag: DescriptionFlags) { self.0 &= !flag.0 } /// Returns whether a flag is set. /// /// [argument, flag] /// The flag to be checked. pub fn is_set(&self, flag: DescriptionFlags) -> bool { self.0 & flag.0 != 0 } }
use core::hash::Hash; use super::Habitat; #[allow(clippy::module_name_repetitions)] pub trait LineageReference<H: Habitat>: crate::cogs::Backup + PartialEq + Eq + Hash + Clone + core::fmt::Debug { }
use hyper::{Body, Response}; use crate::db; use crate::html; use crate::http::util; pub fn handle_get(game_id_str: &str) -> Result<Response<Body>, hyper::Error> { let db = db::DB::new(); let game_id = match game_id_str.parse::<u32>() { Ok(v) => v, Err(_e) => return util::bad_request_response("must supply game id as u32"), }; let game = match db.get_game(game_id) { Ok(d) => d, Err(e) => return util::db_error_page(e), }; Ok(Response::new(Body::from(html::common::render_page( html::pages::game_single::page(game), )))) }
use serde::{Deserialize, Serialize}; use svm_layout::{FixedLayoutBuilder, Id, Layout}; use svm_types::{CodeSection, CtorsSection, DataSection, HeaderSection}; use super::{serde_types::HexBlob, JsonError, JsonSerdeUtils}; use crate::api::builder::TemplateBuilder; use crate::template; /// /// ```json /// { /// "name": "...", // string /// "svm_version": "...", // number (`u32`) /// "code_version": "...", // number (`u32`) /// "desc": "...", // string /// "code": "...", // string (represents a `blob`) /// "data": "", // string (represents a `blob`) /// "ctors": ["", ""], // string[] /// } /// ``` pub fn deploy_template(json: &str) -> Result<Vec<u8>, JsonError> { let deploy = DecodedDeploy::from_json_str(json)?; let layout = to_data_layout(deploy.data.0)?; let code = CodeSection::new_fixed(deploy.code.0, deploy.svm_version); let data = DataSection::with_layout(layout); let ctors = CtorsSection::new(deploy.ctors); let header = HeaderSection::new(deploy.code_version, deploy.name, deploy.desc); let template = TemplateBuilder::default() .with_code(code) .with_data(data) .with_ctors(ctors) .with_header(header) .build(); Ok(template::encode(&template)) } fn to_data_layout(blob: Vec<u8>) -> Result<Layout, JsonError> { if blob.len() % 4 != 0 { return Err(JsonError::InvalidField { path: "data".to_string(), }); } let data: Vec<u32> = blob .chunks_exact(4) .map(|buf| { let bytes: [u8; 4] = [buf[0], buf[1], buf[2], buf[3]]; u32::from_be_bytes(bytes) }) .collect(); // Note: `LayoutBuilder` assume that the `first var id` is zero let mut builder = FixedLayoutBuilder::with_capacity(data.len()); builder.set_first(Id(0)); builder.extend_from_slice(&data); let fixed = builder.build(); let layout = Layout::Fixed(fixed); Ok(layout) } #[derive(Debug, Clone, Deserialize, Serialize)] struct DecodedDeploy { svm_version: u32, code_version: u32, name: String, desc: String, code: HexBlob<Vec<u8>>, data: HexBlob<Vec<u8>>, ctors: Vec<String>, } impl JsonSerdeUtils for DecodedDeploy {} #[cfg(test)] mod tests { use super::*; use std::io::Cursor; use serde_json::json; use svm_layout::FixedLayout; #[test] fn json_deploy_template_missing_svm_version() { let json = json!({}).to_string(); let err = deploy_template(&json).unwrap_err(); assert_eq!( err, JsonError::MissingField { field_name: "svm_version".to_string(), } ); } #[test] fn json_deploy_template_missing_code_version() { let json = json!({ "svm_version": 1 }) .to_string(); let err = deploy_template(&json).unwrap_err(); assert_eq!( err, JsonError::MissingField { field_name: "code_version".to_string(), } ); } #[test] fn json_deploy_template_missing_name() { let json = json!({ "svm_version": 1, "code_version": 2 }) .to_string(); let err = deploy_template(&json).unwrap_err(); assert_eq!( err, JsonError::MissingField { field_name: "name".to_string(), } ); } #[test] fn json_deploy_template_missing_desc() { let json = json!({ "svm_version": 1, "code_version": 2, "name": "My Template", }) .to_string(); let err = deploy_template(&json).unwrap_err(); assert_eq!( err, JsonError::MissingField { field_name: "desc".to_string(), } ); } #[test] fn json_deploy_template_missing_code() { let json = json!({ "svm_version": 1, "code_version": 2, "name": "My Template", "desc": "A few words" }) .to_string(); let err = deploy_template(&json).unwrap_err(); assert_eq!( err, JsonError::MissingField { field_name: "code".to_string(), } ); } #[test] fn json_deploy_template_missing_data() { let json = json!({ "svm_version": 1, "code_version": 2, "name": "My Template", "desc": "A few words", "code": "C0DE" }) .to_string(); let err = deploy_template(&json).unwrap_err(); assert_eq!( err, JsonError::MissingField { field_name: "data".to_string(), } ); } #[test] fn json_deploy_template_missing_ctors() { let json = json!({ "svm_version": 1, "code_version": 2, "name": "My Template", "desc": "A few words", "code": "C0DE", "data": "0000000100000003", }) .to_string(); let err = deploy_template(&json).unwrap_err(); assert_eq!( err, JsonError::MissingField { field_name: "ctors".to_string(), } ); } #[test] fn json_deploy_template_valid() { let json = json!({ "svm_version": 1, "code_version": 2, "name": "My Template", "desc": "A few words", "code": "C0DE", "data": "0000000100000003", "ctors": ["init", "start"] }) .to_string(); let bytes = deploy_template(&json).unwrap(); let cursor = Cursor::new(&bytes[..]); let actual = template::decode(cursor, None).unwrap(); let code = CodeSection::new_fixed(vec![0xC0, 0xDE], 1); let fixed = FixedLayout::from(vec![1, 3]); let data = DataSection::with_layout(Layout::Fixed(fixed)); let ctors = CtorsSection::new(vec!["init".into(), "start".into()]); let header = HeaderSection::new(2, "My Template".into(), "A few words".into()); let expected = TemplateBuilder::default() .with_code(code) .with_data(data) .with_ctors(ctors) .with_header(header) .build(); assert_eq!(actual, expected); } }
/// In this example we check the upgrenade package's version fn main() -> Result<(), failure::Error> { match upgrenade::check_crates_io(None, None)? { Some(v) => println!( "There's a newer version {} (currently at {})", v, env!("CARGO_PKG_VERSION") ), None => println!("This is the latest version", env!("CARGO_PKG_VERSION")), } Ok(()) }
#[macro_use] extern crate serde; extern crate serde_json; extern crate postgres; extern crate regex; extern crate toml; extern crate uuid; mod config; mod database; mod request; mod response; mod threading; use std::sync::{Arc, Mutex}; use config::init_config; use config::ConfigStruct; use database::Db; use request::handle_connection; use threading::ThreadPool; use std::net::TcpListener; fn main() { let config: ConfigStruct = init_config(); let listener = TcpListener::bind(config.server.address).unwrap(); let pool = ThreadPool::new(4); // Wrap the DB connection in a motha-flippin MUTEX because threads let conn = Arc::new(Mutex::new(Db::init(&config.postgres.connection))); for stream in listener.incoming() { let stream = stream.unwrap(); let conn = Arc::clone(&conn); pool.execute(move || { match conn.lock() { Ok(conn) => handle_connection(stream, conn), Err(poisoned) => handle_connection(stream, poisoned.into_inner()), }; }); } }
extern crate bindgen; extern crate cc; fn main() { let defines = ["__SAMD21E18A__"]; let flags = [ "-c", "-ffunction-sections", "-ggdb3", "-mcpu=cortex-m0plus", "-mlong-calls", "-mthumb", "-nostartfiles", "-Os", "-std=gnu99", "-Wall", "-Wno-cast-function-type", "-Wno-unused-parameter", ]; let includes = [ "atmel-start", "atmel-start/CMSIS/Core/Include", "atmel-start/config", "atmel-start/hal/include", "atmel-start/hal/utils/include", "atmel-start/hpl/core", "atmel-start/hpl/dmac", "atmel-start/hpl/gclk", "atmel-start/hpl/pm", "atmel-start/hpl/port", "atmel-start/hpl/sysctrl", "atmel-start/hpl/usb", "atmel-start/hri", "atmel-start/samd21a/include", "atmel-start/usb", "atmel-start/usb/class/cdc", "atmel-start/usb/class/cdc/device", "atmel-start/usb/device", ]; /* exclude main.c and driver_examples.c */ let files = [ "atmel-start/atmel_start.c", "atmel-start/driver_init.c", "atmel-start/hal/src/hal_atomic.c", "atmel-start/hal/src/hal_delay.c", "atmel-start/hal/src/hal_gpio.c", "atmel-start/hal/src/hal_init.c", "atmel-start/hal/src/hal_io.c", "atmel-start/hal/src/hal_sleep.c", "atmel-start/hal/src/hal_spi_m_sync.c", "atmel-start/hal/src/hal_usb_device.c", "atmel-start/hal/utils/src/utils_assert.c", "atmel-start/hal/utils/src/utils_event.c", "atmel-start/hal/utils/src/utils_list.c", "atmel-start/hal/utils/src/utils_syscalls.c", "atmel-start/hpl/core/hpl_core_m0plus_base.c", "atmel-start/hpl/core/hpl_init.c", "atmel-start/hpl/dmac/hpl_dmac.c", "atmel-start/hpl/gclk/hpl_gclk.c", "atmel-start/hpl/pm/hpl_pm.c", "atmel-start/hpl/sercom/hpl_sercom.c", "atmel-start/hpl/sysctrl/hpl_sysctrl.c", "atmel-start/hpl/usb/hpl_usb.c", "atmel-start/samd21a/gcc/gcc/startup_samd21.c", "atmel-start/samd21a/gcc/system_samd21.c", "atmel-start/usb/class/cdc/device/cdcdf_acm.c", "atmel-start/usb/device/usbdc.c", "atmel-start/usb_start.c", "atmel-start/usb/usb_protocol.c", "src/hal.c", ]; let mut builder = cc::Build::new(); builder.pic(false); builder.no_default_flags(true); builder.compiler("arm-none-eabi-gcc"); builder.archiver("arm-none-eabi-ar"); // adds flags "crs" by default /* uncomment below and use link arg "-lhal" to manually link libhal.a */ // builder.out_dir("."); // builder.cargo_metadata(false); let mut bindings = bindgen::Builder::default(); bindings = bindings.clang_arg("--sysroot=/usr/arm-none-eabi"); bindings = bindings.header("atmel-start/driver_init.h"); bindings = bindings.header("atmel-start/usb_start.h"); bindings = bindings.header("src/hal.h"); bindings = bindings.ctypes_prefix("cty"); bindings = bindings.use_core(); bindings = bindings.trust_clang_mangling(false); // add defines for define in defines.iter() { builder.define(define, None); bindings = bindings.clang_arg(format!("-D{}", define)); } // add compiler flags for flag in flags.iter() { builder.flag(flag); } // add include paths for include in includes.iter() { builder.include(include); bindings = bindings.clang_arg(format!("-I{}", include)); } // add source files, rebuild if modified for file in files.iter() { builder.file(file); println!("cargo:rerun-if-changed={}", file); } // compile the atmel-start sources as a static library builder.compile("libhal.a"); // write bindings to file bindings .generate() .expect("Unable to generate bindings") .write_to_file("src/bindings.rs") .expect("Couldn't write bindings"); }
pub mod analysis; pub mod args; pub mod blunder; pub mod config; pub mod database; pub mod engine; pub mod evaluation; pub mod game; pub mod game_info; use std::fs; use std::time::Duration; use analysis::{AnalysisThread, AnalysisThreadHandle}; use anyhow::Result; use args::Command; use blunder::Blunder; use clap::Clap; use config::NUM_THREADS; use counter::Counter; use database::Database; use game::Game; use game_info::GameInfo; use pgnparse::parser::parse_pgn_to_rust_struct; use crate::args::{Args, ScanOpts}; #[derive(PartialEq, Eq)] enum Color { White, Black, } impl Color { fn to_play(&self, move_num: usize) -> bool { let color = match move_num.rem_euclid(2) { 0 => Color::White, 1 => Color::Black, _ => panic!("Impossible value"), }; &color == self } } fn main() -> Result<()> { let args = Args::parse(); let mut database = Database::read(&args.database)?; match args.command { Command::Scan(opts) => scan(&mut database, opts), Command::Show => show_blunders(database), } } fn scan(database: &mut Database, opts: ScanOpts) -> Result<()> { let pgns_string = fs::read_to_string(&opts.pgn_file)?; let mut games: Vec<GameInfo> = split_pgns_into_games(&pgns_string); let (seen_games, unseen_games): (Vec<_>, Vec<_>) = games .into_iter() .partition(|game| database.game_exists(game)); println!( "Skipping {} games that have already been analyzed.", seen_games.len() ); games = unseen_games; run_scans(database, opts, games) } fn run_scans(database: &mut Database, opts: ScanOpts, games: Vec<GameInfo>) -> Result<()> { let mut threads: Vec<AnalysisThreadHandle> = vec![]; let num_games = games.len(); let mut games = games.into_iter().enumerate(); loop { if threads.len() < NUM_THREADS { if let Some((num, game_info)) = games.next() { println!("Analyzing {} / {}", num, num_games); threads.push(AnalysisThread::start(game_info, &opts)); } } if threads.is_empty() { break; } for mut thread in threads.iter_mut() { let received_result = thread.receiver.recv_timeout(Duration::from_millis(20)); if let Ok(game) = received_result { handle_finished_analysis(database, game)?; thread.finished = true; } } let (finished_threads, running_threads): (Vec<_>, Vec<_>) = threads.into_iter().partition(|thread| thread.finished); for finished_thread in finished_threads.into_iter() { finished_thread.handle.join().unwrap().unwrap(); } threads = running_threads; } println!("Finished analyzing."); Ok(()) } fn handle_finished_analysis(database: &mut Database, game: Game) -> Result<()> { println!("Finished analyzing {}", game.id); database.add_game(game); database.write() } fn split_pgns_into_games(pgns: &str) -> Vec<GameInfo> { pgns.split("\n\n\n") .map(parse_pgn_to_rust_struct) .filter(|game| !game.moves.is_empty()) .map(|info| GameInfo { info }) .collect() } fn show_blunders(database: Database) -> Result<()> { let all_blunders: Vec<&Blunder> = database .games .iter() .flat_map(|game| game.blunders.iter()) .collect(); let counter = all_blunders.iter().collect::<Counter<_>>(); for (blunder, count) in counter.iter() { if *count > 1 { println!( "In position: {}\n you played {}", blunder.position, blunder.move_ ); } } Ok(()) }
use config; use config::{Config, ConfigError}; use regex::Regex; use std::borrow::Cow; #[derive(Debug, Deserialize)] pub struct Settings { pub paths: Option<Vec<PathConfig>>, } #[derive(Debug, Deserialize)] pub struct PathConfig { #[serde(with = "serde_regex")] pub pattern: Regex, pub env: Vec<String>, } impl Settings { pub fn new(config: Cow<str>) -> Result<Self, ConfigError> { let mut s = Config::new(); s.merge(config::File::with_name(&config))?; s.try_into() } }
use eval::Value; pub use lexer::Op; pub use lexer::Span; use lexer::{Lexer, Token}; #[derive(Clone, Debug)] pub enum Node { Immediate(Value), MonOp(Op, Box<Node>), BinOp(Op, Box<Node>, Box<Node>), Apply(Box<Node>, Vec<Node>), Index(Box<Node>, Box<Node>), Lambda(Vec<String>, Box<Node>), Cond(Box<Node>, Box<Node>, Box<Node>), List(Vec<Node>), Var(String), VarDef(String, Box<Node>), FunDef(String, Vec<String>, Box<Node>), Range(Option<Box<Node>>, Option<Box<Node>>, Option<Box<Node>>), } #[derive(Debug)] pub struct ParseError { pub token: Token, pub span: Span, } fn unexpected_token(lexer: &Lexer) -> Result<Node, ParseError> { raise!(ParseError { token: lexer.peek(), span: lexer.span(), }) } fn unexpected_prev_token(lexer: &mut Lexer) -> Result<Node, ParseError> { lexer.prev(); unexpected_token(lexer) } fn expect_token(lexer: &mut Lexer, token: &Token) -> Result<(), ParseError> { if lexer.next() != *token { unexpected_prev_token(lexer)?; } Ok(()) } fn parse_list(lexer: &mut Lexer, closing: &Token) -> Result<Vec<Node>, ParseError> { let mut args = vec![]; while lexer.peek() != *closing { args.push(parse_expr(lexer)?); if lexer.peek() == Token::Comma { lexer.next(); } else { break; } } expect_token(lexer, closing)?; Ok(args) } fn parse_primitive(lexer: &mut Lexer) -> Result<Node, ParseError> { let out = match lexer.next() { Token::True => Node::Immediate(Value::Boolean(true)), Token::False => Node::Immediate(Value::Boolean(false)), Token::Ident(name) => Node::Var(name), Token::Number(num) => { if let Ok(x) = num.parse() { Node::Immediate(Value::Number(x)) } else { return unexpected_prev_token(lexer); } } Token::LeftParen => { let expr = parse_expr(lexer)?; expect_token(lexer, &Token::RightParen)?; expr } Token::LeftBracket => { let args = parse_list(lexer, &Token::RightBracket)?; Node::List(args) } _ => return unexpected_prev_token(lexer), }; Ok(out) } fn parse_apply(lexer: &mut Lexer) -> Result<Node, ParseError> { let mut out = parse_primitive(lexer)?; loop { out = match lexer.next() { Token::LeftParen => { let args = parse_list(lexer, &Token::RightParen)?; Node::Apply(Box::new(out), args) } Token::LeftBracket => { let index = parse_expr(lexer)?; expect_token(lexer, &Token::RightBracket)?; Node::Index(Box::new(out), Box::new(index)) } _ => { lexer.prev(); break; } } } Ok(out) } fn parse_monop(lexer: &mut Lexer) -> Result<Node, ParseError> { if let Token::Operator(op) = lexer.peek() { if op == Op::Add || op == Op::Sub || op == Op::Not { lexer.next(); let arg = parse_monop(lexer)?; return Ok(Node::MonOp(op, Box::new(arg))); } } parse_apply(lexer) } fn op_prec(op: Op) -> i32 { match op { Op::Or => 1, Op::And => 2, Op::Lt | Op::Gt | Op::Lte | Op::Gte => 3, Op::Eq | Op::Neq => 4, Op::Add | Op::Sub => 5, Op::Mul | Op::Div => 6, _ => -1, } } fn parse_binop(lexer: &mut Lexer, prec: i32) -> Result<Node, ParseError> { let mut lhs = parse_monop(lexer)?; loop { match lexer.peek() { Token::Operator(op) if prec <= op_prec(op) => { lexer.next(); let rhs = parse_binop(lexer, op_prec(op) + 1)?; lhs = Node::BinOp(op, Box::new(lhs), Box::new(rhs)); } _ => break Ok(lhs), } } } fn parse_cond(lexer: &mut Lexer) -> Result<Node, ParseError> { let mut lhs = parse_binop(lexer, 0)?; loop { if lexer.next() != Token::If { lexer.prev(); break Ok(lhs); } let cond = parse_expr(lexer)?; expect_token(lexer, &Token::Else)?; let rhs = parse_expr(lexer)?; lhs = Node::Cond(Box::new(cond), Box::new(lhs), Box::new(rhs)); } } fn parse_lambda(lexer: &mut Lexer) -> Result<Node, ParseError> { let out = match (lexer.next(), lexer.next(), lexer.next(), lexer.next()) { // Case 1: x => body (Token::Ident(x), Token::Arrow, _, _) => { lexer.prev(); lexer.prev(); let args = vec![x]; let body = parse_lambda(lexer)?; Node::Lambda(args, Box::new(body)) } // Case 2: () => body (Token::LeftParen, Token::RightParen, Token::Arrow, _) => { lexer.prev(); let args = vec![]; let body = parse_lambda(lexer)?; Node::Lambda(args, Box::new(body)) } // Case 3: (x) => body (Token::LeftParen, Token::Ident(x), Token::RightParen, Token::Arrow) => { let args = vec![x]; let body = parse_lambda(lexer)?; Node::Lambda(args, Box::new(body)) } // Case 4: (x, y, z) => body (Token::LeftParen, Token::Ident(x), Token::Comma, Token::Ident(y)) => { let mut args = vec![x, y]; loop { match lexer.next() { Token::Comma => (), Token::RightParen => break, _ => return unexpected_prev_token(lexer), }; match lexer.next() { Token::Ident(x) => args.push(x), _ => return unexpected_prev_token(lexer), } } if lexer.next() != Token::Arrow { return unexpected_prev_token(lexer); } let body = parse_lambda(lexer)?; Node::Lambda(args, Box::new(body)) } _ => { lexer.prev(); lexer.prev(); lexer.prev(); lexer.prev(); parse_cond(lexer)? } }; Ok(out) } fn parse_expr(lexer: &mut Lexer) -> Result<Node, ParseError> { parse_lambda(lexer) } fn parse_statement(lexer: &mut Lexer) -> Result<Node, ParseError> { let lhs = parse_expr(lexer)?; Ok(match (lhs, lexer.next()) { (out, Token::End) => out, (Node::Var(var), Token::Assign) => { let body = parse_statement(lexer)?; match body { Node::Lambda(args, body) => Node::FunDef(var, args, body), body => Node::VarDef(var, Box::new(body)), } } (Node::Apply(lhs, args), Token::Assign) => { let var = match *lhs { Node::Var(var) => var, _ => { unexpected_prev_token(lexer)?; unreachable!(); } }; let mut params = vec![]; for arg in args { if let Node::Var(name) = arg { params.push(name); } else { return unexpected_prev_token(lexer); } } let body = parse_statement(lexer)?; Node::FunDef(var, params, Box::new(body)) } _ => unexpected_prev_token(lexer)?, }) } pub fn parse(mut lexer: Lexer) -> Result<Node, ParseError> { parse_statement(&mut lexer) }
use crate::math::reducers::{reducer_for, Reduce}; use crate::math::utils::run_with_function; use nu_protocol::ast::Call; use nu_protocol::engine::{Command, EngineState, Stack}; use nu_protocol::{Category, Example, PipelineData, ShellError, Signature, Span, Value}; #[derive(Clone)] pub struct SubCommand; impl Command for SubCommand { fn name(&self) -> &str { "math min" } fn signature(&self) -> Signature { Signature::build("math min").category(Category::Math) } fn usage(&self) -> &str { "Finds the minimum within a list of numbers or tables" } fn search_terms(&self) -> Vec<&str> { vec!["minimum", "smallest"] } fn run( &self, _engine_state: &EngineState, _stack: &mut Stack, call: &Call, input: PipelineData, ) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> { run_with_function(call, input, minimum) } fn examples(&self) -> Vec<Example> { vec![Example { description: "Get the minimum of a list of numbers", example: "[-50 100 25] | math min", result: Some(Value::test_int(-50)), }] } } pub fn minimum(values: &[Value], head: &Span) -> Result<Value, ShellError> { let min_func = reducer_for(Reduce::Minimum); min_func(Value::nothing(*head), values.to_vec(), *head) } #[cfg(test)] mod test { use super::*; #[test] fn test_examples() { use crate::test_examples; test_examples(SubCommand {}) } }
extern crate sum; pub use sum::*; #[cfg(test)] mod tests { use ::*; #[test] fn test_sum() { assert_eq!(sum(1, 1), 2); } }
use std::io::{stdin, stdout, Stdout}; use std::sync::mpsc::{Receiver, RecvTimeoutError}; use std::sync::{mpsc, Arc, Mutex}; use std::{fs, thread}; use std::{io, process}; use serde::{Deserialize, Serialize}; use termion::event::Key; use termion::input::TermRead; use termion::raw::{IntoRawMode, RawTerminal}; use tui::backend::TermionBackend; use tui::text::{Span, Spans}; use tui::widgets::ListItem; use tui::Terminal; use crate::app::{App, AppStage}; use crate::app_layout::AppLayout; use crate::todo_item::TodoItem; use crate::update::{update, CURRENT_APP_VERSION}; use std::path::PathBuf; use std::time::Duration; mod app; mod app_layout; mod todo_item; mod update; mod utils; #[derive(Debug, Serialize, Deserialize)] struct Data { items: Vec<TodoItem>, } // How often app updates if key even is not received. // Required to maintain proper layout on window size change. const APP_TICK_MS: u64 = 100; fn dump(data: Data) { let (_path_to_file_dir, path_to_file) = get_file_path(); let content = serde_json::to_string(&data).expect("Json serialization failed"); fs::write(path_to_file, content).expect("Data cannot be saved"); } enum TerminalEvent { Input(Key), } fn main() -> Result<(), io::Error> { // Update application to the latest release match update() { Ok(version) => { if version == CURRENT_APP_VERSION { println!("Rudo is up to date!"); } else { println!("Successfully updated to version {}", version); process::exit(0); } } Err(error) if error.to_string().contains("Update aborted") => {} Err(error) => { println!("---------------------------------------"); println!("Error occurred during update. Please report it here:"); println!("https://github.com/GlebIrovich/rudo/issues"); println!("{}", error); println!("---------------------------------------"); process::exit(1); } }; let stdout = stdout().into_raw_mode()?; let backend = TermionBackend::new(stdout); let mut terminal = Terminal::new(backend)?; // Application state let mut app = App::new(get_app_data()); // Clean screen terminal.clear().expect("Terminal clean failed"); let key_events_receiver = spawn_key_event_listener_worker(Arc::clone(&app.stage)); loop { terminal .draw(|frame| { let items: Vec<ListItem> = app .get_filtered_items() .iter() .map(|(index, item)| { let lines = vec![Spans::from(Span::from(format!( "{}. [{}] - {}", index + 1, if item.completed { 'X' } else { ' ' }, item.name.clone() )))]; ListItem::new(lines) }) .collect(); let mut app_layout = AppLayout::new(); let frame_size = frame.size(); let (app_chunks, list_chunks) = app_layout.update_layout_chunks(&app, frame_size); app_layout.draw_filter_widget(frame, &app.filter_term, app_chunks[0]); app_layout.list_layout.draw_list_widget( frame, items, list_chunks[0], &mut app.list.state, ); app_layout.draw_help_widget(frame, &*app.stage.lock().unwrap(), app_chunks[2]); match &*app.stage.lock().unwrap() { AppStage::CreateItem | AppStage::UpdateItem => { app_layout.list_layout.draw_item_input_widget( frame, &app.item_name_input, list_chunks[1], ); } _ => (), } }) .expect("Terminal draw failed"); if let true = key_down_handler(&key_events_receiver, &mut app, &mut terminal) { break Result::Ok(()); }; } } fn spawn_key_event_listener_worker(app_stage: Arc<Mutex<AppStage>>) -> Receiver<TerminalEvent> { let stdin = stdin(); let (sender, receiver) = mpsc::channel(); thread::spawn(move || { //detecting keydown events for event in stdin.keys() { match event.unwrap() { Key::Char('q') => match *app_stage.lock().unwrap() { AppStage::CreateItem | AppStage::UpdateItem | AppStage::Filter => { sender.send(TerminalEvent::Input(Key::Char('q'))).unwrap() } _ => { sender.send(TerminalEvent::Input(Key::Char('q'))).unwrap(); break; } }, key => sender.send(TerminalEvent::Input(key)).unwrap(), } } }); receiver } fn key_down_handler( receiver: &Receiver<TerminalEvent>, app: &mut App, terminal: &mut Terminal<TermionBackend<RawTerminal<Stdout>>>, ) -> bool { match receiver.recv_timeout(Duration::from_millis(APP_TICK_MS)) { Result::Ok(event) => key_action_mapper(event, app, terminal), Err(RecvTimeoutError::Timeout) => { return false; } Err(_) => { return true; } }; false } fn key_action_mapper( event: TerminalEvent, app: &mut App, terminal: &mut Terminal<TermionBackend<RawTerminal<Stdout>>>, ) -> bool { match event { TerminalEvent::Input(Key::Char(key)) => match app.get_stage_clone() { AppStage::CreateItem => match key { '\n' => { app.add_new_item(); app.reset_item_name_input(); app.set_stage(AppStage::Default); } key => app.item_input_add_character(key), }, AppStage::UpdateItem => match key { '\n' => { app.update_item(); app.reset_item_name_input(); app.set_stage(AppStage::Default); } key => app.item_input_add_character(key), }, AppStage::Filter => match key { '\n' => { app.set_stage(AppStage::Default); } key => app.filter_term_add_character(key), }, AppStage::Default => match key { 'n' => app.set_stage(AppStage::CreateItem), 'f' => app.set_stage(AppStage::Filter), 'e' => app.set_stage(AppStage::UpdateItem), 'd' => app.remove_task(), ' ' | '\n' => app.toggle_task(), 's' => app.toggle_sorting(), 'q' => { terminal.clear().unwrap(); dump(Data { items: app.list.items.clone(), }); return true; } _ => (), }, }, TerminalEvent::Input(special_key) => match app.get_stage_clone() { AppStage::CreateItem | AppStage::UpdateItem => { if let Key::Backspace = special_key { app.item_input_remove_character() } } AppStage::Filter => { if let Key::Backspace = special_key { app.filter_term_remove_character() } } AppStage::Default => match special_key { Key::Backspace => app.remove_task(), Key::Down => app.list.next(), Key::Up => app.list.previous(), _ => (), }, }, }; false } fn get_app_data() -> Vec<TodoItem> { let (path_to_file_dir, path_to_file) = get_file_path(); match fs::read_dir(&path_to_file_dir) { Ok(_) => {} Err(_) => fs::create_dir_all(path_to_file_dir).unwrap(), } match fs::read_to_string(path_to_file) { Ok(data) => { let data: Data = serde_json::from_str(data.as_str()).expect("Parsing json has failed"); data.items } Err(_) => vec![], } } fn get_file_path() -> (PathBuf, PathBuf) { let mut path_to_file = dirs::home_dir().unwrap(); path_to_file.push(".rudo"); let path_to_file_dir = path_to_file.clone(); path_to_file.push("todos.json"); (path_to_file_dir, path_to_file) }
use crate::{ ast_types::{ ast_base::AstBase, result::ResultExpression, }, utils::Ops, }; use serde::Serialize; use std::any::Any; /* IF STATEMENT */ pub trait IfConditionalBase { fn new(conditions: Vec<ResultExpression>, body: Vec<Box<dyn self::AstBase>>) -> Self; } #[derive(Clone, Debug, Serialize)] pub struct IfConditional { pub conditions: Vec<ResultExpression>, pub body: Vec<Box<dyn self::AstBase>>, } impl IfConditionalBase for IfConditional { fn new(conditions: Vec<ResultExpression>, body: Vec<Box<dyn self::AstBase>>) -> Self { Self { conditions, body } } } impl AstBase for IfConditional { fn get_type(&self) -> Ops { Ops::IfConditional } fn as_self(&self) -> &dyn Any { self } }
use amethyst::ecs::prelude::{Component, NullStorage}; /// This component is meant for entities that can be stored inside the player's inventory. #[derive(Default, Debug)] pub struct IsIngameEntity; impl Component for IsIngameEntity { type Storage = NullStorage<Self,>; } /// This component is meant for entities that can be stored inside the player's inventory. #[derive(Default, Debug)] pub struct TagCarriable; impl Component for TagCarriable { type Storage = NullStorage<Self,>; } /// This component is meant for entities that are currentlly stored inside the player's inventory. #[derive(Default, Debug)] pub struct TagInInventory; impl Component for TagInInventory { type Storage = NullStorage<Self,>; } /// This component is meant for entities that can be bought or sold in the OreShop. #[derive(Default, Debug)] pub struct TagOreShopMerch; impl Component for TagOreShopMerch { type Storage = NullStorage<Self,>; } /// This component is meant for entities that can be bought or sold in the PartsShop. #[derive(Default, Debug)] pub struct TagPartsShopMerch; impl Component for TagPartsShopMerch { type Storage = NullStorage<Self,>; } /// This component is meant for ground entites the player cannot see yet. #[derive(Default, Debug)] pub struct TagFogOfWar; impl Component for TagFogOfWar { type Storage = NullStorage<Self,>; } /// This component is meant for player entities. #[derive(Default, Debug)] pub struct TagItem; impl Component for TagItem { type Storage = NullStorage<Self,>; }
use super::{DevicesTabs, Pages}; use crate::{ backend::Backend, error::error, page::AppRoute, pages::{apps::ApplicationContext, devices::DetailsSection}, utils::url_encode, }; use drogue_client::registry::v1::Device; use monaco::{api::*, sys::editor::BuiltinTheme, yew::CodeEditor}; use patternfly_yew::*; use std::rc::Rc; use yew::{format::*, prelude::*, services::fetch::*}; #[derive(Clone, Debug, Properties, PartialEq)] pub struct Props { pub backend: Backend, pub app: String, pub name: String, pub details: DetailsSection, } pub enum Msg { Load, Reset, SetData(Device), Error(String), SaveEditor, } pub struct Details { props: Props, link: ComponentLink<Self>, fetch_task: Option<FetchTask>, content: Option<Device>, yaml: Option<TextModel>, } impl Component for Details { type Message = Msg; type Properties = Props; fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self { link.send_message(Msg::Load); Self { props, link, content: None, yaml: None, fetch_task: None, } } fn update(&mut self, msg: Self::Message) -> ShouldRender { match msg { Msg::Load => match self.load() { Ok(task) => self.fetch_task = Some(task), Err(err) => error("Failed to load", err), }, Msg::SetData(content) => { self.content = Some(content); self.reset(); self.fetch_task = None; } Msg::Reset => { self.reset(); } Msg::SaveEditor => { if let Some(model) = &self.yaml { let new_content = model.get_value(); match self.update_yaml(&new_content) { Ok(task) => self.fetch_task = Some(task), Err(err) => error("Failed to update", err), } } } Msg::Error(msg) => { error("Error", msg); } } true } fn change(&mut self, props: Self::Properties) -> ShouldRender { if self.props != props { self.props = props; true } else { false } } fn view(&self) -> Html { return html! { <> <PageSection variant=PageSectionVariant::Light limit_width=true> <Content> <Title>{&self.props.name}</Title> </Content> </PageSection> { if let Some(app) = &self.content { self.render_content(app) } else { html!{<PageSection><Grid></Grid></PageSection>} } } </> }; } } impl Details { fn load(&self) -> Result<FetchTask, anyhow::Error> { self.props.backend.info.request( Method::GET, format!( "/api/registry/v1alpha1/apps/{}/devices/{}", url_encode(&self.props.app), url_encode(&self.props.name) ), Nothing, vec![], self.link.callback( move |response: Response<Json<Result<Device, anyhow::Error>>>| match response .into_body() .0 { Ok(content) => Msg::SetData(content), Err(err) => Msg::Error(err.to_string()), }, ), ) } fn update(&self, app: Device) -> Result<FetchTask, anyhow::Error> { self.props.backend.info.request( Method::PUT, format!( "/api/registry/v1alpha1/apps/{}/devices/{}", url_encode(&self.props.app), url_encode(&self.props.name) ), Json(&app), vec![("Content-Type", "application/json")], self.link .callback(move |response: Response<Text>| match response.status() { status if status.is_success() => Msg::Load, status => Msg::Error(format!("Failed to perform update: {}", status)), }), ) } fn update_yaml(&self, yaml: &str) -> Result<FetchTask, anyhow::Error> { let app = serde_yaml::from_str(yaml)?; log::info!("Updating to: {:#?}", app); self.update(app) } fn reset(&mut self) { if let Some(content) = &self.content { let yaml = serde_yaml::to_string(content).unwrap_or_default(); let p: &[_] = &['-', '\n', '\r']; let yaml = yaml.trim_start_matches(p); self.yaml = TextModel::create(yaml, Some("yaml"), None).ok(); } else { self.yaml = None; } } fn render_content(&self, device: &Device) -> Html { let app = device.metadata.application.clone(); let name = device.metadata.name.clone(); let transformer = SwitchTransformer::new( |global| match global { AppRoute::Devices(Pages::Details { details, .. }) => Some(details), _ => None, }, move |local| { AppRoute::Devices(Pages::Details { app: ApplicationContext::Single(app.clone()), name: name.clone(), details: local, }) }, ); return html! { <> <PageSection variant=PageSectionVariant::Light> <DevicesTabs transformer=transformer > <TabRouterItem<DetailsSection> to=DetailsSection::Overview label="Overview"/> <TabRouterItem<DetailsSection> to=DetailsSection::Yaml label="YAML"/> </DevicesTabs> </PageSection> <PageSection> { match self.props.details { DetailsSection::Overview => self.render_overview(device), DetailsSection::Yaml => self.render_editor(), } } </PageSection> </> }; } fn render_overview(&self, device: &Device) -> Html { return html! { <Grid gutter=true> <GridItem cols=[3]> <Card title={html_nested!{<>{"Details"}</>}} > <DescriptionList> <DescriptionGroup term="Application"> {&device.metadata.application} </DescriptionGroup> <DescriptionGroup term="Name"> {&device.metadata.name} </DescriptionGroup> <DescriptionGroup term="Labels"> { for device.metadata.labels.iter().map(|(k,v)| if v.is_empty() { html!{ <Label label=k.clone()/>} } else { html!{ <Label label=format!("{}={}", k, v)/>} } ) } </DescriptionGroup> </DescriptionList> </Card> </GridItem> </Grid> }; } fn render_editor(&self) -> Html { let options = CodeEditorOptions::default() .with_scroll_beyond_last_line(false) .with_language("yaml".to_owned()) .with_builtin_theme(BuiltinTheme::VsDark); let options = Rc::new(options); return html! { <> <Stack> <StackItem fill=true> <CodeEditor model=self.yaml.clone() options=options/> </StackItem> <StackItem> <Form> <ActionGroup> <Button disabled=self.fetch_task.is_some() label="Save" variant=Variant::Primary onclick=self.link.callback(|_|Msg::SaveEditor)/> <Button disabled=self.fetch_task.is_some() label="Reload" variant=Variant::Secondary onclick=self.link.callback(|_|Msg::Load)/> <Button disabled=self.fetch_task.is_some() label="Cancel" variant=Variant::Secondary onclick=self.link.callback(|_|Msg::Reset)/> </ActionGroup> </Form> </StackItem> </Stack> </> }; } }
mod commands; mod store; mod types; use std::collections::HashMap; use std::sync::{Arc, Mutex, RwLock}; use flux::ast::walk::Node as AstNode; use flux::ast::{self, Expression as AstExpression}; use flux::semantic::nodes::{ ErrorKind as SemanticNodeErrorKind, Package as SemanticPackage, }; use flux::semantic::sub::{Substitutable, Substituter}; use flux::semantic::types::{ BoundTvar, BoundTvarKinds, BuiltinType, CollectionType, MonoType, PolyType, Tvar, }; use flux::semantic::{walk, ErrorKind}; use lspower::{ jsonrpc::Result as RpcResult, lsp, Client, LanguageServer, }; use strum::IntoEnumIterator; use crate::{completion, composition, lang, visitors::semantic}; use self::commands::{ ClientCommandNotification, CompositionInitializeParams, LspClientCommand, LspMessageActionItem, LspServerCommand, TagValueFilterParams, ValueFilterParams, }; use self::types::LspError; const VERSION: &str = env!("CARGO_PKG_VERSION"); type Diagnostic = fn(&SemanticPackage) -> Vec<(Option<String>, lsp::Diagnostic)>; /// Convert a flux::semantic::walk::Node to a lsp::Location /// https://microsoft.github.io/language-server-protocol/specification#location fn node_to_location( node: &flux::semantic::walk::Node, uri: lsp::Url, ) -> lsp::Location { lsp::Location { uri, range: node.loc().clone().into(), } } fn find_references<'a>( uri: &lsp::Url, node: Option<flux::semantic::walk::Node<'a>>, path: Vec<flux::semantic::walk::Node<'a>>, ) -> Vec<lsp::Location> { if let Some(node) = node { let name = match node { walk::Node::Identifier(ident) => &ident.name, walk::Node::IdentifierExpr(ident) => &ident.name, _ => return Vec::new(), }; let scope: walk::Node = match path .iter() .map(|n| match n { walk::Node::FunctionExpr(f) if f.params .iter() .any(|param| &param.key.name == name) => { Some(n) } walk::Node::Package(_) | walk::Node::File(_) => { let mut visitor = semantic::DefinitionFinderVisitor::new( name.clone(), ); walk::walk(&mut visitor, *n); if visitor.node.is_some() { Some(n) } else { None } } _ => None, }) .next() { Some(Some(n)) => n.to_owned(), _ => return Vec::new(), }; let mut visitor = semantic::IdentFinderVisitor::new(name.clone()); walk::walk(&mut visitor, scope); let locations: Vec<lsp::Location> = visitor .identifiers .iter() .map(|node| node_to_location(node, uri.clone())) .collect(); locations } else { Vec::new() } } #[derive(Default)] struct LspServerState { buckets: Vec<String>, compositions: HashMap<lsp::Url, composition::Composition>, } impl LspServerState { // XXX: rockstar (21 Jun 2022) - This `allow` pragma is temporary, until we can add // bucket completion, which is blocked on the completion refactor. #[allow(dead_code)] pub fn buckets(&self) -> &Vec<String> { &self.buckets } pub fn set_buckets(&mut self, buckets: Vec<String>) { self.buckets = buckets; } /// Get a composition from the state /// /// We return a copy here, as the pointer across threads isn't supported. pub fn get_mut_composition( &mut self, uri: &lsp::Url, ) -> Option<&mut composition::Composition> { self.compositions.get_mut(uri) } pub fn set_composition( &mut self, uri: lsp::Url, composition: composition::Composition, ) { self.compositions.insert(uri, composition); } pub fn drop_composition(&mut self, uri: &lsp::Url) { self.compositions.remove(uri); } } pub struct LspServer { client: Arc<Mutex<Option<Client>>>, diagnostics: Vec<Diagnostic>, store: store::Store, state: Mutex<LspServerState>, client_capabilities: RwLock<lsp::ClientCapabilities>, } impl LspServer { pub fn new(client: Option<Client>) -> Self { Self { client: Arc::new(Mutex::new(client)), diagnostics: vec![ super::diagnostics::contrib_lint, super::diagnostics::experimental_lint, super::diagnostics::no_influxdb_identifiers, super::diagnostics::prefer_camel_case, ], store: store::Store::default(), state: Mutex::new(LspServerState::default()), client_capabilities: RwLock::new( lsp::ClientCapabilities::default(), ), } } // Get the client from out of its arc and mutex. // Note the lspower::Client has a cheap clone method to make it easy // to pass around many instances of the client. // // We leverage that here so we do not have to keep a lock or // an extra reference to the client. fn get_client(&self) -> Option<Client> { match self.client.lock() { Ok(client) => (*client).clone(), Err(err) => { log::error!("failed to get lock on client: {}", err); None } } } fn get_document(&self, key: &lsp::Url) -> RpcResult<String> { match self.store.get(key) { Ok(contents) => Ok(contents), Err(err) => Err(err.into()), } } /// Publish any diagnostics to the client async fn publish_diagnostics(&self, key: &lsp::Url) { // If we have a client back to the editor report any diagnostics found in the document if let Some(client) = &self.get_client() { for (key, diagnostics) in self.compute_diagnostics(key).into_iter() { client .publish_diagnostics(key, diagnostics, None) .await; } } } /// Compute diagnostics for a package /// /// This function will compute all diagnostics for the same package simultaneously. This /// includes files that don't have any diagnostic messages (an empty list is generated), /// as this is the way the server will signal that previous diagnostic messages have cleared. fn compute_diagnostics( &self, key: &lsp::Url, ) -> HashMap<lsp::Url, Vec<lsp::Diagnostic>> { let mut diagnostic_map: HashMap< lsp::Url, Vec<lsp::Diagnostic>, > = self .store .get_package_urls(key) .into_iter() .map(|url| (url, Vec::new())) .collect(); let diagnostics: Vec<(Option<String>, lsp::Diagnostic)> = match self.store.get_package_errors(key) { None => { // If there are no semantic package errors, we can check for other // diagnostics. // // Note: it is important, if no diagnostics exist, that we return an empty // diagnostic list, as that will signal to the client that the diagnostics // have been cleared. if let Ok(package) = self.store.get_semantic_package(key) { self .diagnostics .iter() .flat_map(|func| func(&package)) .collect::<Vec<(Option<String>, lsp::Diagnostic)>>() } else { vec![] } } Some(errors) => { errors .diagnostics .errors .iter() .filter(|error| { // We will never have two files with the same name in a package, so we can // key off filename to determine whether the error exists in this file or // elsewhere in the package. if let Some(file) = &error.location.file { if let Some(segments) = key.path_segments() { if let Some(filename) = segments.last() { return file == filename; } } } false }) .map(|e| { (e.location.file.clone(), lsp::Diagnostic { range: e.location.clone().into(), severity: Some(lsp::DiagnosticSeverity::ERROR), source: Some("flux".to_string()), message: e.error.to_string(), ..lsp::Diagnostic::default() }) }) .collect() } }; diagnostics.into_iter().for_each(|(filename, diagnostic)| { // XXX: rockstar (5 June 2022) - Can this _ever_ be None? Is a blind unwrap safe? if let Some(filename) = filename { diagnostic_map .iter_mut() .filter(|(url, _)| { url.to_string().ends_with(&filename) }) .for_each(|(_, diagnostics)| { diagnostics.push(diagnostic.clone()) }); } }); diagnostic_map } fn complete_member_expression( &self, sem_pkg: &SemanticPackage, member: &ast::MemberExpr, ) -> Option<Vec<lsp::CompletionItem>> { match &member.object { AstExpression::Identifier(identifier) => { // XXX: rockstar (6 Jul 2022) - This is the last holdout from the previous // completion code. There is a bit of indirection/cruft here that can be cleaned // up when recursive support for member expressions is implemented. let mut list: Vec<Box<dyn completion::Completable>> = vec![]; if let Some(import) = completion::get_imports(sem_pkg) .iter() .find(|x| x.name == identifier.name) { for package in lang::STDLIB.packages() { if package.path == import.path { completion::walk_package( &package.path, &mut list, &package.exports.typ().expr, ); } } } else { for package in lang::STDLIB.packages() { if package.name == identifier.name { completion::walk_package( &package.path, &mut list, &package.exports.typ().expr, ); } } } let visitor = crate::walk_semantic_package!( completion::CompletableObjectFinderVisitor::new( &identifier.name ), sem_pkg ); let imports = completion::get_imports(sem_pkg); Some( vec![ visitor .completables .iter() .map(|completable| { completable.completion_item(&imports) }) .collect::<Vec<lsp::CompletionItem>>(), list.iter() .map(|completable| { completable.completion_item(&imports) }) .collect(), ] .into_iter() .flatten() .collect(), ) } _ => None, } } } #[lspower::async_trait] impl LanguageServer for LspServer { async fn initialize( &self, params: lsp::InitializeParams, ) -> RpcResult<lsp::InitializeResult> { match self.client_capabilities.write() { Ok(mut client_capabilities) => { *client_capabilities = params.capabilities; } Err(err) => log::error!("{}", err), } Ok(lsp::InitializeResult { capabilities: lsp::ServerCapabilities { code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)), completion_provider: Some(lsp::CompletionOptions { resolve_provider: None, trigger_characters: Some(vec![ ".".to_string(), ":".to_string(), "(".to_string(), ",".to_string(), "\"".to_string(), ]), all_commit_characters: None, work_done_progress_options: lsp::WorkDoneProgressOptions { work_done_progress: None, }, }), definition_provider: Some(lsp::OneOf::Left(true)), document_formatting_provider: Some(lsp::OneOf::Left( true, )), document_highlight_provider: Some(lsp::OneOf::Left( true, )), document_symbol_provider: Some(lsp::OneOf::Left( true, )), execute_command_provider: Some(lsp::ExecuteCommandOptions { commands: commands::LspServerCommand::iter().map(|command| command.into()).collect::<Vec<String>>(), work_done_progress_options: lsp::WorkDoneProgressOptions { work_done_progress: None, } }), folding_range_provider: Some( lsp::FoldingRangeProviderCapability::Simple(true), ), hover_provider: Some( lsp::HoverProviderCapability::Simple(true), ), references_provider: Some(lsp::OneOf::Left(true)), rename_provider: Some(lsp::OneOf::Left(true)), semantic_tokens_provider: Some(lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(lsp::SemanticTokensOptions{ work_done_progress_options: lsp::WorkDoneProgressOptions { work_done_progress: None }, legend: lsp::SemanticTokensLegend { token_types: crate::visitors::ast::SemanticToken::LSP_MAPPING.to_owned(), token_modifiers: vec![], }, range: None, full: None, })), signature_help_provider: Some( lsp::SignatureHelpOptions { trigger_characters: Some(vec![ "(".to_string() ]), retrigger_characters: Some(vec![ "(".to_string() ]), work_done_progress_options: lsp::WorkDoneProgressOptions { work_done_progress: None, }, }, ), text_document_sync: Some( lsp::TextDocumentSyncCapability::Options( lsp::TextDocumentSyncOptions { open_close: Some(true), change: Some(lsp::TextDocumentSyncKind::FULL), ..Default::default() } ), ), ..Default::default() }, server_info: Some(lsp::ServerInfo { name: "flux-lsp".to_string(), version: Some(VERSION.into()), }), }) } async fn shutdown(&self) -> RpcResult<()> { // XXX: rockstar (19 May 2022) - This chunk of code will no longer be needed, // when tower-lsp is added again. let mut client = match self.client.lock() { Ok(client) => client, Err(err) => { return Err(LspError::InternalError(format!( "{}", err )) .into()) } }; *client = None; Ok(()) } async fn did_open( &self, params: lsp::DidOpenTextDocumentParams, ) -> () { let key = params.text_document.uri; let value = params.text_document.text; self.store.put(&key, &value); self.publish_diagnostics(&key).await; } async fn did_change( &self, params: lsp::DidChangeTextDocumentParams, ) -> () { let key = params.text_document.uri; match self.store.get(&key) { Ok(value) => { // The way the spec reads, if given a list of changes to make, these changes // are made in the order that they are provided, e.g. an straight iteration, // applying each one as given, is the correct process. That means a change later // in the list could overwrite a change made earlier in the list. let new_contents = params .content_changes .iter() .fold(value, |_acc, change| change.text.clone()); self.store.put(&key, &new_contents.clone()); self.publish_diagnostics(&key).await; // let mut composition_position = None; if self.store.get_package_errors(&key).is_none() { let composition_state = match self.state.lock() { Ok(mut state) => { if let Some(composition) = state.get_mut_composition(&key) { match self.store.get_ast_file(&key) { Ok(file) => { let result = composition .resolve_with_ast(file); if result.is_err() { state.drop_composition( &key, ); Err(LspClientCommand::CompositionDropped) } else { Ok(composition.clone()) } } Err(_) => Err(LspClientCommand::CompositionNotFound), } } else { Err(LspClientCommand::CompositionNotFound) } } Err(err) => panic!("{}", err), }; if let Some(client) = self.get_client() { match composition_state { Ok(composition) => { let position = composition .get_stmt_position() .expect("Bad stmt position."); let range_action_item = lsp::MessageActionItem { title: LspMessageActionItem::CompositionRange.to_string(), properties: HashMap::from([("range".to_string(), lsp::MessageActionItemProperty::Object( serde_json::to_value(HashMap::from([ ("start", position.start), ("end", position.end) ])).expect("Bad stmt position") ))]) }; let composition_state_action_item = lsp::MessageActionItem { title: LspMessageActionItem::CompositionState.to_string(), properties: HashMap::from([ ("state".to_string(), lsp::MessageActionItemProperty::Object( composition.get_serialized_composition_state().expect("Bad composition state")))]) }; let params = lsp::ShowMessageRequestParams { typ: lsp::MessageType::INFO, message: LspClientCommand::UpdateComposition.to_string(), actions: Some(vec![range_action_item, composition_state_action_item]), }; client.send_custom_notification::<ClientCommandNotification>(params).await; } Err(error_type) => { let params = lsp::ShowMessageRequestParams { typ: lsp::MessageType::INFO, message: error_type .to_string(), actions: None, }; client.send_custom_notification::<ClientCommandNotification>(params).await; } }; } else { log::error!("Failed to acquire client."); }; } } Err(err) => log::error!( "Could not update key: {}\n{:?}", key, err ), } } async fn did_close( &self, params: lsp::DidCloseTextDocumentParams, ) -> () { self.store.remove(&params.text_document.uri); match self.state.lock() { Ok(mut state) => { state.drop_composition(&params.text_document.uri) } Err(err) => panic!("{}", err), } } async fn did_change_configuration( &self, params: lsp::DidChangeConfigurationParams, ) -> () { if let serde_json::value::Value::Object(map) = params.settings { if let Some(settings) = map.get("settings") { if let Some(serde_json::value::Value::Array( buckets, )) = settings.get("buckets") { match self.state.lock() { Ok(mut state) => { state.set_buckets( buckets .iter() .filter(|bucket| { bucket.is_string() }) .map(|bucket| { #[allow(clippy::unwrap_used)] String::from( bucket.as_str().unwrap(), ) }) .collect::<Vec<String>>(), ); } Err(err) => log::error!("{}", err), } } } } } async fn signature_help( &self, params: lsp::SignatureHelpParams, ) -> RpcResult<Option<lsp::SignatureHelp>> { let key = params.text_document_position_params.text_document.uri; let pkg = match self.store.get_semantic_package(&key) { Ok(pkg) => pkg, Err(err) => return Err(err.into()), }; let visitor = crate::walk_semantic_package!( semantic::NodeFinderVisitor::new( params.text_document_position_params.position ), pkg ); let signatures: Vec<lsp::SignatureInformation> = if let Some( node, ) = visitor.node { if let walk::Node::CallExpr(call) = node { let callee = call.callee.clone(); if let flux::semantic::nodes::Expression::Member(member) = callee.clone() { let name = member.property.clone(); if let flux::semantic::nodes::Expression::Identifier(ident) = member.object.clone() { match lang::STDLIB.package(&ident.name) { None => return Ok(None), Some(package) => match package.function(&name) { None => return Ok(None), Some(function) => function.signature_information(), } } } else { return Ok(None); } } else if let flux::semantic::nodes::Expression::Identifier(ident) = callee { match lang::UNIVERSE.function(&ident.name) { Some(function) => { function.signature_information() } None => return Ok(None), } } else { log::debug!("signature_help on non-member and non-identifier"); return Ok(None); } } else { log::debug!("signature_help on non-call expression"); return Ok(None); } } else { return Ok(None); }; let response = if signatures.is_empty() { None } else { Some(lsp::SignatureHelp { signatures, active_signature: None, active_parameter: None, }) }; Ok(response) } async fn formatting( &self, params: lsp::DocumentFormattingParams, ) -> RpcResult<Option<Vec<lsp::TextEdit>>> { let key = params.text_document.uri; let contents = self.get_document(&key)?; let mut formatted = match flux::formatter::format(&contents) { Ok(value) => value, Err(err) => { return Err(lspower::jsonrpc::Error { code: lspower::jsonrpc::ErrorCode::InternalError, message: format!( "Error formatting document: {}", err ), data: None, }) } }; if let Some(trim_trailing_whitespace) = params.options.trim_trailing_whitespace { if trim_trailing_whitespace { log::info!("textDocument/formatting requested trimming trailing whitespace, but the flux formatter will always trim trailing whitespace"); } } if let Some(insert_final_newline) = params.options.insert_final_newline { if insert_final_newline && formatted.chars().last().unwrap_or(' ') != '\n' { formatted.push('\n'); } } if let Some(trim_final_newlines) = params.options.trim_final_newlines { if trim_final_newlines && formatted.chars().last().unwrap_or(' ') != '\n' { log::info!("textDocument/formatting requested trimming final newlines, but the flux formatter will always trim trailing whitespace"); } } // The new text shows the range of the previously replaced section, // not the range of the new section. let lookup = line_col::LineColLookup::new(contents.as_str()); let end = lookup.get(contents.len()); let edit = lsp::TextEdit::new( lsp::Range { start: lsp::Position { line: 0, character: 0, }, end: lsp::Position { line: (end.0 - 1) as u32, character: (end.1 - 1) as u32, }, }, formatted, ); Ok(Some(vec![edit])) } async fn folding_range( &self, params: lsp::FoldingRangeParams, ) -> RpcResult<Option<Vec<lsp::FoldingRange>>> { let key = params.text_document.uri; let pkg = match self.store.get_semantic_package(&key) { Ok(pkg) => pkg, Err(err) => return Err(err.into()), }; let visitor = crate::walk_semantic_package!( semantic::FoldFinderVisitor::default(), pkg ); let results: Vec<lsp::FoldingRange> = visitor .nodes .into_iter() .map(|node| lsp::FoldingRange { start_line: node.loc().start.line, start_character: Some(node.loc().start.column), end_line: node.loc().end.line, end_character: Some(node.loc().end.column), kind: Some(lsp::FoldingRangeKind::Region), }) .collect(); Ok(if results.is_empty() { None } else { Some(results) }) } async fn document_symbol( &self, params: lsp::DocumentSymbolParams, ) -> RpcResult<Option<lsp::DocumentSymbolResponse>> { let key = params.text_document.uri; let pkg = match self.store.get_semantic_package(&key) { Ok(pkg) => pkg, Err(err) => return Err(err.into()), }; let visitor = crate::walk_semantic_package!( semantic::SymbolsVisitor::new(key), pkg ); let mut symbols = visitor.symbols; symbols.sort_by(|a, b| { let a_start = a.location.range.start; let b_start = b.location.range.start; if a_start.line == b_start.line { a_start.character.cmp(&b_start.character) } else { a_start.line.cmp(&b_start.line) } }); let response = if symbols.is_empty() { None } else { Some(lsp::DocumentSymbolResponse::Flat(symbols)) }; Ok(response) } async fn goto_definition( &self, params: lsp::GotoDefinitionParams, ) -> RpcResult<Option<lsp::GotoDefinitionResponse>> { let key = params.text_document_position_params.text_document.uri; let pkg = match self.store.get_semantic_package(&key) { Ok(pkg) => pkg, Err(err) => return Err(err.into()), }; let visitor = crate::walk_semantic_package!( semantic::NodeFinderVisitor::new( params.text_document_position_params.position ), pkg ); if let Some(node) = visitor.node { let node_name = match node { walk::Node::Identifier(ident) => &ident.name, walk::Node::IdentifierExpr(ident) => &ident.name, _ => return Ok(None), }; let definition_visitor = crate::walk_semantic_package!( semantic::DefinitionFinderVisitor::new( node_name.clone() ), pkg ); if let Some(node) = definition_visitor.node { let location = node_to_location(&node, key); return Ok(Some(lsp::GotoDefinitionResponse::from( location, ))); } } Ok(None) } async fn rename( &self, params: lsp::RenameParams, ) -> RpcResult<Option<lsp::WorkspaceEdit>> { let key = params.text_document_position.text_document.uri; let pkg = match self.store.get_semantic_package(&key) { Ok(pkg) => pkg, Err(err) => return Err(err.into()), }; let visitor = crate::walk_semantic_package!( semantic::NodeFinderVisitor::new( params.text_document_position.position ), pkg ); let locations = find_references(&key, visitor.node, visitor.path); let edits = locations .iter() .map(|location| lsp::TextEdit { range: location.range, new_text: params.new_name.clone(), }) .collect::<Vec<lsp::TextEdit>>(); Ok(Some(lsp::WorkspaceEdit { changes: Some(HashMap::from([(key, edits)])), document_changes: None, change_annotations: None, })) } async fn document_highlight( &self, params: lsp::DocumentHighlightParams, ) -> RpcResult<Option<Vec<lsp::DocumentHighlight>>> { let key = params.text_document_position_params.text_document.uri; let pkg = match self.store.get_semantic_package(&key) { Ok(pkg) => pkg, Err(err) => return Err(err.into()), }; let visitor = crate::walk_semantic_package!( semantic::NodeFinderVisitor::new( params.text_document_position_params.position ), pkg ); let refs = find_references(&key, visitor.node, visitor.path); Ok(Some( refs.iter() .map(|r| lsp::DocumentHighlight { kind: Some(lsp::DocumentHighlightKind::TEXT), range: r.range, }) .collect(), )) } async fn references( &self, params: lsp::ReferenceParams, ) -> RpcResult<Option<Vec<lsp::Location>>> { let key = params.text_document_position.text_document.uri; let pkg = match self.store.get_semantic_package(&key) { Ok(pkg) => pkg, Err(err) => return Err(err.into()), }; let visitor = crate::walk_semantic_package!( semantic::NodeFinderVisitor::new( params.text_document_position.position ), pkg ); let references = find_references(&key, visitor.node, visitor.path); Ok(if references.is_empty() { None } else { Some(references) }) } async fn hover( &self, params: lsp::HoverParams, ) -> RpcResult<Option<lsp::Hover>> { let key = params.text_document_position_params.text_document.uri; let pkg = match self.store.get_semantic_package(&key) { Ok(pkg) => pkg, Err(err) => return Err(err.into()), }; let visitor = crate::walk_semantic_package!( semantic::NodeFinderVisitor::new( params.text_document_position_params.position ), pkg ); if let Some(node) = visitor.node { let path = &visitor.path; let hover_type = node .type_of() .map(|t| include_constraints(path, t).to_string()) .or_else(|| match node { walk::Node::Identifier(ident) => { // We hovered over an identifier without an attached type, try to figure // it out from its context let parent = path.get(path.len() - 2)?; match parent { // The type of assigned variables is the type of the right hand side walk::Node::VariableAssgn(var) => { Some(var.init.type_of().to_string()) } walk::Node::MemberAssgn(var) => { Some(var.init.type_of().to_string()) } walk::Node::BuiltinStmt(builtin) => { Some(builtin.typ_expr.to_string()) } // The type of an property identifier is the type of the value walk::Node::Property(property) => Some( property.value.type_of().to_string(), ), // The type Function parameters can be derived from the function type // stored in the function expression walk::Node::FunctionParameter(_) => { let func = path.get(path.len() - 3)?; match func { walk::Node::FunctionExpr( func, ) => func .typ .parameter( ident.name.as_str(), ) .map(|t| t.to_string()), _ => None, } } _ => None, } } _ => None, }); if let Some(typ) = hover_type { let supports_markdown = match self .client_capabilities .read() { Ok(client_capabilities) => { if let Some(text_document) = (client_capabilities) .text_document .as_ref() { if let Some(hover) = text_document.hover.as_ref() { hover.content_format.as_ref().map_or(false, |formats| formats.contains(&lsp::MarkupKind::Markdown)) } else { false } } else { false } } Err(err) => { log::error!("{}", err); false } }; let hover_contents: lsp::HoverContents = match supports_markdown { true => lsp::HoverContents::Markup( lsp::MarkupContent { kind: lsp::MarkupKind::Markdown, value: format!( "```flux\n{}\n```", typ ), }, ), false => lsp::HoverContents::Scalar( lsp::MarkedString::String(typ), ), }; return Ok(Some(lsp::Hover { contents: hover_contents, range: None, })); } } Ok(None) } async fn completion( &self, params: lsp::CompletionParams, ) -> RpcResult<Option<lsp::CompletionResponse>> { // This is the rules for matching whether a string should be part of // the completion matching. let fuzzy_match = |haystack: &str, needle: &str| -> bool { return haystack .to_lowercase() .contains(needle.to_lowercase().as_str()); }; let ast_pkg = match self.store.get_ast_package( &params.text_document_position.text_document.uri, ) { Ok(pkg) => pkg, Err(err) => return Err(err.into()), }; let sem_pkg = match self.store.get_semantic_package( &params.text_document_position.text_document.uri, ) { Ok(pkg) => pkg, Err(err) => { return Err(err.into()); } }; let visitor = crate::walk_ast_package!( crate::visitors::ast::NodeFinderVisitor::new( params.text_document_position.position ), ast_pkg ); let items = match visitor.node { Some(walk_node) => match walk_node.node { AstNode::CallExpr(call) => { completion::complete_call_expr( &params, &sem_pkg, call, ) } AstNode::Identifier(identifier) => { match walk_node .parent .as_ref() .map(|node| &node.node) { // The identifier is a member property so do member completion Some(AstNode::MemberExpr(member)) if member .property .base() .location .start == identifier.base.location.start => { match self.complete_member_expression( &sem_pkg, member, ) { Some(items) => items, None => return Ok(None), } } _ => { // XXX: rockstar (6 Jul 2022) - This is helping to complete packages that // have never been imported. That's probably not a great pattern. let stdlib_completions: Vec< lsp::CompletionItem, > = lang::STDLIB .fuzzy_matches(&identifier.name) .map(|package| { lsp::CompletionItem { label: package.path.clone(), detail: Some("Package".into()), documentation: Some( lsp::Documentation::String( package.path.clone(), ), ), filter_text: Some( package.name.clone(), ), insert_text: Some( package.path.clone(), ), insert_text_format: Some( lsp::InsertTextFormat::PLAIN_TEXT, ), kind: Some( lsp::CompletionItemKind::MODULE, ), sort_text: Some(package.path), ..lsp::CompletionItem::default() } }) .collect(); let builtin_completions: Vec< lsp::CompletionItem, > = lang::UNIVERSE.exports.iter().filter(|(key, val)| { // Don't allow users to "discover" private-ish functionality. // Filter out irrelevent items that won't match. // Only pass expressions that have completion support. !key.starts_with('_') && fuzzy_match(key, &identifier.name) && match &val.expr { MonoType::Fun(_) | MonoType::Builtin(_) => true, MonoType::Collection(collection) => collection.collection == CollectionType::Array, _ => false } }).map(|(key, val)| { match &val.expr { MonoType::Fun(function) => { lsp::CompletionItem { label: key.to_string(), detail: Some(completion::create_function_signature(function)), filter_text: Some(key.to_string()), insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), kind: Some(lsp::CompletionItemKind::FUNCTION), sort_text: Some(key.to_string()), ..lsp::CompletionItem::default() } } MonoType::Builtin(builtin) => { lsp::CompletionItem { label: format!("{} ({})", key, "prelude"), detail: Some(match *builtin { BuiltinType::String => "String".into(), BuiltinType::Int => "Integer".into(), BuiltinType::Float => "Float".into(), BuiltinType::Bool => "Boolean".into(), BuiltinType::Bytes => "Bytes".into(), BuiltinType::Duration => "Duration".into(), BuiltinType::Uint => "Uint".into(), BuiltinType::Regexp => "Regular Expression".into(), BuiltinType::Time => "Time".into(), }), documentation: Some(lsp::Documentation::String("from prelude".into())), filter_text: Some(key.to_string()), insert_text: Some(key.to_string()), insert_text_format: Some( lsp::InsertTextFormat::PLAIN_TEXT ), kind: Some(lsp::CompletionItemKind::VARIABLE), sort_text: Some(format!("{} prelude", key)), ..lsp::CompletionItem::default() } } _ => unreachable!("Previous filter on expression value failed. Got: {}", val.expr) } }).collect(); vec![ stdlib_completions, builtin_completions, ] .into_iter() .flatten() .collect() } } } AstNode::MemberExpr(member) => { match self .complete_member_expression(&sem_pkg, member) { Some(items) => items, None => return Ok(None), } } AstNode::ObjectExpr(_) => { let parent = walk_node .parent .as_ref() .map(|parent| &parent.node); match parent { Some(AstNode::CallExpr(call)) => { completion::complete_call_expr( &params, &sem_pkg, call, ) } Some(_) | None => return Ok(None), } } AstNode::StringLit(_) => { let parent = walk_node .parent .as_ref() .map(|parent| &parent.node); match parent { Some(AstNode::ImportDeclaration(_)) => { let imports = completion::get_imports(&sem_pkg); lang::STDLIB.packages().filter(|package| { !&imports.iter().any(|x| x.path == package.path) }).map(|package| { let trigger = if let Some(context) = & params.context { context.trigger_character.as_deref() } else { None }; let insert_text = if trigger == Some("\"") { package.path.as_str().to_string() } else { format!(r#""{}""#, package.path.as_str()) }; lsp::CompletionItem { label: insert_text.clone(), insert_text: Some(insert_text), insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), kind: Some(lsp::CompletionItemKind::VALUE), ..lsp::CompletionItem::default() } }).collect() } // This is where bucket/measurement/field/tag completion will occur. Some(_) | None => return Ok(None), } } _ => return Ok(None), }, None => return Ok(None), }; if items.is_empty() { Ok(None) } else { Ok(Some(lsp::CompletionResponse::List( lsp::CompletionList { // XXX: rockstar (5 Jul 2022) - This should probably always be incomplete, so // we don't leave off to the client to try and figure out what completions to use. is_incomplete: false, items, }, ))) } } async fn semantic_tokens_full( &self, params: lsp::SemanticTokensParams, ) -> RpcResult<Option<lsp::SemanticTokensResult>> { let pkg = match self .store .get_ast_package(&params.text_document.uri) { Ok(pkg) => pkg, Err(err) => return Err(err.into()), }; let visitor = crate::walk_ast_package!( crate::visitors::ast::SemanticTokenVisitor::default(), pkg ); Ok(Some(lsp::SemanticTokensResult::Tokens( lsp::SemanticTokens { result_id: None, data: visitor.tokens, }, ))) } async fn code_action( &self, params: lsp::CodeActionParams, ) -> RpcResult<Option<lsp::CodeActionResponse>> { // Our code actions should all be connected with a diagnostic. The // client user experience can vary when not directly connected to // a diagnostic, which is sorta the client's fault, but we also // don't have a need for trying to support any other flows. if params.context.diagnostics.is_empty() { return Ok(None); } let errors = match self .store .get_package_errors(&params.text_document.uri) { Some(errors) => errors, None => return Ok(None), }; let relevant: Vec<&flux::semantic::Error> = errors .diagnostics .errors .iter() .filter(|error| { crate::lsp::ranges_overlap( &params.range, &error.location.clone().into(), ) }) .collect(); if relevant.is_empty() { return Ok(None); } let pkg = match self .store .get_semantic_package(&params.text_document.uri) { Ok(pkg) => pkg, Err(err) => unreachable!("{:?}", err), }; let visitor = crate::walk_semantic_package!( semantic::PackageNodeFinderVisitor::default(), pkg ); let import_position = match visitor.location { Some(location) => lsp::Position { line: location.start.line + 1, character: 0, }, None => lsp::Position::default(), }; let actions: Vec<lsp::CodeActionOrCommand> = relevant.iter().map(|error| { if let ErrorKind::Inference(kind) = &error.error { match kind { SemanticNodeErrorKind::UndefinedIdentifier(identifier) => { // When encountering undefined identifiers, check to see if they match any corresponding // packages available for import. let potential_imports: Vec<lang::Package> = lang::STDLIB.fuzzy_matches(identifier).collect(); if potential_imports.is_empty() { return None; } let inner_actions: Vec<lsp::CodeActionOrCommand> = potential_imports.iter().map(|package| { lsp::CodeAction { title: format!("Import `{}`", package.path), kind: Some(lsp::CodeActionKind::QUICKFIX), diagnostics: None, edit: Some(lsp::WorkspaceEdit { changes: Some(HashMap::from([ (params.text_document.uri.clone(), vec![ lsp::TextEdit { range: lsp::Range { start: import_position, end: import_position, }, new_text: format!("import \"{}\"\n", package.path), } ]) ])), document_changes: None, change_annotations: None, }), command: None, is_preferred: Some(true), disabled: None, data: None, }.into() }).collect(); return Some(inner_actions); }, _ => return None, } } None }).filter(|action| action.is_some()).flat_map(|action| { action.expect("Previous .filter() call failed.") }).collect(); return Ok(Some(actions)); } async fn execute_command( &self, params: lsp::ExecuteCommandParams, ) -> RpcResult<Option<serde_json::Value>> { if params.arguments.len() > 1 || (params.arguments.len() == 1 && !params.arguments[0].is_object()) { // We want, at most, a single argument, which is an object itself. This means that // positional arguments are not supported. We only want kwargs. Some commands will // take no arguments. return Err( LspError::InvalidArguments(params.arguments).into() ); } match LspServerCommand::try_from(params.command.clone()) { Ok(LspServerCommand::CompositionInitialize) => { let command_params: CompositionInitializeParams = match serde_json::value::from_value( params.arguments[0].clone(), ) { Ok(value) => value, Err(err) => { return Err(LspError::InternalError( format!("{:?}", err), ) .into()) } }; let file = self.store.get_ast_file( &command_params.text_document.uri, )?; let mut composition = composition::Composition::new( file.clone(), command_params.bucket, command_params.measurement, command_params.fields.unwrap_or_default(), command_params.tag_values.unwrap_or_default(), ); if composition.exists_in(&file) && composition.resolve_with_ast(file).is_ok() { let position = composition .get_stmt_position() .expect("Bad stmt position."); let range_action_item = lsp::MessageActionItem { title: LspMessageActionItem::CompositionRange .to_string(), properties: HashMap::from([( "range".to_string(), lsp::MessageActionItemProperty::Object( serde_json::to_value(HashMap::from( [ ("start", position.start), ("end", position.end), ], )) .expect("Bad stmt position"), ), )]), }; let params = lsp::ShowMessageRequestParams { typ: lsp::MessageType::INFO, message: LspClientCommand::AlreadyExists .to_string(), actions: Some(vec![range_action_item]), }; if let Some(client) = self.get_client() { client.send_custom_notification::<ClientCommandNotification>(params).await; }; } else { let edit = lsp::WorkspaceEdit { changes: Some(HashMap::from([( command_params.text_document.uri.clone(), vec![lsp::TextEdit { new_text: composition.to_string(), range: { let file = self.store.get_ast_file( &command_params .text_document .uri, )?; file.base.location.into() }, }], )])), document_changes: None, change_annotations: None, }; if let Some(client) = self.get_client() { let edit_applied = client.apply_edit(edit, None).await; if edit_applied.is_err() { let params = lsp::ShowMessageRequestParams { typ: lsp::MessageType::ERROR, message: LspClientCommand::ExecuteCommandFailed.to_string(), actions: None, }; client.send_custom_notification::<ClientCommandNotification>(params).await; } }; } match self.state.lock() { Ok(mut state) => state.set_composition( command_params.text_document.uri, composition, ), Err(err) => panic!("{}", err), } Ok(None) } Ok(LspServerCommand::SetMeasurementFilter) => { let command_params: ValueFilterParams = match serde_json::value::from_value( params.arguments[0].clone(), ) { Ok(value) => value, Err(err) => { return Err(LspError::InternalError( format!("{:?}", err), ) .into()) } }; let composition_text = match self.state.lock() { Ok(mut state) => match state.get_mut_composition( &command_params.text_document.uri, ) { Some(composition) => { if composition .set_measurement(command_params.value) .is_err() { return Err(LspError::InternalError( "Failed to set measurement to composition." .to_string(), ) .into()); } composition.to_string() } None => { return Err( LspError::CompositionNotFound( command_params.text_document.uri, ) .into(), ) } }, Err(err) => panic!("{}", err), }; let edit = lsp::WorkspaceEdit { changes: Some(HashMap::from([( command_params.text_document.uri.clone(), vec![lsp::TextEdit { new_text: composition_text .trim_end() .to_owned(), range: { let file = self.store.get_ast_file( &command_params.text_document.uri, )?; file.base.location.into() }, }], )])), document_changes: None, change_annotations: None, }; if let Some(client) = self.get_client() { let edit_applied = client.apply_edit(edit, None).await; if edit_applied.is_err() { let params = lsp::ShowMessageRequestParams { typ: lsp::MessageType::ERROR, message: LspClientCommand::ExecuteCommandFailed .to_string(), actions: None, }; client.send_custom_notification::<ClientCommandNotification>(params).await; } }; Ok(None) } Ok(LspServerCommand::AddFieldFilter) => { let command_params: ValueFilterParams = match serde_json::value::from_value( params.arguments[0].clone(), ) { Ok(value) => value, Err(err) => { return Err(LspError::InternalError( format!("{:?}", err), ) .into()) } }; let composition_text = match self.state.lock() { Ok(mut state) => match state.get_mut_composition( &command_params.text_document.uri, ) { Some(composition) => { if composition .add_field(command_params.value) .is_err() { return Err(LspError::InternalError( "Failed to add field to composition." .to_string(), ) .into()); } composition.to_string().clone() } None => { return Err( LspError::CompositionNotFound( command_params.text_document.uri, ) .into(), ) } }, Err(err) => panic!("{}", err), }; let edit = lsp::WorkspaceEdit { changes: Some(HashMap::from([( command_params.text_document.uri.clone(), vec![lsp::TextEdit { new_text: composition_text .trim_end() .to_owned(), range: { let file = self.store.get_ast_file( &command_params.text_document.uri, )?; file.base.location.into() }, }], )])), document_changes: None, change_annotations: None, }; if let Some(client) = self.get_client() { let edit_applied = client.apply_edit(edit, None).await; if edit_applied.is_err() { let params = lsp::ShowMessageRequestParams { typ: lsp::MessageType::ERROR, message: LspClientCommand::ExecuteCommandFailed .to_string(), actions: None, }; client.send_custom_notification::<ClientCommandNotification>(params).await; } }; Ok(None) } Ok(LspServerCommand::RemoveFieldFilter) => { let command_params: ValueFilterParams = match serde_json::value::from_value( params.arguments[0].clone(), ) { Ok(value) => value, Err(err) => { return Err(LspError::InternalError( format!("{:?}", err), ) .into()) } }; let composition_text = match self.state.lock() { Ok(mut state) => match state.get_mut_composition( &command_params.text_document.uri, ) { Some(composition) => { if composition .remove_field(command_params.value) .is_err() { return Err(LspError::InternalError( "Failed to remove field from composition." .to_string(), ) .into()); } composition.to_string().clone() } None => { return Err( LspError::CompositionNotFound( command_params.text_document.uri, ) .into(), ) } }, Err(err) => panic!("{}", err), }; let edit = lsp::WorkspaceEdit { changes: Some(HashMap::from([( command_params.text_document.uri.clone(), vec![lsp::TextEdit { new_text: composition_text .trim_end() .to_owned(), range: { let file = self.store.get_ast_file( &command_params.text_document.uri, )?; file.base.location.into() }, }], )])), document_changes: None, change_annotations: None, }; if let Some(client) = self.get_client() { let edit_applied = client.apply_edit(edit, None).await; if edit_applied.is_err() { let params = lsp::ShowMessageRequestParams { typ: lsp::MessageType::ERROR, message: LspClientCommand::ExecuteCommandFailed .to_string(), actions: None, }; client.send_custom_notification::<ClientCommandNotification>(params).await; } }; Ok(None) } Ok(LspServerCommand::AddTagValueFilter) => { let command_params: TagValueFilterParams = match serde_json::value::from_value( params.arguments[0].clone(), ) { Ok(value) => value, Err(err) => { return Err(LspError::InternalError( format!("{:?}", err), ) .into()) } }; let composition_text = match self.state.lock() { Ok(mut state) => match state.get_mut_composition( &command_params.text_document.uri, ) { Some(composition) => { if composition .add_tag_value( command_params.tag, command_params.value, ) .is_err() { return Err(LspError::InternalError( "Failed to add tagValue to composition." .to_string(), ) .into()); } composition.to_string().clone() } None => { return Err( LspError::CompositionNotFound( command_params.text_document.uri, ) .into(), ) } }, Err(err) => panic!("{}", err), }; let edit = lsp::WorkspaceEdit { changes: Some(HashMap::from([( command_params.text_document.uri.clone(), vec![lsp::TextEdit { new_text: composition_text .trim_end() .to_owned(), range: { let file = self.store.get_ast_file( &command_params.text_document.uri, )?; file.base.location.into() }, }], )])), document_changes: None, change_annotations: None, }; if let Some(client) = self.get_client() { let edit_applied = client.apply_edit(edit, None).await; if edit_applied.is_err() { let params = lsp::ShowMessageRequestParams { typ: lsp::MessageType::ERROR, message: LspClientCommand::ExecuteCommandFailed .to_string(), actions: None, }; client.send_custom_notification::<ClientCommandNotification>(params).await; } }; Ok(None) } Ok(LspServerCommand::RemoveTagValueFilter) => { let command_params: TagValueFilterParams = match serde_json::value::from_value( params.arguments[0].clone(), ) { Ok(value) => value, Err(err) => { return Err(LspError::InternalError( format!("{:?}", err), ) .into()) } }; let composition_text = match self.state.lock() { Ok(mut state) => match state.get_mut_composition( &command_params.text_document.uri, ) { Some(composition) => { if composition .remove_tag_value( command_params.tag, command_params.value, ) .is_err() { return Err(LspError::InternalError( "Failed to remove tagValue from composition." .to_string(), ) .into()); } composition.to_string().clone() } None => { return Err( LspError::CompositionNotFound( command_params.text_document.uri, ) .into(), ) } }, Err(err) => panic!("{}", err), }; let edit = lsp::WorkspaceEdit { changes: Some(HashMap::from([( command_params.text_document.uri.clone(), vec![lsp::TextEdit { new_text: composition_text .trim_end() .to_owned(), range: { let file = self.store.get_ast_file( &command_params.text_document.uri, )?; file.base.location.into() }, }], )])), document_changes: None, change_annotations: None, }; if let Some(client) = self.get_client() { let edit_applied = client.apply_edit(edit, None).await; if edit_applied.is_err() { let params = lsp::ShowMessageRequestParams { typ: lsp::MessageType::ERROR, message: LspClientCommand::ExecuteCommandFailed .to_string(), actions: None, }; client.send_custom_notification::<ClientCommandNotification>(params).await; } }; Ok(None) } Ok(LspServerCommand::GetFunctionList) => Ok(Some( lang::UNIVERSE .functions() .iter() .map(|function| function.name.clone()) .collect(), )), Err(_err) => { return Err( LspError::InvalidCommand(params.command).into() ) } } } } // `MonoType`'s extracted from a `Node` in a semantic graph do not contain the constraints directly // on them however we can locate the parent variable assignment to the type (`t`) and figure out // which constraints apply. fn include_constraints( path: &[walk::Node<'_>], t: MonoType, ) -> PolyType { // Get all constraints that may apply to `t` let all_constraints = path.iter().rev().find_map(|parent| match parent { walk::Node::VariableAssgn(assgn) => { Some(assgn.poly_type_of().cons) } _ => None, }); let mut constraints = BoundTvarKinds::default(); if let Some(all_constraints) = all_constraints { // Pick out the constraints that apply to `t` t.visit(&mut VisitBoundVars(|var| { if let Some(c) = all_constraints.get(&var) { constraints.entry(var).or_insert_with(|| c.clone()); } })); } PolyType { vars: Vec::new(), cons: constraints, expr: t, } } struct VisitBoundVars<F>(F); impl<F> Substituter for VisitBoundVars<F> where F: FnMut(BoundTvar), { fn try_apply(&mut self, _var: Tvar) -> Option<MonoType> { None } fn try_apply_bound( &mut self, var: BoundTvar, ) -> Option<MonoType> { (self.0)(var); None } } // Url::to_file_path doesn't exist in wasm-unknown-unknown, for kinda // obvious reasons. Ignore these tests when executing against that target. #[cfg(all(test, not(target_arch = "wasm32")))] mod tests;
//! Internal helper macros for implementing common functionality. macro_rules! default_null { () => { #[inline] fn default() -> Self { Self::null() } }; } macro_rules! impl_from_ptr { () => { #[inline] pub unsafe fn from_marked_ptr(ptr: MarkedPtr<T, N>) -> Self { Self { inner: MarkedNonNull::new_unchecked(ptr), _marker: PhantomData } } }; } macro_rules! impl_from_ptr_for_nullable { () => { #[inline] pub unsafe fn from_marked_ptr(ptr: MarkedPtr<T, N>) -> Self { Self { inner: ptr, _marker: PhantomData } } }; } macro_rules! impl_from_non_null { () => { #[inline] pub unsafe fn from_marked_non_null(ptr: MarkedNonNull<T, N>) -> Self { Self { inner: ptr.into(), _marker: PhantomData } } }; } macro_rules! impl_common { () => { /// Returns the internal marked pointer representation. #[inline] pub fn into_marked_ptr(self) -> MarkedPtr<T, N> { self.inner.into() } /// Clears the tag value. #[inline] pub fn clear_tag(self) -> Self { Self { inner: self.inner.clear_tag(), _marker: PhantomData } } /// Splits the tag value and returns both the cleared pointer and the /// separated tag value. #[inline] pub fn split_tag(self) -> (Self, usize) { let (inner, tag) = self.inner.split_tag(); (Self { inner, _marker: PhantomData }, tag) } /// Sets the tag value to `tag`, overwriting any previous value. #[inline] pub fn set_tag(self, tag: usize) -> Self { Self { inner: self.inner.set_tag(tag), _marker: PhantomData } } /// Updates the tag value to the result of `func`, which is called with /// the current tag value. #[inline] pub fn update_tag(self, func: impl FnOnce(usize) -> usize) -> Self { Self { inner: self.inner.update_tag(func), _marker: PhantomData } } /// Decomposes the marked pointer, returning only the separated tag /// value. #[inline] pub fn decompose_tag(self) -> usize { self.inner.decompose_tag() } }; } macro_rules! impl_fmt_debug { ($ty_name:ident) => { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (ptr, tag) = self.inner.decompose(); f.debug_struct(stringify!($ty_name)).field("ptr", &ptr).field("tag", &tag).finish() } }; } macro_rules! impl_fmt_pointer { () => { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Pointer::fmt(&self.inner, f) } }; }
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use uvll; use raw::{mod, Request, Allocated, Stream}; use UvResult; pub struct Write { handle: *mut uvll::uv_write_t, } impl Write { pub fn send<T, U>(&mut self, handle: &mut U, buf: &[u8], cb: uvll::uv_write_cb) -> UvResult<()> where T: Allocated, U: Stream<T> { unsafe { let buf = raw::slice_to_uv_buf(buf); try!(call!(uvll::uv_write(self.handle, handle.raw() as *mut _, &buf, 1, cb))); Ok(()) } } } impl Allocated for uvll::uv_write_t { fn size(_self: Option<uvll::uv_write_t>) -> uint { unsafe { uvll::uv_req_size(uvll::UV_WRITE) as uint } } } impl Request<uvll::uv_write_t> for Write { fn raw(&self) -> *mut uvll::uv_write_t { self.handle } fn from_raw(t: *mut uvll::uv_write_t) -> Write { Write { handle: t } } }
pub fn is_palindrome(mut number: u32) -> bool { let copy = number; let mut reversed_number = 0; while number != 0 { let lsd = number % 10; reversed_number = reversed_number * 10 + lsd; number = number / 10; } copy == reversed_number } pub fn get_largest_palindrome() -> (u32, u32) { let mut num1 = 9999; let mut num2 = 9999; while num1 > 0 { while num2 > 0 { if is_palindrome(num1 * num2) { return (num1, num2); } num2 -= 1; } num1 -= 1; } return (num1, num2); } pub fn runner() { let (a, b) = get_largest_palindrome(); println!("{} {}", a, b); }
use warp::{Filter, Rejection, Reply}; use crate::{get_user_handler, list_users_handler, put_user_handler, Database}; pub fn users_api(db: Database) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone { get_user(db.clone()).or(list(db.clone())).or(put_user(db)) } fn users() -> warp::filters::BoxedFilter<()> { warp::path("users").boxed() } fn user_id() -> warp::filters::BoxedFilter<(u64,)> { warp::path::param().boxed() } fn list(db: Database) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone { users() .and(warp::get()) .and_then(move || list_users_handler(db.clone())) } fn get_user(db: Database) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone { users() .and(user_id()) .and(warp::get()) .and_then(move |id| get_user_handler(db.clone(), id)) } fn put_user(db: Database) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone { users() .and(user_id()) .and(warp::put()) .and(warp::body::json()) .and_then(move |id, body| put_user_handler(db.clone(), id, body)) }
use anyhow::{Context, Result}; use log::LevelFilter; use necsim_partitioning_core::Partitioning; use necsim_partitioning_mpi::{MpiLocalPartition, MpiPartitioning}; use necsim_plugins_core::match_any_reporter_plugin_vec; use crate::{ args::{CommandArgs, SimulateArgs}, reporter::DynamicReporterContext, }; #[allow(clippy::module_name_repetitions)] pub fn simulate_with_logger_mpi(simulate_args: CommandArgs) -> Result<()> { // Initialise the simulation partitioning let partitioning = MpiPartitioning::initialise().with_context(|| "Failed to initialise MPI.")?; // Only log to stdout/stderr if the partition is the root partition log::set_max_level(if partitioning.is_root() { LevelFilter::Info } else { LevelFilter::Off }); let simulate_args = SimulateArgs::try_parse(simulate_args, &partitioning)?; info!("Parsed simulation arguments:\n{:#?}", simulate_args); if let Some(event_log) = &simulate_args.event_log { info!( "The simulation will log its events to {:?}.", event_log.directory() ); warn!("Therefore, only progress will be reported live."); } match_any_reporter_plugin_vec!(simulate_args.reporters => |reporter| { // Initialise the local partition and the simulation match partitioning .into_local_partition( DynamicReporterContext::new(reporter), simulate_args.event_log, ) .with_context(|| "Failed to initialise the local MPI partition.")? { MpiLocalPartition::LiveMonolithic(partition) => { super::simulate_with_logger(partition, simulate_args.common, simulate_args.scenario) }, MpiLocalPartition::RecordedMonolithic(partition) => { super::simulate_with_logger(partition, simulate_args.common, simulate_args.scenario) }, MpiLocalPartition::Root(partition) => { super::simulate_with_logger(partition, simulate_args.common, simulate_args.scenario) }, MpiLocalPartition::Parallel(partition) => { super::simulate_with_logger(partition, simulate_args.common, simulate_args.scenario) }, } }) }
// vim: shiftwidth=2 mod key_codes; mod keys; mod key_transforms; mod dev_input_rw; mod struct_ser; mod default_layouts; mod remapping_loop; mod layout_generation; mod keyboard_listing; mod udev_utils; mod layout_loading; mod version; mod monitor; mod monitor_raw; mod struct_de; mod tablet_mode_switch_reader; mod monitor_tablet_mode; mod example_hardware; use clap::{Arg, App}; use std::borrow::Cow; use keys::Layout; fn main() { let mut app = App::new("totalmapper") .version(version::VERSION) .author("Owen Healy <owen@owenehealy.com>") .about("Remaps keycodes in the Linux input event system based on a simple, JSON-defined list of mappings.\n\ \n\ To try mapping your keyboard, run:\n\ \n\ totalmapper remap --default-layout caps-for-movement --all-keyboards\n\ \n\ (making sure you have write access to /dev/uinput).\n\ \n\ To see an example of how to define mappings, run:\n\ \n\ totalmapper print_default_layout caps-for-movement") .subcommand(App::new("remap") .about("Remap a keyboard") .arg(Arg::new("dev_file") .long("dev-file") .takes_value(true) .value_name("FILE") .multiple(true) .number_of_values(1) .help_heading(Some("DEVICE SELECTION")) .about("A path under /dev/input representing a keyboard device. To find your keyboards, run `totalmapper list_keyboards`. Repeat this option multiple times to map multiple keyboards, e.g., `totalmapper remap --dev-file /dev/input/event0 --dev-file /dev/input/event1`. Use --all-keyboards to map all keyboards currently plugged in.") ) .arg(Arg::new("all_keyboards") .long("all-keyboards") .help_heading(Some("DEVICE SELECTION")) .about("Remap all keyboards currently plugged in. Note that this will not affect keyboards you plug in after invoking this command. To automatically remap new keyboards, see the help for `totalmapper add_udev_rule`.") ) .arg(Arg::new("default_layout") .long("default-layout") .takes_value(true) .value_name("NAME") .help_heading(Some("LAYOUT SELECTION")) .about("Use the builtin layout named NAME. To list the builtin layouts, use `totalmapper list_default_layouts`. To get the JSON for a default layout, use `totalmapper print_default_layout <name>`.") ) .arg(Arg::new("layout_file") .long("layout-file") .takes_value(true) .value_name("FILE") .help_heading(Some("LAYOUT SELECTION")) .about("Load a layout from json file FILE. To see an example of the form, print an example using `totalmapper print_default_layout caps-for-movement`.") ) .arg(Arg::new("only_if_keyboard") .long("only-if-keyboard") .help_heading(Some("PROCESS")) .about("If the device selected with --dev-file is not a keyboard, exit successfully. Useful when running from udev, since there is no easy way to test in a udev rule whether an input device is a keyboard.") ) .arg(Arg::new("tablet_mode_switch_device") .long("tablet-mode-switch-device") .takes_value(true) .value_name("FILE") .help_heading(Some("TABLET MODE")) .about("Do not emit key events when the selected device indicates the computer is in tablet mode.") ) ) .subcommand(App::new("list_keyboards") .about("List keyboard devices under /dev/input") ) .subcommand(App::new("list_default_layouts") .about("List the names of the default layouts") ) .subcommand(App::new("print_default_layout") .about("Print the JSON for one of the builtin layouts") .arg(Arg::new("NAME") .required(true) .index(1) .about("The name of the builtin layout to print. Use `totalmapper list_default_layouts` to see the list of builtin layouts.") ) ) .subcommand(App::new("monitor") .about("Print events from a keyboard device (without consuming them)") .arg(Arg::new("dev_file") .long("dev-file") .takes_value(true) .value_name("FILE") .number_of_values(1) .about("A path under /dev/input representing a keyboard device. To find your keyboards, run `totalmapper list_keyboards`.") ) ) .subcommand(App::new("monitor_raw") .about("Print all events from a any input device (without consuming them).") .arg(Arg::new("dev_file") .long("dev-file") .takes_value(true) .value_name("FILE") .number_of_values(1) .about("A path under /dev/input") ) ) .subcommand(App::new("monitor_tablet_mode") .about("Monitor a table mode switch device.") .arg(Arg::new("dev_file") .long("dev-file") .takes_value(true) .value_name("FILE") .number_of_values(1) .about("A path under /dev/input representing your tablet mode switch") ) ) .subcommand(App::new("add_systemd_service") .about("Add (or update, if one exists) a rule in /etc/udev/rules.d/ and service in /etc/systemd/system/ to start totalmapper when a new keyboard is plugged in. Does not affect keyboards already plugged in. Must be run as root.") .arg(Arg::new("default_layout") .long("default-layout") .takes_value(true) .value_name("NAME") .help_heading(Some("LAYOUT SELECTION")) .about("Use the builtin layout named NAME. To list the builtin layouts, use `totalmapper list_default_layouts`. To get the JSON for a default layout, use `totalmapper print_default_layout <name>`.") ) .arg(Arg::new("layout_file") .long("layout-file") .takes_value(true) .value_name("FILE") .help_heading(Some("LAYOUT SELECTION")) .about("Load a layout from json file FILE. To see an example of the form, print an example using `totalmapper print_default_layout caps-for-movement`.") ) ); let m = app.clone().get_matches(); if let Some(m) = m.subcommand_matches("remap") { let layout = load_layout(&m.value_of("default_layout"), &m.value_of("layout_file")); match layout { Err(msg) => { println!("{}", msg); std::process::exit(1); }, Ok(layout) => { match (m.occurrences_of("all_keyboards") > 0, m.values_of("dev_file")) { (false, None) => { println!("Error: Must specify a least one --dev-file or --all-keyboards"); }, (true, Some(_)) => { println!("Error: Must specify either --dev-file or --all-keyboards, not both"); }, (true, None) => { match remapping_loop::do_remapping_loop_all_devices(&layout) { Ok(_) => (), Err(err) => { println!("Error: {}", err); std::process::exit(1); } } }, (false, Some(devs)) => { let devs2 = devs.collect(); match remapping_loop::do_remapping_loop_multiple_devices(&devs2, m.occurrences_of("only_if_keyboard") > 0, &layout, &m.value_of("tablet_mode_switch_device")) { Ok(_) => (), Err(err) => { println!("Error: {}", err); std::process::exit(1); } } } } } } } else if let Some(_) = m.subcommand_matches("list_keyboards") { keyboard_listing::list_keyboards_to_stdout().unwrap(); } else if let Some(_) = m.subcommand_matches("list_default_layouts") { for name in (*default_layouts::DEFAULT_LAYOUTS).keys() { println!("{}", name); } } else if let Some(m) = m.subcommand_matches("print_default_layout") { let name = m.value_of("NAME").unwrap(); match (*default_layouts::DEFAULT_LAYOUTS).get(name) { None => { println!("Error: no builtin layout named {}", name); std::process::exit(1); }, Some(layout) => { println!("{}", serde_json::to_string_pretty(layout).unwrap()) } } } else if let Some(m) = m.subcommand_matches("monitor") { match m.value_of("dev_file") { None => { println!("Must specify --dev-file"); }, Some(dev_file) => { monitor::run_monitor(dev_file); } } } else if let Some(m) = m.subcommand_matches("monitor_raw") { match m.value_of("dev_file") { None => { println!("Must specify --dev-file"); }, Some(dev_file) => { monitor_raw::run_monitor_raw(dev_file); } } } else if let Some(m) = m.subcommand_matches("monitor_tablet_mode") { match m.value_of("dev_file") { None => { println!("Must specify --dev-file"); }, Some(dev_file) => { monitor_tablet_mode::run_monitor(dev_file); } } } else if let Some(m) = m.subcommand_matches("add_systemd_service") { match load_layout(&m.value_of("default_layout"), &m.value_of("layout_file")) { Err(s) => { println!("{}", s); std::process::exit(1); }, Ok(layout) => { match udev_utils::add_systemd_service(&*layout) { Err(msg) => { println!("{}", msg); std::process::exit(1); }, Ok(_) => () } } } } else { app.print_long_help().unwrap(); } } fn load_layout(default_layout: &Option<&str>, layout_file: &Option<&str>) -> Result<Cow<'static, Layout>, String> { match (default_layout, layout_file) { (None, None) => { Err("Error: no layout specified. Use --default-layout or --layout-file.".to_string()) }, (Some(_), Some(_)) => { Err("Error: use either --default-layout or --layout-file, not both.".to_string()) }, (Some(name), None) => { match (*default_layouts::DEFAULT_LAYOUTS).get(&name.to_string()) { None => Err(format!("Error: no builtin layout named {}", name)), Some(layout) => Ok(Cow::Borrowed(*layout)) } }, (None, Some(path)) => { match layout_loading::load_layout_from_file(path) { Err(err) => Err(err), Ok(layout) => Ok(Cow::Owned(layout)) } } } }
extern crate postgres; extern crate r2d2; extern crate r2d2_postgres; use std::io; use postgres::error::T_R_SERIALIZATION_FAILURE; use postgres::transaction::Transaction; use postgres::{Connection, Error}; use postgres::transaction::{Config, IsolationLevel}; use r2d2::{Pool, PooledConnection}; use r2d2_postgres::{PostgresConnectionManager, TlsMode}; use crate::proto::accounting::TransferComponent; use crate::DataStore; pub type PostgresPool = Pool<PostgresConnectionManager>; pub type PostgresConnection = PooledConnection<PostgresConnectionManager>; pub type PostgresResult<T> = Result<T, ()>; // This will work with both Postgres and CockroachDb #[derive(Clone)] pub struct PostgresDataStore { pool: PostgresPool, } impl PostgresDataStore { pub fn new(conn_string: &str) -> PostgresDataStore { let manager = PostgresConnectionManager::new(conn_string, TlsMode::None) .expect("Database Connection Error"); let pool = Pool::new(manager) .expect("ThreadPool Error"); PostgresDataStore { pool } } pub fn reset_db_state(&mut self) -> PostgresResult<()> { self.get_conn().execute("DROP TABLE IF EXISTS accounts;", &[]).unwrap(); self.get_conn().execute("CREATE TABLE accounts ( id INT4 PRIMARY KEY, balance BIGINT, created_at TIMESTAMP DEFAULT now(), creation_request BIGINT, CONSTRAINT balance_check CHECK (balance >= 0) );", &[]).unwrap(); self.get_conn().execute("GRANT ALL ON TABLE accounts TO accountant;", &[]).unwrap(); self.get_conn().execute("DROP TABLE IF EXISTS transactions;", &[]).unwrap(); self.get_conn().execute("CREATE TABLE transactions ( id INT PRIMARY KEY DEFAULT unique_rowid(), transaction_index INT4, req_id BIGINT, account_id INT4, amount INT, created_at TIMESTAMP DEFAULT now() );", &[]).unwrap(); self.get_conn().execute("GRANT ALL ON TABLE transactions TO accountant;", &[]).unwrap(); Ok(()) } fn get_conn(&mut self) -> PostgresConnection { self.pool.get().unwrap() } } impl DataStore for PostgresDataStore { fn create_account(&mut self, account: u32, req_id: u64, balance: i64) -> PostgresResult<u64> { let res = create_account(self.get_conn(), account, req_id, balance); match res { Ok(1) => Ok(1), _ => Err(()), } } fn get_account_balance(&mut self, account: u32) -> PostgresResult<i64> { let res = get_account_balance(self.get_conn(), account); match res { Ok(val) => Ok(val), _ => Err(()), } } fn execute_transfers( &mut self, transfers: &[TransferComponent], req_id: i64, ) -> PostgresResult<()> { let res = execute_transfers(self.get_conn(), transfers, req_id); match res { Ok(_) => Ok(()), _ => Err(()), } } fn reset(&mut self) -> PostgresResult<()> { self.reset_db_state() } } fn execute_txn<T, F>(conn: &Connection, op: F) -> Result<T, Error> where F: Fn(&Transaction) -> Result<T, Error>, { // Use serializable isolation to protect against concurrent writes let txn = conn.transaction()?; let mut cfg = Config::new(); cfg.isolation_level(IsolationLevel::Serializable); txn.set_config(&cfg).unwrap(); loop { let sp = txn.savepoint("cockroach_restart")?; match op(&sp).and_then(|t| sp.commit().map(|_| t)) { Err(ref err) if err .as_db() .map(|e| e.code == T_R_SERIALIZATION_FAILURE) .unwrap_or(false) => {} r => break r, } } .and_then(|t| txn.commit().map(|_| t)) } fn create_account(conn: PostgresConnection, account: u32, req_id: u64, balance: i64) -> Result<u64, Error> { conn.execute( "INSERT INTO accounts (balance, id, creation_request) VALUES ($1, $2, $3)", &[&balance, &(account as i64), &(req_id as i64)], ) } fn get_account_balance(conn: PostgresConnection, account: u32) -> Result<i64, Error> { let balance = conn.query("SELECT balance FROM accounts WHERE id=$1", &[&(account as i64)])?; // If no rows are returned, need to inform user if balance.len() != 1 { Ok(-1) } else { Ok(balance.get(0).get(0)) } } fn execute_transfers( conn: PostgresConnection, transfers: &[TransferComponent], req_id: i64, ) -> Result<(), Error> { execute_txn(&conn, |txn| transfer_funds(txn, transfers, req_id)) } fn transfer_funds( txn: &Transaction, transfers: &[TransferComponent], req_id: i64, ) -> Result<(), Error> { for (i, transfer) in transfers.iter().enumerate() { let delta: i64 = transfer.get_money_delta(); let account = transfer.get_account_id() as i64; let res = txn.execute( "UPDATE accounts SET balance = balance + $1 WHERE id = $2", &[&delta, &account], )?; if res == 0 { return Err(Error::from(io::Error::from(io::ErrorKind::NotFound))) } let res = txn.execute( "INSERT INTO transactions (req_id, account_id, amount, transaction_index) VALUES ($1, $2, $3, $4)", &[&req_id, &account, &delta, &(i as i64)], )?; if res == 0 { return Err(Error::from(io::Error::from(io::ErrorKind::NotFound))) } } Ok(()) }
#![allow(unused)] extern crate lalr; use lalr::*; use std::collections::{btree_map, BTreeSet, BTreeMap, RingBuf}; macro_rules! map { ($($l: expr => $r: expr),*) => ({ let mut r = BTreeMap::new(); $(r.insert($l, $r);)* r }) } fn rhs<T, N, A>(syms: Vec<Symbol<T, N>>, act: A) -> Rhs<T, N, A> { Rhs { syms: syms, act: act, } } fn main() { let g = Grammar { rules: map![ "S" => vec![ rhs(vec![Nonterminal("N")], ()), ], "N" => vec![ rhs(vec![Nonterminal("V"), Terminal("="), Nonterminal("E")], ()), rhs(vec![Nonterminal("E")], ()), ], "E" => vec![ rhs(vec![Nonterminal("V")], ()), ], "V" => vec![ rhs(vec![Terminal("x")], ()), rhs(vec![Terminal("*"), Nonterminal("E")], ()), ] ], start: "S" }; println!("{:?}", g); let machine = g.lr0_state_machine(); machine.print(); let ag = machine.augmented_grammar(); println!("{:?}", ag); println!(""); println!("{:?}", g.follow_sets(g.first_sets())); println!(""); println!("{:?}", ag.follow_sets(ag.first_sets())); println!(""); println!("{:?}", g.lalr1()); }
extern crate rustc_serialize; #[macro_use] extern crate log; extern crate bitex; use std::time::Duration; use std::thread; pub mod strategy; pub use strategy::Strategy; use bitex::{Api, Bid, Ask, StatusCode, Order}; use bitex::curs::{CursResult, CursError}; #[derive(Debug)] pub struct Trader<'a> { pub api: Api<'a>, pub sleep_for: u64, pub cooldown: u64, pub bids_config: Strategy, pub asks_config: Strategy, } impl<'a> Trader<'a> { pub fn new(api: Api<'a>, sleep: u64, cool: u64, bids: Strategy, asks: Strategy) -> Trader<'a> { assert!(bids.price_delta.is_sign_negative(), "Bids need negative delta"); assert!(asks.price_delta.is_sign_positive(), "Asks need positive delta"); Trader{ api: api, sleep_for: sleep, cooldown: cool, bids_config: bids, asks_config: asks, } } fn with_retry<F: Fn() -> CursResult<A>, A>(&self, func: F) -> CursResult<A> { thread::sleep(Duration::from_millis(self.cooldown)); func().or_else(|err|{ info!("Last operation errored with \n {:?}", err); match err { CursError::Status(ref r) if r.status != StatusCode::UnprocessableEntity => self.with_retry(func), CursError::Network(_) => self.with_retry(func), e => Err(e) } }) } fn pairs_to_orders<F, A>(&self, pairs: Vec<(f64, f64)>, func: F) -> Vec<CursResult<A>> where F: Fn(f64, f64) -> CursResult<A> { pairs.into_iter().map(|(a,p)|{ self.with_retry(|| func(a,p)) }).collect() } pub fn place_bids(&self, pairs: Vec<(f64, f64)>) -> Vec<CursResult<Bid>> { self.pairs_to_orders(pairs, |a,p|{ info!("Placing Bid ${} @ ${}", a, p); self.api.bids().create(a,p) }) } pub fn place_asks(&self, pairs: Vec<(f64, f64)>) -> Vec<CursResult<Ask>> { self.pairs_to_orders(pairs, |a,p|{ info!("Placing Ask {} BTC @ ${}", a, p); self.api.asks().create(a,p) }) } pub fn clear_all_orders(&self){ info!("Clearing orders"); loop { let orders = self.with_retry(|| self.api.orders()).unwrap(); if orders.is_empty() { break } for o in orders.into_iter() { match o { Order::Bid(o) => {self.api.bids().cancel(o.id).unwrap();}, Order::Ask(o) => {self.api.asks().cancel(o.id).unwrap();} }; }; thread::sleep(Duration::from_millis(self.cooldown)); } } pub fn trade(&self){ self.clear_all_orders(); info!("Starting trade"); let book = self.with_retry(|| self.api.orderbook()).unwrap(); //let profile = self.with_retry(|| self.api.profile()).unwrap(); self.place_bids(self.bids_config.build_orders(book.bids[0].0)); self.place_asks(self.asks_config.build_orders(book.asks[0].0)); thread::sleep(Duration::from_millis(self.sleep_for)); } }
use rtm::Usergroup; /// List all users in a User Group /// /// Wraps https://api.slack.com/methods/usergroups.users.list #[derive(Clone, Debug, Serialize, new)] pub struct ListRequest { /// The encoded ID of the User Group to update. pub usergroup: ::UsergroupId, /// Allow results that involve disabled User Groups. #[new(default)] pub include_disabled: Option<bool>, } #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct ListResponse { ok: bool, pub users: Option<Vec<::UserId>>, } /// Update the list of users for a User Group /// /// Wraps https://api.slack.com/methods/usergroups.users.update #[derive(Clone, Debug, Serialize, new)] pub struct UpdateRequest<'a> { /// The encoded ID of the User Group to update. pub usergroup: ::UsergroupId, /// A comma separated string of encoded user IDs that represent the entire list of users for the User Group. #[new(default)] #[serde(serialize_with = "::serialize_comma_separated")] pub users: &'a [::UserId], /// Include the number of users in the User Group. #[new(default)] pub include_count: Option<bool>, } #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct UpdateResponse { ok: bool, pub usergroup: Option<Usergroup>, }
use std::io; use crate::{ingame::InGameLoop, Connection}; use futures::{sink::SinkExt, stream::StreamExt}; use rsc2_pb::protocol::{self, response::Response, Status}; macro_rules! server_call { ($conn:ident, $req:ident, $resp:path) => { async { let _: &mut Connection = $conn; $conn.send($req).await?; let res = match $conn.next().await { Some(Ok(res)) => res, Some(Err(err)) => return Err(err), None => return Ok(None), }; let status = res.status(); let id = res.id(); let $crate::protocol::Response { response, error, .. } = &res; error .iter() .for_each(|err| warn!("response id: {} | err: {}", id, err)); if matches!(status, Status::Quit | Status::Unknown) { info!("status: {:?}, interupting state machine", status); return Result::<_, io::Error>::Ok(None); } Ok(match response { Some($resp(response)) => { if response.error.is_some() { error!("response id: {} | err: {:?}", id, response.error()); None } else { Some(res) } } _ => None, }) } }; } macro_rules! impl_from { ($from:ident -> $to:ident) => { impl<'a> From<$from<'a>> for $to<'a> { #[inline] fn from(rhs: $from<'a>) -> Self { $to(rhs.0) } } }; } pub enum Core { Launched {}, InitGame {}, InGame {}, InReplay {}, Ended {}, } impl Core { pub const fn init() -> Self { Self::Launched {} } pub fn launched(&mut self) -> Option<Launched<'_>> { match self { Self::Launched { .. } => Some(Launched::from(self)), _ => None, } } pub fn replace(&mut self, new: Self) -> Self { let (a, b) = (&self, &new); debug_assert!( // Launched matches!((a, b), (Self::Launched { .. }, Self::InitGame { .. })) || matches!((a, b), (Self::Launched { .. }, Self::InGame { .. })) || matches!((a, b), (Self::Launched { .. }, Self::InReplay { .. })) // InitGame || matches!((a, b), (Self::InitGame { .. }, Self::InGame { .. })) // InGame || matches!((a, b), (Self::InGame { .. }, Self::InGame { .. })) || matches!((a, b), (Self::InGame { .. }, Self::Ended { .. })) // InReplay || matches!((a, b), (Self::InReplay { .. }, Self::InReplay { .. })) || matches!((a, b), (Self::InReplay { .. }, Self::Ended { .. })) // Ended || matches!((a, b), (Self::Ended { .. }, Self::Launched { .. })) || matches!((a, b), (Self::Ended { .. }, Self::InGame { .. })) ); std::mem::replace(self, new) } } impl Default for Core { fn default() -> Self { Self::init() } } #[repr(transparent)] pub struct Launched<'a>(&'a mut Core); impl<'a> From<&'a mut Core> for Launched<'a> { fn from(rhs: &'a mut Core) -> Self { Launched(rhs) } } #[repr(transparent)] pub struct InitGame<'a>(&'a mut Core); impl_from!(Launched -> InitGame); #[repr(transparent)] pub struct InGame<'a>(&'a mut Core); impl_from!(Launched -> InGame); impl_from!(InitGame -> InGame); #[repr(transparent)] pub struct InReplay<'a>(&'a mut Core); impl_from!(Launched -> InReplay); #[repr(transparent)] pub struct Ended<'a>(&'a mut Core); impl_from!(InGame -> Ended); impl_from!(InReplay -> Ended); impl<'a> Launched<'a> { pub fn core(&mut self) -> &mut Core { self.0 } pub async fn create_game( self, framed: &mut Connection, data: protocol::RequestCreateGame, ) -> io::Result<Option<InitGame<'a>>> { let resp = server_call!(framed, data, Response::CreateGame).await?; Ok(resp.map(|_| { self.0.replace(Core::InitGame {}); InitGame::from(self) })) } pub async fn join_game( self, framed: &mut Connection, data: protocol::RequestJoinGame, ) -> io::Result<Option<InGame<'a>>> { let resp = server_call!(framed, data, Response::JoinGame).await?; Ok(resp.map(|_| { self.0.replace(Core::InGame {}); InGame::from(self) })) } pub async fn join_replay( self, framed: &mut Connection, data: protocol::RequestStartReplay, ) -> io::Result<Option<InReplay<'a>>> { let resp = server_call!(framed, data, Response::StartReplay).await?; Ok(resp.map(|_| { self.0.replace(Core::InReplay {}); InReplay::from(self) })) } } impl<'a> InitGame<'a> { pub fn core(&mut self) -> &mut Core { self.0 } pub async fn join_game( self, framed: &mut Connection, data: protocol::RequestJoinGame, ) -> io::Result<Option<InGame<'a>>> { let resp = server_call!(framed, data, Response::JoinGame).await?; Ok(resp.map(|_| { self.0.replace(Core::InGame {}); InGame::from(self) })) } } impl<'a> InGame<'a> { pub fn core(&mut self) -> &mut Core { self.0 } pub fn stream(self, stream: &mut Connection) -> InGameLoop<'a, '_> { let framed = unsafe { std::pin::Pin::new_unchecked(stream) }; InGameLoop::new(self, framed) } }
#[doc = "Reader of register INTR_TX_MASKED"] pub type R = crate::R<u32, super::INTR_TX_MASKED>; #[doc = "Reader of field `TRIGGER`"] pub type TRIGGER_R = crate::R<bool, bool>; #[doc = "Reader of field `NOT_FULL`"] pub type NOT_FULL_R = crate::R<bool, bool>; #[doc = "Reader of field `EMPTY`"] pub type EMPTY_R = crate::R<bool, bool>; #[doc = "Reader of field `OVERFLOW`"] pub type OVERFLOW_R = crate::R<bool, bool>; #[doc = "Reader of field `UNDERFLOW`"] pub type UNDERFLOW_R = crate::R<bool, bool>; #[doc = "Reader of field `BLOCKED`"] pub type BLOCKED_R = crate::R<bool, bool>; #[doc = "Reader of field `UART_NACK`"] pub type UART_NACK_R = crate::R<bool, bool>; #[doc = "Reader of field `UART_DONE`"] pub type UART_DONE_R = crate::R<bool, bool>; #[doc = "Reader of field `UART_ARB_LOST`"] pub type UART_ARB_LOST_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn trigger(&self) -> TRIGGER_R { TRIGGER_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn not_full(&self) -> NOT_FULL_R { NOT_FULL_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 4 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn empty(&self) -> EMPTY_R { EMPTY_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn overflow(&self) -> OVERFLOW_R { OVERFLOW_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn underflow(&self) -> UNDERFLOW_R { UNDERFLOW_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn blocked(&self) -> BLOCKED_R { BLOCKED_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn uart_nack(&self) -> UART_NACK_R { UART_NACK_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn uart_done(&self) -> UART_DONE_R { UART_DONE_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn uart_arb_lost(&self) -> UART_ARB_LOST_R { UART_ARB_LOST_R::new(((self.bits >> 10) & 0x01) != 0) } }
use std::collections::{HashMap, HashSet}; struct Solution {} impl Solution { pub fn is_possible(nums: Vec<i32>) -> bool { let mut numsFreqMap: HashMap<i32, i32> = HashMap::new(); let mut nextInSQMap: HashMap<i32, i32> = HashMap::new(); for &num in nums.iter() { match numsFreqMap.get_mut(&num) { Some(&mut (mut freq)) => freq += 1, None => { numsFreqMap.insert(num, 1); } }; } for &curr_num in nums.iter() { match numsFreqMap.get_mut(&curr_num) { Some(&mut (mut curr_freq)) => { if curr_freq == 0 { continue; } else { curr_freq -= 1; let prev_num = curr_num - 1; if let Some(&mut (mut prev_freq)) = nextInSQMap.get_mut(&prev_num) { if prev_freq > 0 { prev_freq -= 1; match nextInSQMap.get_mut(&curr_num) { Some(&mut (mut seq_freq)) => seq_freq += 1, None => { nextInSQMap.insert(curr_num, 1); } } } } else { let next_num = curr_num + 1; let next_to_next_num = next_num + 1; if let Some(&mut mut next_num_freq) = numsFreqMap.get_mut(&next_num) { if next_num_freq > 0 { if let Some(&mut mut next_to_next_num_freq) = numsFreqMap.get_mut(&next_to_next_num) { next_num_freq -= 1; next_to_next_num_freq -= 1; match nextInSQMap.get_mut(&next_to_next_num) { Some(&mut (mut seq_freq)) => seq_freq += 1, None => { nextInSQMap.insert(next_to_next_num, 1); } }; } else { return false; } } else { return false; } } else { return false; } } } } None => {} } } println!("{:?} {:?}", numsFreqMap, nextInSQMap); true } } fn main() { println!("{}", Solution::is_possible(vec![1, 2, 3, 3, 4, 4, 5, 5])); println!("{}", Solution::is_possible(vec![1, 2, 3, 3, 4, 5])); println!("{}", Solution::is_possible(vec![1, 2, 3, 4, 4, 5])); }
/// Solves the Day 01 Part 1 puzzle with respect to the given input. pub fn part_1(input: String) { let depths = parse_input(input); let mut increases = 0; for i in 1..depths.len() { let increase = depths[i - 1] < depths[i]; increases += increase as i32; } println!("{}", increases); } /// Solves the Day 01 Part 2 puzzle with respect to the given input. pub fn part_2(input: String) { let depths = parse_input(input); let mut prefixes: Vec<i32> = vec![0; depths.len() + 1]; for i in 1..prefixes.len() { prefixes[i] = prefixes[i - 1] + depths[i - 1]; } let mut increases = 0; for i in 3..depths.len() { let prev = prefixes[i] - prefixes[i - 3]; let next = prefixes[i + 1] - prefixes[i - 2]; let increase = prev < next; increases += increase as i32; } println!("{}", increases); } /// Parses the given input into a vector of integers. fn parse_input(input: String) -> Vec<i32> { let mut depths: Vec<i32> = Vec::new(); for depth in input.split("\n") { depths.push(depth.parse().unwrap()); } return depths; }