text
stringlengths
8
4.13M
use crate::errors::{message::ErrorKind::*, ErrorKind, Result, ResultExt}; use crate::message_header::MessageHeader; use cashcontracts::double_sha256; use std::io; #[derive(Clone, Debug)] pub struct MessagePacket { header: MessageHeader, payload: Vec<u8>, } impl MessagePacket { fn _check_checksum(payload: &[u8], checksum: [u8; 4]) -> Result<()> { let hash = double_sha256(&payload); if hash[..4] != checksum { return Err(ErrorKind::Message(InvalidChecksum).into()); } Ok(()) } pub fn from_header_payload(header: MessageHeader, payload: Vec<u8>) -> Result<Self> { Self::_check_checksum(&payload, header.checksum())?; Ok(MessagePacket { header, payload }) } pub fn from_payload(command: &[u8], payload: Vec<u8>) -> MessagePacket { use std::io::Write; let hash = double_sha256(&payload); let mut checksum = [0; 4]; checksum.copy_from_slice(&hash[..4]); let mut command_padded = [0u8; 12]; io::Cursor::new(&mut command_padded[..]) .write_all(command) .unwrap(); let header = MessageHeader::new(command_padded, payload.len() as u32, checksum); MessagePacket { header, payload } } pub async fn write_to_stream<W: async_std::io::Write + Unpin>( &self, write: &mut W, ) -> Result<()> { use async_std::prelude::*; write .write_all(&self.header.bytes()[..]) .await .chain_err(|| IoError)?; write.write_all(&self.payload).await.chain_err(|| IoError)?; Ok(()) } pub fn header(&self) -> &MessageHeader { &self.header } pub fn payload(&self) -> &[u8] { &self.payload } } impl std::fmt::Display for MessagePacket { fn fmt<'a>(&self, f: &mut std::fmt::Formatter<'a>) -> std::result::Result<(), std::fmt::Error> { write!(f, "{}", self.header)?; writeln!(f, "payload: {}", hex::encode(&self.payload))?; Ok(()) } }
use std::{fmt::Debug, time::Duration}; use futures::{ future::{select_ok, try_select, Either}, pin_mut, Future, }; use tracing::warn; pub async fn single_retry<F, T, E>( mut create_f: impl FnMut() -> F, retry_delay: Duration, ) -> Result<T, E> where F: Future<Output = Result<T, E>> + Send, E: Debug, { let f = create_f(); let timeout = async { tokio::time::sleep(retry_delay).await; Ok::<(), ()>(()) }; pin_mut!(timeout); pin_mut!(f); let response = try_select(timeout, f).await; match response { Ok(Either::Left((_, f))) => { let next_req = create_f(); pin_mut!(next_req); select_ok([f, next_req]).await.map(|(v, _)| v) } Ok(Either::Right((x, _))) => Ok(x), Err(Either::Left((_, _))) => unreachable!(), Err(Either::Right((err, _))) => { warn!("Request errored: {:?}", err); Ok(create_f().await?) } } } #[cfg(test)] mod tests { use std::{ sync::{Arc, Mutex}, time::Duration, }; use super::single_retry; #[tokio::test] async fn should_retry_when_failed() { let call_count = Arc::new(Mutex::new(0)); let create_fut = || { let call_count = call_count.clone(); async move { let mut mutex_guard = call_count.lock().unwrap(); *mutex_guard += 1; if *mutex_guard == 1 { Err(()) } else { Ok(()) } } }; let result = single_retry(create_fut, Duration::from_secs(10000)).await; let guard = call_count.lock().unwrap(); assert_eq!(*guard, 2); assert!(result.is_ok()); } #[tokio::test] async fn should_return_second_error_when_all_failed() { let call_count = Arc::new(Mutex::new(0)); let create_fut = || { let call_count = call_count.clone(); async move { let mut mutex_guard = call_count.lock().unwrap(); *mutex_guard += 1; if *mutex_guard == 1 { Err::<(), _>(*mutex_guard) } else { Err(*mutex_guard) } } }; let result = single_retry(create_fut, Duration::from_secs(10000)).await; let guard = call_count.lock().unwrap(); assert_eq!(*guard, 2); assert_eq!(result, Err(2)); } #[tokio::test] async fn should_retry_after_delay() { let call_count = Arc::new(Mutex::new(0)); let create_fut = || { let call_count = call_count.clone(); async move { let call_count = { let mut mutex_guard = call_count.lock().unwrap(); *mutex_guard += 1; *mutex_guard }; if call_count == 1 { tokio::time::sleep(Duration::from_secs(10000000)).await; Ok::<_, ()>(()) } else { Ok(()) } } }; let result = single_retry(create_fut, Duration::from_millis(1)).await; let guard = call_count.lock().unwrap(); assert_eq!(*guard, 2); assert!(result.is_ok()); } #[tokio::test] async fn should_not_retry_when_ok() { let call_count = Arc::new(Mutex::new(0)); let create_fut = || { let call_count = call_count.clone(); async move { let call_count = { let mut mutex_guard = call_count.lock().unwrap(); *mutex_guard += 1; *mutex_guard }; if call_count == 1 { Ok::<_, ()>(()) } else { Ok(()) } } }; let result = single_retry(create_fut, Duration::from_millis(1)).await; let guard = call_count.lock().unwrap(); assert_eq!(*guard, 1); assert!(result.is_ok()); } }
use hydroflow::hydroflow_syntax; fn main() { let mut df = hydroflow_syntax! { pivot = union() -> tee(); x_0 = pivot[0]; x_1 = pivot[1]; x_0 -> [0]x_0; x_1[0] -> [1]x_1; // Error: `pivot[1][0]` }; df.run_available(); }
use clap::{App, Arg}; use std::io::{Read, Write}; use std::net::TcpStream; use std::thread; use std::time::{Duration, Instant}; use url::Url; fn main() -> anyhow::Result<()> { let arguments = App::new("cloudflare-2020-systems-engineering-assignment") .version("1.0") .about("Tool for making HTTP/1.1 requests and measuring statistics about them.") .author("JMS55") .arg( Arg::with_name("url") .long("url") .takes_value(true) .required(true), ) .arg( Arg::with_name("number_of_requests") .long("profile") .takes_value(true), ) .get_matches(); let url = Url::parse(arguments.value_of("url").unwrap())?; let host = url .host_str() .ok_or(anyhow::Error::msg("URL has no host."))?; let path = url.path(); match arguments.value_of("number_of_requests") { None => { let response_info = make_request(host, path)?; println!("{}", String::from_utf8_lossy(&response_info.response)); } Some(i) => { let number_of_requests = i.parse::<usize>()?; let mut times = Vec::with_capacity(number_of_requests); let mut error_codes = Vec::with_capacity(number_of_requests); let mut sizes = Vec::with_capacity(number_of_requests); for _ in 0..number_of_requests { let response_info = make_request(host, path)?; times.push(response_info.time); if response_info.code / 100 != 2 { error_codes.push(response_info.code); } sizes.push(response_info.response.len()); thread::sleep(Duration::from_millis(500)); } times.sort_unstable(); println!("Number of requests: {}.", number_of_requests); println!( "Fastest time: {:?}.", times .iter() .fold(Duration::from_secs(u64::MAX), |a, b| a.min(*b)) ); println!( "Slowest time: {:?}.", times.iter().fold(Duration::from_secs(0), |a, b| a.max(*b)) ); println!( "Mean time: {:?}.", times.iter().fold(Duration::from_secs(0), |a, b| a + *b) / times.len() as u32 ); println!("Median time: {:?}.", times.get(times.len() / 2).unwrap()); println!( "Success percentage: {}.", (number_of_requests - error_codes.len()) / number_of_requests ); println!("Error codes: {:?}.", error_codes); println!( "Smallest response size: {} bytes.", sizes.iter().fold(usize::MAX, |a, b| a.min(*b)) ); println!( "Largest response size: {} bytes.", sizes.iter().fold(0, |a, b| a.max(*b)) ); } }; Ok(()) } fn make_request(host: &str, path: &str) -> anyhow::Result<ResponseInfo> { let start = Instant::now(); let mut response = Vec::new(); let mut stream = TcpStream::connect(format!("{}:80", host))?; write!(stream, "GET {} HTTP/1.1\r\n", path)?; write!(stream, "Host: {}\r\n", host)?; write!(stream, "Connection: close\r\n\r\n")?; stream.read_to_end(&mut response)?; let mut code = 0; let text = String::from_utf8_lossy(&response); for line in text.lines() { if line.contains("HTTP/1.1") { let line = line.split(" "); code = line.skip(1).next().unwrap().parse::<u32>()?; break; } } Ok(ResponseInfo { response, code, time: start.elapsed(), }) } struct ResponseInfo { response: Vec<u8>, code: u32, time: Duration, }
use std::os::raw::c_int; use std::os::raw::c_void; use std::os::raw::c_char; pub type argon2_context = Argon2_Context; pub type argon2_type = Argon2_type; pub type allocate_fptr = Option<unsafe extern "C" fn(memory: *mut *mut u8, bytes_to_allocate: usize) -> c_int>; pub type deallocate_fptr = Option<unsafe extern "C" fn(memory: *mut u8, bytes_to_allocate: usize)>; pub type Argon2_ErrorCodes = c_int; pub type Argon2_type = c_int; pub type Argon2_version = c_int; #[repr(C)] pub struct Argon2_Context { pub out: *mut u8, pub outlen: u32, pub pwd: *mut u8, pub pwdlen: u32, pub salt: *mut u8, pub saltlen: u32, pub secret: *mut u8, pub secretlen: u32, pub ad: *mut u8, pub adlen: u32, pub t_cost: u32, pub m_cost: u32, pub lanes: u32, pub threads: u32, pub version: u32, pub allocate_cbk: allocate_fptr, pub free_cbk: deallocate_fptr, pub flags: u32, } pub const Argon2_ErrorCodes_ARGON2_OK: Argon2_ErrorCodes = 0; pub const Argon2_ErrorCodes_ARGON2_OUTPUT_PTR_NULL: Argon2_ErrorCodes = -1; pub const Argon2_ErrorCodes_ARGON2_OUTPUT_TOO_SHORT: Argon2_ErrorCodes = -2; pub const Argon2_ErrorCodes_ARGON2_OUTPUT_TOO_LONG: Argon2_ErrorCodes = -3; pub const Argon2_ErrorCodes_ARGON2_PWD_TOO_SHORT: Argon2_ErrorCodes = -4; pub const Argon2_ErrorCodes_ARGON2_PWD_TOO_LONG: Argon2_ErrorCodes = -5; pub const Argon2_ErrorCodes_ARGON2_SALT_TOO_SHORT: Argon2_ErrorCodes = -6; pub const Argon2_ErrorCodes_ARGON2_SALT_TOO_LONG: Argon2_ErrorCodes = -7; pub const Argon2_ErrorCodes_ARGON2_AD_TOO_SHORT: Argon2_ErrorCodes = -8; pub const Argon2_ErrorCodes_ARGON2_AD_TOO_LONG: Argon2_ErrorCodes = -9; pub const Argon2_ErrorCodes_ARGON2_SECRET_TOO_SHORT: Argon2_ErrorCodes = -10; pub const Argon2_ErrorCodes_ARGON2_SECRET_TOO_LONG: Argon2_ErrorCodes = -11; pub const Argon2_ErrorCodes_ARGON2_TIME_TOO_SMALL: Argon2_ErrorCodes = -12; pub const Argon2_ErrorCodes_ARGON2_TIME_TOO_LARGE: Argon2_ErrorCodes = -13; pub const Argon2_ErrorCodes_ARGON2_MEMORY_TOO_LITTLE: Argon2_ErrorCodes = -14; pub const Argon2_ErrorCodes_ARGON2_MEMORY_TOO_MUCH: Argon2_ErrorCodes = -15; pub const Argon2_ErrorCodes_ARGON2_LANES_TOO_FEW: Argon2_ErrorCodes = -16; pub const Argon2_ErrorCodes_ARGON2_LANES_TOO_MANY: Argon2_ErrorCodes = -17; pub const Argon2_ErrorCodes_ARGON2_PWD_PTR_MISMATCH: Argon2_ErrorCodes = -18; pub const Argon2_ErrorCodes_ARGON2_SALT_PTR_MISMATCH: Argon2_ErrorCodes = -19; pub const Argon2_ErrorCodes_ARGON2_SECRET_PTR_MISMATCH: Argon2_ErrorCodes = -20; pub const Argon2_ErrorCodes_ARGON2_AD_PTR_MISMATCH: Argon2_ErrorCodes = -21; pub const Argon2_ErrorCodes_ARGON2_MEMORY_ALLOCATION_ERROR: Argon2_ErrorCodes = -22; pub const Argon2_ErrorCodes_ARGON2_FREE_MEMORY_CBK_NULL: Argon2_ErrorCodes = -23; pub const Argon2_ErrorCodes_ARGON2_ALLOCATE_MEMORY_CBK_NULL: Argon2_ErrorCodes = -24; pub const Argon2_ErrorCodes_ARGON2_INCORRECT_PARAMETER: Argon2_ErrorCodes = -25; pub const Argon2_ErrorCodes_ARGON2_INCORRECT_TYPE: Argon2_ErrorCodes = -26; pub const Argon2_ErrorCodes_ARGON2_OUT_PTR_MISMATCH: Argon2_ErrorCodes = -27; pub const Argon2_ErrorCodes_ARGON2_THREADS_TOO_FEW: Argon2_ErrorCodes = -28; pub const Argon2_ErrorCodes_ARGON2_THREADS_TOO_MANY: Argon2_ErrorCodes = -29; pub const Argon2_ErrorCodes_ARGON2_MISSING_ARGS: Argon2_ErrorCodes = -30; pub const Argon2_ErrorCodes_ARGON2_ENCODING_FAIL: Argon2_ErrorCodes = -31; pub const Argon2_ErrorCodes_ARGON2_DECODING_FAIL: Argon2_ErrorCodes = -32; pub const Argon2_ErrorCodes_ARGON2_THREAD_FAIL: Argon2_ErrorCodes = -33; pub const Argon2_ErrorCodes_ARGON2_DECODING_LENGTH_FAIL: Argon2_ErrorCodes = -34; pub const Argon2_ErrorCodes_ARGON2_VERIFY_MISMATCH: Argon2_ErrorCodes = -35; pub const Argon2_type_Argon2_d: Argon2_type = 0; pub const Argon2_type_Argon2_i: Argon2_type = 1; pub const Argon2_type_Argon2_id: Argon2_type = 2; pub const Argon2_version_ARGON2_VERSION_10: Argon2_version = 0x10; pub const Argon2_version_ARGON2_VERSION_13: Argon2_version = 0x13; pub const Argon2_version_ARGON2_VERSION_NUMBER: Argon2_version = Argon2_version_ARGON2_VERSION_13; extern "C" { pub fn argon2_ctx(context: *mut argon2_context, type_: argon2_type) -> c_int; pub fn argon2i_hash_encoded(t_cost: u32, m_cost: u32, parallelism: u32, pwd: *const c_void, pwdlen: usize, salt: *const c_void, saltlen: usize, hashlen: usize, encoded: *mut c_char, encodedlen: usize) -> c_int; pub fn argon2i_hash_raw(t_cost: u32, m_cost: u32, parallelism: u32, pwd: *const c_void, pwdlen: usize, salt: *const c_void, saltlen: usize, hash: *mut c_void, hashlen: usize) -> c_int; pub fn argon2d_hash_encoded(t_cost: u32, m_cost: u32, parallelism: u32, pwd: *const c_void, pwdlen: usize, salt: *const c_void, saltlen: usize, hashlen: usize, encoded: *mut c_char, encodedlen: usize) -> c_int; pub fn argon2d_hash_raw(t_cost: u32, m_cost: u32, parallelism: u32, pwd: *const c_void, pwdlen: usize, salt: *const c_void, saltlen: usize, hash: *mut c_void, hashlen: usize) -> c_int; pub fn argon2id_hash_encoded(t_cost: u32, m_cost: u32, parallelism: u32, pwd: *const c_void, pwdlen: usize, salt: *const c_void, saltlen: usize, hashlen: usize, encoded: *mut c_char, encodedlen: usize) -> c_int; pub fn argon2id_hash_raw(t_cost: u32, m_cost: u32, parallelism: u32, pwd: *const c_void, pwdlen: usize, salt: *const c_void, saltlen: usize, hash: *mut c_void, hashlen: usize) -> c_int; pub fn argon2_hash(t_cost: u32, m_cost: u32, parallelism: u32, pwd: *const c_void, pwdlen: usize, salt: *const c_void, saltlen: usize, hash: *mut c_void, hashlen: usize, encoded: *mut c_char, encodedlen: usize, type_: argon2_type, version: u32) -> c_int; pub fn argon2i_verify(encoded: *const c_char, pwd: *const c_void, pwdlen: usize) -> c_int; pub fn argon2d_verify(encoded: *const c_char, pwd: *const c_void, pwdlen: usize) -> c_int; pub fn argon2id_verify(encoded: *const c_char, pwd: *const c_void, pwdlen: usize) -> c_int; pub fn argon2_verify(encoded: *const c_char, pwd: *const c_void, pwdlen: usize, type_: argon2_type) -> c_int; pub fn argon2d_ctx(context: *mut argon2_context) -> c_int; pub fn argon2i_ctx(context: *mut argon2_context) -> c_int; pub fn argon2id_ctx(context: *mut argon2_context) -> c_int; pub fn argon2d_verify_ctx(context: *mut argon2_context, hash: *const c_char) -> c_int; pub fn argon2i_verify_ctx(context: *mut argon2_context, hash: *const c_char) -> c_int; pub fn argon2id_verify_ctx(context: *mut argon2_context, hash: *const c_char) -> c_int; pub fn argon2_verify_ctx(context: *mut argon2_context, hash: *const c_char, type_: argon2_type) -> c_int; pub fn argon2_error_message( error_code: c_int) -> *const c_char; pub fn argon2_encodedlen(t_cost: u32, m_cost: u32, parallelism: u32, saltlen: u32, hashlen: u32, type_: argon2_type) -> usize; pub fn argon2_type2string(type_: argon2_type, uppercase: c_int) -> *const c_char; }
use std::collections::HashMap; use std::io::ErrorKind::WouldBlock; use std::io::{Read, Write}; use std::ops::{Deref, DerefMut}; use std::os::unix::io::AsRawFd; use netlib::{Event, PollReactor}; use crate::codec::{Codec, Decode, Encode}; // ----------------------------------------------------------------------------- // - Connections - // ----------------------------------------------------------------------------- pub struct Connections<T, U, D> where T: AsRawFd + Read + Write, U: Codec, { inner: HashMap<u64, (PollReactor<T>, U)>, data: HashMap<u64, D>, } // ----------------------------------------------------------------------------- // - Deref - // ----------------------------------------------------------------------------- impl<T, U, D> Deref for Connections<T, U, D> where T: AsRawFd + Read + Write, U: Codec, { type Target = HashMap<u64, (PollReactor<T>, U)>; fn deref(&self) -> &Self::Target { &self.inner } } impl<T, U, D> DerefMut for Connections<T, U, D> where T: AsRawFd + Read + Write, U: Codec, { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } // ----------------------------------------------------------------------------- // - Impl - // ----------------------------------------------------------------------------- impl<T, U, D> Connections<T, U, D> where T: AsRawFd + Read + Write, U: Codec, { pub fn new() -> Self { Self { inner: HashMap::new(), data: HashMap::new(), } } pub fn update(&mut self, event: &Event) { self.inner .get_mut(&event.owner) .map(|(c, _)| c.update(event)); } pub fn associate_data(&mut self, reactor_id: u64, data: D) { self.data.insert(reactor_id, data); } pub fn data(&mut self, reactor_id: u64) -> Option<&mut D> { self.data.get_mut(&reactor_id) } pub fn recv(&mut self, reactor_id: u64) -> Vec<U::Item> { let (mut con, mut codec) = match self.remove(&reactor_id) { Some((con, codec)) => (con, codec), None => return Vec::new(), }; let mut data = Vec::new(); loop { match codec.decode(&mut con) { Decode::Value(val) => data.push(val), Decode::NoValue => break, Decode::Failed => return data, } } self.insert(reactor_id, (con, codec)); data } pub fn send(&mut self, reactor_id: u64, message: U::Item) -> Option<()> { let (mut con, mut codec) = self.remove(&reactor_id).unwrap(); match codec.encode(&mut con, message) { Encode::Success => { self.insert(reactor_id, (con, codec)); Some(()) } Encode::Fail => { self.data.remove(&reactor_id); None } } } }
/* * A sample API conforming to the draft standard OGC API - Features - Part 1: Core * * This is a sample OpenAPI definition that conforms to the conformance classes \"Core\", \"GeoJSON\", \"HTML\" and \"OpenAPI 3.0\" of the draft standard \"OGC API - Features - Part 1: Core\". This example is a generic OGC API Features definition that uses path parameters to describe all feature collections and all features. The generic OpenAPI definition does not provide any details on the collections or the feature content. This information is only available from accessing the feature collection resources. There is [another example](ogcapi-features-1-example2.yaml) that specifies each collection explicitly. * * The version of the OpenAPI document: 1.0.0 * Contact: info@example.org * Generated by: https://openapi-generator.tech */ #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MultipolygonGeoJson { #[serde(rename = "type")] pub _type: Type, #[serde(rename = "coordinates")] pub coordinates: Vec<Vec<Vec<Vec<f32>>>>, } impl MultipolygonGeoJson { pub fn new(_type: Type, coordinates: Vec<Vec<Vec<Vec<f32>>>>) -> MultipolygonGeoJson { MultipolygonGeoJson { _type, coordinates, } } } /// #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Type { #[serde(rename = "MultiPolygon")] MultiPolygon, }
pub fn get_line((mut x0, mut y0): (i32, i32), (mut x1, mut y1): (i32, i32)) -> Vec<(i32, i32)> { let mut result = Vec::new(); let steep = (y1 - y0).abs() > (x1 - x0).abs(); if steep { std::mem::swap(&mut x0, &mut y0); std::mem::swap(&mut x1, &mut y1); } if x0 > x1 { std::mem::swap(&mut x0, &mut x1); std::mem::swap(&mut y0, &mut y1); } let deltax = x1 - x0; let deltay = (y1 - y0).abs(); let mut error = 0; let ystep; let mut y = y0; if y0 < y1 { ystep = 1; } else { ystep = -1; } for x in x0..x1 { result.push(if steep { (y, x) } else { (x, y) }); error += deltay; if 2 * error >= deltax { y += ystep; error -= deltax; } } result } pub fn raycast<F>(point_a: (i32, i32), point_b: (i32, i32), is_valid: F) -> Option<(i32, i32)> where F: Fn((i32, i32)) -> bool, { for point in get_line(point_a, point_b) { if is_valid(point) { return Some(point); } } None } pub fn round_n(num: f32, n: f32) -> f32 { (num / n).round() * n }
use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize}; use near_sdk::{AccountId}; use near_sdk::json_types::{U128}; use near_sdk::collections::{Vector}; use near_sdk::serde::{Deserialize, Serialize}; #[derive(BorshDeserialize, BorshSerialize)] pub struct Reward { amount: u128, memo: String, } #[derive(Deserialize, Serialize)] pub struct WrappedReward { amount: U128, memo: String } #[derive(BorshDeserialize, BorshSerialize)] pub struct Rewards { rewards: Vector<Reward>, amount: u128, } impl Rewards{ pub fn new(account_id: AccountId) -> Self { Self { rewards: Vector::new(account_id.as_bytes().to_vec()), amount: 0, } } pub fn internal_add_new_reward(&mut self, reward: Reward) { self.rewards.push(&reward); } pub fn internal_set_reward_amount(&mut self, amount: u128) { self.amount = amount; } pub fn internal_reward_amount(&self) -> u128 { return self.amount; } pub fn get_reward(&self, reward_id: u64) -> Reward { self.rewards.get(reward_id).expect("ERR_NO_REWARD").into() } pub fn get_rewards_len(&self) -> u64 { self.rewards.len() } } impl Reward { pub fn new( amount: U128, memo: String, ) -> Self { Self { amount: amount.into(), memo: memo, } } pub fn get_amount(&self) -> u128 { self.amount } pub fn get_memo(&self) -> String { self.memo.clone() } pub fn to_wreward(&self) -> WrappedReward { WrappedReward::new(self) } } impl WrappedReward { pub fn new( reward: &Reward ) -> Self { Self { amount: reward.get_amount().into(), memo: reward.get_memo() } } }
#[doc = "Register `SCR` writer"] pub type W = crate::W<SCR_SPEC>; #[doc = "Field `CTAMP1F` writer - CTAMP1F"] pub type CTAMP1F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTAMP2F` writer - CTAMP2F"] pub type CTAMP2F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTAMP3F` writer - CTAMP3F"] pub type CTAMP3F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTAMP4F` writer - CTAMP4F"] pub type CTAMP4F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTAMP5F` writer - CTAMP5F"] pub type CTAMP5F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTAMP6F` writer - CTAMP6F"] pub type CTAMP6F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTAMP7F` writer - CTAMP7F"] pub type CTAMP7F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CTAMP8F` writer - CTAMP8F"] pub type CTAMP8F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CITAMP1F` writer - CITAMP1F"] pub type CITAMP1F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CITAMP2F` writer - CITAMP2F"] pub type CITAMP2F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CITAMP3F` writer - CITAMP3F"] pub type CITAMP3F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CITAMP5F` writer - CITAMP5F"] pub type CITAMP5F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CITAMP8F` writer - CITAMP8F"] pub type CITAMP8F_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl W { #[doc = "Bit 0 - CTAMP1F"] #[inline(always)] #[must_use] pub fn ctamp1f(&mut self) -> CTAMP1F_W<SCR_SPEC, 0> { CTAMP1F_W::new(self) } #[doc = "Bit 1 - CTAMP2F"] #[inline(always)] #[must_use] pub fn ctamp2f(&mut self) -> CTAMP2F_W<SCR_SPEC, 1> { CTAMP2F_W::new(self) } #[doc = "Bit 2 - CTAMP3F"] #[inline(always)] #[must_use] pub fn ctamp3f(&mut self) -> CTAMP3F_W<SCR_SPEC, 2> { CTAMP3F_W::new(self) } #[doc = "Bit 3 - CTAMP4F"] #[inline(always)] #[must_use] pub fn ctamp4f(&mut self) -> CTAMP4F_W<SCR_SPEC, 3> { CTAMP4F_W::new(self) } #[doc = "Bit 4 - CTAMP5F"] #[inline(always)] #[must_use] pub fn ctamp5f(&mut self) -> CTAMP5F_W<SCR_SPEC, 4> { CTAMP5F_W::new(self) } #[doc = "Bit 5 - CTAMP6F"] #[inline(always)] #[must_use] pub fn ctamp6f(&mut self) -> CTAMP6F_W<SCR_SPEC, 5> { CTAMP6F_W::new(self) } #[doc = "Bit 6 - CTAMP7F"] #[inline(always)] #[must_use] pub fn ctamp7f(&mut self) -> CTAMP7F_W<SCR_SPEC, 6> { CTAMP7F_W::new(self) } #[doc = "Bit 7 - CTAMP8F"] #[inline(always)] #[must_use] pub fn ctamp8f(&mut self) -> CTAMP8F_W<SCR_SPEC, 7> { CTAMP8F_W::new(self) } #[doc = "Bit 16 - CITAMP1F"] #[inline(always)] #[must_use] pub fn citamp1f(&mut self) -> CITAMP1F_W<SCR_SPEC, 16> { CITAMP1F_W::new(self) } #[doc = "Bit 17 - CITAMP2F"] #[inline(always)] #[must_use] pub fn citamp2f(&mut self) -> CITAMP2F_W<SCR_SPEC, 17> { CITAMP2F_W::new(self) } #[doc = "Bit 18 - CITAMP3F"] #[inline(always)] #[must_use] pub fn citamp3f(&mut self) -> CITAMP3F_W<SCR_SPEC, 18> { CITAMP3F_W::new(self) } #[doc = "Bit 20 - CITAMP5F"] #[inline(always)] #[must_use] pub fn citamp5f(&mut self) -> CITAMP5F_W<SCR_SPEC, 20> { CITAMP5F_W::new(self) } #[doc = "Bit 23 - CITAMP8F"] #[inline(always)] #[must_use] pub fn citamp8f(&mut self) -> CITAMP8F_W<SCR_SPEC, 23> { CITAMP8F_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "TAMP status clear register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`scr::W`](W). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct SCR_SPEC; impl crate::RegisterSpec for SCR_SPEC { type Ux = u32; } #[doc = "`write(|w| ..)` method takes [`scr::W`](W) writer structure"] impl crate::Writable for SCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets SCR to value 0"] impl crate::Resettable for SCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
mod common; use crate::common::random_dfa; use cached::cached; use proptest::prelude::*; use valis_automata::{ dfa::{minimization::minimize, standard::StandardDFA, DFA}, range_set::Range, }; #[test] fn fake_test_to_generate_random_dfa() { let dfa = random_dfa::<bool>(32, 0.1); common::render_dfa_to_file("random_dfa.dot", &dfa); } cached! { SMALL_NON_MINIMAL; fn small_non_minimal_dfa() -> StandardDFA<bool, u8> = { let states: Range<u8> = (0..=4).into(); let accept_states = [4].into_iter().cloned().collect(); let dead_state = Some(2); let start_state = 0; let transitions = vec![ 1,3, 2,2, 2,2, 4,1, 4,4, ].into_boxed_slice(); let dfa = StandardDFA::<bool, u8>::new(states, transitions, accept_states, start_state, dead_state); common::render_dfa_to_file("small_non_minimal.dot", &dfa); dfa } } cached! { SMALL_MINIMAL; fn small_minimal_dfa() -> StandardDFA<bool, u8> = { let dfa = small_non_minimal_dfa(); minimize(&dfa) } } cached! { LARGE_NON_MINIMAL; fn large_non_minimal_dfa() -> StandardDFA<bool, u8> = { let states: Range<u8> = (0..=14).into(); let accept_states = [5].into_iter().cloned().collect(); let dead_state = Some(14); let start_state = 0; let transitions = vec![ 1, 2, 3, 4, 5, 6, 10, 7, 7, 11, 5, 5, 8, 9, 10, 11, 12, 13, 8, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, ].into_boxed_slice(); let dfa = StandardDFA::<bool, u8>::new(states, transitions, accept_states, start_state, dead_state); common::render_dfa_to_file("large_non_minimal.dot", &dfa); dfa } } cached! { LARGE_MINIMAL; fn large_minimal_dfa() -> StandardDFA<bool, u8> = { let dfa = large_non_minimal_dfa(); minimize(&dfa) } } proptest! { #[test] fn accept_large_non_minimal_dfa(s in "10[01]*") { let dfa = large_non_minimal_dfa(); let string = common::convert_string(s, common::binary_converter()); prop_assert!(dfa.accept(string)); } #[test] fn reject_large_non_minimal_dfa(s in r"(?x) (0[01]*) | (11[01]*) ") { let dfa = large_non_minimal_dfa(); let string = common::convert_string(s, common::binary_converter()); prop_assert!(!dfa.accept(string)); } #[test] fn accept_large_minimal_dfa(s in "10[01]*") { let dfa = large_minimal_dfa(); let string = common::convert_string(s, common::binary_converter()); prop_assert!(dfa.accept(string)); } #[test] fn reject_large_minimal_dfa(s in r"(?x) (0[01]*) | (11[01]*) ") { let dfa = large_minimal_dfa(); let string = common::convert_string(s, common::binary_converter()); prop_assert!(!dfa.accept(string)); } #[test] fn accept_small_non_minimal_dfa(s in "10[01]*") { let dfa_1 = small_non_minimal_dfa(); let string = common::convert_string(s, common::binary_converter()); prop_assert!(dfa_1.accept(string)); } #[test] fn reject_small_non_minimal_dfa(s in r"(?x) (0[01]*) | (11[01]*) ") { let dfa = small_non_minimal_dfa(); let string = common::convert_string(s, common::binary_converter()); prop_assert!(!dfa.accept(string)); } #[test] fn accept_small_minimal_dfa(s in "10[01]*") { let dfa_1 = small_minimal_dfa(); let string = common::convert_string(s, common::binary_converter()); prop_assert!(dfa_1.accept(string)); } #[test] fn reject_small_minimal_dfa(s in r"(?x) (0[01]*) | (11[01]*) ") { let dfa = small_minimal_dfa(); let string = common::convert_string(s, common::binary_converter()); prop_assert!(!dfa.accept(string)); } }
// 给定两个二进制字符串,返回他们的和(用二进制表示)。 // 输入为非空字符串且只包含数字 1 和 0。 // 示例 1: // 输入: a = "11", b = "1" // 输出: "100" // 示例 2: // 输入: a = "1010", b = "1011" // 输出: "10101" // struct Solution{} impl Solution { pub fn add_binary(a: String, b: String) -> String { let a: &[u8] = a.as_bytes(); let b: &[u8] = b.as_bytes(); let a_length: usize = a.len(); let mut a_point: usize = 0; let b_length: usize = b.len(); let mut b_point: usize = 0; let mut a_result: u128 = 0; let mut b_result: u128 = 0; while a_point < a_length { a_result += ((a[a_point]) as u128 - 48).rotate_left((a_length - 1 - a_point) as u32); a_point += 1; } while b_point < b_length { b_result += ((b[b_point]) as u128 - 48).rotate_left((b_length - 1 - b_point) as u32); b_point += 1; } format!("{:b}", a_result + b_result) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_add_binary() { assert_eq!(Solution::add_binary(String::from("11"), String::from("1")), String::from("100")); assert_eq!(Solution::add_binary(String::from("1010"), String::from("1011")), String::from("10101")); assert_eq!(Solution::add_binary(String::from("1110001"), String::from("110100100")), String::from("1000010101")); assert_eq!(Solution::add_binary(String::from("10100000100100110110010000010101111011011001101110111111111101000000101111001110001111100001101"), String::from("110101001011101110001111100110001010100001101011101010000011011011001011101111001100000011011110011")), String::from("110111101100010011000101110110100000011101000101011001000011011000001100011110011010010011000000000")); } }
use aoc2020::aoc::{load_data, Res}; use std::collections::HashMap; use std::collections::HashSet; use std::fmt::Debug; use std::fmt::Display; use std::hash::Hash; use std::io::BufRead; use std::collections::VecDeque; fn find_second_star(numbers: &[i64], invalid: i64)-> Vec<i64>{ let mut v = Vec::new(); 'outer: for i in 0..numbers.len(){ for j in i..numbers.len(){ v.push(numbers[j]); let sum = v.iter().fold(0, |acc, el| acc + el); if sum == invalid { // found the solution, exit without cleanup println!("Solution is found!"); break 'outer; } else if sum > invalid{ break; } } v.clear(); } v } fn is_valid(preamble: &VecDeque<i64>, n: i64) -> bool{ let mut valid = false; 'outer: for (i,p) in preamble.iter().enumerate(){ for (j,q) in preamble.iter().enumerate(){ if i != j { valid = n == q + p; if valid { break 'outer; } } } } valid } fn invalid_numbers(v: &[i64], preamble_length: usize) -> Vec<i64>{ let mut invalid = Vec::new(); let mut preamble = VecDeque::with_capacity(preamble_length as usize); for (i, n) in v.iter().enumerate(){ if i >= preamble_length { if !is_valid(&preamble, *n){ invalid.push(*n); } preamble.pop_front(); } preamble.push_back(*n); println!("preamble[i={}] is {:?}", i, preamble); } invalid } fn main() -> Res<()> { let data = load_data("examples/data/day9.txt")?; let lines : Vec<_> = data.lines().filter_map(|l| l.ok().and_then(|s| s.parse::<i64>().ok())).collect(); let invalid = invalid_numbers(&lines, 25).pop().unwrap(); println!("invalid: {:?}", invalid); let mut second = find_second_star(&lines, invalid); second.sort(); println!("Second: {:?}", second); println!("Res: {}", second[0] + second[second.len() -1]); Ok(()) }
#[doc = "Reader of register CLOCK_CTL[%s]"] pub type R = crate::R<u32, super::CLOCK_CTL>; #[doc = "Writer for register CLOCK_CTL[%s]"] pub type W = crate::W<u32, super::CLOCK_CTL>; #[doc = "Register CLOCK_CTL[%s] `reset()`'s with value 0xff"] impl crate::ResetValue for super::CLOCK_CTL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0xff } } #[doc = "Reader of field `DIV_SEL`"] pub type DIV_SEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DIV_SEL`"] pub struct DIV_SEL_W<'a> { w: &'a mut W, } impl<'a> DIV_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x3f) | ((value as u32) & 0x3f); self.w } } #[doc = "Reader of field `TYPE_SEL`"] pub type TYPE_SEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `TYPE_SEL`"] pub struct TYPE_SEL_W<'a> { w: &'a mut W, } impl<'a> TYPE_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6); self.w } } impl R { #[doc = "Bits 0:5 - Specifies one of the dividers of the divider type specified by TYPE_SEL. If DIV_SEL is '63' and TYPE_SEL is '3' (default/reset value), no divider is specified and no clock control signal(s) are generated. When transitioning a clock between two out-of-phase dividers, spurious clock control signals may be generated for one 'clk_peri' cycle during this transition. These clock control signals may cause a single clock period that is smaller than any of the two divider periods. To prevent these spurious clock signals, the clock multiplexer can be disconnected (DIV_SEL is '63' and TYPE_SEL is '3') for a transition time that is larger than the smaller of the two divider periods."] #[inline(always)] pub fn div_sel(&self) -> DIV_SEL_R { DIV_SEL_R::new((self.bits & 0x3f) as u8) } #[doc = "Bits 6:7 - Specifies divider type: 0: 8.0 (integer) clock dividers. 1: 16.0 (integer) clock dividers. 2: 16.5 (fractional) clock dividers. 3: 24.5 (fractional) clock dividers."] #[inline(always)] pub fn type_sel(&self) -> TYPE_SEL_R { TYPE_SEL_R::new(((self.bits >> 6) & 0x03) as u8) } } impl W { #[doc = "Bits 0:5 - Specifies one of the dividers of the divider type specified by TYPE_SEL. If DIV_SEL is '63' and TYPE_SEL is '3' (default/reset value), no divider is specified and no clock control signal(s) are generated. When transitioning a clock between two out-of-phase dividers, spurious clock control signals may be generated for one 'clk_peri' cycle during this transition. These clock control signals may cause a single clock period that is smaller than any of the two divider periods. To prevent these spurious clock signals, the clock multiplexer can be disconnected (DIV_SEL is '63' and TYPE_SEL is '3') for a transition time that is larger than the smaller of the two divider periods."] #[inline(always)] pub fn div_sel(&mut self) -> DIV_SEL_W { DIV_SEL_W { w: self } } #[doc = "Bits 6:7 - Specifies divider type: 0: 8.0 (integer) clock dividers. 1: 16.0 (integer) clock dividers. 2: 16.5 (fractional) clock dividers. 3: 24.5 (fractional) clock dividers."] #[inline(always)] pub fn type_sel(&mut self) -> TYPE_SEL_W { TYPE_SEL_W { w: self } } }
use mysql_rent::Rent; #[tokio::test] async fn should_work_with_no_params() { let sut = Rent::new().await.unwrap(); println!("connection URL: {}", sut.mysql_url()); drop(sut); } #[tokio::test] async fn should_work_with_options() { let sut = Rent::builder() .database("contacts") .local_port(3307) .root_password("chupacabra111") .rent() .await .unwrap(); println!("connection URL: {}", sut.mysql_url()); drop(sut); }
use std::net::SocketAddr; use chrono::prelude::*; use hydroflow::hydroflow_syntax; use hydroflow::util::{UdpLinesSink, UdpLinesStream}; use crate::helpers::{deserialize_json, serialize_json}; use crate::protocol::EchoMsg; pub(crate) async fn run_client( outbound: UdpLinesSink, inbound: UdpLinesStream, server_addr: SocketAddr, ) { println!("Attempting to connect to server at {:?}", server_addr); println!("Client live!"); let mut flow = hydroflow_syntax! { // take stdin and send to server as an Echo::Message source_stdin() -> map(|l| (EchoMsg{ payload: l.unwrap(), ts: Utc::now(), }, server_addr) ) -> map(|(msg, addr)| (serialize_json(msg), addr)) -> dest_sink(outbound); // receive and print messages source_stream(inbound) -> map(deserialize_json) -> for_each(|(m, _a): (EchoMsg, SocketAddr) | println!("{:?}", m)); }; flow.run_async().await.unwrap(); }
fn main() { println!("cargo:rustc-link-lib=dylib=cuda"); let cuda_path = std::env::var("CUDA_LIB_PATH") .unwrap_or_else(|_| "/usr/local/cuda-11.3/lib64/".to_string()); println!("cargo:rustc-link-search=native={}", cuda_path); }
/* * Copyright Stalwart Labs Ltd. See the COPYING * file at the top-level directory of this distribution. * * Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or * https://www.apache.org/licenses/LICENSE-2.0> or the MIT license * <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your * option. This file may not be copied, modified, or distributed * except according to those terms. */ use crate::core::session::URLParser; pub mod copy; pub mod download; pub mod helpers; pub mod upload; pub enum URLParameter { AccountId, BlobId, Name, Type, } impl URLParser for URLParameter { fn parse(value: &str) -> Option<Self> { match value { "accountId" => Some(URLParameter::AccountId), "blobId" => Some(URLParameter::BlobId), "name" => Some(URLParameter::Name), "type" => Some(URLParameter::Type), _ => None, } } }
use crate::PemStorable; pub mod x25519; pub trait MixnetEncryptionKeyPair<Priv, Pub> where Priv: MixnetEncryptionPrivateKey, Pub: MixnetEncryptionPublicKey, { fn new() -> Self; fn private_key(&self) -> &Priv; fn public_key(&self) -> &Pub; fn from_bytes(priv_bytes: &[u8], pub_bytes: &[u8]) -> Self; // TODO: encryption related methods } pub trait MixnetEncryptionPublicKey: Sized + PemStorable + for<'a> From<&'a <Self as MixnetEncryptionPublicKey>::PrivateKeyMaterial> { // we need to couple public and private keys together type PrivateKeyMaterial: MixnetEncryptionPrivateKey<PublicKeyMaterial = Self>; fn to_bytes(&self) -> Vec<u8>; fn from_bytes(b: &[u8]) -> Self; } pub trait MixnetEncryptionPrivateKey: Sized + PemStorable { // we need to couple public and private keys together type PublicKeyMaterial: MixnetEncryptionPublicKey<PrivateKeyMaterial = Self>; /// Returns the associated public key fn public_key(&self) -> Self::PublicKeyMaterial { self.into() } fn to_bytes(&self) -> Vec<u8>; fn from_bytes(b: &[u8]) -> Self; }
use std::io::prelude::*; use std::fs::File; use std::env; extern crate yaml_rust; use yaml_rust::YamlLoader; use yaml_rust::Yaml; use yaml_rust::yaml::Hash; fn merge_hashes(mut left_hash: Hash, right_hash: Hash) -> Yaml { right_hash.into_iter().for_each(|(key, value)| { left_hash.insert(key, value); }); Yaml::Hash(left_hash) } fn merge_hash(left: Hash, right: Yaml) -> Yaml { match right { Yaml::Hash(right_hash) => merge_hashes(left, right_hash), _ => Yaml::Hash(left), } } fn merge_docs(doc: Yaml, right: Yaml) -> Yaml { match doc { Yaml::Hash(left_hash) => merge_hash(left_hash, right), _ => doc, } } fn remove_first_char(s: &str) -> Option<&str> { s.chars().next().map(|c| &s[c.len_utf8()..]) } /// Loads the configuration (from the config.yaml file, cmd line args, env vars) pub fn init() -> Yaml { // Load config from file let mut file = File::open("config.yaml").expect("Unable to open the config file"); let mut contents = String::new(); file.read_to_string(&mut contents).expect("Unable to read the config file"); let config = &YamlLoader::load_from_str(&contents).unwrap()[0]; let mut hash_map = Hash::new(); // Load config from env vars for (key, value) in env::vars() { let key_str = key.to_string(); let key_yaml = Yaml::from_str(&key_str); let key_lowercase_yaml = Yaml::from_str(&key_str.to_lowercase()); if config.as_hash().unwrap().contains_key(&key_yaml) { hash_map.insert(key_yaml, Yaml::from_str(&value.to_string())); } else if config.as_hash().unwrap().contains_key(&key_lowercase_yaml) { hash_map.insert(key_lowercase_yaml, Yaml::from_str(&value.to_string())); } else { hash_map.insert(key_yaml, Yaml::from_str(&value.to_string())); } } // Load config from cmd line args let args: Vec<String> = env::args().collect(); for arg in args { if arg.contains('=') { let (key, value) = arg.split_at(arg.find('=').unwrap()); hash_map.insert(Yaml::from_str(key), Yaml::from_str(remove_first_char(&value).unwrap())); } else { hash_map.insert(Yaml::from_str(&arg), Yaml::Boolean(true)); } } let args_hash = Yaml::Hash(hash_map); merge_docs(config.clone(), args_hash) }
extern crate clap; use clap::{App, Arg}; fn main() { // You can get a "default value" like feature by using Option<T>'s .unwrap_or() method // // Let's assume you have -c <config> argument to allow users to specify a configuration file // but you also want to support a default file, if none is specified. let matches = App::new("myapp").about("does awesome things") .arg(Arg::with_name("CONFIG") .help("The config file to use (default is \"config.json\")") .short("c") .takes_value(true)) .get_matches(); let config_file = matches.value_of("CONFIG").unwrap_or("config.json"); // If the user passed in a -c <file> we'll see that value, if not we'll see 'config.json' println!("The config file is: {}", config_file); }
use crate::display::Display; use rand::Rng; pub struct Cpu { memory: [u8; 4096], pc: u16, i: u16, regs: [u8; 16], display: Display, delay_timer: u8, stack: [u16; 16], sp: u8, keyboard: [bool; 16], awaiting_key_press: bool, current_key_pressed: Option<u8>, } impl Cpu { pub fn new() -> Cpu { let mut cpu = Cpu { memory: [0; 4096], pc: 0x200, i: 0, regs: [0; 16], display: Display::new(), delay_timer: 0, stack: [0; 16], sp: 0, keyboard: [false; 16], awaiting_key_press: false, current_key_pressed: None, }; cpu.load_fontset(); cpu } pub fn load_binary(&mut self, binary: &Vec<u8>) { let start = 0x200; let binary_area = &mut self.memory[start..start+binary.len()]; binary_area.copy_from_slice(binary); } pub fn timer_interrupt(&mut self) { if self.delay_timer > 0 { self.delay_timer -= 1; } } pub fn key_press_interrupt(&mut self, key: u8) { if self.awaiting_key_press { self.current_key_pressed = Some(key); self.awaiting_key_press = false; } } pub fn update_keyboard(&mut self, keys: &[u8]) { for k in &mut self.keyboard { *k = false; } for key in keys { self.keyboard[*key as usize] = true; } } pub fn display_buffer(&self) -> &[bool] { self.display.buffer() } // Returns true if display needs redrawing pub fn cycle(&mut self) -> bool { let opcode = self.fetch_opcode(); self.execute(opcode); opcode & 0xF000 == 0xD000 } fn load_fontset(&mut self) { let fontset = [ 0xF0, 0x90, 0x90, 0x90, 0xF0, // 0 0x20, 0x60, 0x20, 0x20, 0x70, // 1 0xF0, 0x10, 0xF0, 0x80, 0xF0, // 2 0xF0, 0x10, 0xF0, 0x10, 0xF0, // 3 0x90, 0x90, 0xF0, 0x10, 0x10, // 4 0xF0, 0x80, 0xF0, 0x10, 0xF0, // 5 0xF0, 0x80, 0xF0, 0x90, 0xF0, // 6 0xF0, 0x10, 0x20, 0x40, 0x40, // 7 0xF0, 0x90, 0xF0, 0x90, 0xF0, // 8 0xF0, 0x90, 0xF0, 0x10, 0xF0, // 9 0xF0, 0x90, 0xF0, 0x90, 0x90, // A 0xE0, 0x90, 0xE0, 0x90, 0xE0, // B 0xF0, 0x80, 0x80, 0x80, 0xF0, // C 0xE0, 0x90, 0x90, 0x90, 0xE0, // D 0xF0, 0x80, 0xF0, 0x80, 0xF0, // E 0xF0, 0x80, 0xF0, 0x80, 0x80, // F ]; let fontset_area = &mut self.memory[0..fontset.len()]; fontset_area.copy_from_slice(&fontset); } fn fetch_opcode(&self) -> u16 { let pc_idx = self.pc as usize; let opcode_bytes = &self.memory[pc_idx..pc_idx+2]; (opcode_bytes[0] as u16) << 8 | opcode_bytes[1] as u16 } fn execute(&mut self, opcode: u16) { if self.awaiting_key_press { return; } // Only used for debugging let old = self.pc; self.pc += 2; match opcode & 0xF000 { 0x0000 => match opcode { 0x00E0 => { // 00E0 - CLS // Clear the display. self.display.clear(); self.print_i(old, opcode, "CLS"); }, 0x00EE => { // 00EE - RET // Return from a subroutine. self.sp -= 1; self.pc = self.stack[self.sp as usize]; self.print_i(old, opcode, "RET"); } _ => { // This instruction is SYS nnn, which calls a subroutine // only needed by older computers. Can be ignored. let addr = opcode & 0x0FFF; self.print_i(old, opcode, &format!("SYS {:04x}", addr)); }, }, 0x1000 => { // 1nnn - JP addr // Jump to location nnn. let addr = opcode & 0x0FFF; self.pc = addr; self.print_i(old, opcode, &format!("JMP {:04x}", addr)); }, 0x2000 => { // 2nnn - CALL addr // Call subroutine at nnn. let addr = opcode & 0x0FFF; self.stack[self.sp as usize] = self.pc; self.sp += 1; self.pc = addr; self.print_i(old, opcode, &format!("CALL {:04x}", addr)); }, 0x3000 => { // 3xkk - SE Vx, byte // Skip next instruction if Vx == kk. let idx = (opcode & 0x0F00) >> 8; let byte = opcode & 0x00FF; if self.regs[idx as usize] == byte as u8 { self.pc += 2; } self.print_i(old, opcode, &format!("SE V{}, {:02x}", idx, byte)); }, 0x4000 => { // 4xkk - SNE Vx, byte // Skip next instruction if Vx != kk. let idx = (opcode & 0x0F00) >> 8; let byte = opcode & 0x00FF; if self.regs[idx as usize] != byte as u8 { self.pc += 2; } self.print_i(old, opcode, &format!("SNE V{}, {:02x}", idx, byte)); }, 0x6000 => { // 6xkk - LD Vx, byte // Set Vx = kk. let idx = (opcode & 0x0F00) >> 8; let byte = opcode & 0x00FF; self.regs[idx as usize] = byte as u8; self.print_i(old, opcode, &format!("LD V{}, {:02x}", idx, byte)); }, 0x7000 => { // 7xkk - ADD Vx, byte // Set Vx = Vx + kk. let idx = (opcode & 0x0F00) >> 8; let byte = opcode & 0x00FF; self.regs[idx as usize] += byte as u8; self.print_i(old, opcode, &format!("ADD V{}, {:02x}", idx, byte)); } 0x5000 | 0x8000 | 0x9000 => { let x = ((opcode & 0x0F00) >> 8) as usize; let y = ((opcode & 0x00F0) >> 4) as usize; self.execute_two_reg_opcode(x, y, opcode, old); }, 0xA000 => { // Annn - LD I, addr // Set I = nnn. let addr = opcode & 0x0FFF; self.i = addr; self.print_i(old, opcode, &format!("LD I, {:04x}", addr)); }, 0xB000 => { // Bnnn - JP V0, addr // Jump to location nnn + V0. let addr = opcode & 0x0FFF; self.pc = addr + self.regs[0] as u16; self.print_i(old, opcode, &format!("JP V0, {:04x}", addr)); }, 0xC000 => { // Cxkk - RND Vx, byte // Set Vx = random byte AND kk. let idx = (opcode & 0x0F00) >> 8; let byte = (opcode & 0x00FF) as u8; let rand_byte: u8 = rand::thread_rng().gen(); self.regs[idx as usize] = rand_byte & byte; self.print_i(old, opcode, &format!("RND V{}, {:02x}", idx, byte)); }, 0xD000 => { // Dxyn - DRW Vx, Vy, nibble // Display n-byte sprite starting at memory location I at (Vx, Vy), set VF = collision. let x_idx = ((opcode & 0x0F00) >> 8) as usize; let y_idx = ((opcode & 0x00F0) >> 4) as usize; let n = (opcode & 0x000F) as usize; let start = self.i as usize; let pixel_erased = self.display.draw( self.regs[x_idx] as u32, self.regs[y_idx] as u32, &self.memory[start..start + n], ); self.regs[0xF] = pixel_erased as u8; self.print_i(old, opcode, &format!("DRW V{}, V{}, {:x}", x_idx, y_idx, n)); }, 0xE000 => match opcode & 0xF0FF { 0xE09E => { // Ex9E - SKP Vx // Skip next instruction if key with the value of Vx is pressed. let x = ((opcode & 0x0F00) >> 8) as usize; let vx = self.regs[x] as usize; if self.keyboard[vx] { self.pc += 2; } self.print_i(old, opcode, &format!("SKP V{}", x)); } 0xE0A1 => { // ExA1 - SKNP Vx // Skip next instruction if key with the value of Vx is not pressed. let x = ((opcode & 0x0F00) >> 8) as usize; let vx = self.regs[x] as usize; if !self.keyboard[vx] { self.pc += 2; } self.print_i(old, opcode, &format!("SKNP V{}", x)); } _ => panic!("Unknown opcode {:04x}", opcode), }, 0xF000 => match opcode & 0xF0FF { 0xF007 => { // Fx07 - LD Vx, DT // Set Vx = delay timer value. let x = ((opcode & 0x0F00) >> 8) as usize; self.regs[x] = self.delay_timer; self.print_i(old, opcode, &format!("LD V{}, DT", x)); }, 0xF00A => { // Fx0A - LD Vx, K // Wait for a key press, store the value of the key in Vx. // Since this is a blocking instruction, we will loop on this // instruction until a key is pressed. Meanwhile, the calling code // is waiting looking for key presses, communicated via // `key_press_interrupt()`. Hence when we hit this instruction, // there are two possible scenarios: // 1. This isn't the first iteration of the loop, and a key has // has already been pressed (`execute()` returns early if // there hasn't been a key press yet). In that case we can // reset the flag and progress. // 2. This is the first iteration of the loop. In that case we need // to indicate that we need to loop, and rollback the program // counter to the current instruction so that we don't progress. if let Some(key) = self.current_key_pressed { let x = ((opcode & 0x0F00) >> 8) as usize; self.regs[x] = key; self.current_key_pressed = None; } else { self.awaiting_key_press = true; self.pc -= 2; } }, 0xF015 => { // Fx15 - LD DT, Vx // Set delay timer = Vx. let x = ((opcode & 0x0F00) >> 8) as usize; self.delay_timer = self.regs[x]; self.print_i(old, opcode, &format!("LD DT V{}", x)); }, 0xF018 => { // Fx18 - LD ST, Vx // Set sound timer = Vx. // TODO: No-op for now. The current framebuffer library being // used does not support audio output. }, 0xF01E => { // Fx1E - ADD I, Vx // Set I = I + Vx. let idx = (opcode & 0x0F00) >> 8; self.i += self.regs[idx as usize] as u16; self.print_i(old, opcode, &format!("ADD I, V{}", idx)); }, 0xF029 => { // Fx29 - LD F, Vx // Set I = location of sprite for digit Vx. let x = ((opcode & 0x0F00) >> 8) as usize; // Each digit's sprite is 5 bytes long self.i = self.regs[x] as u16 * 5; self.print_i(old, opcode, &format!("LD F, V{}", x)); } 0xF033 => { // Fx33 - LD B, Vx // Store BCD representation of Vx in memory locations I, I+1, and I+2. let x = ((opcode & 0x0F00) >> 8) as usize; let vx = self.regs[x]; let i = self.i as usize; self.memory[i] = vx / 100; // hundreds digit self.memory[i + 1] = (vx / 10) % 10; // tens digit self.memory[i + 2] = (vx % 100) % 10; // ones digit self.print_i(old, opcode, &format!("LD B, V{}", x)); }, 0xF055 => { // Fx55 - LD [I], Vx // Store registers V0 through Vx in memory starting at location I. let idx = ((opcode & 0x0F00) >> 8) as usize; for (offset, val) in self.regs[0..idx+1].iter().enumerate() { self.memory[self.i as usize + offset] = *val; } self.print_i(old, opcode, &format!("LD [I], V{}", idx)); }, 0xF065 => { // Fx65 - LD Vx, [I] // Read registers V0 through Vx from memory starting at location I. let idx = ((opcode & 0x0F00) >> 8) as usize; let start = self.i as usize; for (i, val) in self.memory[start..start+idx+1].iter().enumerate() { self.regs[i] = *val; } self.print_i(old, opcode, &format!("LD V{}, [I]", idx)); }, _ => panic!("Unknown opcode {:04x}", opcode), }, _ => panic!("Unknown opcode {:04x}", opcode), }; } fn execute_two_reg_opcode(&mut self, x: usize, y: usize, opcode: u16, old: u16) { match opcode & 0xF00F { 0x5000 => { // 5xy0 - SE Vx, Vy // Skip next instruction if Vx == Vy. if self.regs[x] == self.regs[y] { self.pc += 2; } self.print_i(old, opcode, &format!("SE V{}, V{}", x, y)); }, 0x8000 => { // 8xy0 - LD Vx, Vy // Set Vx = Vy. self.regs[x] = self.regs[y]; self.print_i(old, opcode, &format!("LD V{}, V{}", x, y)); }, 0x8001 => { // 8xy1 - OR Vx, Vy // Set Vx = Vx OR Vy. self.regs[x] |= self.regs[y]; self.print_i(old, opcode, &format!("OR V{}, V{}", x, y)); }, 0x8002 => { // 8xy2 - AND Vx, Vy // Set Vx = Vx AND Vy. self.regs[x] &= self.regs[y]; self.print_i(old, opcode, &format!("AND V{}, V{}", x, y)); }, 0x8003 => { // 8xy3 - XOR Vx, Vy // Set Vx = Vx XOR Vy. self.regs[x] ^= self.regs[y]; self.print_i(old, opcode, &format!("XOR V{}, V{}", x, y)); } 0x8004 => { // 8xy4 - ADD Vx, Vy // Set Vx = Vx + Vy, set VF = carry. let vx = self.regs[x] as u32; let vy = self.regs[y] as u32; let result = vx + vy; self.regs[0xF] = (result > 0xFF) as u8; self.regs[x] = result as u8; self.print_i(old, opcode, &format!("ADD V{}, V{}", x, y)); }, 0x8005 => { // 8xy5 - SUB Vx, Vy // Set Vx = Vx - Vy, set VF = NOT borrow. let vx = self.regs[x]; let vy = self.regs[y]; // Set if NO borrow self.regs[0xF] = (vx >= vy) as u8; self.regs[x] -= vy; self.print_i(old, opcode, &format!("SUB V{}, V{}", x, y)); }, 0x8006 => { // 8xy6 - SHR Vx {, Vy} // Set Vx = Vx SHR 1. self.regs[0xF] = (self.regs[x] & 0x0001) as u8; self.regs[x] >>= 1; self.print_i(old, opcode, &format!("SHR V{}", x)); }, 0x8007 => { // 8xy7 - SUBN Vx, Vy // Set Vx = Vy - Vx, set VF = NOT borrow. let vx = self.regs[x]; let vy = self.regs[y]; // Set if NO borrow self.regs[0xF] = (vy >= vx) as u8; self.regs[x] = vy - vx; self.print_i(old, opcode, &format!("SUBN V{}, V{}", x, y)); }, 0x800E => { // 8xyE - SHL Vx {, Vy} // Set Vx = Vx SHL 1. self.regs[0xF] = ((self.regs[x] & 0x80) == 0x80) as u8; self.regs[x] <<= 1; self.print_i(old, opcode, &format!("SHL V{}", x)); }, 0x9000 => { // 9xy0 - SNE Vx, Vy // Skip next instruction if Vx != Vy. if self.regs[x] != self.regs[y] { self.pc += 2; } self.print_i(old, opcode, &format!("SNE V{}, V{}", x, y)); }, _ => panic!("Unknown opcode {:04x}", opcode), } } fn print_i(&self, pc: u16, opcode: u16, rep: &str) { println!("{:#03x}: ({:04x}) {}", pc, opcode, rep); } }
pub trait JsonTrait { fn to_json(&self) -> Json; }
// ref: http://norvig.com/spell-correct.html use std::collections::HashSet; use strsim::jaro_winkler; const LETTERS: &str = "abcdefghijklmnopqrstuvwxyz"; pub struct Corrector { pub keys: HashSet<String>, } impl Corrector { pub fn correct(&self, word: &str) -> Vec<String> { // build_complex_candidates is not for non ASCII string if !word.chars().all(|it| it.is_ascii_alphabetic()) { return vec![]; } let word = word.to_lowercase(); let candidates = build_complex_candidates(&word); let mut min_d = std::f64::MAX; let mut result = vec![]; for candidate in candidates { if self.keys.contains(&candidate) { let d = jaro_winkler(&word, &candidate); // println!("{:?}→{:?}", d, candidate); result.push((d, candidate)); if d < min_d { min_d = d; } } } result.sort_by(|b, a| a.0.partial_cmp(&b.0).unwrap()); result.into_iter().take(10).map(|(_, word)| word).collect() } } fn build_complex_candidates(word: &str) -> HashSet<String> { let mut result = HashSet::new(); for candidate in build_simple_candidates(word) { let simple = build_simple_candidates(&candidate); result.insert(candidate); result.extend(simple); } result } fn build_simple_candidates(word: &str) -> HashSet<String> { let mut set = HashSet::new(); let splits = (0..word.len()).map(|it| word.split_at(it)).collect::<Vec<_>>(); for (l, r) in &splits { // deletion if !r.is_empty() { set.insert(format!("{}{}", l, &r[1..])); // replacing for c in LETTERS.chars() { set.insert(format!("{}{}{}", l, c, &r[1..])); } // transposition if 1 < r.len() { set.insert(format!("{}{}{}{}", l, &r[1..2], &r[0..1], &r[2..])); } } // insertion for c in LETTERS.chars() { set.insert(format!("{}{}{}", l, c, &r[..])); } } set }
//! Module providing the search capability using BAM/BAI files //! use std::str::FromStr; use std::sync::Arc; use async_trait::async_trait; use noodles::bam; use noodles::bam::bai; use noodles::bam::bai::index::ReferenceSequence; use noodles::bam::bai::Index; use noodles::bgzf; use noodles::bgzf::VirtualPosition; use noodles::csi::index::reference_sequence::bin::Chunk; use noodles::csi::BinningIndex; use noodles::sam::header::record::value::map::read_group::platform::ParseError; use noodles::sam::header::record::value::map::read_group::Platform; use noodles::sam::Header; use tokio::io; use tokio::io::{AsyncRead, BufReader}; use tracing::{instrument, trace, warn}; use crate::htsget::search::{BgzfSearch, BinningIndexExt, Search, SearchAll, SearchReads}; use crate::htsget::HtsGetError; use crate::Class::Body; use crate::{ htsget::{Format, Query, Result}, storage::{BytesPosition, Storage}, }; type AsyncReader<ReaderType> = bam::AsyncReader<bgzf::AsyncReader<ReaderType>>; /// Allows searching through bam files. pub struct BamSearch<S> { storage: Arc<S>, } impl BinningIndexExt for Index { #[instrument(level = "trace", skip_all)] fn get_all_chunks(&self) -> Vec<&Chunk> { trace!("getting vec of chunks"); self .reference_sequences() .iter() .flat_map(|ref_seq| ref_seq.bins()) .flat_map(|bin| bin.chunks()) .collect() } } #[async_trait] impl<S, ReaderType> BgzfSearch<S, ReaderType, ReferenceSequence, Index, AsyncReader<ReaderType>, Header> for BamSearch<S> where S: Storage<Streamable = ReaderType> + Send + Sync + 'static, ReaderType: AsyncRead + Unpin + Send + Sync, { #[instrument(level = "trace", skip(self, index))] async fn get_byte_ranges_for_unmapped( &self, query: &Query, index: &Index, ) -> Result<Vec<BytesPosition>> { trace!("getting byte ranges for unmapped reads"); let last_interval = index.first_record_in_last_linear_bin_start_position(); let start = match last_interval { Some(start) => start, None => { VirtualPosition::try_from((self.get_header_end_offset(index).await?, 0)).map_err(|err| { HtsGetError::InvalidInput(format!( "invalid virtual position generated from header end offset {err}." )) })? } }; Ok(vec![BytesPosition::default() .with_start(start.compressed()) .with_end(self.position_at_eof(query).await?) .with_class(Body)]) } } #[async_trait] impl<S, ReaderType> Search<S, ReaderType, ReferenceSequence, Index, AsyncReader<ReaderType>, Header> for BamSearch<S> where S: Storage<Streamable = ReaderType> + Send + Sync + 'static, ReaderType: AsyncRead + Unpin + Send + Sync, { fn init_reader(inner: ReaderType) -> AsyncReader<ReaderType> { AsyncReader::new(inner) } async fn read_raw_header(reader: &mut AsyncReader<ReaderType>) -> io::Result<String> { let header = reader.read_header().await; reader.read_reference_sequences().await?; if let Ok(header) = header.as_deref() { for value in header.split_whitespace() { if let Some(value) = value.strip_prefix("PL:") { if let Err(ParseError::Invalid) = Platform::from_str(value) { warn!( "invalid read group platform `{value}`, only `{}`, `{}`, `{}`, `{}`, `{}`, `{}`, \ `{}`, `{}`, `{}`, `{}`, or `{}` is supported", Platform::Capillary.as_ref(), Platform::DnbSeq.as_ref(), Platform::Element.as_ref(), Platform::Ls454.as_ref(), Platform::Illumina.as_ref(), Platform::Solid.as_ref(), Platform::Helicos.as_ref(), Platform::IonTorrent.as_ref(), Platform::Ont.as_ref(), Platform::PacBio.as_ref(), Platform::Ultima.as_ref() ); } } } } header } async fn read_index_inner<T: AsyncRead + Unpin + Send>(inner: T) -> io::Result<Index> { let mut reader = bai::AsyncReader::new(BufReader::new(inner)); reader.read_header().await?; reader.read_index().await } #[instrument(level = "trace", skip(self, index, header, query))] async fn get_byte_ranges_for_reference_name( &self, reference_name: String, index: &Index, header: &Header, query: &Query, ) -> Result<Vec<BytesPosition>> { trace!("getting byte ranges for reference name"); self .get_byte_ranges_for_reference_name_reads(&reference_name, index, header, query) .await } fn get_storage(&self) -> Arc<S> { Arc::clone(&self.storage) } fn get_format(&self) -> Format { Format::Bam } } #[async_trait] impl<S, ReaderType> SearchReads<S, ReaderType, ReferenceSequence, Index, AsyncReader<ReaderType>, Header> for BamSearch<S> where S: Storage<Streamable = ReaderType> + Send + Sync + 'static, ReaderType: AsyncRead + Unpin + Send + Sync, { async fn get_reference_sequence_from_name<'a>( &self, header: &'a Header, name: &str, ) -> Option<usize> { Some(header.reference_sequences().get_index_of(name)?) } async fn get_byte_ranges_for_unmapped_reads( &self, query: &Query, bai_index: &Index, ) -> Result<Vec<BytesPosition>> { self.get_byte_ranges_for_unmapped(query, bai_index).await } async fn get_byte_ranges_for_reference_sequence( &self, ref_seq_id: usize, query: &Query, index: &Index, ) -> Result<Vec<BytesPosition>> { self .get_byte_ranges_for_reference_sequence_bgzf(query, ref_seq_id, index) .await } } impl<S, ReaderType> BamSearch<S> where S: Storage<Streamable = ReaderType> + Send + Sync + 'static, ReaderType: AsyncRead + Unpin + Send + Sync, { /// Create the bam search. pub fn new(storage: Arc<S>) -> Self { Self { storage } } } #[cfg(test)] pub(crate) mod tests { use std::future::Future; use htsget_config::storage::local::LocalStorage as ConfigLocalStorage; use htsget_test::util::expected_bgzf_eof_data_url; #[cfg(feature = "s3-storage")] use crate::htsget::from_storage::tests::with_aws_storage_fn; use crate::htsget::from_storage::tests::with_local_storage_fn; use crate::storage::local::LocalStorage; use crate::{Class::Body, Class::Header, Headers, HtsGetError::NotFound, Response, Url}; use super::*; const DATA_LOCATION: &str = "data/bam"; const INDEX_FILE_LOCATION: &str = "htsnexus_test_NA12878.bam.bai"; #[tokio::test] async fn search_all_reads() { with_local_storage(|storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam); let response = search.search(query).await; println!("{response:#?}"); let expected_response = Ok(Response::new( Format::Bam, vec![ Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=0-2596770")), Url::new(expected_bgzf_eof_data_url()), ], )); assert_eq!(response, expected_response) }) .await; } #[tokio::test] async fn search_unmapped_reads() { with_local_storage(|storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam).with_reference_name("*"); let response = search.search(query).await; println!("{response:#?}"); let expected_response = Ok(Response::new( Format::Bam, vec![ Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=0-4667")) .with_class(Header), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=2060795-2596770")) .with_class(Body), Url::new(expected_bgzf_eof_data_url()).with_class(Body), ], )); assert_eq!(response, expected_response) }) .await; } #[tokio::test] async fn search_reference_name_without_seq_range() { with_local_storage(|storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam).with_reference_name("20"); let response = search.search(query).await; println!("{response:#?}"); let expected_response = Ok(Response::new( Format::Bam, vec![ Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=0-4667")) .with_class(Header), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=977196-2128165")) .with_class(Body), Url::new(expected_bgzf_eof_data_url()).with_class(Body), ], )); assert_eq!(response, expected_response) }) .await; } #[tokio::test] async fn search_reference_name_with_seq_range() { with_local_storage(|storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam) .with_reference_name("11") .with_start(5015000) .with_end(5050000); let response = search.search(query).await; println!("{response:#?}"); let expected_response = Ok(Response::new( Format::Bam, vec![ Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=0-4667")) .with_class(Header), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=256721-647345")) .with_class(Body), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=824361-842100")) .with_class(Body), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=977196-996014")) .with_class(Body), Url::new(expected_bgzf_eof_data_url()).with_class(Body), ], )); assert_eq!(response, expected_response) }) .await; } #[tokio::test] async fn search_reference_name_no_end_position() { with_local_storage(|storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam) .with_reference_name("11") .with_start(5015000); let response = search.search(query).await; println!("{response:#?}"); let expected_response = Ok(Response::new( Format::Bam, vec![ Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=0-4667")) .with_class(Header), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=256721-996014")) .with_class(Body), Url::new(expected_bgzf_eof_data_url()).with_class(Body), ], )); assert_eq!(response, expected_response) }) .await; } #[tokio::test] async fn search_many_response_urls() { with_local_storage(|storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam) .with_reference_name("11") .with_start(4999976) .with_end(5003981); let response = search.search(query).await; println!("{response:#?}"); let expected_response = Ok(Response::new( Format::Bam, vec![ Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=0-273085")), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=499249-574358")), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=627987-647345")), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=824361-842100")), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=977196-996014")), Url::new(expected_bgzf_eof_data_url()), ], )); assert_eq!(response, expected_response) }) .await } #[tokio::test] async fn search_no_gzi() { with_local_storage_fn( |storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam) .with_reference_name("11") .with_start(5015000) .with_end(5050000); let response = search.search(query).await; println!("{response:#?}"); let expected_response = Ok(Response::new( Format::Bam, vec![ Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=0-4667")) .with_class(Header), Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=256721-1065951")) .with_class(Body), Url::new(expected_bgzf_eof_data_url()).with_class(Body), ], )); assert_eq!(response, expected_response) }, DATA_LOCATION, &["htsnexus_test_NA12878.bam", INDEX_FILE_LOCATION], ) .await } #[tokio::test] async fn search_header() { with_local_storage(|storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam).with_class(Header); let response = search.search(query).await; println!("{response:#?}"); let expected_response = Ok(Response::new( Format::Bam, vec![Url::new(expected_url()) .with_headers(Headers::default().with_header("Range", "bytes=0-4667")) .with_class(Header)], )); assert_eq!(response, expected_response) }) .await; } #[tokio::test] async fn search_non_existent_id_reference_name() { with_local_storage_fn( |storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam); let response = search.search(query).await; assert!(matches!(response, Err(NotFound(_)))); }, DATA_LOCATION, &[INDEX_FILE_LOCATION], ) .await } #[tokio::test] async fn search_non_existent_id_all_reads() { with_local_storage_fn( |storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam).with_reference_name("20"); let response = search.search(query).await; assert!(matches!(response, Err(NotFound(_)))); }, DATA_LOCATION, &[INDEX_FILE_LOCATION], ) .await } #[tokio::test] async fn search_non_existent_id_header() { with_local_storage_fn( |storage| async move { let search = BamSearch::new(storage.clone()); let query = Query::new("htsnexus_test_NA12878", Format::Bam).with_class(Header); let response = search.search(query).await; assert!(matches!(response, Err(NotFound(_)))); }, DATA_LOCATION, &[INDEX_FILE_LOCATION], ) .await } #[cfg(feature = "s3-storage")] #[tokio::test] async fn search_non_existent_id_reference_name_aws() { with_aws_storage_fn( |storage| async move { let search = BamSearch::new(storage); let query = Query::new("htsnexus_test_NA12878", Format::Bam); let response = search.search(query).await; assert!(matches!(response, Err(_))); }, DATA_LOCATION, &[INDEX_FILE_LOCATION], ) .await } #[cfg(feature = "s3-storage")] #[tokio::test] async fn search_non_existent_id_all_reads_aws() { with_aws_storage_fn( |storage| async move { let search = BamSearch::new(storage); let query = Query::new("htsnexus_test_NA12878", Format::Bam).with_reference_name("20"); let response = search.search(query).await; assert!(matches!(response, Err(_))); }, DATA_LOCATION, &[INDEX_FILE_LOCATION], ) .await } #[cfg(feature = "s3-storage")] #[tokio::test] async fn search_non_existent_id_header_aws() { with_aws_storage_fn( |storage| async move { let search = BamSearch::new(storage); let query = Query::new("htsnexus_test_NA12878", Format::Bam).with_class(Header); let response = search.search(query).await; assert!(matches!(response, Err(_))); }, DATA_LOCATION, &[INDEX_FILE_LOCATION], ) .await } pub(crate) async fn with_local_storage<F, Fut>(test: F) where F: FnOnce(Arc<LocalStorage<ConfigLocalStorage>>) -> Fut, Fut: Future<Output = ()>, { with_local_storage_fn(test, DATA_LOCATION, &[]).await } pub(crate) fn expected_url() -> String { "http://127.0.0.1:8081/data/htsnexus_test_NA12878.bam".to_string() } }
pub use tabled::{self, *};
fn print_string(s: &str) { println!("&str: {}", s) } fn print_second_word(s: &str) { let second_word = s.split(" ") .collect::<Vec<&str>>()[1]; println!("second: {}", second_word) } fn main() { let str = "Hello, world &str!"; let string = String::from("Hello, world String!"); print_string(str); print_string(&string); print_second_word(str) }
#[doc = "Register `HWCFGR4` reader"] pub type R = crate::R<HWCFGR4_SPEC>; #[doc = "Register `HWCFGR4` writer"] pub type W = crate::W<HWCFGR4_SPEC>; #[doc = "Field `CHMAP15` reader - Input channel mapping"] pub type CHMAP15_R = crate::FieldReader; #[doc = "Field `CHMAP15` writer - Input channel mapping"] pub type CHMAP15_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>; #[doc = "Field `CHMAP14` reader - Input channel mapping"] pub type CHMAP14_R = crate::FieldReader; #[doc = "Field `CHMAP14` writer - Input channel mapping"] pub type CHMAP14_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>; #[doc = "Field `CHMAP13` reader - Input channel mapping"] pub type CHMAP13_R = crate::FieldReader; #[doc = "Field `CHMAP13` writer - Input channel mapping"] pub type CHMAP13_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>; #[doc = "Field `CHMAP12` reader - Input channel mapping"] pub type CHMAP12_R = crate::FieldReader; #[doc = "Field `CHMAP12` writer - Input channel mapping"] pub type CHMAP12_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>; impl R { #[doc = "Bits 0:4 - Input channel mapping"] #[inline(always)] pub fn chmap15(&self) -> CHMAP15_R { CHMAP15_R::new((self.bits & 0x1f) as u8) } #[doc = "Bits 8:12 - Input channel mapping"] #[inline(always)] pub fn chmap14(&self) -> CHMAP14_R { CHMAP14_R::new(((self.bits >> 8) & 0x1f) as u8) } #[doc = "Bits 16:20 - Input channel mapping"] #[inline(always)] pub fn chmap13(&self) -> CHMAP13_R { CHMAP13_R::new(((self.bits >> 16) & 0x1f) as u8) } #[doc = "Bits 24:28 - Input channel mapping"] #[inline(always)] pub fn chmap12(&self) -> CHMAP12_R { CHMAP12_R::new(((self.bits >> 24) & 0x1f) as u8) } } impl W { #[doc = "Bits 0:4 - Input channel mapping"] #[inline(always)] #[must_use] pub fn chmap15(&mut self) -> CHMAP15_W<HWCFGR4_SPEC, 0> { CHMAP15_W::new(self) } #[doc = "Bits 8:12 - Input channel mapping"] #[inline(always)] #[must_use] pub fn chmap14(&mut self) -> CHMAP14_W<HWCFGR4_SPEC, 8> { CHMAP14_W::new(self) } #[doc = "Bits 16:20 - Input channel mapping"] #[inline(always)] #[must_use] pub fn chmap13(&mut self) -> CHMAP13_W<HWCFGR4_SPEC, 16> { CHMAP13_W::new(self) } #[doc = "Bits 24:28 - Input channel mapping"] #[inline(always)] #[must_use] pub fn chmap12(&mut self) -> CHMAP12_W<HWCFGR4_SPEC, 24> { CHMAP12_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Hardware Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hwcfgr4::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`hwcfgr4::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct HWCFGR4_SPEC; impl crate::RegisterSpec for HWCFGR4_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`hwcfgr4::R`](R) reader structure"] impl crate::Readable for HWCFGR4_SPEC {} #[doc = "`write(|w| ..)` method takes [`hwcfgr4::W`](W) writer structure"] impl crate::Writable for HWCFGR4_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets HWCFGR4 to value 0x070b_0a09"] impl crate::Resettable for HWCFGR4_SPEC { const RESET_VALUE: Self::Ux = 0x070b_0a09; }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under both the MIT license found in the * LICENSE-MIT file in the root directory of this source tree and the Apache * License, Version 2.0 found in the LICENSE-APACHE file in the root directory * of this source tree. */ use super::{get_task_id, EventId, Traced}; use crate::context::{TraceContext, TraceContextInner}; use chrome_trace::{Args, Event, Phase}; use futures::{Async, Future, Poll}; use maplit::hashmap; use std::sync::{Arc, Weak}; use std::time::{Duration, Instant}; use time_ext::DurationExt; pub struct TracedFuture<F> { inner: F, context: Weak<TraceContextInner>, name: String, id: Option<usize>, /// scope is used to link AsyncBegin and AsyncEnd events together scope: usize, args: Option<Args>, poll_count: u64, poll_time: Duration, } impl<F> TracedFuture<F> { fn new( future: F, context: &TraceContext, name: String, args: Args, id: Option<usize>, scope: usize, ) -> Self { Self { inner: future, context: Arc::downgrade(&context.inner), name, id, scope, args: Some(args), poll_count: 0, poll_time: Duration::from_secs(0), } } fn log_first_poll(&mut self) { if self.id.is_none() { self.id = Some(get_task_id()) } let context = match self.context.upgrade() { Some(ctx) => ctx, None => return, }; context.add_event(Event { tid: get_task_id() as u64, id: self.id.map(|id| id.to_string()), scope: Some(self.scope.to_string()), args: self .args .take() .expect("The args for tracing were already taken"), ..Event::now(&self.name, Phase::AsyncBegin, &context.epoch) }); } fn log_completion(&mut self) { let context = match self.context.upgrade() { Some(ctx) => ctx, None => return, }; context.add_event(Event { tid: get_task_id() as u64, id: self.id.map(|id| id.to_string()), scope: Some(self.scope.to_string()), args: hashmap! { "poll_count".to_owned() => self.poll_count.into(), "poll_time".to_owned() => self.poll_time.as_micros_unchecked().into(), }, ..Event::now(&self.name, Phase::AsyncEnd, &context.epoch) }); } } impl<F: Future> Future for TracedFuture<F> { type Item = F::Item; type Error = F::Error; fn poll(&mut self) -> Poll<Self::Item, Self::Error> { if self.poll_count == 0 { self.log_first_poll(); } let poll_start = Instant::now(); let poll = self.inner.poll(); self.poll_time += poll_start.elapsed(); match &poll { Ok(Async::Ready(_)) | Err(_) => { self.log_completion(); } Ok(Async::NotReady) => {} } self.poll_count += 1; poll } } /// Dummy type used for the sole purpose of preventing overlapping implementations of /// the `Traced<T>` trait for `Future`s and `Stream`s. pub enum FutureMarker {} impl<F: Future> Traced<FutureMarker> for F { type Wrapper = TracedFuture<Self>; fn traced<N: ToString>( self, context: &TraceContext, name: N, args: Option<Args>, ) -> Self::Wrapper { TracedFuture::new( self, context, name.to_string(), args.unwrap_or_default(), None, EventId::new().id, ) } fn traced_with_id<N: ToString>( self, context: &TraceContext, name: N, args: Option<Args>, id: EventId, ) -> Self::Wrapper { TracedFuture::new( self, context, name.to_string(), args.unwrap_or_default(), Some(id.id), EventId::new().id, ) } }
//! platform-independent traits. Submodules with backends will be selectable //! via cargo features in future mod palette; mod video_sdl; pub use palette::Palette; pub use video_sdl::VideoSdl; /// Texture id binging #[derive(PartialEq, Eq, Hash, Copy, Clone)] pub struct TextureInfo { id: usize, width: u32, height: u32, } /// Simple rect struct pub struct Rect { x: i32, y: i32, w: u32, h: u32, } impl Rect { /// Constructs new rect pub fn new(x: i32, y: i32, w: u32, h: u32) -> Rect { Rect { x, y, w, h } } } /// provides video functionality trough rela backend to emulator pub trait VideoDevice { /// generates and returns texture handle fn gen_texture(&mut self, width: u32, height: u32) -> TextureInfo; /// changes window title fn set_title(&mut self, title: &str); /// udpates texture data fn update_texture(&mut self, tex: TextureInfo, buffer: &[u8]); /// starts render block fn begin(&mut self); /// draws plain texure into destination rect fn draw_texture_2d(&mut self, tex: TextureInfo, rect: Option<Rect>); /// finishes rendering fn end(&mut self); }
use crate::gc::Gc; use crate::rerrs::{ErrorKind, SteelErr}; use crate::rvals::{Result, SteelVal}; use crate::stop; use crate::throw; use std::rc::Rc; use serde::{Deserialize, Serialize}; use crate::parser::ast::Struct; #[derive(Clone, Debug, PartialEq)] pub struct SteelStruct { name: Rc<str>, fields: Vec<SteelVal>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StructFuncBuilder<'a> { pub name: &'a str, pub fields: Vec<&'a str>, } impl<'a> StructFuncBuilder<'a> { pub fn new(name: &'a str, fields: Vec<&'a str>) -> Self { StructFuncBuilder { name, fields } } pub fn to_struct_function_names(&self) -> Vec<String> { // collect the functions // for each field there are going to be 2 functions // add 2 for the constructor and the predicate let mut func_names = Vec::with_capacity(&self.fields.len() * 2 + 2); // generate constructor // let cons = constructor(name, field_names_as_strs.len()); // generate predicate func_names.push((&self.name).to_string()); func_names.push(format!("{}?", &self.name)); // generate getters and setters for field in &self.fields { func_names.push(format!("{}-{}", &self.name, field)); func_names.push(format!("set-{}-{}!", &self.name, field)); } func_names } // This needs to return something that can be consumed at runtime from the constant map // Effectively, we need a list with the form '(name fields ...) // Let's make it happen pub fn to_constant_val(&self, indices: Vec<usize>) -> SteelVal { let indices: Vec<_> = indices .into_iter() .map(|x| SteelVal::IntV(x as isize)) .collect(); let mut name = vec![ crate::primitives::ListOperations::built_in_list_normal_iter_non_result( indices.into_iter(), ), SteelVal::StringV(self.name.into()), ]; let fields: Vec<_> = self .fields .iter() .map(|x| SteelVal::StringV((*x).into())) .collect(); name.extend(fields); // TODO who knows if this actually works crate::primitives::ListOperations::built_in_list_normal_iter_non_result(name.into_iter()) } pub fn to_func_vec(&self) -> Result<Vec<(String, SteelVal)>> { SteelStruct::generate_from_name_fields(self.name, &self.fields) } } // // Housekeeping (just in case there are cyclical references) // impl Drop for SteelStruct { // fn drop(&mut self) { // self.fields.clear(); // } // } impl SteelStruct { pub fn new(name: Rc<str>, fields: Vec<SteelVal>) -> Self { SteelStruct { name, fields } } // This will blow up the stack with a sufficiently large recursive struct pub fn pretty_print(&self) -> String { format!("{}", self.name) } } impl SteelStruct { pub fn generate_from_ast(s: &Struct) -> Result<StructFuncBuilder> { let name = s.name.atom_identifier_or_else(throw!(TypeMismatch => "struct definition expected an identifier as the first argument"))?; let field_names_as_strs: Vec<&str> = s .fields .iter() .map(|x| { x.atom_identifier_or_else(throw!(TypeMismatch => "struct expected identifiers")) }) .collect::<Result<_>>()?; Ok(StructFuncBuilder::new(name, field_names_as_strs)) } pub fn generate_from_name_fields( name: &str, field_names_as_strs: &[&str], ) -> Result<Vec<(String, SteelVal)>> { // collect the functions // for each field there are going to be 2 functions // add 2 for the constructor and the predicate let mut funcs = Vec::with_capacity(field_names_as_strs.len() * 2 + 2); let name = Rc::from(name); // generate constructor let cons = constructor(Rc::clone(&name), field_names_as_strs.len()); funcs.push((name.to_string(), cons)); // generate predicate funcs.push((format!("{}?", name), predicate(Rc::clone(&name)))); // generate getters and setters for (idx, field) in field_names_as_strs.iter().enumerate() { funcs.push((format!("{}-{}", name, field), getter(Rc::clone(&name), idx))); funcs.push(( format!("set-{}-{}!", name, field), setter(Rc::clone(&name), idx), )); } Ok(funcs) } } // initialize hashmap to be field_names -> void // just do arity check before inserting to make sure things check out // that way field names as a vec are no longer necessary fn constructor(name: Rc<str>, len: usize) -> SteelVal { let f = move |args: &[SteelVal]| -> Result<SteelVal> { if args.len() != len { let error_message = format!( "{} expected {} arguments, found {}", name.clone(), args.len(), len ); stop!(ArityMismatch => error_message); } let mut new_struct = SteelStruct::new(Rc::clone(&name), vec![SteelVal::Void; len]); for (idx, arg) in args.iter().enumerate() { let key = new_struct .fields .get_mut(idx) .ok_or_else(throw!(TypeMismatch => "Couldn't find that field in the struct"))?; *key = arg.clone(); } Ok(SteelVal::StructV(Gc::new(new_struct))) }; SteelVal::BoxedFunction(Rc::new(f)) } fn predicate(name: Rc<str>) -> SteelVal { let f = move |args: &[SteelVal]| -> Result<SteelVal> { if args.len() != 1 { let error_message = format!("{}? expected one argument, found {}", name, args.len()); stop!(ArityMismatch => error_message); } match &args[0] { SteelVal::StructV(my_struct) => { if my_struct.name.as_ref() == name.as_ref() { Ok(SteelVal::BoolV(true)) } else { Ok(SteelVal::BoolV(false)) } } _ => Ok(SteelVal::BoolV(false)), } }; SteelVal::BoxedFunction(Rc::new(f)) } fn getter(name: Rc<str>, idx: usize) -> SteelVal { let f = move |args: &[SteelVal]| -> Result<SteelVal> { if args.len() != 1 { let error_message = format!( "{} getter expected one argument, found {}", name, args.len() ); stop!(ArityMismatch => error_message); } let my_struct = args[0].struct_or_else(throw!(TypeMismatch => "expected struct"))?; if let Some(ret_val) = my_struct.fields.get(idx) { Ok(ret_val.clone()) } else { stop!(TypeMismatch => "Couldn't find that field in the struct") } }; SteelVal::BoxedFunction(Rc::new(f)) } fn setter(name: Rc<str>, idx: usize) -> SteelVal { let f = move |args: &[SteelVal]| -> Result<SteelVal> { if args.len() != 2 { let error_message = format!( "{} setter expected two arguments, found {}", name, args.len() ); stop!(ArityMismatch => error_message); } let my_struct = args[0].struct_or_else(throw!(TypeMismatch => "expected struct"))?; let value = args[1].clone(); let mut new_struct = my_struct.clone(); let key = new_struct .fields .get_mut(idx) .ok_or_else(throw!(TypeMismatch => "Couldn't find that field in the struct"))?; *key = value; Ok(SteelVal::StructV(Gc::new(new_struct))) }; SteelVal::BoxedFunction(Rc::new(f)) } #[cfg(test)] mod struct_tests { use super::*; fn apply_function(func: SteelVal, args: Vec<SteelVal>) -> Result<SteelVal> { let func = func .boxed_func_or_else(throw!(BadSyntax => "string tests")) .unwrap(); func(&args) } #[test] fn constructor_normal() { let args = vec![SteelVal::IntV(1), SteelVal::IntV(2)]; let res = apply_function(constructor(Rc::from("Promise"), 2), args); let expected = SteelVal::StructV(Gc::new(SteelStruct { name: Rc::from("Promise"), fields: vec![SteelVal::IntV(1), SteelVal::IntV(2)], })); assert_eq!(res.unwrap(), expected) } #[test] fn setter_position_0() { let args = vec![ SteelVal::StructV(Gc::new(SteelStruct { name: Rc::from("Promise"), fields: vec![SteelVal::IntV(1), SteelVal::IntV(2)], })), SteelVal::IntV(100), ]; let res = apply_function(setter(Rc::from("Promise"), 0), args); let expected = SteelVal::StructV(Gc::new(SteelStruct { name: Rc::from("Promise"), fields: vec![SteelVal::IntV(100), SteelVal::IntV(2)], })); assert_eq!(res.unwrap(), expected); } #[test] fn setter_position_1() { let args = vec![ SteelVal::StructV(Gc::new(SteelStruct { name: Rc::from("Promise"), fields: vec![SteelVal::IntV(1), SteelVal::IntV(2)], })), SteelVal::IntV(100), ]; let res = apply_function(setter(Rc::from("Promise"), 1), args); let expected = SteelVal::StructV(Gc::new(SteelStruct { name: Rc::from("Promise"), fields: vec![SteelVal::IntV(1), SteelVal::IntV(100)], })); assert_eq!(res.unwrap(), expected); } #[test] fn getter_position_0() { let args = vec![SteelVal::StructV(Gc::new(SteelStruct { name: Rc::from("Promise"), fields: vec![SteelVal::IntV(1), SteelVal::IntV(2)], }))]; let res = apply_function(getter(Rc::from("Promise"), 0), args); let expected = SteelVal::IntV(1); assert_eq!(res.unwrap(), expected); } }
use franklin_crypto::plonk::circuit::allocated_num::Num; use franklin_crypto::bellman::pairing::Engine; use franklin_crypto::bellman::plonk::better_better_cs::cs::ConstraintSystem; use rand::Rng; pub struct MdsMatrix<E: Engine, const SIZE: usize> { data: [[Num<E>; SIZE]; SIZE] } impl<E: Engine, const SIZE: usize> MdsMatrix<E,SIZE>{ pub fn zero_matrix()-> Self { let data = [[Num::<E>::zero(); SIZE]; SIZE]; MdsMatrix { data } } pub fn get_row(&self, n: usize) -> [Num<E>; SIZE] { if n >= SIZE { panic!(); } self.data[n] } } pub fn generate_vectors_for_matrix< E: Engine, R: Rng, const SIZE: usize >( rng: &mut R )-> [Vec<E::Fr>;2] { loop { let x: Vec<E::Fr> = (0..SIZE).map(|_| rng.gen()).collect(); let y: Vec<E::Fr> = (0..SIZE).map(|_| rng.gen()).collect(); if okey_vectors::<E, SIZE>(&x, &y){ return [x, y]; } } } fn okey_vectors<E: Engine, const SIZE: usize>(x: & Vec<E::Fr>, y: & Vec<E::Fr>)->bool { for i in 0..(SIZE as usize) { let el = x[i]; for other in x[(i+1)..].iter() { if el == *other { return false; } } } for i in 0..(SIZE as usize) { let el = y[i]; for other in y[(i+1)..].iter() { if el == *other { return false; } } } for i in 0..(SIZE as usize) { let el = x[i]; for other in y.iter() { if el == *other { return false; } } } return true; } pub fn construct_mds_matrix< E: Engine, CS: ConstraintSystem<E>, const SIZE: usize >( cs: &mut CS, vectors: &mut [Vec<E::Fr>; 2] )-> MdsMatrix<E, SIZE> { let mut mds_matrix = MdsMatrix::<E,SIZE>::zero_matrix(); let x: Vec<Num<E>> = vectors[0].iter_mut().map(|number| Num::alloc(cs, Some(*number)).unwrap()).collect(); let y: Vec<Num<E>> = vectors[1].iter_mut().map(|number| Num::alloc(cs, Some(*number)).unwrap()).collect(); if !veryfy_vector_corectnes::<E, CS, SIZE>(cs, &x, &y){ panic!(); } for (i, x) in x.into_iter().enumerate() { for (j, y) in y.iter().enumerate() { mds_matrix.data[i][j] = x.sub(cs,y).unwrap().inverse(cs).unwrap(); } } mds_matrix } fn veryfy_vector_corectnes< E: Engine, CS: ConstraintSystem<E>, const SIZE: usize >(cs: &mut CS, x: & Vec<Num<E>>, y: & Vec<Num<E>>) -> bool{ for i in 0..(SIZE as usize) { let el = x[i]; for other in x[(i+1)..].iter() { if Num::equals(cs, &el, &other).unwrap().get_value().unwrap() { return false; } } } for i in 0..(SIZE as usize) { let el = y[i]; for other in y[(i+1)..].iter() { if Num::equals(cs, &el, &other).unwrap().get_value().unwrap() { return false; } } } for i in 0..(SIZE as usize) { let el = x[i]; for other in y.iter() { if Num::equals(cs, &el, &other).unwrap().get_value().unwrap() { return false; } } } return true; } fn compute_determinant< E: Engine, CS: ConstraintSystem<E>, const SIZE: usize >( cs: &mut CS, mds_matrix: &MdsMatrix<E, SIZE> )->Option<Num<E>> { if SIZE != 3 { return None; } let s123 = mds_matrix.data[0][0].mul(cs, &mds_matrix.data[1][1]).unwrap().mul(cs, &mds_matrix.data[2][2]).unwrap(); let s132 = mds_matrix.data[0][0].mul(cs, &mds_matrix.data[1][2]).unwrap().mul(cs, &mds_matrix.data[2][1]).unwrap(); let s213 = mds_matrix.data[0][1].mul(cs, &mds_matrix.data[1][0]).unwrap().mul(cs, &mds_matrix.data[2][2]).unwrap(); let s231 = mds_matrix.data[0][1].mul(cs, &mds_matrix.data[1][2]).unwrap().mul(cs, &mds_matrix.data[2][0]).unwrap(); let s312 = mds_matrix.data[0][2].mul(cs, &mds_matrix.data[1][0]).unwrap().mul(cs, &mds_matrix.data[2][1]).unwrap(); let s321 = mds_matrix.data[0][2].mul(cs, &mds_matrix.data[1][1]).unwrap().mul(cs, &mds_matrix.data[2][0]).unwrap(); let result = s123.add(cs, &s231).unwrap() .add(cs, &s312).unwrap() .sub(cs, &s132).unwrap() .sub(cs, &s321).unwrap() .sub(cs, &s213).unwrap(); Some(result) } pub fn construct_inverse_matrix< E: Engine, CS: ConstraintSystem<E>, const SIZE: usize >( cs: &mut CS, mds_matrix: &MdsMatrix<E, SIZE> )-> Option<MdsMatrix<E, SIZE>> { if SIZE != 3 { return None; } let det = compute_determinant(cs, mds_matrix).unwrap(); if det.is_zero(cs).unwrap().get_value().unwrap() { return None; } let mut mds_invert_matrix = MdsMatrix::<E,SIZE>::zero_matrix(); for i in 0..(SIZE as usize) { for j in 0..(SIZE as usize) { let a = mds_matrix.data[(i+1)%3][(j+1)%3].mul(cs, &mds_matrix.data[(i+2)%3][(j+2)%3]).unwrap(); let b = mds_matrix.data[(i+1)%3][(j+2)%3].mul(cs, &mds_matrix.data[(i+2)%3][(j+1)%3]).unwrap(); let c = a.sub(cs, &b).unwrap(); mds_invert_matrix.data[j][i] = c.div(cs, &det).unwrap(); } } Some(mds_invert_matrix) } pub fn dot_product< E: Engine, CS: ConstraintSystem<E>, const SIZE: usize>( cs: &mut CS, x: &[Num<E>; SIZE], y: &[Num<E>; SIZE])->Num<E>{ let mut res = Num::<E>::zero(); for i in 0..SIZE { let z = x[i].mul(cs, &y[i]).unwrap(); res = res.add(cs, &z).unwrap(); } res } pub fn add_vectors< E: Engine, CS: ConstraintSystem<E>, const SIZE: usize>( cs: &mut CS, x: &[Num<E>; SIZE], y: &[Num<E>; SIZE])->[Num<E>;SIZE]{ let mut res = [Num::<E>::zero(); SIZE]; for i in 0..SIZE { res[i] = x[i].add(cs, &y[i]).unwrap(); } res } pub fn sub_vectors< E: Engine, CS: ConstraintSystem<E>, const SIZE: usize>( cs: &mut CS, x: &[Num<E>; SIZE], y: &[Num<E>; SIZE])->[Num<E>;SIZE]{ let mut res = [Num::<E>::zero(); SIZE]; for i in 0..SIZE { res[i] = x[i].sub(cs, &y[i]).unwrap(); } res }
use serde_json::{Value}; use crate::ofn_2_man::axiom_translation as axiom_translation; use crate::ofn_2_man::class_translation as class_translation; use crate::ofn_2_man::property_translation as property_translation; /// Given an OFN S-expression (encoded in JSON), /// return its corresponding representation in Manchester Syntax /// /// Examples /// /// let ofn_string = r#"["SubClassOf","obo:IAO_0000120",["ObjectSomeValuesFrom","obo:BFO_0000050","obo:OBI_0500000"]]"#; /// let ofn = util::parser::parse(&ofn_string); /// let man = ofn_2_man::translation::ofn_2_man(&ofn); /// println!("{}", man); pub fn ofn_2_man(v : &Value) -> String { match v[0].as_str() { Some("SubClassOf") => axiom_translation::translate_subclass_of_axiom(v), Some("DisjointClasses") => axiom_translation::translate_disjoint_classes_axiom(v), Some("DisjointUnionOf") => axiom_translation::translate_disjoint_union_of_axiom(v), Some("EquivalentClasses") => axiom_translation::translate_equivalent_classes_axiom(v), Some("ThinTriple") => axiom_translation::translate_thin_triple(v), Some("SomeValuesFrom") => class_translation::translate_some_values_from(v), Some("AllValuesFrom") => class_translation::translate_all_values_from(v), Some("HasValue") => class_translation::translate_has_value(v), Some("MinCardinality") => class_translation::translate_min_cardinality(v), Some("MinQualifiedCardinality") => class_translation::translate_min_qualified_cardinality(v), Some("MaxCardinality") => class_translation::translate_max_cardinality(v), Some("MaxQualifiedCardinality") => class_translation::translate_max_qualified_cardinality(v), Some("ExactCardinality") => class_translation::translate_exact_cardinality(v), Some("ExactQualifiedCardinality") => class_translation::translate_exact_qualified_cardinality(v), Some("HasSelf") => class_translation::translate_has_self(v), Some("IntersectionOf") => class_translation::translate_intersection_of(v), Some("UnionOf") => class_translation::translate_union_of(v), Some("OneOf") => class_translation::translate_one_of(v), Some("ComplementOf") => class_translation::translate_complement_of(v), Some("ObjectSomeValuesFrom") => class_translation::translate_some_values_from(v), Some("ObjectAllValuesFrom") => class_translation::translate_all_values_from(v), Some("ObjectHasValue") => class_translation::translate_has_value(v), Some("ObjectMinCardinality") => class_translation::translate_min_cardinality(v), Some("ObjectMinQualifiedCardinality") => class_translation::translate_min_qualified_cardinality(v), Some("ObjectMaxCardinality") => class_translation::translate_max_cardinality(v), Some("ObjectMaxQualifiedCardinality") => class_translation::translate_max_qualified_cardinality(v), Some("ObjectExactCardinality") => class_translation::translate_exact_cardinality(v), Some("ObjectExactQualifiedCardinality") => class_translation::translate_exact_qualified_cardinality(v), Some("ObjectHasSelf") => class_translation::translate_has_self(v), Some("ObjectIntersectionOf") => class_translation::translate_intersection_of(v), Some("ObjectUnionOf") => class_translation::translate_union_of(v), Some("ObjectOneOf") => class_translation::translate_one_of(v), Some("ObjectComplementOf") => class_translation::translate_complement_of(v), Some("DataSomeValuesFrom") => class_translation::translate_some_values_from(v), Some("DataAllValuesFrom") => class_translation::translate_all_values_from(v), Some("DataHasValue") => class_translation::translate_has_value(v), Some("DataMinCardinality") => class_translation::translate_min_cardinality(v), Some("DataMinQualifiedCardinality") => class_translation::translate_min_qualified_cardinality(v), Some("DataMaxCardinality") => class_translation::translate_max_cardinality(v), Some("DataMaxQualifiedCardinality") => class_translation::translate_max_qualified_cardinality(v), Some("DataExactCardinality") => class_translation::translate_exact_cardinality(v), Some("DataExactQualifiedCardinality") => class_translation::translate_exact_qualified_cardinality(v), Some("DataHasSelf") => class_translation::translate_has_self(v), Some("DataIntersectionOf") => class_translation::translate_intersection_of(v), Some("DataUnionOf") => class_translation::translate_union_of(v), Some("DataOneOf") => class_translation::translate_one_of(v), Some("DataComplementOf") => class_translation::translate_complement_of(v), Some("ObjectInverseOf") => property_translation::translate_inverse_of(v), Some(_) => panic!(), None => String::from(v.as_str().unwrap()), //named entity } }
//! Basic block and tokens a [template](`super::Template`) is created from. use std::fmt; use super::span::{ByteSpan, Spanned}; /// A parsed instruction from a template. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum BlockHint { /// Starts a `Text` block Text, /// Starts a `Comment` block Comment, /// Starts an escaped block Escaped, /// Starts a `Variable` block Var, /// Starts a `Print` block Print, /// Starts a `If` block IfStart, /// Continues an `If` block with an `ElIf` block ElIf, /// Continues an `If` block with an `Else` block Else, /// End an `If` block IfEnd, } impl BlockHint { /// Whether this instruction is part of a block. pub fn is_if_subblock(&self) -> bool { self == &Self::ElIf || self == &Self::Else || self == &Self::IfEnd } } /// A instruction that opens a new block. #[derive(Debug, Clone, PartialEq, Eq)] pub enum BlockKind { /// A `Text` block, that contains text that is copied to the output. Text, /// A `Comment` block, that contains text that is ignored. Comment, /// An escaped block, that contains escaped text that is copied to the output. Escaped(ByteSpan), /// A `Variable` block, that contains a variable name that is replaced with its value. Var(Var), /// A `Print` block, that contains text that is printed to the log. Print(ByteSpan), /// An `If` block, that contains a condition that is evaluated and compiles the block conditionally. If(If), } impl BlockKind { /// Returns the corresponding hint for this block. pub const fn as_hint(&self) -> BlockHint { match self { BlockKind::Text => BlockHint::Text, BlockKind::Comment => BlockHint::Comment, BlockKind::Escaped(_) => BlockHint::Escaped, BlockKind::Var(_) => BlockHint::Var, BlockKind::Print(_) => BlockHint::Print, BlockKind::If(_) => BlockHint::IfEnd, } } } /// A block can be a single construction or open up a multi-line block that contains sub-blocks. #[derive(Debug, Clone, PartialEq, Eq)] pub struct Block { /// The span of this block. pub span: ByteSpan, /// The type of this block. pub kind: BlockKind, } impl Block { /// Creates a new block. pub const fn new(span: ByteSpan, kind: BlockKind) -> Self { Self { span, kind } } /// Returns the span of the block. pub const fn span(&self) -> &ByteSpan { &self.span } /// Returns the type of this block. pub const fn kind(&self) -> &BlockKind { &self.kind } } /// The different types of sources for variables values #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum VarEnv { /// A variable that is defined by the system's environment. Environment, /// A variable that is defined in the profile. Profile, /// A variable that is defined for a specific dotfile. Dotfile, } impl fmt::Display for VarEnv { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&self, f) } } /// Defines a set of variables sources that can be used to resolve variables. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct VarEnvSet(pub [Option<VarEnv>; 3]); impl VarEnvSet { /// Creates an empty `VarEnvSet`. pub const fn empty() -> Self { Self([None; 3]) } /// Adds a new variable to the set if it is not already present. pub fn add(&mut self, value: VarEnv) -> bool { if self.0.contains(&Some(value)) { false } else if let Some(slot) = self.0.iter_mut().find(|x| x.is_none()) { *slot = Some(value); true } else { false } } /// Returns the set of `VarEnv`s that are defined. pub fn envs(&self) -> impl Iterator<Item = &VarEnv> { self.0.iter().filter_map(|x| x.as_ref()) } /// Returns the number of environments that are defined. pub fn len(&self) -> usize { self.envs().count() } /// Returns the capacity of the set. pub const fn capacity(&self) -> usize { self.0.len() } } impl Default for VarEnvSet { fn default() -> Self { Self([Some(VarEnv::Dotfile), Some(VarEnv::Profile), None]) } } impl fmt::Display for VarEnvSet { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.envs()).finish() } } /// A variable that is defined in the template. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Var { /// The `VarEnvSet` for the variable pub envs: VarEnvSet, /// The `ByteSpan` of the variable pub name: ByteSpan, } /// Defines an if block. #[derive(Debug, Clone, PartialEq, Eq)] pub struct If { /// The head of an if statement. /// /// `{{@if {{VAR}}}}` pub head: (Spanned<IfExpr>, Vec<Block>), /// All elif statements of the if. /// /// `{{@elif {{VAR}}}}` pub elifs: Vec<(Spanned<IfExpr>, Vec<Block>)>, /// The else statement of the if. /// /// `{{@else}}` pub els: Option<(ByteSpan, Vec<Block>)>, /// The closing fi statement. /// /// `{{@fi}}` pub end: ByteSpan, } /// The different types of if expression operations. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum IfOp { /// Operand to check for equality. Eq, /// Operand to check for inequality. NotEq, } impl IfOp { /// Evaluates an if expression. pub fn eval(&self, lhs: &str, rhs: &str) -> bool { match self { Self::Eq => lhs == rhs, Self::NotEq => lhs != rhs, } } } /// The different if expression types. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum IfExpr { /// An if expression that compares two values. Compare { /// Left hand side of the compare operation. var: Var, /// Compare operand. op: IfOp, /// Right hand side of the compare operation. other: ByteSpan, }, /// An if expression that checks if a value is defined. Exists { /// Variable to check existence for. var: Var, }, /// An if expression that checks if a value is not defined. NotExists { /// Variable to check not existence for. var: Var, }, }
use std::collections::HashMap; use crate::utils::file2vec; pub fn day4(filename: &String){ let contents = file2vec::<String>(filename); let contents:PassPortList = PassPortList { passports : &contents.iter().map(|x| x.to_owned().unwrap()).collect(), ptr:0, size: contents.len()}; let iter = contents.into_iter(); let count = iter.fold(vec![0,0], |mut acc, pport|{ acc[0] += if pport.is_valid1(){1}else{0}; acc[1] += if pport.is_valid2(){1}else{0}; acc }); println!("{} valid passports pt1", count[0]); println!("{} valid passports pt2", count[1]); } struct PassPortList<'a> { passports: &'a Vec<String>, ptr: usize, size: usize } #[derive(Debug)] struct PassPort { byr: Option<String>, iyr: Option<String>, eyr: Option<String>, hgt: Option<String>, hcl: Option<String>, ecl: Option<String>, pid: Option<String>, cid: Option<String> } impl PassPort{ fn from_dict(mut map: HashMap<String, String>)->PassPort{ let byr:Option<String> = map.remove("byr"); let iyr:Option<String> = map.remove("iyr"); let eyr:Option<String> = map.remove("eyr"); let hgt:Option<String> = map.remove("hgt"); let hcl:Option<String> = map.remove("hcl"); let ecl:Option<String> = map.remove("ecl"); let pid:Option<String> = map.remove("pid"); let cid:Option<String> = map.remove("cid"); PassPort { byr, iyr, eyr, hgt, hcl, ecl, pid, cid} } fn is_valid1(&self)->bool{ true & match &self.byr { Some(x) => true, None=> false } & match &self.iyr { Some(x) => true, None=> false } & match &self.eyr { Some(x) => true, None=> false } & match &self.hgt { Some(x) => true, None=> false } & match &self.hcl { Some(x) => true, None=> false } & match &self.ecl { Some(x) => true, None=> false } & match &self.pid { Some(x) => true, None=> false } } fn is_valid2(&self)->bool{ true & match &self.byr { Some(x) => { (x.len() == 4) & match x.parse::<i32>() { Ok(val) => (1920 <= val) & (val <= 2002), Err(e)=>false } }, None=> false } & match &self.iyr { Some(x) => { (x.len() == 4) & match x.parse::<i32>() { Ok(val) => (2010 <= val) & (val <= 2020), Err(e)=> false } }, None=> false } & match &self.eyr { Some(x) => { (x.len() == 4) & match x.parse::<i32>() { Ok(val) => (2020 <= val) & (val <= 2030), Err(e)=>false } }, None=> false } & match &self.hgt { Some(x) => { match &x[&x.len()-2..] { "cm" => match &x[..&x.len()-2].parse::<i32>() { Ok(val) => (150 <= *val) & (*val <= 193), Err(e)=> false }, "in" => match &x[..&x.len()-2].parse::<i32>() { Ok(val) => (59 <= *val) & (*val <= 76), Err(e)=> false }, _ => false } }, None=> false } & match &self.hcl { Some(x) => { (&x[0..1] == "#") & (x[..].len() == 7 as usize) & x[1..].chars().fold(true, |mut acc, c| { acc &= c.is_digit(16); acc }) }, None=> false } & match &self.ecl { Some(x) => { (x == "amb") | (x == "blu") | (x == "brn") | (x == "gry") | (x == "grn") | (x == "hzl") | (x == "oth") }, None=> false } & match &self.pid { Some(x) => { (x.len() == 9) & x.chars().fold(true, |mut acc, c| { acc &= c.is_digit(10); acc }) }, None=> false } } } impl<'a> Iterator for PassPortList<'a> { type Item = PassPort; fn next(&mut self) -> Option<Self::Item> { match self.ptr { x if x<self.size => { let mut map = HashMap::new(); let mut row = self.passports[self.ptr].to_owned(); while row != "" { map = parse_line(row, map); self.ptr += 1; if self.ptr == self.size { break } row = self.passports[self.ptr].to_owned(); }; self.ptr += 1; Some(PassPort::from_dict(map)) }, _ => None } } } fn parse_line(line: String, mut map: HashMap<String, String>)->HashMap<String, String>{ line.split_whitespace() .fold(map, |mut acc, field| -> HashMap<String, String> { let key_value: Vec<&str> = field.split(':').collect(); acc.insert(key_value[0].to_owned(), key_value[1].to_owned()); acc }) }
extern crate r2d2; extern crate r2d2_postgres; extern crate postgres; use std::thread; use r2d2_postgres::{TlsMode, PostgresConnectionManager}; struct Person { id: i32, username: String } fn main() { let manager = PostgresConnectionManager::new("postgres://jeka:0454@localhost/diesel_demo", TlsMode::None).unwrap(); let pool = r2d2::Pool::new(manager).unwrap(); for i in 0..10i32 { let pool = pool.clone(); thread::spawn(move || { let conn = pool.get().unwrap(); // conn.execute("INSERT INTO foo (bar) VALUES ($1)", &[&i]).unwrap(); for row in &conn.query("SELECT id, username FROM users", &[]).unwrap() { let person = Person { id: row.get(0), username: row.get(1) }; println!("Found person {}: {}", person.id, person.username); } }); } }
pub(crate) mod func; pub(crate) mod types; pub use func::*; pub use types::*;
// // Copyright 2021 The Project Oak Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // use crate::{ handshaker::{AttestationBehavior, ClientHandshaker, ServerHandshaker}, tests::message::INVALID_MESSAGE_HEADER, }; use assert_matches::assert_matches; const TEE_MEASUREMENT: &str = "Test TEE measurement"; const DATA: [u8; 10] = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; fn create_handshakers() -> (ClientHandshaker, ServerHandshaker) { let bidirectional_attestation = AttestationBehavior::create_bidirectional_attestation(&[], TEE_MEASUREMENT.as_bytes()) .unwrap(); let client_handshaker = ClientHandshaker::new(bidirectional_attestation); let bidirectional_attestation = AttestationBehavior::create_bidirectional_attestation(&[], TEE_MEASUREMENT.as_bytes()) .unwrap(); let server_handshaker = ServerHandshaker::new(bidirectional_attestation); (client_handshaker, server_handshaker) } #[test] fn test_create_attestation_behavior() { let self_attestation = AttestationBehavior::create_self_attestation(&[]); assert_matches!(self_attestation, Ok(_)); let bidirectional_attestation = AttestationBehavior::create_bidirectional_attestation(&[], &[]); assert_matches!(bidirectional_attestation, Ok(_)); } #[test] fn test_handshake() { let (mut client_handshaker, mut server_handshaker) = create_handshakers(); let client_hello = client_handshaker .create_client_hello() .expect("Couldn't create client hello message"); let server_identity = server_handshaker .next_step(&client_hello) .expect("Couldn't process client hello message") .expect("Empty server identity message"); let client_identity = client_handshaker .next_step(&server_identity) .expect("Couldn't process server identity message") .expect("Empty client identity message"); assert!(client_handshaker.is_completed()); let result = server_handshaker .next_step(&client_identity) .expect("Couldn't process client identity message"); assert_matches!(result, None); assert!(server_handshaker.is_completed()); let mut client_encryptor = client_handshaker .get_encryptor() .expect("Couldn't get client encryptor"); let mut server_encryptor = server_handshaker .get_encryptor() .expect("Couldn't get server encryptor"); let encrypted_client_data = client_encryptor .encrypt(&DATA) .expect("Couldn't encrypt client data"); let decrypted_client_data = server_encryptor .decrypt(&encrypted_client_data) .expect("Couldn't decrypt client data"); assert_eq!(decrypted_client_data, DATA); let encrypted_server_data = server_encryptor .encrypt(&DATA) .expect("Couldn't encrypt server data"); let decrypted_server_data = client_encryptor .decrypt(&encrypted_server_data) .expect("Couldn't decrypt server data"); assert_eq!(decrypted_server_data, DATA); } #[test] fn test_invalid_message_after_initialization() { let (mut client_handshaker, mut server_handshaker) = create_handshakers(); let invalid_message = vec![INVALID_MESSAGE_HEADER]; let result = client_handshaker.next_step(&invalid_message); assert_matches!(result, Err(_)); assert!(client_handshaker.is_aborted()); let result = client_handshaker.create_client_hello(); assert_matches!(result, Err(_)); let result = server_handshaker.next_step(&invalid_message); assert_matches!(result, Err(_)); assert!(server_handshaker.is_aborted()); } #[test] fn test_invalid_message_after_hello() { let (mut client_handshaker, mut server_handshaker) = create_handshakers(); let invalid_message = vec![INVALID_MESSAGE_HEADER]; let client_hello = client_handshaker.create_client_hello().unwrap(); let result = client_handshaker.next_step(&invalid_message); assert_matches!(result, Err(_)); assert!(client_handshaker.is_aborted()); let server_identity = server_handshaker.next_step(&client_hello).unwrap().unwrap(); let result = server_handshaker.next_step(&invalid_message); assert_matches!(result, Err(_)); assert!(server_handshaker.is_aborted()); let result = client_handshaker.next_step(&server_identity); assert_matches!(result, Err(_)); } #[test] fn test_invalid_message_after_identities() { let (mut client_handshaker, mut server_handshaker) = create_handshakers(); let invalid_message = vec![INVALID_MESSAGE_HEADER]; let client_hello = client_handshaker.create_client_hello().unwrap(); let server_identity = server_handshaker.next_step(&client_hello).unwrap().unwrap(); let client_identity = client_handshaker .next_step(&server_identity) .unwrap() .unwrap(); let result = client_handshaker.next_step(&invalid_message); assert_matches!(result, Err(_)); assert!(client_handshaker.is_aborted()); let result = server_handshaker.next_step(&invalid_message); assert_matches!(result, Err(_)); assert!(server_handshaker.is_aborted()); let result = server_handshaker.next_step(&client_identity); assert_matches!(result, Err(_)); } #[test] fn test_replay_server_identity() { let (mut first_client_handshaker, mut first_server_handshaker) = create_handshakers(); let (mut second_client_handshaker, _) = create_handshakers(); let first_client_hello = first_client_handshaker.create_client_hello().unwrap(); let first_server_identity = first_server_handshaker .next_step(&first_client_hello) .unwrap() .unwrap(); let _ = second_client_handshaker.create_client_hello().unwrap(); let result = second_client_handshaker.next_step(&first_server_identity); assert_matches!(result, Err(_)); assert!(second_client_handshaker.is_aborted()); } #[test] fn test_replay_client_identity() { let (mut first_client_handshaker, mut first_server_handshaker) = create_handshakers(); let (mut second_client_handshaker, mut second_server_handshaker) = create_handshakers(); let first_client_hello = first_client_handshaker.create_client_hello().unwrap(); let first_server_identity = first_server_handshaker .next_step(&first_client_hello) .unwrap() .unwrap(); let first_client_identity = first_client_handshaker .next_step(&first_server_identity) .unwrap() .unwrap(); let second_client_hello = second_client_handshaker.create_client_hello().unwrap(); let _ = second_server_handshaker .next_step(&second_client_hello) .unwrap() .unwrap(); let result = second_server_handshaker.next_step(&first_client_identity); assert_matches!(result, Err(_)); }
extern crate wasm_bindgen; use apollo_query_planner::{QueryPlanner, QueryPlanningOptions}; use js_sys::JsString; use wasm_bindgen::prelude::*; static mut SCHEMA: Vec<String> = vec![]; static mut DATA: Vec<QueryPlanner> = vec![]; #[wasm_bindgen(js_name = getQueryPlanner)] pub fn get_query_planner(schema: JsString) -> usize { unsafe { if SCHEMA.is_empty() { SCHEMA.push(String::from(schema)); DATA.push(QueryPlanner::new(&SCHEMA[0])); } else { SCHEMA[0] = String::from(schema); DATA[0] = QueryPlanner::new(&SCHEMA[0]); } let data = &DATA[0]; data as *const QueryPlanner as usize } } #[wasm_bindgen(js_name = getQueryPlan)] pub fn get_query_plan(planner_ptr: usize, query: &str, options: &JsValue) -> JsValue { let options: QueryPlanningOptions = options.into_serde().unwrap(); unsafe { let planner = planner_ptr as *const QueryPlanner; let planner: &QueryPlanner = &*planner; let plan = planner.plan(query, options).unwrap(); JsValue::from_serde(&plan).unwrap() } } #[cfg(test)] mod tests { use crate::{get_query_plan, get_query_planner}; use apollo_query_planner::model::{FetchNode, PlanNode, QueryPlan}; use apollo_query_planner::QueryPlanningOptionsBuilder; use js_sys::JsString; use wasm_bindgen::JsValue; use wasm_bindgen_test::*; #[wasm_bindgen_test] fn getting_a_query_planner_and_using_it_multiple_times() { let schema = include_str!("../../query-planner/tests/features/basic/csdl.graphql"); let planner = get_query_planner(JsString::from(schema)); let query = "query { me { name } }"; let expected = QueryPlan { node: Some(PlanNode::Fetch(FetchNode { service_name: String::from("accounts"), requires: None, variable_usages: vec![], operation: String::from("{me{name}}"), })), }; let options = QueryPlanningOptionsBuilder::default().build().unwrap(); let options = JsValue::from_serde(&options).unwrap(); let result = get_query_plan(planner, query, &options); let plan = result.into_serde::<QueryPlan>().unwrap(); assert_eq!(plan, expected); } }
use jsonwebtoken::{ decode, encode, get_current_timestamp, Algorithm, DecodingKey, EncodingKey, Validation, }; use ring::signature::{Ed25519KeyPair, KeyPair}; use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize)] pub struct Claims { sub: String, exp: u64, } fn main() { let doc = Ed25519KeyPair::generate_pkcs8(&ring::rand::SystemRandom::new()).unwrap(); let encoding_key = EncodingKey::from_ed_der(doc.as_ref()); let pair = Ed25519KeyPair::from_pkcs8(doc.as_ref()).unwrap(); let decoding_key = DecodingKey::from_ed_der(pair.public_key().as_ref()); let claims = Claims { sub: "test".to_string(), exp: get_current_timestamp() }; let token = encode(&jsonwebtoken::Header::new(Algorithm::EdDSA), &claims, &encoding_key).unwrap(); let validation = Validation::new(Algorithm::EdDSA); let _token_data = decode::<Claims>(&token, &decoding_key, &validation).unwrap(); } #[cfg(test)] mod tests { use super::*; struct Jot { encoding_key: EncodingKey, decoding_key: DecodingKey, } impl Jot { fn new() -> Jot { let doc = Ed25519KeyPair::generate_pkcs8(&ring::rand::SystemRandom::new()).unwrap(); let encoding_key = EncodingKey::from_ed_der(doc.as_ref()); let pair = Ed25519KeyPair::from_pkcs8(doc.as_ref()).unwrap(); let decoding_key = DecodingKey::from_ed_der(pair.public_key().as_ref()); Jot { encoding_key, decoding_key } } } #[test] fn test() { let jot = Jot::new(); let claims = Claims { sub: "test".to_string(), exp: get_current_timestamp() }; let token = encode(&jsonwebtoken::Header::new(Algorithm::EdDSA), &claims, &jot.encoding_key) .unwrap(); let validation = Validation::new(Algorithm::EdDSA); let token_data = decode::<Claims>(&token, &jot.decoding_key, &validation).unwrap(); assert_eq!(token_data.claims.sub, "test"); } }
#![ allow( dead_code ) ] #![ allow( clippy::suspicious_else_formatting ) ] use { futures :: { * } , log :: { * } , std :: { io, task::{ Poll, Context }, pin::Pin, collections::VecDeque } , }; #[ derive( Debug, PartialEq, Eq, Clone ) ] // pub enum Action { Pending , Error( io::ErrorKind ) , Data ( Vec<u8> ) , } impl From<Vec<u8>> for Action { fn from( input: Vec<u8> ) -> Self { Action::Data( input ) } } impl From<io::Error> for Action { fn from( input: io::Error ) -> Self { Action::Error( input.kind() ) } } pub struct TestStream { actions: VecDeque<Action> , polled : usize , } impl TestStream { pub fn new( actions: VecDeque<Action> ) -> Self { Self { actions , polled : 0 , } } pub fn polled( &self ) -> usize { self.polled } } impl Stream for TestStream { type Item = Result< Vec<u8>, io::Error >; fn poll_next(mut self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Option<Self::Item>> { self.polled += 1; if let Some( action ) = self.actions.pop_front() { trace!( "poll_next with: {:?}", &action ); match action { Action::Pending => Poll::Pending , Action::Data(data) => Poll::Ready( Ok ( data ).into() ) , Action::Error(err) => Poll::Ready( Err( err.into() ).into() ) , } } else { Poll::Ready( None ) } } } #[ derive( Debug, PartialEq, Eq, Clone, Copy ) ] // pub enum ReadyAction { Pending , Ok , Error( io::ErrorKind ) , } #[ derive( Debug, PartialEq, Eq, Clone, Copy ) ] // pub enum SendAction { Ok , Error( io::ErrorKind ) , } #[ derive( Debug, PartialEq, Eq, Clone, Copy ) ] // pub enum FlushAction { Pending , Ok , Error( io::ErrorKind ) , } pub struct TestSink { pub poll_ready: usize , // # times poll_ready was called. pub start_send: usize , pub poll_flush: usize , pub ready_actions : Vec< ReadyAction > , pub send_actions : Vec< SendAction > , pub flush_actions : Vec< FlushAction > , pub items: Vec< Vec<u8> > , } impl TestSink { pub fn new( ready_actions: Vec< ReadyAction>, send_actions: Vec< SendAction>, flush_actions: Vec< FlushAction> ) -> Self { Self { poll_ready: 0 , start_send: 0 , poll_flush: 0 , ready_actions , send_actions , flush_actions , items: Vec::new() , } } } impl Sink< Vec<u8> > for TestSink { type Error = io::Error; fn poll_ready( mut self: Pin<&mut Self>, _cx: &mut Context ) -> Poll<Result<(), Self::Error>> { self.poll_ready += 1; match self.ready_actions[ self.poll_ready - 1 ] { ReadyAction::Pending => Poll::Pending , ReadyAction::Ok => Poll::Ready( Ok(()) ) , ReadyAction::Error(e) => Poll::Ready( Err( io::Error::from(e) )) , } } fn start_send( mut self: Pin<&mut Self>, item: Vec<u8> ) -> Result<(), Self::Error> { self.start_send += 1; match self.send_actions[ self.start_send - 1 ] { SendAction::Error(e) => Err( io::Error::from(e) ) , SendAction::Ok => { self.items.push( item ); Ok(()) } } } fn poll_flush( mut self: Pin<&mut Self>, _cx: &mut Context ) -> Poll<Result<(), Self::Error>> { self.poll_flush += 1; match self.flush_actions[ self.poll_flush - 1 ] { FlushAction::Pending => Poll::Pending , FlushAction::Ok => Poll::Ready( Ok(()) ) , FlushAction::Error(e) => Poll::Ready( Err( io::Error::from(e) )) , } } fn poll_close( self: Pin<&mut Self>, _cx: &mut Context ) -> Poll<Result<(), Self::Error>> { Poll::Ready(Ok(())) } }
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![warn(clippy::all)] #![warn(clippy::if_not_else)] fn bla() -> bool { unimplemented!() } fn main() { if !bla() { println!("Bugs"); } else { println!("Bunny"); } if 4 != 5 { println!("Bugs"); } else { println!("Bunny"); } }
extern crate rand; use rand::{thread_rng,Rng}; use std::env; use std::process; #[allow(dead_code)] #[allow(non_camel_case_types)] // Each die_type contains a value. // For die_type::constant this value is equal to its total value // For all the rest, this value represents the amount of dice // to be rolled enum die_type {constant(u32), d2(u32), d3(u32), d4(u32), d5(u32), d6(u32), d8(u32), d10(u32), d12(u32), d20(u32), d100(u32), } // Add Error catching!! fn parse_die_string(die_string: &Vec<&str>, die_list: &mut Vec<die_type>) { let mut die_size = 0; let mut amount = 0; for s in die_string { // Check if &str is die if s.contains("d") { let (left, right) = s.split_at(s.find('d').unwrap()); // check if no dice amount is specified if left.is_empty() == true { amount = 1; } else { amount = left.parse::<u32>().unwrap(); } die_size = right.trim_left_matches('d').parse::<u32>().unwrap(); match die_size { 2 => die_list.push(die_type::d2(amount)), 3 => die_list.push(die_type::d3(amount)), 4 => die_list.push(die_type::d4(amount)), 5 => die_list.push(die_type::d5(amount)), 6 => die_list.push(die_type::d6(amount)), 8 => die_list.push(die_type::d8(amount)), 10 => die_list.push(die_type::d10(amount)), 12 => die_list.push(die_type::d12(amount)), 20 => die_list.push(die_type::d20(amount)), 100 => die_list.push(die_type::d100(amount)), _ => { println!("Warning: {} is not a valid die size", die_size); println!("Valid die sizes are: d2, d3, d4, d5, d6, d8, d10, d12, d20, d100"); process::exit(0x0100); }, } // Check if &str is non-valid (TODO) // Check if &str is constant } else { amount = s.parse::<u32>().unwrap(); die_list.push(die_type::constant(amount)); } } } // Roll the given die equal to its internal value fn roll_die(d: &die_type) -> u32 { let mut rng = thread_rng(); let mut i = 0u32; match d { &die_type::constant(n) => n, // Constants aren't rerolled &die_type::d2(n) => { for _ in 0..n {i += rng.gen_range(1u32, 3u32)} i }, &die_type::d3(n) => { for _ in 0..n {i += rng.gen_range(1u32, 4u32)} i }, &die_type::d4(n) => { for _ in 0..n {i += rng.gen_range(1u32, 5u32)} i }, &die_type::d5(n) => { for _ in 0..n {i += rng.gen_range(1u32, 6u32)} i }, &die_type::d6(n) => { for _ in 0..n {i += rng.gen_range(1u32, 7u32)} i }, &die_type::d8(n) => { for _ in 0..n {i += rng.gen_range(1u32, 9u32)} i }, &die_type::d10(n) => { for _ in 0..n {i += rng.gen_range(1u32, 11u32)} i }, &die_type::d12(n) => { for _ in 0..n {i += rng.gen_range(1u32, 13u32)} i }, &die_type::d20(n) => { for _ in 0..n {i += rng.gen_range(1u32, 21u32)} i }, &die_type::d100(n) => { for _ in 0..n {i += rng.gen_range(1u32, 101u32)} i }, } } fn gen_total(die_list: &Vec<die_type>) -> u32 { let mut total = 0u32; for n in die_list { total += roll_die(n); } total } fn main() { // Establish a vector of possible error strings let mut err_vec: Vec<String> = Vec::new(); let mut n = 1u32; let args: Vec<String> = env::args().collect(); if args.len() == 1 { println!("This program requires an input string!"); println!(" e.g. '4d6+3d12+30'"); process::exit(1); } if args.len() == 3 { n = args[2].parse::<u32>().unwrap(); } let input = args[1].to_lowercase(); let string_list: Vec<&str> = input.split('+').collect(); let mut dice: Vec<die_type> = Vec::with_capacity(string_list.len()); parse_die_string(&string_list, &mut dice); while n > 0 { println!("{}", gen_total(&dice)); n -= 1; } }
extern crate linux_embedded_hal; extern crate hd44780_driver; use linux_embedded_hal::{Delay, Pin}; use linux_embedded_hal::sysfs_gpio::Direction; use hd44780_driver::{HD44780, DisplayMode, Cursor, CursorBlink, Display}; fn main() { let rs = Pin::new(26); let en = Pin::new(22); let db0 = Pin::new(19); let db1 = Pin::new(13); let db2 = Pin::new(6); let db3 = Pin::new(5); let db4 = Pin::new(21); let db5 = Pin::new(20); let db6 = Pin::new(16); let db7 = Pin::new(12); rs.export().unwrap(); en.export().unwrap(); db0.export().unwrap(); db1.export().unwrap(); db2.export().unwrap(); db3.export().unwrap(); db4.export().unwrap(); db5.export().unwrap(); db6.export().unwrap(); db7.export().unwrap(); rs.set_direction(Direction::Low).unwrap(); en.set_direction(Direction::Low).unwrap(); db0.set_direction(Direction::Low).unwrap(); db1.set_direction(Direction::Low).unwrap(); db2.set_direction(Direction::Low).unwrap(); db3.set_direction(Direction::Low).unwrap(); db4.set_direction(Direction::Low).unwrap(); db5.set_direction(Direction::Low).unwrap(); db6.set_direction(Direction::Low).unwrap(); db7.set_direction(Direction::Low).unwrap(); let mut lcd = HD44780::new_8bit( rs, en, db0, db1, db2, db3, db4, db5, db6, db7, Delay, ); lcd.reset(); lcd.clear(); lcd.set_display_mode( DisplayMode { display: Display::On, cursor_visibility: Cursor::Visible, cursor_blink: CursorBlink::On, } ); lcd.write_str("Hello, world!"); }
use aoc_2020::day_03::*; fn main() { let filename = std::env::args().nth(1).unwrap(); let input = std::fs::read_to_string(filename).expect("Couldn't read input file"); println!("Part 1: {}", part1(&input)); println!("Part 2: {}", part2(&input)); }
use pasture_core::nalgebra::Vector3; use pasture_core::{ containers::InterleavedVecPointStorage, layout::{ attributes, PointAttributeDataType, PointAttributeDefinition, PointLayout, PointType, }, }; use pasture_derive::PointType; fn main() { // In this example, we will take a closer look at the `PointLayout` type. We will learn what a `PointLayout` is, how it is constructed // and how it is used throughout pasture. Let's dive right in: { // A `PointLayout` describes the data attributes of a single point in a point cloud. It is quite similar to what a // `struct` in Rust is: A description for a collection of data attributes in a single type. // Let's create an empty `PointLayout`: let empty_layout: PointLayout = Default::default(); // The empty `PointLayout` is quite boring, as it describes a point without any attributes. Still, let's see what we can do with // this layout. // We can ask for the size in bytes that a single point in this layout takes in memory. For the empty layout, the size is of course // zero: assert_eq!(0, empty_layout.size_of_point_entry()); // We can ask the layout if it contains a specific attribute. Attributes in pasture are identified by a unique name. Since names can // be confusing, pasture provides a large number of constants for default attributes that we can use. Let's look at a very common // attribute: POSITION_3D let position_attribute = attributes::POSITION_3D; // This is a `PointAttributeDefinition`, a sort of template that represents a specific attribute. Think of it like a member of a Rust // `struct`. As such, it has some expected parameters: println!( "The position attribute is called: {}", position_attribute.name() ); println!( "It has the following datatype: {}", position_attribute.datatype() ); // The `datatype` of an attribute is one of a series of default datatypes that correspond to several Rust types. All supported datatypes // by pasture are defined in the `PointAttributeDataType` enum. They include most of the Rust primitive types, as well as a few Vector // types from the `nalgebra` crate. The `POSITION_3D` attribute for example has the default datatype `PointAttributeDataType::Vec3f64`, which // corresponds to the Rust type `nalgebra::Vector3<f64>`. // Now we can ask the layout if it contains this attribute: assert!(!empty_layout.has_attribute(&position_attribute)); // Let's move on to a more interesting example: A layout that contains some attributes! } { // We can create a custom `PointLayout` by telling it the attributes that it should contain let layout = PointLayout::from_attributes(&[attributes::POSITION_3D, attributes::INTENSITY]); // This layout roughly corresponds to the following Rust-type: // ``` // struct Point { // pub position: Vector3<f64>, // pub intensity: u16, // } // ``` assert!(layout.has_attribute(&attributes::POSITION_3D)); assert!(layout.has_attribute(&attributes::INTENSITY)); // Let's look at the size of a single point in this layout: println!( "A single point in this layout takes {} bytes", layout.size_of_point_entry() ); // The result might be surprising. POSITION_3D has datatype `nalgebra::Vector3<f64>`, which takes // 24 bytes. INTENSITY has datatype `u16`, which takes 2 bytes. This would equal 26 bytes total, however // the layout tells us 32 bytes instead. This is due to alignment requirements: Vector3<f64> has an 8-byte // minimum alignment. Indeed, if you call `std::mem::size_of::<Point>()`, you will also get 32 bytes. } { // We don't have to use default attributes in a `PointLayout`. We can use custom datatypes for the builtin attributes // as well as custom attribute names: let custom_layout = PointLayout::from_attributes(&[ attributes::POSITION_3D.with_custom_datatype(PointAttributeDataType::Vec3f32), PointAttributeDefinition::custom("Custom", PointAttributeDataType::U64), ]); // We can ask the layout if it contains an attribute just by its name, ignoring the datatype: assert!(custom_layout.has_attribute_with_name(attributes::POSITION_3D.name())); } { // But what is a `PointLayout` really used for? In the `basic_point_buffers.rs` example, we already saw that a `PointLayout` is required // to create any type of `PointBuffer`. Indeed, point data in pasture is stored in an arbitrary format that gets determined at runtime. // This prevents using generics, as they are compile-time, so instead pasture uses the `PointLayout` type to figure out the memory layout // of points at runtime. To that end, you will rarely create `PointLayout`s manually. Instead, pasture provides a `derive` macro to create // a `PointLayout` for a specific type: #[derive(PointType)] #[repr(C)] struct CustomPointType { #[pasture(BUILTIN_POSITION_3D)] pub position: Vector3<f64>, #[pasture(BUILTIN_INTENSITY)] pub intensity: u16, #[pasture(attribute = "CUSTOM_ATTRIBUTE")] pub custom_attribute: f32, } let layout = CustomPointType::layout(); println!( "The CustomPointType has the following PointLayout: {}", layout ); //With this, we can create a `PointBuffer` that stores `CustomPointType`s let mut buffer = InterleavedVecPointStorage::new(layout); buffer.push_point(CustomPointType { position: Vector3::new(1.0, 2.0, 3.0), intensity: 42, custom_attribute: 3.14, }); } }
use std::sync::{Arc, Weak}; use anyhow::Result; use tokio::sync::RwLock; use super::{progress, Host, ResourcePool, ResourceResult, Service}; #[derive(Default)] pub struct Deployment { pub hosts: Vec<Arc<RwLock<dyn Host>>>, pub services: Vec<Weak<RwLock<dyn Service>>>, pub resource_pool: ResourcePool, last_resource_result: Option<Arc<ResourceResult>>, next_host_id: usize, next_service_id: usize, } impl Deployment { pub async fn deploy(&mut self) -> Result<()> { progress::ProgressTracker::with_group("deploy", || async { let mut resource_batch = super::ResourceBatch::new(); let active_services = self .services .iter() .filter(|service| service.upgrade().is_some()) .cloned() .collect::<Vec<_>>(); self.services = active_services; for service in self.services.iter_mut() { service .upgrade() .unwrap() .write() .await .collect_resources(&mut resource_batch); } for host in self.hosts.iter_mut() { host.write().await.collect_resources(&mut resource_batch); } let result = Arc::new( progress::ProgressTracker::with_group("provision", || async { resource_batch .provision(&mut self.resource_pool, self.last_resource_result.clone()) .await }) .await?, ); self.last_resource_result = Some(result.clone()); progress::ProgressTracker::with_group("provision", || { let hosts_provisioned = self.hosts .iter_mut() .map(|host: &mut Arc<RwLock<dyn Host>>| async { host.write().await.provision(&result).await; }); futures::future::join_all(hosts_provisioned) }) .await; progress::ProgressTracker::with_group("deploy", || { let services_future = self.services .iter_mut() .map(|service: &mut Weak<RwLock<dyn Service>>| async { service .upgrade() .unwrap() .write() .await .deploy(&result) .await; }); futures::future::join_all(services_future) }) .await; progress::ProgressTracker::with_group("ready", || { let all_services_ready = self.services .iter() .map(|service: &Weak<RwLock<dyn Service>>| async { service.upgrade().unwrap().write().await.ready().await?; Ok(()) as Result<()> }); futures::future::try_join_all(all_services_ready) }) .await?; Ok(()) }) .await } pub async fn start(&mut self) { let active_services = self .services .iter() .filter(|service| service.upgrade().is_some()) .cloned() .collect::<Vec<_>>(); self.services = active_services; let all_services_start = self.services .iter() .map(|service: &Weak<RwLock<dyn Service>>| async { service.upgrade().unwrap().write().await.start().await; }); futures::future::join_all(all_services_start).await; } pub fn add_host<T: Host + 'static, F: FnOnce(usize) -> T>( &mut self, host: F, ) -> Arc<RwLock<T>> { let arc = Arc::new(RwLock::new(host(self.next_host_id))); self.next_host_id += 1; self.hosts.push(arc.clone()); arc } pub fn add_service<T: Service + 'static>( &mut self, service: impl FnOnce(usize) -> T, ) -> Arc<RwLock<T>> { let arc = Arc::new(RwLock::new(service(self.next_service_id))); self.next_service_id += 1; let dyn_arc: Arc<RwLock<dyn Service>> = arc.clone(); self.services.push(Arc::downgrade(&dyn_arc)); arc } }
use std::error::Error; use crate::modules::intcode; pub fn run(input: &str) -> Result<String, Box<dyn Error>> { let mut machine = intcode::build_intcode_from_input(input)?; machine.run(); Ok("Done!".to_string()) }
#[cfg(feature = "client")] use graphics::Context; #[cfg(feature = "client")] use opengl_graphics::Gl; use battle_state::BattleContext; use module; use module::{IModule, Module, ModuleBase, ModuleRef}; use net::{InPacket, OutPacket}; use ship::{ShipRef, ShipState}; use sim::SimEventAdder; use vec::{Vec2, Vec2f}; #[cfg(feature = "client")] use sim_visuals::SpriteVisual; #[cfg(feature = "client")] use sim::{SimEffects, SimVisual}; #[cfg(feature = "client")] use sprite_sheet::{SpriteSheet, SpriteAnimation}; #[cfg(feature = "client")] use asset_store::AssetStore; #[derive(RustcEncodable, RustcDecodable, Clone)] pub struct CommandModule; impl CommandModule { pub fn new() -> Module<CommandModule> { Module { base: ModuleBase::new(1, 2, 0, 2, 4), module: CommandModule, } } } impl IModule for CommandModule { fn server_preprocess(&mut self, base: &mut ModuleBase, ship_state: &mut ShipState) { } fn before_simulation(&mut self, base: &mut ModuleBase, ship: &ShipRef, events: &mut SimEventAdder) { } #[cfg(feature = "client")] fn add_plan_effects(&self, base: &ModuleBase, asset_store: &AssetStore, effects: &mut SimEffects, ship: &ShipRef) { let mut command_sprite = SpriteSheet::new(asset_store.get_sprite_info_str("modules/big_command_sprite.png")); if base.is_active() { command_sprite.add_animation(SpriteAnimation::Loop(0.0, 7.0, 0, 7, 0.2)); } else { command_sprite.add_animation(SpriteAnimation::Stay(0.0, 7.0, 0)); } effects.add_visual(ship.borrow().id, 0, box SpriteVisual { position: base.get_render_position().clone(), sprite_sheet: command_sprite, }); } #[cfg(feature = "client")] fn add_simulation_effects(&self, base: &ModuleBase, asset_store: &AssetStore, effects: &mut SimEffects, ship: &ShipRef) { self.add_plan_effects(base, asset_store, effects, ship); } fn after_simulation(&mut self, base: &mut ModuleBase, ship_state: &mut ShipState) { } fn on_activated(&mut self, base: &mut ModuleBase, ship_state: &mut ShipState, modules: &Vec<ModuleRef>) { } fn on_deactivated(&mut self, base: &mut ModuleBase, ship_state: &mut ShipState, modules: &Vec<ModuleRef>) { } fn get_target_mode(&self, base: &ModuleBase) -> Option<module::TargetMode> { None } }
use sdl2::event::Event; use sdl2::render::{Texture, WindowCanvas}; use sdl2::EventPump; use crate::Config; pub struct Gui { pub canvas: WindowCanvas, pub events: EventPump, } impl Gui { pub fn new(config: &Config) -> Gui { let sdl_context = sdl2::init().unwrap(); let video_subsystem = sdl_context.video().unwrap(); let mut window = video_subsystem.window("Rustboy", 480, 432); //160X144 if config.full_screen { window.fullscreen(); } let window = window .position_centered() .opengl() .build() .expect("could not initialize video subsystem"); let mut canvas = window .into_canvas() .accelerated() .present_vsync() .build() .expect("could not make a canvas"); canvas .set_scale(3.0, 3.0) .expect("Could not set canvas scale"); canvas.clear(); let event_pump = sdl_context.event_pump().unwrap(); Gui { //context: sdl_context, //video: video_subsystem, canvas: canvas, events: event_pump, } } pub fn clear(&mut self) { self.canvas.set_draw_color((255, 255, 255)); self.canvas.clear(); } pub fn update(&mut self) -> bool { if self .events .poll_iter() .any(|i| matches!(i, Event::Quit {..})) { false } else { self.canvas.present(); true } } pub fn push_matrix(&mut self, mat: &[[u8; 144]; 160], texture: &mut Texture) { let mut pixel_data: [u8; 69120] = [0; 69120]; //BGR let mut offset: u32; let mut r: u8; let mut g: u8; let mut b: u8; for i in 0..mat.len() { for j in 0..mat[i].len() { //println!("{}",mat[i][j]); match mat[i][j] { 0 => { r = 224; g = 248; b = 208; } 1 => { r = 136; g = 192; b = 112; } 2 => { r = 52; g = 104; b = 86; } _ => { r = 8; g = 24; b = 32; } } offset = (j as u32) * 480 + (i as u32) * 3; pixel_data[(offset) as usize] = r; pixel_data[(offset + 1) as usize] = g; pixel_data[(offset + 2) as usize] = b; } } texture .update(None, &pixel_data, 480 as usize) .expect("Couldn't update texture"); self.canvas .copy(&texture, None, None) .expect("Couldn't copy texture on canvas"); } }
extern crate rustc_version; use std::env; use std::fs::File; use std::io::Write; use std::path::PathBuf; use std::ops::{Neg,Sub}; /* * Let me explain this hack. For the sync shell script it's easiest if every * line in mapping.rs looks exactly the same. This means that specifying an * array literal is not possible. include!() can only expand to expressions, so * just specifying the contents of an array is also not possible. * * This leaves us with trying to find an expression in which every line looks * the same. This can be done using the `-` operator. This can be a unary * operator (first thing on the first line), or a binary operator (later * lines). That is exactly what's going on here, and Neg and Sub simply build a * vector of the operangs. */ struct Mapping(&'static str,&'static str); impl Neg for Mapping { type Output = Vec<Mapping>; fn neg(self) -> Vec<Mapping> { vec![self.into()] } } impl Sub<Mapping> for Vec<Mapping> { type Output=Vec<Mapping>; fn sub(mut self, rhs: Mapping) -> Vec<Mapping> { self.push(rhs.into()); self } } fn main() { let ver=rustc_version::version_meta(); let io_commit=match env::var("CORE_IO_COMMIT") { Ok(c) => c, Err(env::VarError::NotUnicode(_)) => panic!("Invalid commit specified in CORE_IO_COMMIT"), Err(env::VarError::NotPresent) => { let mappings=include!("mapping.rs"); let compiler=ver.commit_hash.expect("Couldn't determine compiler version"); mappings.iter().find(|&&Mapping(elem,_)|elem==compiler).expect("Unknown compiler version, upgrade core_io?").1.to_owned() } }; if ver.commit_date.as_ref().map_or(false,|d| &**d<"2016-12-15") { println!("cargo:rustc-cfg=rustc_unicode"); } else if ver.commit_date.as_ref().map_or(false,|d| &**d<"2017-03-03") { println!("cargo:rustc-cfg=std_unicode"); } if ver.commit_date.as_ref().map_or(false,|d| &**d>"2017-06-15") { println!("cargo:rustc-cfg=collections_in_alloc"); } let mut dest_path=PathBuf::from(env::var_os("OUT_DIR").unwrap()); dest_path.push("io.rs"); let mut f=File::create(&dest_path).unwrap(); let mut target_path=PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); target_path.push("src"); target_path.push(io_commit); target_path.push("mod.rs"); f.write_all(br#"#[path=""#).unwrap(); f.write_all(target_path.into_os_string().into_string().unwrap().as_bytes()).unwrap(); f.write_all(br#""] mod io;"#).unwrap(); }
use std::marker::PhantomData; use necsim_core::{ cogs::{ CoalescenceSampler, DispersalSampler, EmigrationExit, Habitat, ImmigrationEntry, LineageReference, LineageStore, MinSpeciationTrackingEventSampler, PeekableActiveLineageSampler, PrimeableRng, SingularActiveLineageSampler, SpeciationProbability, TurnoverRate, }, reporter::boolean::Boolean, }; use rust_cuda::{ rustacuda::{ function::{BlockSize, Function, GridSize}, module::Module, stream::Stream, }, rustacuda_core::DeviceCopy, }; use rust_cuda::common::RustToCuda; use rustcoalescence_algorithms_cuda_kernel_ptx_jit::host::compiler::PtxJITCompiler; use super::SimulationKernelWithDimensions; #[allow(clippy::type_complexity)] pub struct SimulationKernelWithDimensionsStream< 'k, 's, H: Habitat + RustToCuda, G: PrimeableRng + RustToCuda, R: LineageReference<H> + DeviceCopy, S: LineageStore<H, R> + RustToCuda, X: EmigrationExit<H, G, R, S> + RustToCuda, D: DispersalSampler<H, G> + RustToCuda, C: CoalescenceSampler<H, R, S> + RustToCuda, T: TurnoverRate<H> + RustToCuda, N: SpeciationProbability<H> + RustToCuda, E: MinSpeciationTrackingEventSampler<H, G, R, S, X, D, C, T, N> + RustToCuda, I: ImmigrationEntry + RustToCuda, A: SingularActiveLineageSampler<H, G, R, S, X, D, C, T, N, E, I> + PeekableActiveLineageSampler<H, G, R, S, X, D, C, T, N, E, I> + RustToCuda, ReportSpeciation: Boolean, ReportDispersal: Boolean, > { pub(super) compiler: &'k mut PtxJITCompiler, pub(super) ptx_jit: bool, pub(super) module: &'k mut Module, pub(super) entry_point: &'k mut Function<'k>, pub(super) marker: PhantomData<( H, G, R, S, X, D, C, T, N, E, I, A, ReportSpeciation, ReportDispersal, )>, pub(super) grid_size: GridSize, pub(super) block_size: BlockSize, pub(super) shared_mem_bytes: u32, pub(super) stream: &'s Stream, } impl< 'k, H: Habitat + RustToCuda, G: PrimeableRng + RustToCuda, R: LineageReference<H> + DeviceCopy, S: LineageStore<H, R> + RustToCuda, X: EmigrationExit<H, G, R, S> + RustToCuda, D: DispersalSampler<H, G> + RustToCuda, C: CoalescenceSampler<H, R, S> + RustToCuda, T: TurnoverRate<H> + RustToCuda, N: SpeciationProbability<H> + RustToCuda, E: MinSpeciationTrackingEventSampler<H, G, R, S, X, D, C, T, N> + RustToCuda, I: ImmigrationEntry + RustToCuda, A: SingularActiveLineageSampler<H, G, R, S, X, D, C, T, N, E, I> + PeekableActiveLineageSampler<H, G, R, S, X, D, C, T, N, E, I> + RustToCuda, ReportSpeciation: Boolean, ReportDispersal: Boolean, > SimulationKernelWithDimensions< 'k, H, G, R, S, X, D, C, T, N, E, I, A, ReportSpeciation, ReportDispersal, > { #[allow(clippy::type_complexity)] pub fn with_stream<'s>( self, stream: &'s Stream, ) -> SimulationKernelWithDimensionsStream< 'k, 's, H, G, R, S, X, D, C, T, N, E, I, A, ReportSpeciation, ReportDispersal, > { SimulationKernelWithDimensionsStream { compiler: self.compiler, ptx_jit: self.ptx_jit, module: self.module, entry_point: self.entry_point, marker: self.marker, grid_size: self.grid_size.clone(), block_size: self.block_size.clone(), shared_mem_bytes: self.shared_mem_bytes, stream, } } }
use std::fs::File; use std::io::prelude::*; use std::path::PathBuf; use sloppycomp::compression::Algorithm; use sloppycomp::lz77; #[test] fn test_compression_size() { // test exists so we can monitor and commit changes in optimisations to the // compression - slow in debug mode, so run with `cargo test --release`. let mut input_file = PathBuf::from(env!("CARGO_MANIFEST_DIR")); input_file.push("benches/test-files/sloppy-compressor-bench-plaintext"); let mut file = File::open(input_file).unwrap(); let compressor = lz77::Lz77Compression {}; compressor.compress(file, "/tmp/sloppycomp-ratio-test"); let compressed_size = std::fs::metadata("/tmp/sloppycomp-ratio-test") .unwrap() .len(); assert_eq!(16143356, compressed_size); }
use crate::prelude::*; use azure_core::prelude::*; use http::StatusCode; use std::convert::TryInto; #[derive(Debug, Clone)] pub struct ReplaceReferenceAttachmentBuilder<'a, 'b> { attachment_client: &'a AttachmentClient, if_match_condition: Option<IfMatchCondition<'b>>, user_agent: Option<UserAgent<'b>>, activity_id: Option<ActivityId<'b>>, consistency_level: Option<ConsistencyLevel>, } impl<'a, 'b> ReplaceReferenceAttachmentBuilder<'a, 'b> { pub(crate) fn new(attachment_client: &'a AttachmentClient) -> Self { Self { attachment_client, if_match_condition: None, user_agent: None, activity_id: None, consistency_level: None, } } } impl<'a, 'b> ReplaceReferenceAttachmentBuilder<'a, 'b> { setters! { user_agent: &'b str => Some(UserAgent::new(user_agent)), activity_id: &'b str => Some(ActivityId::new(activity_id)), consistency_level: ConsistencyLevel => Some(consistency_level), if_match_condition: IfMatchCondition<'b> => Some(if_match_condition), } } // methods callable only when every mandatory field has been filled impl<'a, 'b> ReplaceReferenceAttachmentBuilder<'a, 'b> { pub async fn execute<M, C>( &self, media: M, content_type: C, ) -> crate::Result<crate::responses::ReplaceReferenceAttachmentResponse> where M: AsRef<str>, C: Into<ContentType<'b>>, { let mut req = self .attachment_client .prepare_request_with_attachment_name(http::Method::PUT); // add trait headers req = azure_core::headers::add_optional_header(&self.if_match_condition, req); req = azure_core::headers::add_optional_header(&self.user_agent, req); req = azure_core::headers::add_optional_header(&self.activity_id, req); req = azure_core::headers::add_optional_header(&self.consistency_level, req); req = crate::cosmos_entity::add_as_partition_key_header_serialized( self.attachment_client .document_client() .partition_key_serialized(), req, ); // create serialized request #[derive(Debug, Clone, Serialize)] struct _Request<'r> { pub id: &'r str, #[serde(rename = "contentType")] pub content_type: &'r str, pub media: &'r str, } let request = azure_core::to_json(&_Request { id: self.attachment_client.attachment_name(), content_type: content_type.into().as_str(), media: media.as_ref(), })?; req = req.header(http::header::CONTENT_TYPE, "application/json"); req = req.header(http::header::CONTENT_LENGTH, request.len()); let req = req.body(request)?; debug!("req == {:#?}", req); Ok(self .attachment_client .http_client() .execute_request_check_status(req, StatusCode::OK) .await? .try_into()?) } }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #![feature(async_await)] #![allow(dead_code)] use { failure::{Error, ResultExt}, fidl_fuchsia_settings::*, fuchsia_async as fasync, fuchsia_component::server::ServiceFs, futures::prelude::*, }; mod accessibility; mod client; mod display; mod do_not_disturb; mod intl; mod setup; mod system; enum Services { SetUi(fidl_fuchsia_setui::SetUiServiceRequestStream), Accessibility(AccessibilityRequestStream), Display(DisplayRequestStream), DoNotDisturb(DoNotDisturbRequestStream), System(SystemRequestStream), Intl(IntlRequestStream), Setup(SetupRequestStream), } const ENV_NAME: &str = "setui_client_test_environment"; #[fasync::run_singlethreaded] async fn main() -> Result<(), Error> { println!("account mutation tests"); validate_account_mutate( "autologinguest".to_string(), fidl_fuchsia_setui::LoginOverride::AutologinGuest, ) .await?; validate_account_mutate("auth".to_string(), fidl_fuchsia_setui::LoginOverride::AuthProvider) .await?; validate_account_mutate("none".to_string(), fidl_fuchsia_setui::LoginOverride::None).await?; println!("accessibility service tests"); println!(" client calls set accessibility watch"); validate_accessibility(None, None, None, None, None).await?; println!(" client calls set audio_description"); validate_accessibility(Some(true), None, None, None, None).await?; println!(" client calls set screen_reader"); validate_accessibility(None, Some(true), None, None, None).await?; println!(" client calls set color_inversion"); validate_accessibility(None, None, Some(true), None, None).await?; println!(" client calls set enable_magnification"); validate_accessibility(None, None, None, Some(true), None).await?; println!(" client calls set color_correction"); validate_accessibility( None, None, None, None, Some(fidl_fuchsia_settings::ColorBlindnessType::Protanomaly), ) .await?; println!("display service tests"); println!(" client calls display watch"); validate_display(None, None).await?; println!(" client calls set brightness"); validate_display(Some(0.5), None).await?; println!(" client calls set auto brightness"); validate_display(None, Some(true)).await?; println!("do not disturb service tests"); println!(" client calls dnd watch"); validate_dnd(Some(false), Some(false)).await?; println!(" client calls set user initiated do not disturb"); validate_dnd(Some(true), Some(false)).await?; println!(" client calls set night mode initiated do not disturb"); validate_dnd(Some(false), Some(true)).await?; println!("intl service tests"); println!(" client calls set temperature unit"); validate_temperature_unit().await?; println!("system service tests"); println!(" client calls set login mode"); validate_system_override().await?; println!("setup service tests"); println!(" client calls set config interfaces"); validate_setup().await?; Ok(()) } // Creates a service in an environment for a given setting type. // Usage: create_service!(service_enum_name, // request_name => {code block}, // request2_name => {code_block} // ... ); macro_rules! create_service { ($setting_type:path, $( $request:pat => $callback:block ),*) => {{ let mut fs = ServiceFs::new(); fs.add_fidl_service($setting_type); let env = fs.create_nested_environment(ENV_NAME)?; fasync::spawn(fs.for_each_concurrent(None, move |connection| { async move { #![allow(unreachable_patterns)] match connection { $setting_type(stream) => { stream .err_into::<failure::Error>() .try_for_each(|req| async move { match req { $($request => $callback)* _ => panic!("Incorrect command to service"), } Ok(()) }) .unwrap_or_else(|e: failure::Error| panic!( "error running setui server: {:?}", e )).await; } _ => { panic!("Unexpected service"); } } } })); env }}; } async fn validate_system_override() -> Result<(), Error> { let env = create_service!(Services::System, SystemRequest::Set { settings, responder } => { if let Some(login_override) = settings.mode { assert_eq!(login_override, LoginOverride::AuthProvider); responder.send(&mut Ok(()))?; } else { panic!("Wrong call to set"); } }); let system_service = env.connect_to_service::<SystemMarker>().context("Failed to connect to intl service")?; system::command(system_service, Some("auth".to_string())).await?; Ok(()) } async fn validate_temperature_unit() -> Result<(), Error> { let env = create_service!(Services::Intl, IntlRequest::Set { settings, responder } => { if let Some(temperature_unit) = settings.temperature_unit { assert_eq!( temperature_unit, fidl_fuchsia_intl::TemperatureUnit::Celsius ); responder.send(&mut Ok(()))?; } else { panic!("Wrong call to set"); } }); let intl_service = env.connect_to_service::<IntlMarker>().context("Failed to connect to intl service")?; intl::command(intl_service, None, Some(fidl_fuchsia_intl::TemperatureUnit::Celsius), vec![]) .await?; Ok(()) } // Can only check one mutate option at once async fn validate_display( expected_brightness: Option<f32>, expected_auto_brightness: Option<bool>, ) -> Result<(), Error> { let env = create_service!( Services::Display, DisplayRequest::Set { settings, responder, } => { if let (Some(brightness_value), Some(expected_brightness_value)) = (settings.brightness_value, expected_brightness) { assert_eq!(brightness_value, expected_brightness_value); responder.send(&mut Ok(()))?; } else if let (Some(auto_brightness), Some(expected_auto_brightness_value)) = (settings.auto_brightness, expected_auto_brightness) { assert_eq!(auto_brightness, expected_auto_brightness_value); responder.send(&mut Ok(()))?; } else { panic!("Unexpected call to set"); } }, DisplayRequest::Watch { responder } => { responder.send(&mut Ok(DisplaySettings { auto_brightness: Some(false), brightness_value: Some(0.5), }))?; } ); let display_service = env .connect_to_service::<DisplayMarker>() .context("Failed to connect to display service")?; display::command(display_service, expected_brightness, expected_auto_brightness).await?; Ok(()) } async fn validate_accessibility( expected_audio_description: Option<bool>, expected_screen_reader: Option<bool>, expected_color_inversion: Option<bool>, expected_enable_magnification: Option<bool>, expected_color_correction: Option<fidl_fuchsia_settings::ColorBlindnessType>, ) -> Result<(), Error> { let env = create_service!( Services::Accessibility, AccessibilityRequest::Set { settings, responder, } => { if let (Some(audio_description), Some(expected_audio_description_value)) = (settings.audio_description, expected_audio_description) { assert_eq!(audio_description, expected_audio_description_value); responder.send(&mut Ok(()))?; } else if let (Some(screen_reader), Some(expected_screen_reader_value)) = (settings.screen_reader, expected_screen_reader) { assert_eq!(screen_reader, expected_screen_reader_value); responder.send(&mut Ok(()))?; } else if let (Some(color_inversion), Some(expected_color_inversion_value)) = (settings.color_inversion, expected_color_inversion) { assert_eq!(color_inversion, expected_color_inversion_value); responder.send(&mut Ok(()))?; } else if let (Some(enable_magnification), Some(expected_enable_magnification_value)) = (settings.enable_magnification, expected_enable_magnification) { assert_eq!(enable_magnification, expected_enable_magnification_value); responder.send(&mut Ok(()))?; } else if let (Some(color_correction), Some(expected_color_correction_value)) = (settings.color_correction, expected_color_correction) { assert_eq!(color_correction, expected_color_correction_value); responder.send(&mut Ok(()))?; } else { panic!("Unexpected call to set"); } }, AccessibilityRequest::Watch { responder } => { responder.send(&mut Ok(AccessibilitySettings { audio_description: Some(false), screen_reader: Some(false), color_inversion: Some(false), enable_magnification: Some(false), color_correction: Some(fidl_fuchsia_settings::ColorBlindnessType::None), captions_settings: None, }))?; } ); let accessibility_service = env .connect_to_service::<AccessibilityMarker>() .context("Failed to connect to accessibility service")?; accessibility::command( accessibility_service, expected_audio_description, expected_screen_reader, expected_color_inversion, expected_enable_magnification, expected_color_correction, ) .await?; Ok(()) } async fn validate_dnd( expected_user_dnd: Option<bool>, expected_night_mode_dnd: Option<bool>, ) -> Result<(), Error> { let env = create_service!(Services::DoNotDisturb, DoNotDisturbRequest::Set { settings, responder } => { if let(Some(user_dnd), Some(expected_user_dnd)) = (settings.user_initiated_do_not_disturb, expected_user_dnd) { assert_eq!(user_dnd, expected_user_dnd); responder.send(&mut Ok(()))?; } else if let (Some(night_mode_dnd), Some(expected_night_mode_dnd)) = (settings.night_mode_initiated_do_not_disturb, expected_night_mode_dnd) { assert_eq!(night_mode_dnd, expected_night_mode_dnd); responder.send(&mut (Ok(())))?; } else { panic!("Unexpected call to set"); } }, DoNotDisturbRequest::Watch { responder } => { responder.send(DoNotDisturbSettings { user_initiated_do_not_disturb: Some(false), night_mode_initiated_do_not_disturb: Some(false), })?; } ); let do_not_disturb_service = env .connect_to_service::<DoNotDisturbMarker>() .context("Failed to connect to do not disturb service")?; do_not_disturb::command(do_not_disturb_service, expected_user_dnd, expected_night_mode_dnd) .await?; Ok(()) } fn create_setup_setting(interfaces: ConfigurationInterfaces) -> SetupSettings { let mut settings = SetupSettings::empty(); settings.enabled_configuration_interfaces = Some(interfaces); settings } async fn validate_setup() -> Result<(), Error> { let expected_set_interfaces = ConfigurationInterfaces::Ethernet; let expected_watch_interfaces = ConfigurationInterfaces::Wifi | ConfigurationInterfaces::Ethernet; let env = create_service!( Services::Setup, SetupRequest::Set { settings, responder, } => { if let Some(interfaces) = settings.enabled_configuration_interfaces { assert_eq!(interfaces, expected_set_interfaces); responder.send(&mut Ok(()))?; } else { panic!("Unexpected call to set"); } }, SetupRequest::Watch { responder } => { responder.send(create_setup_setting(expected_watch_interfaces))?; } ); let setup_service = env.connect_to_service::<SetupMarker>().context("Failed to connect to setup service")?; setup::command(setup_service.clone(), Some(expected_set_interfaces)).await?; let watch_result = setup::command(setup_service.clone(), None).await?; assert_eq!( watch_result, setup::describe_setup_setting(&create_setup_setting(expected_watch_interfaces)) ); Ok(()) } async fn validate_account_mutate( specified_type: String, expected_override: fidl_fuchsia_setui::LoginOverride, ) -> Result<(), Error> { let mut fs = ServiceFs::new(); fs.add_fidl_service(Services::SetUi); let env = fs.create_nested_environment(ENV_NAME)?; fasync::spawn(fs.for_each_concurrent(None, move |req| { async move { match req { Services::SetUi(stream) => { serve_check_login_override_mutate(stream, expected_override).await } _ => {} } } })); let setui = env .connect_to_service::<fidl_fuchsia_setui::SetUiServiceMarker>() .context("Failed to connect to setui service")?; client::mutate(setui, "login".to_string(), specified_type).await?; Ok(()) } fn serve_check_login_override_mutate( stream: fidl_fuchsia_setui::SetUiServiceRequestStream, expected_override: fidl_fuchsia_setui::LoginOverride, ) -> impl Future<Output = ()> { stream .err_into::<failure::Error>() .try_for_each(move |req| { async move { match req { fidl_fuchsia_setui::SetUiServiceRequest::Mutate { setting_type, mutation, responder, } => { assert_eq!(setting_type, fidl_fuchsia_setui::SettingType::Account); match mutation { fidl_fuchsia_setui::Mutation::AccountMutationValue( account_mutation, ) => { if let (Some(login_override), Some(operation)) = (account_mutation.login_override, account_mutation.operation) { assert_eq!(login_override, expected_override); assert_eq!( operation, fidl_fuchsia_setui::AccountOperation::SetLoginOverride ); } } _ => { panic!("unexpected data for account mutation"); } } responder .send(&mut fidl_fuchsia_setui::MutationResponse { return_code: fidl_fuchsia_setui::ReturnCode::Ok, }) .context("sending response")?; } _ => {} }; Ok(()) } }) .unwrap_or_else(|e: failure::Error| panic!("error running setui server: {:?}", e)) }
#[doc = "Reader of register INTR_STAT"] pub type R = crate::R<u32, super::INTR_STAT>; #[doc = "Writer for register INTR_STAT"] pub type W = crate::W<u32, super::INTR_STAT>; #[doc = "Register INTR_STAT `reset()`'s with value 0"] impl crate::ResetValue for super::INTR_STAT { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `DSM_ENTERED_INTR`"] pub type DSM_ENTERED_INTR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DSM_ENTERED_INTR`"] pub struct DSM_ENTERED_INTR_W<'a> { w: &'a mut W, } impl<'a> DSM_ENTERED_INTR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `DSM_EXITED_INTR`"] pub type DSM_EXITED_INTR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DSM_EXITED_INTR`"] pub struct DSM_EXITED_INTR_W<'a> { w: &'a mut W, } impl<'a> DSM_EXITED_INTR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `RCBLL_DONE_INTR`"] pub type RCBLL_DONE_INTR_R = crate::R<bool, bool>; #[doc = "Reader of field `BLERD_ACTIVE_INTR`"] pub type BLERD_ACTIVE_INTR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `BLERD_ACTIVE_INTR`"] pub struct BLERD_ACTIVE_INTR_W<'a> { w: &'a mut W, } impl<'a> BLERD_ACTIVE_INTR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `RCB_INTR`"] pub type RCB_INTR_R = crate::R<bool, bool>; #[doc = "Reader of field `LL_INTR`"] pub type LL_INTR_R = crate::R<bool, bool>; #[doc = "Reader of field `GPIO_INTR`"] pub type GPIO_INTR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `GPIO_INTR`"] pub struct GPIO_INTR_W<'a> { w: &'a mut W, } impl<'a> GPIO_INTR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Reader of field `EFUSE_INTR`"] pub type EFUSE_INTR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EFUSE_INTR`"] pub struct EFUSE_INTR_W<'a> { w: &'a mut W, } impl<'a> EFUSE_INTR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `XTAL_ON_INTR`"] pub type XTAL_ON_INTR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `XTAL_ON_INTR`"] pub struct XTAL_ON_INTR_W<'a> { w: &'a mut W, } impl<'a> XTAL_ON_INTR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "Reader of field `ENC_INTR`"] pub type ENC_INTR_R = crate::R<bool, bool>; #[doc = "Reader of field `HVLDO_LV_DETECT_POS`"] pub type HVLDO_LV_DETECT_POS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `HVLDO_LV_DETECT_POS`"] pub struct HVLDO_LV_DETECT_POS_W<'a> { w: &'a mut W, } impl<'a> HVLDO_LV_DETECT_POS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } #[doc = "Reader of field `HVLDO_LV_DETECT_NEG`"] pub type HVLDO_LV_DETECT_NEG_R = crate::R<bool, bool>; #[doc = "Write proxy for field `HVLDO_LV_DETECT_NEG`"] pub struct HVLDO_LV_DETECT_NEG_W<'a> { w: &'a mut W, } impl<'a> HVLDO_LV_DETECT_NEG_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } impl R { #[doc = "Bit 0 - On a firmware request to LL to enter into state machine, working on LF clock, LL transitions into Deep Sleep Mode and asserts this interrupt. The interrupt can be cleared by writing one into this location."] #[inline(always)] pub fn dsm_entered_intr(&self) -> DSM_ENTERED_INTR_R { DSM_ENTERED_INTR_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - On a firmware request to LL to exit from Deep Sleep Mode, working on LF clock, LL transitions from Deep Sleep Mode and asserts this interrupt when the Deep Sleep clock gater is turned ON. The interrupt can be cleared by writing one into this location."] #[inline(always)] pub fn dsm_exited_intr(&self) -> DSM_EXITED_INTR_R { DSM_EXITED_INTR_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - RCB transaction Complete"] #[inline(always)] pub fn rcbll_done_intr(&self) -> RCBLL_DONE_INTR_R { RCBLL_DONE_INTR_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - CYBLERD55 is in active mode. RF is active"] #[inline(always)] pub fn blerd_active_intr(&self) -> BLERD_ACTIVE_INTR_R { BLERD_ACTIVE_INTR_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - RCB controller Interrupt - Refer to RCB_INTR_STAT register"] #[inline(always)] pub fn rcb_intr(&self) -> RCB_INTR_R { RCB_INTR_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - LL controller interrupt - Refer to EVENT_INTR register"] #[inline(always)] pub fn ll_intr(&self) -> LL_INTR_R { LL_INTR_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - GPIO interrupt"] #[inline(always)] pub fn gpio_intr(&self) -> GPIO_INTR_R { GPIO_INTR_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - This bit when set by efuse controller logic when the efuse read/write is completed"] #[inline(always)] pub fn efuse_intr(&self) -> EFUSE_INTR_R { EFUSE_INTR_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - enabled crystal stable signal rising edge interrupt. The interrupt can be cleared by writing one into this location."] #[inline(always)] pub fn xtal_on_intr(&self) -> XTAL_ON_INTR_R { XTAL_ON_INTR_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - Encryption Interrupt Triggered"] #[inline(always)] pub fn enc_intr(&self) -> ENC_INTR_R { ENC_INTR_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - This interrupt is set on HVLDO LV Detector Rise edge. There is a 1cycle AHB clock glitch filter on the HVLDO LV Detector output"] #[inline(always)] pub fn hvldo_lv_detect_pos(&self) -> HVLDO_LV_DETECT_POS_R { HVLDO_LV_DETECT_POS_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - This interrupt is set on HVLDO LV Detector Fall edge. There is a 1cycle AHB clock glitch filter on the HVLDO LV Detector output"] #[inline(always)] pub fn hvldo_lv_detect_neg(&self) -> HVLDO_LV_DETECT_NEG_R { HVLDO_LV_DETECT_NEG_R::new(((self.bits >> 11) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - On a firmware request to LL to enter into state machine, working on LF clock, LL transitions into Deep Sleep Mode and asserts this interrupt. The interrupt can be cleared by writing one into this location."] #[inline(always)] pub fn dsm_entered_intr(&mut self) -> DSM_ENTERED_INTR_W { DSM_ENTERED_INTR_W { w: self } } #[doc = "Bit 1 - On a firmware request to LL to exit from Deep Sleep Mode, working on LF clock, LL transitions from Deep Sleep Mode and asserts this interrupt when the Deep Sleep clock gater is turned ON. The interrupt can be cleared by writing one into this location."] #[inline(always)] pub fn dsm_exited_intr(&mut self) -> DSM_EXITED_INTR_W { DSM_EXITED_INTR_W { w: self } } #[doc = "Bit 3 - CYBLERD55 is in active mode. RF is active"] #[inline(always)] pub fn blerd_active_intr(&mut self) -> BLERD_ACTIVE_INTR_W { BLERD_ACTIVE_INTR_W { w: self } } #[doc = "Bit 6 - GPIO interrupt"] #[inline(always)] pub fn gpio_intr(&mut self) -> GPIO_INTR_W { GPIO_INTR_W { w: self } } #[doc = "Bit 7 - This bit when set by efuse controller logic when the efuse read/write is completed"] #[inline(always)] pub fn efuse_intr(&mut self) -> EFUSE_INTR_W { EFUSE_INTR_W { w: self } } #[doc = "Bit 8 - enabled crystal stable signal rising edge interrupt. The interrupt can be cleared by writing one into this location."] #[inline(always)] pub fn xtal_on_intr(&mut self) -> XTAL_ON_INTR_W { XTAL_ON_INTR_W { w: self } } #[doc = "Bit 10 - This interrupt is set on HVLDO LV Detector Rise edge. There is a 1cycle AHB clock glitch filter on the HVLDO LV Detector output"] #[inline(always)] pub fn hvldo_lv_detect_pos(&mut self) -> HVLDO_LV_DETECT_POS_W { HVLDO_LV_DETECT_POS_W { w: self } } #[doc = "Bit 11 - This interrupt is set on HVLDO LV Detector Fall edge. There is a 1cycle AHB clock glitch filter on the HVLDO LV Detector output"] #[inline(always)] pub fn hvldo_lv_detect_neg(&mut self) -> HVLDO_LV_DETECT_NEG_W { HVLDO_LV_DETECT_NEG_W { w: self } } }
use crate::attrs::get_serde_attrs; use crate::docs::get_docs; use crate::meta::Glue; use std::collections::HashMap; use syn::Data; use syn::DataStruct; use syn::DeriveInput; use syn::Fields; pub fn process_struct( metadata: &mut Glue, input: DeriveInput, ) -> Result<(), String> { match &input.data { Data::Struct(DataStruct { fields: Fields::Named(fields), .. }) => { let fields = &fields.named; let name = &input.ident; let mut fmap = HashMap::new(); let mut typescript: Vec<String> = vec![]; let serde_attrs = get_serde_attrs(&input.attrs); for field in fields.iter() { let mut ident = field .ident .as_ref() .expect("Field without ident") .to_string(); match field.ty { syn::Type::Path(ref ty) => { let segment = &ty.path.segments.first().unwrap(); let ty = segment.ident.to_string(); fmap.insert(ident.clone(), ty); } _ => unimplemented!(), }; for attr in &serde_attrs { ident = attr.transform(&ident); } let doc_str = get_docs(&field.attrs); typescript.push(format!( "{} {}: {};", doc_str, ident, types_to_ts(&field.ty) )); } metadata.type_defs.insert(name.to_string(), fmap.clone()); let doc_str = get_docs(&input.attrs); let typescript = format!( "{}export type {} = {{\n {}\n}};", doc_str, name, typescript.join("\n") ); metadata.ts_types.insert(name.to_string(), typescript); Ok(()) } Data::Enum(syn::DataEnum { variants, .. }) => { let name = &input.ident; let mut typescript: Vec<String> = vec![]; for variant in variants { let mut variant_fields: Vec<String> = vec![]; let fields = &variant.fields; let serde_attrs = get_serde_attrs(&input.attrs); for field in fields { let mut ident = field .ident .as_ref() .expect("Field without ident") .to_string(); for attr in &serde_attrs { ident = attr.transform(&ident); } let doc_str = get_docs(&field.attrs); variant_fields.push(format!( "{} {}: {};", doc_str, ident, types_to_ts(&field.ty) )); } let mut ident = variant.ident.to_string(); for attr in &serde_attrs { ident = attr.transform(&ident); } let doc_str = get_docs(&variant.attrs); let variant_str = if variant_fields.len() > 0 { format!( "{} {{ {}: {{\n {}\n}} }}", doc_str, &ident, variant_fields.join("\n") ) } else { format!("{} \"{}\"", doc_str, &ident) }; typescript.push(variant_str); } // TODO: `type_defs` in favor of `ts_types` metadata.type_defs.insert(name.to_string(), HashMap::new()); let doc_str = get_docs(&input.attrs); let typescript = format!( "{}export type {} = {};", doc_str, name, typescript.join(" |\n") ); metadata.ts_types.insert(name.to_string(), typescript); Ok(()) } _ => unimplemented!(), } } fn types_to_ts(ty: &syn::Type) -> String { match ty { syn::Type::Array(_) => String::from("any"), syn::Type::Ptr(_) => String::from("any"), syn::Type::Path(ref ty) => { // std::Alloc::Vec => Vec let segment = &ty.path.segments.last().unwrap(); let ty = segment.ident.to_string(); let mut generics: Vec<String> = vec![]; let generic_params = &segment.arguments; match generic_params { &syn::PathArguments::AngleBracketed(ref args) => { for p in &args.args { let ty = match p { syn::GenericArgument::Type(ty) => types_to_ts(ty), _ => unimplemented!(), }; generics.push(ty); } } &syn::PathArguments::None => {} _ => unimplemented!(), }; match ty.as_ref() { "Option" => format!( "{} | undefined | null", rs_to_ts(generics.first().unwrap().as_ref()) ), _ => { if generics.len() > 0 { let root_ty = rs_to_ts(&ty); let generic_str = generics .iter() .map(|g| rs_to_ts(g)) .collect::<Vec<&str>>() .join(", "); format!("{}<{}>", root_ty, generic_str) } else { rs_to_ts(&ty).to_string() } } } } _ => unimplemented!(), } } fn rs_to_ts(ty: &str) -> &str { match ty { "i8" => "number", "i16" => "number", "i32" => "number", "i64" => "number", "u8" => "number", "u16" => "number", "u32" => "number", "u64" => "number", "usize" => "number", "bool" => "boolean", "String" => "string", "f32" => "number", "f64" => "number", "HashMap" => "Map", "Vec" => "Array", "HashSet" => "Array", "Value" => "any", a @ _ => a, } }
pub(crate) mod home; pub(crate) mod clicky; pub(crate) mod navbar;
extern crate graphics; extern crate opengl_graphics; extern crate piston_window; extern crate piston; use characters::Direction; use characters::player::Player; use locations::{Coordinates, get_by_id, Location}; use networking; use opengl_graphics::{GlGraphics, Texture}; use piston::input::*; use std::collections::HashMap; use std::u16; pub struct Game<'a, 'b, 'c> { location: Location<'a>, player: Player<'b>, sprites: HashMap<&'c str, Texture>, } impl<'a, 'b, 'c> Game<'a, 'b, 'c> { pub fn new<'i, 'j, 'k>(player: Player<'j>, sprites: HashMap<&'k str, Texture>) -> Game<'i, 'j, 'k> { Game { location: get_by_id(player.location_current) .unwrap() .clone(), player: player, sprites: sprites, } } pub fn run(&mut self, e: Event, gl: &mut GlGraphics) { let p_x: u16 = self.player.coordinates.x; let p_y: u16 = self.player.coordinates.y; let loc = self.location.clone(); if let Some(Button::Keyboard(key)) = e.press_args() { match key { Key::Up | Key::W => { match self.player.direction { Direction::North => { // If the player is at the max y coord, then do nothing. if p_y == u16::MIN { return () } let new_c = c![p_x, p_y - 1]; if loc.within_boundaries(new_c) { self.player.move_north(); } }, _ => self.player.face(Direction::North), } }, Key::Left | Key::A => { match self.player.direction { Direction::West => { // If the player is at the max x coord, then do nothing. if p_x == u16::MIN { return () } let new_c = c![p_x - 1, p_y]; if loc.within_boundaries(new_c) { self.player.move_west(); } }, _ => self.player.face(Direction::West), } }, Key::Right | Key::D => { match self.player.direction { Direction::East => { // If the player is at the max x coord, do nothing. if p_y == u16::MAX { return () } let new_c = c![p_x + 1, p_y]; if loc.within_boundaries(new_c) { self.player.move_east(); } }, _ => self.player.face(Direction::East), } }, Key::Down | Key::S => { match self.player.direction { Direction::South => { // If the player is at the max y coord, do nothing. if p_y == u16::MAX { return () } let new_c = c![p_x, p_y + 1]; if loc.within_boundaries(new_c) { self.player.move_south(); } }, _ => self.player.face(Direction::South), } }, _ => {}, }; self.check_on_portal(); println!("Pressed keyboard key '{:?}'", key); }; e.text(|text| println!("{}", text)); if let Some(args) = e.render_args() { gl.draw(args.viewport(), |c, gl| { let char_x: f64 = (self.player.coordinates.x * 16) as f64; let char_y: f64 = (self.player.coordinates.y * 16) as f64; let char_img = graphics::Image::new() .rect(graphics::rectangle::square(char_x, char_y, 16.0)); piston_window::clear([1.0; 4], gl); piston_window::image(self.sprites.get(self.location.clone().sprite).unwrap(), c.transform, gl); for i in 0..255 { let entry: [i32; 3] = unsafe { super::super::OTHER_PLAYERS[i] }; if entry[0] == -1 || entry[1] == -1 { break; } let new_x: f64 = ((entry[0] as f64) * 16f64) as f64; let new_y: f64 = ((entry[1] as f64) * 16f64) as f64; let char_img_new = graphics::Image::new() .rect(graphics::rectangle::square(new_x, new_y, 16.0)); char_img_new.draw(self.sprites.get(match entry[2] { 0 => "characters.brendan.walk_south_still", 1 => "characters.brendan.walk_west_still", 2 => "characters.brendan.walk_north_still", 3 => "characters.brendan.walk_east_still", _ => panic!(), }).unwrap(), graphics::default_draw_state(), c.transform, gl); } char_img.draw(match self.player.direction { Direction::West => self.sprites.get("characters.brendan.walk_west_still").unwrap(), Direction::North => self.sprites.get("characters.brendan.walk_north_still").unwrap(), Direction::East => self.sprites.get("characters.brendan.walk_east_still").unwrap(), Direction::South => self.sprites.get("characters.brendan.walk_south_still").unwrap(), }, graphics::default_draw_state(), c.transform, gl); }); } networking::update(&self.player); e.update(|_| {}); } fn check_on_portal(&mut self) { let loc = self.location.clone(); match loc.get_portal_by_coordinates(self.player.coordinates.clone()) { Some(portal) => { let new_location = get_by_id(portal.location_id).unwrap(); let entry_coords = new_location.get_entry_coords_by_id(portal.entry_id); match entry_coords { Some(entry) => { println!("some"); self.player.set_coordinates(entry); self.location = new_location.clone(); }, None => println!("none"), } }, _ => {}, } } }
//! MetroRail client. Contains the client for fetching data from //! the WMATA API and data structures returned from those endpoint calls. pub mod responses; mod tests; use crate::{ error::Error, rail::{ traits::{NeedsLine, NeedsStation}, urls::URLs, }, requests::{Fetch, Request as WMATARequest}, Line, RadiusAtLatLong, Station, }; use std::str::FromStr; /// MetroRail client. Used to fetch MetroRail-related information from the WMATA API. pub struct Client { /// The WMATA API key to use for all requests routed through this client. pub key: String, } impl Fetch for Client {} // Constructor impl Client { /// Constructor for the MetroRail client. /// /// # Example /// ``` /// use wmata::MetroRail; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// ``` pub fn new(api_key: &str) -> Self { Client { key: api_key.to_string(), } } } // No Station or Line Codes impl Client { /// Basic information on all MetroRail lines. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5476364f031f590f38092507/operations/5476364f031f5909e4fe330c) /// /// # Example /// ``` /// use wmata::MetroRail; /// use tokio_test::block_on; /// /// let lines = block_on(async { MetroRail::new("9e38c3eab34c4e6c990828002828f5ed").lines().await }); /// assert!(lines.is_ok()); /// ``` pub async fn lines(&self) -> Result<responses::Lines, Error> { self.fetch::<responses::Lines>(WMATARequest::new(&self.key, &URLs::Lines.to_string(), None)) .await } /// A list of nearby station entrances based on latitude, longitude, and radius (meters). /// [WMATA Documentation](https://developer.wmata.com/docs/services/5476364f031f590f38092507/operations/5476364f031f5909e4fe330f?) /// /// # Example /// ``` /// use wmata::{MetroRail, RadiusAtLatLong}; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let entrances = block_on(async { client.entrances(RadiusAtLatLong::new(1000, 38.8817596, -77.0166426)).await }); /// assert!(entrances.is_ok()); /// ``` pub async fn entrances( &self, radius_at_lat_long: RadiusAtLatLong, ) -> Result<responses::StationEntrances, Error> { self.fetch(WMATARequest::new( &self.key, &URLs::Entrances.to_string(), Some( radius_at_lat_long .to_query() .iter() .map(|(key, value)| (key.as_str(), value.clone())) .collect(), ), )) .await } /// Uniquely identifiable trains in service and what track circuits they currently occupy. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5763fa6ff91823096cac1057/operations/5763fb35f91823096cac1058) /// /// # Example /// ``` /// use wmata::MetroRail; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let positions = block_on(async { client.positions().await }); /// assert!(positions.is_ok()); /// ``` pub async fn positions(&self) -> Result<responses::TrainPositions, Error> { self.fetch(WMATARequest::new( &self.key, &URLs::Positions.to_string(), Some(vec![("contentType", "json".to_string())]), )) .await } /// Returns an ordered list of mostly revenue (and some lead) track circuits, arranged by line and track number. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5763fa6ff91823096cac1057/operations/57641afc031f59363c586dca?) /// /// # Example /// ``` /// use wmata::MetroRail; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let routes = block_on(async { client.routes().await }); /// assert!(routes.is_ok()); /// ``` pub async fn routes(&self) -> Result<responses::StandardRoutes, Error> { self.fetch(WMATARequest::new( &self.key, &URLs::Routes.to_string(), Some(vec![("contentType", "json".to_string())]), )) .await } /// All track circuits including those on pocket tracks and crossovers. Each track circuit may include references to its right and left neighbors. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5763fa6ff91823096cac1057/operations/57644238031f59363c586dcb?) /// /// # Example /// ``` /// use wmata::MetroRail; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let circuits = block_on(async { client.circuits().await }); /// assert!(circuits.is_ok()); /// ```` pub async fn circuits(&self) -> Result<responses::TrackCircuits, Error> { self.fetch(WMATARequest::new( &self.key, &URLs::Circuits.to_string(), Some(vec![("contentType", "json".to_string())]), )) .await } } impl NeedsStation for Client {} // Overwriting NeedsStation impl Client { /// Distance, fare information, and estimated travel time between any two stations, including those on different lines. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5476364f031f590f38092507/operations/5476364f031f5909e4fe3313?) /// /// # Example /// ``` /// use wmata::{MetroRail, Station}; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let station_to_station = block_on(async { client.station_to_station(Some(Station::A01), Some(Station::A02)).await }); /// assert!(station_to_station.is_ok()); /// ``` pub async fn station_to_station( &self, from_station: Option<Station>, to_destination_station: Option<Station>, ) -> Result<responses::StationToStationInfos, Error> { <Self as NeedsStation>::station_to_station( &self, from_station, to_destination_station, &self.key, ) .await } /// List of reported elevator and escalator outages at a given station. /// [WMATA Documentation](https://developer.wmata.com/docs/services/54763641281d83086473f232/operations/54763641281d830c946a3d76?) /// /// # Examples /// ``` /// use wmata::{MetroRail, Station}; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let incidents = block_on(async { client.elevator_and_escalator_incidents_at(Some(Station::A01)).await }); /// assert!(incidents.is_ok()); /// ``` pub async fn elevator_and_escalator_incidents_at( &self, station: Option<Station>, ) -> Result<responses::ElevatorAndEscalatorIncidents, Error> { <Self as NeedsStation>::elevator_and_escalator_incidents_at(&self, station, &self.key).await } /// Reported rail incidents (significant disruptions and delays to normal service) /// [WMATA Documentation](https://developer.wmata.com/docs/services/54763641281d83086473f232/operations/54763641281d830c946a3d77) /// /// # Examples /// ``` /// use wmata::{MetroRail, Station}; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let incidents = block_on(async { client.incidents_at(Some(Station::A01)).await }); /// assert!(incidents.is_ok()); /// ``` pub async fn incidents_at( &self, station: Option<Station>, ) -> Result<responses::RailIncidents, Error> { <Self as NeedsStation>::incidents_at(&self, station, &self.key).await } /// Next train arrivals for the given station. /// [WMATA Documentation](https://developer.wmata.com/docs/services/547636a6f9182302184cda78/operations/547636a6f918230da855363f) /// /// # Examples /// ``` /// use wmata::{MetroRail, Station}; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let next_trains = block_on(async { client.next_trains(Station::A01).await }); /// assert!(next_trains.is_ok()); /// ``` pub async fn next_trains( &self, station_code: Station, ) -> Result<responses::RailPredictions, Error> { <Self as NeedsStation>::next_trains(&self, station_code, &self.key).await } /// Location and address information at the given station. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5476364f031f590f38092507/operations/5476364f031f5909e4fe3310) /// /// # Examples /// ``` /// use wmata::{MetroRail, Station}; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let station_information = block_on(async { client.station_information(Station::A01).await }); /// assert!(station_information.is_ok()); /// ``` pub async fn station_information( &self, station_code: Station, ) -> Result<responses::StationInformation, Error> { <Self as NeedsStation>::station_information(&self, station_code, &self.key).await } /// Parking information for the given station. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5476364f031f590f38092507/operations/5476364f031f5909e4fe330d) /// /// # Examples /// ``` /// use wmata::{MetroRail, Station}; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let parking_information = block_on(async { client.parking_information(Station::A01).await }); /// assert!(parking_information.is_ok()); /// ``` pub async fn parking_information( &self, station_code: Station, ) -> Result<responses::StationsParking, Error> { <Self as NeedsStation>::parking_information(&self, station_code, &self.key).await } /// Set of ordered stations and distances between two stations on the **same line**. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5476364f031f590f38092507/operations/5476364f031f5909e4fe330e) /// /// # Examples /// ``` /// use wmata::{MetroRail, Station}; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let path = block_on(async { client.path_from(Station::A01, Station::A02).await }); /// assert!(path.is_ok()); /// ``` pub async fn path_from( &self, from_station: Station, to_station: Station, ) -> Result<responses::PathBetweenStations, Error> { <Self as NeedsStation>::path_from(&self, from_station, to_station, &self.key).await } /// Opening and scheduled first/last train times for the given station. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5476364f031f590f38092507/operations/5476364f031f5909e4fe3312) /// /// # Examples /// ``` /// use wmata::{MetroRail, Station}; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let timings = block_on(async { client.timings(Station::A01).await }); /// assert!(timings.is_ok()); /// ``` pub async fn timings(&self, station_code: Station) -> Result<responses::StationTimings, Error> { <Self as NeedsStation>::timings(&self, station_code, &self.key).await } } impl NeedsLine for Client {} /// Overwriting NeedsLine methods impl Client { /// Station location and address information for all stations on the given line. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5476364f031f590f38092507/operations/5476364f031f5909e4fe330c) /// /// # Examples /// ``` /// use wmata::{MetroRail, Line}; /// use tokio_test::block_on; /// /// let client = MetroRail::new("9e38c3eab34c4e6c990828002828f5ed"); /// let stations = block_on(async { client.stations_on(Some(Line::Red)).await }); /// assert!(stations.is_ok()); /// ``` pub async fn stations_on(&self, line: Option<Line>) -> Result<responses::Stations, Error> { <Self as NeedsLine>::stations_on(&self, line, &self.key).await } } impl FromStr for Client { type Err = Error; /// Converts a string into a MetroRail Client. /// /// # Examples /// ``` /// use wmata::MetroRail; /// /// let client: MetroRail = "9e38c3eab34c4e6c990828002828f5ed".parse().unwrap(); /// /// assert_eq!(client.key, "9e38c3eab34c4e6c990828002828f5ed"); /// ``` fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Client { key: s.to_string() }) } }
use support::{decl_storage, decl_module, StorageValue, StorageMap, dispatch::Result, ensure, decl_event, traits::Currency}; use system::ensure_signed; use runtime_primitives::traits::{As, Hash, Zero}; use parity_codec::{Encode, Decode}; use rstd::cmp; #[derive(Encode, Decode, Default, Clone, PartialEq)] #[cfg_attr(feature = "std", derive(Debug))] pub struct FighterV1<Hash, Balance> { id: Hash, dna: Hash, price: Balance, strength: u64, } #[derive(Encode, Decode, Default, Clone, PartialEq)] #[cfg_attr(feature = "std", derive(Debug))] pub struct FighterV2<Hash, Balance> { id: Hash, dna: Hash, price: Balance, strength: u64, wins: u64, } pub trait Trait: balances::Trait { type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>; } type CurrentFighterVersion<T, U> = FighterV2<T, U>; type CurrentFightersStorage<T> = FightersV2<T>; decl_event!( pub enum Event<T> where <T as system::Trait>::AccountId, <T as system::Trait>::Hash, <T as balances::Trait>::Balance { Created(AccountId, Hash), PriceSet(AccountId, Hash, Balance), Transferred(AccountId, AccountId, Hash), Bought(AccountId, AccountId, Hash, Balance), VersionUpdated(u64), } ); decl_storage! { trait Store for Module<T: Trait> as FighterStorage { FightersV1: map T::Hash => FighterV1<T::Hash, T::Balance>; FightersV2 get(fighter): map T::Hash => FighterV2<T::Hash, T::Balance>; FighterOwner get(owner_of): map T::Hash => Option<T::AccountId>; AllFightersArray get(fighter_by_index): map u64 => T::Hash; AllFightersCount get(all_fighters_count): u64; AllFightersIndex: map T::Hash => u64; OwnedFightersArray get(fighter_of_owner_by_index): map (T::AccountId, u64) => T::Hash; OwnedFightersCount get(owned_fighter_count): map T::AccountId => u64; OwnedFightersIndex: map T::Hash => u64; Nonce: u64; Version get(version): u64; } } decl_module! { pub struct Module<T: Trait> for enum Call where origin: T::Origin { fn deposit_event<T>() = default; fn on_initialize() { if Self::version() == 0 { for i in 0..Self::all_fighters_count() { let fighter_hash = Self::fighter_by_index(i); let fighter = <FightersV1<T>>::take(fighter_hash); let fighter_new = FighterV2 { id: fighter.id, dna: fighter.dna, price: fighter.price, strength: fighter.strength, wins: 0, }; <FightersV2<T>>::insert(fighter_hash, fighter_new); } <Version<T>>::put(2); Self::deposit_event(RawEvent::VersionUpdated(2)); } } fn create_fighter(origin) -> Result { let sender = ensure_signed(origin)?; let nonce = <Nonce<T>>::get(); let random_hash = (<system::Module<T>>::random_seed(), &sender, nonce) .using_encoded(<T as system::Trait>::Hashing::hash); let strength = random_hash.as_ref()[3]; let new_fighter = CurrentFighterVersion { id: random_hash, dna: random_hash, price: <T::Balance as As<u64>>::sa(0), strength: strength.into(), wins: 0, }; Self::mint(sender, random_hash, new_fighter)?; <Nonce<T>>::mutate(|n| *n += 1); Ok(()) } fn set_price(origin, fighter_id: T::Hash, new_price: T::Balance) -> Result { let sender = ensure_signed(origin)?; ensure!(<CurrentFightersStorage<T>>::exists(fighter_id), "This cat does not exist"); let owner = Self::owner_of(fighter_id).ok_or("No owner for this Fighter")?; ensure!(owner == sender, "You do not own this cat"); let mut fighter = Self::fighter(fighter_id); fighter.price = new_price; <CurrentFightersStorage<T>>::insert(fighter_id, fighter); Self::deposit_event(RawEvent::PriceSet(sender, fighter_id, new_price)); Ok(()) } fn transfer(origin, to: T::AccountId, fighter_id: T::Hash) -> Result { let sender = ensure_signed(origin)?; let owner = Self::owner_of(fighter_id).ok_or("No owner for this Fighter")?; ensure!(owner == sender, "You do not own this Fighter"); Self::transfer_from(sender, to, fighter_id)?; Ok(()) } fn buy_fighter(origin, fighter_id: T::Hash, max_price: T::Balance) -> Result { let sender = ensure_signed(origin)?; ensure!(<CurrentFightersStorage<T>>::exists(fighter_id), "This cat does not exist"); let owner = Self::owner_of(fighter_id).ok_or("No owner for this Fighter")?; ensure!(owner != sender, "You can't buy your own cat"); let mut fighter = Self::fighter(fighter_id); let fighter_price = fighter.price; ensure!(!fighter_price.is_zero(), "The cat you want to buy is not for sale"); ensure!(fighter_price <= max_price, "The cat you want to buy costs more than your max price"); <balances::Module<T> as Currency<_>>::transfer(&sender, &owner, fighter_price)?; Self::transfer_from(owner.clone(), sender.clone(), fighter_id) .expect("`owner` is shown to own the Fighter; \ `owner` must have greater than 0 Fighters, so transfer cannot cause underflow; \ `all_Fighter_count` shares the same type as `owned_fighter_count` \ and minting ensure there won't ever be more than `max()` Fighters, \ which means transfer cannot cause an overflow; \ qed"); fighter.price = <T::Balance as As<u64>>::sa(0); <CurrentFightersStorage<T>>::insert(fighter_id, fighter); Self::deposit_event(RawEvent::Bought(sender, owner, fighter_id, fighter_price)); Ok(()) } fn fight(origin, fighter_id_1: T::Hash, fighter_id_2: T::Hash) -> Result { let sender = ensure_signed(origin)?; ensure!(<CurrentFightersStorage<T>>::exists(fighter_id_1), "Fighter must exist"); ensure!(<CurrentFightersStorage<T>>::exists(fighter_id_2), "Fighter must exist"); let nonce = <Nonce<T>>::get(); let random_hash = (<system::Module<T>>::random_seed(), &sender, nonce) .using_encoded(<T as system::Trait>::Hashing::hash); let random_int: u64 = random_hash.as_ref()[8].into(); let fighter_1 = Self::fighter(fighter_id_1); let fighter_2 = Self::fighter(fighter_id_2); let fighter_power_1 = fighter_2.strength * random_int % fighter_1.strength; let fighter_power_2 = fighter_1.strength * random_int % fighter_2.strength; let winner = if fighter_power_1 > fighter_power_2 { fighter_1 } else { fighter_2 }; <CurrentFightersStorage<T>>::mutate(winner.id, |f| f.wins +=1); <Nonce<T>>::mutate(|n| *n += 1); Ok(()) } } } impl<T: Trait> Module<T> { fn mint(to: T::AccountId, fighter_id: T::Hash, new_fighter: CurrentFighterVersion<T::Hash, T::Balance>) -> Result { ensure!(!<FighterOwner<T>>::exists(fighter_id), "Fighter already exists"); let owned_fighter_count = Self::owned_fighter_count(&to); let new_owned_fighter_count = owned_fighter_count.checked_add(1) .ok_or("Overflow adding a new Fighter to account balance")?; let all_fighters_count = Self::all_fighters_count(); let new_all_fighters_count = all_fighters_count.checked_add(1) .ok_or("Overflow adding a new Fighter to total supply")?; <CurrentFightersStorage<T>>::insert(fighter_id, new_fighter); <FighterOwner<T>>::insert(fighter_id, &to); <AllFightersArray<T>>::insert(all_fighters_count, fighter_id); <AllFightersCount<T>>::put(new_all_fighters_count); <AllFightersIndex<T>>::insert(fighter_id, all_fighters_count); <OwnedFightersArray<T>>::insert((to.clone(), owned_fighter_count), fighter_id); <OwnedFightersCount<T>>::insert(&to, new_owned_fighter_count); <OwnedFightersIndex<T>>::insert(fighter_id, owned_fighter_count); Self::deposit_event(RawEvent::Created(to, fighter_id)); Ok(()) } fn transfer_from(from: T::AccountId, to: T::AccountId, fighter_id: T::Hash) -> Result { let owner = Self::owner_of(fighter_id).ok_or("No owner for this Fighter")?; ensure!(owner == from, "'from' account does not own this Fighter"); let owned_fighter_count_from = Self::owned_fighter_count(&from); let owned_fighter_count_to = Self::owned_fighter_count(&to); let new_owned_fighter_count_to = owned_fighter_count_to.checked_add(1) .ok_or("Transfer causes overflow of 'to' Fighter balance")?; let new_owned_fighter_count_from = owned_fighter_count_from.checked_sub(1) .ok_or("Transfer causes underflow of 'from' Fighter balance")?; // "Swap and pop" let fighter_index = <OwnedFightersIndex<T>>::get(fighter_id); if fighter_index != new_owned_fighter_count_from { let last_fighter_id = <OwnedFightersArray<T>>::get((from.clone(), new_owned_fighter_count_from)); <OwnedFightersArray<T>>::insert((from.clone(), fighter_index), last_fighter_id); <OwnedFightersIndex<T>>::insert(last_fighter_id, fighter_index); } <FighterOwner<T>>::insert(&fighter_id, &to); <OwnedFightersIndex<T>>::insert(fighter_id, owned_fighter_count_to); <OwnedFightersArray<T>>::remove((from.clone(), new_owned_fighter_count_from)); <OwnedFightersArray<T>>::insert((to.clone(), owned_fighter_count_to), fighter_id); <OwnedFightersCount<T>>::insert(&from, new_owned_fighter_count_from); <OwnedFightersCount<T>>::insert(&to, new_owned_fighter_count_to); Self::deposit_event(RawEvent::Transferred(from, to, fighter_id)); Ok(()) } }
use serde::Serialize; use common::result::Result; use crate::application::dtos::CategoryDto; use crate::domain::category::CategoryRepository; #[derive(Serialize)] pub struct GetAllResponse { pub categories: Vec<CategoryDto>, } pub struct GetAll<'a> { category_repo: &'a dyn CategoryRepository, } impl<'a> GetAll<'a> { pub fn new(category_repo: &'a dyn CategoryRepository) -> Self { GetAll { category_repo } } pub async fn exec(&self) -> Result<GetAllResponse> { let categories = self.category_repo.find_all_categories().await?; Ok(GetAllResponse { categories: categories .iter() .map(|category| CategoryDto::from(category)) .collect(), }) } }
///Contains the current board state /// All the pieces, empty tiles etc. #[derive(Copy, Clone)] struct Board { } ///Represents a move on the board. It can be analysed. #[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Copy)] struct Move { } impl Board { ///Generates all possible moves on a provided board. ///Returns them as an array of moves. fn generate_moves(&self) -> Vec<Move> { vec![] } ///Applies a provided move to the board state, and returns the modified board. fn apply_move(&mut self, m: &Move) -> Board { Board {} } } ///Represents a potential move that could be made, we just don't know if it is the best one yet. #[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Copy)] struct PossibleMove { m: Move, c: i32, } ///A recursive function that finds the best possible move when provided a board. fn analyse( board: Board, r: i32, c: i32, o: Option<&Move> ) -> PossibleMove { let mut possible_moves: Vec<PossibleMove> = vec![]; //We have reached the bottom of the tree, return a val! if r == 0 { return PossibleMove { m: o.unwrap().to_owned(), c }; } board.generate_moves().iter().for_each(|m| { let new_c = c; //Apply some cost function based on move m //Modify c here //This check is needed for the first loop of the iterative function as it won't have a valid move yet. let mut new_b = board.clone(); if let Some(pot_m) = o { new_b = new_b.apply_move(pot_m); } possible_moves.push(analyse(new_b.clone().apply_move(m), r-1, new_c, Some(m))); }); if possible_moves.is_empty() { //No possible moves, that's really bad. //Do something useful here instead like returning an empty move with negative infinity cost function. } possible_moves.sort(); return possible_moves[0].clone(); } fn main() { let mut my_board = Board{}; let search_depth = 3; let my_move: PossibleMove = analyse(my_board, search_depth, 0, None); my_board.apply_move(&my_move.m); }
//! Franks server handson //! //! A simple server that accepts connections, writes "hello world\n", and closes //! the connection. //! //! Start this application and in another terminal run: //! //! telnet localhost 6142 //! #![allow(warnings)] #![allow(unused_variables)] extern crate tokio; extern crate futures; use tokio::io; use tokio::net::TcpListener; use tokio::net::TcpStream; use tokio::prelude::*; //use bytes::Buf; use futures::sync::mpsc; use futures::sync::mpsc::Sender; use futures::sync::mpsc::Receiver; use std::time::{Duration, Instant}; #[derive(Debug)] struct RawMesg { portname: String, timestamp: Instant, origin: std::net::SocketAddr, mesg: Vec<u8>, } #[derive(Debug)] enum PortEvent { RawMesg(RawMesg), } /// struct PortStream { portname: String, socket: TcpStream, tx : Sender<PortEvent>, } /// impl io::Write for PortStream { /// fn flush(&mut self) -> std::io::Result<()> { self.socket.flush() } /// fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { self.socket.write(buf) } } /// impl io::AsyncWrite for PortStream { fn shutdown(&mut self) -> Poll<(), io::Error> { <&TcpStream>::shutdown(&mut &(self.socket)) } } pub fn main() { let mut runtime = tokio::runtime::Runtime::new().unwrap(); let addr = "127.0.0.1:6142".parse().unwrap(); let (tx, rx) = mpsc::channel(1); // Bind a TCP listener to the socket address. // // Note that this is the Tokio TcpListener, which is fully async. let listener = TcpListener::bind(&addr).unwrap(); // The server task asynchronously iterates over and processes each // incoming connection. let server = listener.incoming().for_each(move |socket| { let peer = socket.peer_addr().unwrap(); println!("accepted socket; addr={:?}", peer); let tx = tx.clone(); //tx.send(PortEvent {}); let portname = "Server1".to_string(); let port = PortStream{portname, socket, tx,}; let connection = io::write_all(port, "hello world\n") .then( move | res | { let (port, buf) = res.ok().unwrap(); port.tx.send( PortEvent::RawMesg( RawMesg{portname: port.portname, timestamp: Instant::now(), origin: peer, mesg: vec!{0,1,2} } ) ) } ) .then( |res| { println!("wrote message; success={:?}",res.is_ok()); Ok(()) }); // Spawn a new task that processes the socket: tokio::spawn(connection); Ok(()) }) .map_err(|err| { // All tasks must have an `Error` type of `()`. This forces error // handling and helps avoid silencing failures. // // In our example, we are only going to log the error to STDOUT. println!("accept error = {:?}", err); }); let f2 = rx.for_each(|event| { println!("Message = {:?}", event); // The stream will stop on `Err`, so we need to return `Ok`. Ok(()) }); println!("server running on localhost:6142"); runtime.spawn(server); runtime.spawn(f2); // Start the Tokio runtime runtime.shutdown_on_idle().wait().unwrap(); }
#[doc = "Register `GICD_IGROUPR6` reader"] pub type R = crate::R<GICD_IGROUPR6_SPEC>; #[doc = "Register `GICD_IGROUPR6` writer"] pub type W = crate::W<GICD_IGROUPR6_SPEC>; #[doc = "Field `IGROUPR6` reader - IGROUPR6"] pub type IGROUPR6_R = crate::FieldReader<u32>; #[doc = "Field `IGROUPR6` writer - IGROUPR6"] pub type IGROUPR6_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 32, O, u32>; impl R { #[doc = "Bits 0:31 - IGROUPR6"] #[inline(always)] pub fn igroupr6(&self) -> IGROUPR6_R { IGROUPR6_R::new(self.bits) } } impl W { #[doc = "Bits 0:31 - IGROUPR6"] #[inline(always)] #[must_use] pub fn igroupr6(&mut self) -> IGROUPR6_W<GICD_IGROUPR6_SPEC, 0> { IGROUPR6_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "For interrupts ID\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gicd_igroupr6::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`gicd_igroupr6::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct GICD_IGROUPR6_SPEC; impl crate::RegisterSpec for GICD_IGROUPR6_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`gicd_igroupr6::R`](R) reader structure"] impl crate::Readable for GICD_IGROUPR6_SPEC {} #[doc = "`write(|w| ..)` method takes [`gicd_igroupr6::W`](W) writer structure"] impl crate::Writable for GICD_IGROUPR6_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets GICD_IGROUPR6 to value 0"] impl crate::Resettable for GICD_IGROUPR6_SPEC { const RESET_VALUE: Self::Ux = 0; }
extern crate chrono; #[macro_use] extern crate diesel; #[macro_use] extern crate failure; extern crate futures; #[macro_use] extern crate hyper; extern crate reqwest; extern crate serde; #[macro_use] extern crate serde_derive; extern crate serde_json; #[macro_use] extern crate slog; extern crate tokio; extern crate tokio_core; extern crate tokio_retry; extern crate tokio_threadpool; extern crate tokio_timer; extern crate url; mod github; mod review_handler; mod schema; mod todoist_client; use failure::Error; use futures::future::{self, Either}; use futures::prelude::*; use std::env; use tokio_core::reactor::Core as TokioCore; use url::Url; use github::GithubClient; use review_handler::ReviewHandler; use todoist_client::TodoistClient; pub struct Config<'a> { pub logger: slog::Logger, pub core: &'a TokioCore, pub todoist_base: Url, pub github_base: Url, pub database_url: String, } impl<'a> Config<'a> { pub fn defaults(logger: slog::Logger, core: &TokioCore) -> Config { Config { logger, core, todoist_base: Url::parse("https://beta.todoist.com").unwrap(), github_base: Url::parse("https://api.github.com").unwrap(), database_url: env::var("DATABASE_URL").expect("DATABASE_URL must be set"), } } } pub fn run(config: Config) -> impl Future<Item = (), Error = Error> { macro_rules! early_error { ($e:expr) => { match $e { Ok(res) => res, Err(err) => return Either::A(future::err(Error::from(err))), } }; } let main_future = build_main_future(State { github_client: early_error!(github::new_client(&config)), todoist_client: early_error!(TodoistClient::new(&config)), handler: early_error!(review_handler::new(&config)), }); Either::B(main_future) } struct State { github_client: GithubClient, todoist_client: TodoistClient, handler: ReviewHandler, } fn build_main_future(state: State) -> impl Future<Item = (), Error = Error> { let State { github_client, todoist_client, handler, } = state; let stream = github_client.into_pull_request_stream(); stream.for_each(move |(pull_request, logger)| { let record_logger = logger.new(o!("pull_request" => pull_request.number)); if !pull_request.is_open() { debug!(record_logger, "Skipping closed pull request"); return Either::A(future::ok(())); } let todoist_client = todoist_client.clone(); let result = handler .record_in_task(pull_request, record_logger) .and_then(move |maybe_pr| match maybe_pr { Some(pr) => Either::A(todoist_client.create_task_for_pr(&pr)), None => Either::B(future::ok(())), }); Either::B(result) }) }
use super::gl; use super::gl::types::*; use super::cgmath::prelude::*; use super::cgmath::Matrix4; use super::glutin::{GlContext, GlWindow}; use std::ffi::{CStr, CString}; use std::mem; use std::os::raw::c_void; use std::ptr; const VERTEX_SHADER_SOURCE: &[u8] = include_bytes!("./shaders/cell.vs"); const FRAGMENT_SHADER_SOURCE: &[u8] = include_bytes!("./shaders/cell.fs"); pub struct GraphicsContext { shader_program: GLuint, vao: GLuint, } impl GraphicsContext { pub fn new() -> Self { GraphicsContext { shader_program: 0, vao: 0, } } pub fn init(&mut self, gl_window: &GlWindow) -> Result<(), String> { gl::load_with(|symbol| gl_window.get_proc_address(symbol) as *const _); unsafe { // NOTE: these will be used a number of times. let mut success = gl::FALSE as GLint; let mut info_log = Vec::with_capacity(512); let vertex_shader = gl::CreateShader(gl::VERTEX_SHADER); let c_str_vert = CString::new(VERTEX_SHADER_SOURCE).unwrap(); gl::ShaderSource(vertex_shader, 1, &c_str_vert.as_ptr(), ptr::null()); gl::CompileShader(vertex_shader); info_log.set_len(512 - 1); gl::GetShaderiv(vertex_shader, gl::COMPILE_STATUS, &mut success); if success != gl::TRUE as GLint { gl::GetShaderInfoLog( vertex_shader, 512, ptr::null_mut(), info_log.as_mut_ptr() as *mut GLchar, ); return Err(format!( "vertex shader compilation failed:\n{}", CStr::from_ptr(info_log.as_ptr()).to_string_lossy(), )); } let fragment_shader = gl::CreateShader(gl::FRAGMENT_SHADER); let c_str_frag = CString::new(FRAGMENT_SHADER_SOURCE).unwrap(); gl::ShaderSource(fragment_shader, 1, &c_str_frag.as_ptr(), ptr::null()); gl::CompileShader(fragment_shader); gl::GetShaderiv(fragment_shader, gl::COMPILE_STATUS, &mut success); if success != gl::TRUE as GLint { gl::GetShaderInfoLog( fragment_shader, 512, ptr::null_mut(), info_log.as_mut_ptr() as *mut GLchar, ); return Err(format!( "fragment shader compilation failed:\n{}", CStr::from_ptr(info_log.as_ptr()).to_string_lossy(), )); } self.shader_program = gl::CreateProgram(); gl::AttachShader(self.shader_program, vertex_shader); gl::AttachShader(self.shader_program, fragment_shader); gl::LinkProgram(self.shader_program); gl::GetProgramiv(self.shader_program, gl::LINK_STATUS, &mut success); if success != gl::TRUE as GLint { gl::GetProgramInfoLog( self.shader_program, 512, ptr::null_mut(), info_log.as_mut_ptr() as *mut GLchar, ); return Err(format!( "shader program compilation failed:\n{}", CStr::from_ptr(info_log.as_ptr()).to_string_lossy(), )); } gl::DeleteShader(vertex_shader); gl::DeleteShader(fragment_shader); // Using vertices and indices a square is drawn that covers the entire screen. let vertices: [f32; 12] = [ // top left -1.0, -1.0, 0.0, // top right 1.0, -1.0, 0.0, // bottom right 1.0, 1.0, 0.0, // bottom left -1.0, 1.0, 0.0, ]; let indices = [ // first triangle 0, 1, 2, // second triangle 2, 3, 0, ]; let (mut vbo, mut ebo) = (0, 0); gl::GenVertexArrays(1, &mut self.vao); gl::GenBuffers(1, &mut vbo); gl::GenBuffers(1, &mut ebo); gl::BindVertexArray(self.vao); gl::BindBuffer(gl::ARRAY_BUFFER, vbo); gl::BufferData( gl::ARRAY_BUFFER, (vertices.len() * mem::size_of::<GLfloat>()) as GLsizeiptr, &vertices[0] as *const f32 as *const c_void, gl::STATIC_DRAW, ); gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, ebo); gl::BufferData( gl::ELEMENT_ARRAY_BUFFER, (indices.len() * mem::size_of::<GLfloat>()) as GLsizeiptr, &indices[0] as *const i32 as *const c_void, gl::STATIC_DRAW, ); gl::VertexAttribPointer( 0, 3, gl::FLOAT, gl::FALSE, 3 * mem::size_of::<GLfloat>() as GLsizei, ptr::null(), ); gl::EnableVertexAttribArray(0); gl::BindBuffer(gl::ARRAY_BUFFER, 0); gl::BindVertexArray(0); } Ok(()) } pub fn clear_color(red: f32, green: f32, blue: f32, alpha: f32) { unsafe { gl::ClearColor(red, green, blue, alpha); gl::Clear(gl::COLOR_BUFFER_BIT); } } pub fn draw_square_with_scale_translation_color( &self, scale: Matrix4<f32>, translate: Matrix4<f32>, color: [f32; 4], ) { unsafe { gl::UseProgram(self.shader_program); let scale_str = CString::new("scale").unwrap(); let translate_str = CString::new("translate").unwrap(); let color_str = CString::new("color").unwrap(); let scale_square = gl::GetUniformLocation(self.shader_program, scale_str.as_ptr()); gl::UniformMatrix4fv(scale_square, 1, gl::FALSE, scale.as_ptr()); let translate_square = gl::GetUniformLocation(self.shader_program, translate_str.as_ptr()); gl::UniformMatrix4fv(translate_square, 1, gl::FALSE, translate.as_ptr()); let color_square = gl::GetUniformLocation(self.shader_program, color_str.as_ptr()); gl::Uniform4f(color_square, color[0], color[1], color[2], color[3]); gl::BindVertexArray(self.vao); gl::DrawElements(gl::TRIANGLES, 6, gl::UNSIGNED_INT, ptr::null()); } } }
use crate::grid::{CellValue, Grid}; use printpdf::*; use std::fs::File; use std::io::BufWriter; const BOTTOM_LEFT_X: f64 = 10.0; const BOTTOM_LEFT_Y: f64 = 279.0 - 200.0 - 10.0; const GRID_DIMENSION: f64 = 190.0; const A4: (Mm, Mm) = (Mm(215.0), Mm(279.0)); pub fn draw_grid(grid: &Grid, filename: &str, print_possibilities: bool) -> Result<(), Box<dyn std::error::Error>> { let (doc, page1, layer1) = PdfDocument::new("Sudoku Puzzle", A4.0, A4.1, "Layer 1"); let layer = doc.get_page(page1).get_layer(layer1); let font = doc.add_builtin_font(BuiltinFont::HelveticaBold)?; let fixed_value_font_size = 45; let possibility_font_size = 12; draw_empty_grid(&layer); // x represents position on left-right scale // y represents position on up-down scale // Now need to add any values // One thing to note - higher y values are associated with the top of the page, while for my grid // higher row values are associated with the bottom of the page. let x_offset = 6.1; let y_offset = -16.5; for r in 0..9 { let y = Mm(BOTTOM_LEFT_Y + (GRID_DIMENSION / 9.0) * (9.0 - r as f64) + y_offset); for c in 0..9 { let x = Mm(BOTTOM_LEFT_X + (GRID_DIMENSION / 9.0) * (c as f64) + x_offset); let cell = grid.get(r, c).unwrap(); let value = &*cell.value.borrow(); match value { CellValue::Fixed(digit) => { let text = digit.to_string(); layer.use_text(text, fixed_value_font_size, x, y, &font); } CellValue::Unknown(possibilities) => { if print_possibilities { for (_, possibility) in possibilities.iter().enumerate() { let sub_row = (possibility - 1) / 3; let sub_column = (possibility - 1) % 3; // Need to adjust x & y let x = Mm(x.0 - 4.0 + (GRID_DIMENSION / 27.0) * (sub_column as f64)); let y = Mm(y.0 - 9.5 + (GRID_DIMENSION / 27.0) * (3.0 - sub_row as f64)); let text = possibility.to_string(); layer.use_text(text, possibility_font_size, x, y, &font); } } } } } } doc.save(&mut BufWriter::new(File::create(filename)?))?; return Ok(()); } fn draw_empty_grid(layer: &PdfLayerReference) { // x represents position on left-right scale // y represents position on up-down scale // Thick lines first layer.set_outline_thickness(2.0); // Horizontal first { let starting_x = Mm(BOTTOM_LEFT_X); let ending_x = Mm(BOTTOM_LEFT_X + GRID_DIMENSION); let y_increment = GRID_DIMENSION / 3.0; for i in 0..4 { let y = Mm(BOTTOM_LEFT_Y + (i as f64) * y_increment); draw_line(layer, Point::new(starting_x, y), Point::new(ending_x, y)); } } // Vertical lines next { let starting_y = Mm(BOTTOM_LEFT_Y); let ending_y = Mm(BOTTOM_LEFT_Y + GRID_DIMENSION); let x_increment = GRID_DIMENSION / 3.0; for i in 0..4 { let x = Mm(BOTTOM_LEFT_X + (i as f64) * x_increment); draw_line(layer, Point::new(x, starting_y), Point::new(x, ending_y)); } } // Thin lines next layer.set_outline_thickness(0.0); // Special value to make line be 1px on all devices and zoom levels // Horizontal first { let starting_x = Mm(BOTTOM_LEFT_X); let ending_x = Mm(BOTTOM_LEFT_X + GRID_DIMENSION); let y_increment = GRID_DIMENSION / 9.0; for i in 1..9 { if i % 3 != 0 { let y = Mm(BOTTOM_LEFT_Y + (i as f64) * y_increment); draw_line(layer, Point::new(starting_x, y), Point::new(ending_x, y)); } } } // Vertical lines next { let starting_y = Mm(BOTTOM_LEFT_Y); let ending_y = Mm(BOTTOM_LEFT_Y + GRID_DIMENSION); let x_increment = GRID_DIMENSION / 9.0; for i in 1..9 { if i % 3 != 0 { let x = Mm(BOTTOM_LEFT_X + (i as f64) * x_increment); draw_line(layer, Point::new(x, starting_y), Point::new(x, ending_y)); } } } } fn draw_line(layer: &PdfLayerReference, point1: Point, point2: Point) { let points = vec![(point1, false), (point2, false)]; let line = Line { points, is_closed: false, has_fill: false, has_stroke: true, is_clipping_path: false, }; layer.add_shape(line); }
use derive_more::Display; use std::{env::JoinPathsError, error::Error, num::NonZeroI32, path::PathBuf}; /// Error types emitted by `pn` itself. #[derive(Debug, Display)] pub enum PnError { /// Script not found when running `pn run`. #[display(fmt = "Missing script: {name}")] MissingScript { name: String }, /// Script ran by `pn run` exits with non-zero status code. #[display(fmt = "Command failed with exit code {status}")] ScriptError { name: String, status: NonZeroI32 }, /// Subprocess finishes but without a status code. #[display(fmt = "Command {command:?} has ended unexpectedly")] UnexpectedTermination { command: String }, /// The program receives --workspace-root outside a workspace. #[display(fmt = "--workspace-root may only be used in a workspace")] NotInWorkspace, /// No package manifest. #[display(fmt = "File not found: {file:?}")] NoPkgManifest { file: PathBuf }, /// Parse JSON error. #[display(fmt = "Failed to parse {file:?}: {message}")] ParseJsonError { file: PathBuf, message: String }, /// Failed to prepend `node_modules/.bin` to `PATH`. #[display(fmt = "Cannot add `node_modules/.bin` to PATH: {error}")] NodeBinPathError { error: JoinPathsError }, /// Other errors. #[display(fmt = "{error}")] Other { error: Box<dyn Error> }, } /// The main error type. #[derive(Debug, Display)] pub enum MainError { /// Errors emitted by `pn` itself. Pn(PnError), /// The subprocess that takes control exits with non-zero status code. Sub(NonZeroI32), } impl MainError { pub fn from_dyn(error: impl Error + 'static) -> Self { MainError::Pn(PnError::Other { error: Box::new(error), }) } }
use super::*; /// A comment block. /// /// # Semantics /// /// See [`Comment`]. /// /// # Syntax /// /// ```text /// #+BEGIN_COMMENT /// CONTENTS /// #+END_COMMENT /// ``` /// /// `CONTENTS` can contain anything except a line `#+END_COMMENT` on its own. Lines beginning /// with stars must be quoted by a comma. `CONTENTS` will not be parsed. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CommentBlock { affiliated_keywords: Option<Spanned<AffiliatedKeywords>>, pub value: String, }
use std::collections::HashMap; use std::error::Error; use scraper::{Html, Selector}; use chrono::{Timelike, Local, DateTime, Duration}; use clap::{Arg, App}; // Roomzilla doesn't give us an end time or duration, so we have to infer it by the width of the reservation element // 58px = 1 hour // 60 / 58 = 1.03448275862 (minutes per px) // 60 / 58 * 60 = 62.0689655172 (seconds per px) const SECONDS_PER_WIDTH_PX: f64 = 62.0689655172; #[derive(Debug)] struct Room { name: String, floor: i32, size: u32, reservations: Vec<Reservation>, } #[derive(Debug)] struct Reservation { start: u32, end: u32, } fn parse_time_arg(arg_value: Option<&str>, default: DateTime<Local>) -> Result<DateTime<Local>, Box<dyn Error>> { match arg_value { Some(start) => { let split = start.split(":").collect::<Vec<_>>(); Ok(Local::now() .with_hour(split.get(0).ok_or("Could not parse hour")?.parse::<u32>()?).ok_or("Could not parse hour")? .with_minute(split.get(1).ok_or("Could not parse minute")?.parse::<u32>()?).ok_or("Could not parse minute")? ) }, None => Ok(default) } } fn scrape_rooms() -> Result<HashMap<String, Room>, Box<dyn Error>> { let mut rooms = HashMap::new(); let html = reqwest::get("https://industryrinostation.roomzilla.net/")? .text()?; let document = Html::parse_document(&html); let table_selector = Selector::parse("table#timeline tbody tr").expect("Failed to parse selector"); let name_selector = Selector::parse("td.name").expect("Failed to parse selector"); let floor_selector = Selector::parse("td.floor").expect("Failed to parse selector"); let size_selector = Selector::parse("td.size").expect("Failed to parse selector"); for element in document.select(&table_selector) { let name_element = element.select(&name_selector).next().ok_or("Failed to find name element")?; let floor_element = element.select(&floor_selector).next().ok_or("Failed to find floor element")?; let size_element = element.select(&size_selector).next().ok_or("Failed to find size element")?; rooms.insert(name_element.value().attr("data-sort").ok_or("Failed to parse name from name element")?.to_owned(), Room { name: name_element.value().attr("data-sort").ok_or("Failed to parse name from name element")?.to_owned(), floor: floor_element.value().attr("data-sort").ok_or("Failed to parse floor from floor element")?.parse::<i32>()?, size: size_element.value().attr("data-sort").ok_or("Failed to parse size from size element")?.parse::<u32>()?, reservations: vec![], }); } let reserved_selector = Selector::parse("div.reserved").expect("Failed to parse selector"); for element in document.select(&reserved_selector) { let room_name = element.value().attr("room_name").ok_or("Failed to parse room name for reservation")?; let start = element.value().attr("seconds").ok_or("Failed to parse room seconds for reservation start")?.parse::<f64>()?; let style = element.value().attr("style").ok_or("Failed to parse room style for reservation duration")?; let duration = (&style[7..style.find("px;").ok_or("Failed to parse duration from room style")?].parse::<f64>()? * SECONDS_PER_WIDTH_PX).round(); if let Some(room) = rooms.get_mut(room_name) { room.reservations.push(Reservation { start: start as u32, end: (start + duration) as u32, }); } } Ok(rooms) } fn print_free_rooms(rooms: HashMap<String, Room>, start: DateTime<Local>, end: DateTime<Local>) { println!("Free rooms available from {} to {}", start.format("%I:%M%P").to_string(), end.format("%I:%M%P").to_string()); for (_, room) in rooms.iter() { let mut free = true; for reservation in &room.reservations { if (start.num_seconds_from_midnight() > reservation.start && start.num_seconds_from_midnight() < reservation.end) // Starts during this reservation || (end.num_seconds_from_midnight() > reservation.start && end.num_seconds_from_midnight() < reservation.end) // Ends during this reservation || (start.num_seconds_from_midnight() < reservation.start && end.num_seconds_from_midnight() > reservation.end) // This reservation is in the middle of our target { // Starts during this reservation free = false; break; } } if free { println!("{} (seats {})", room.name, room.size); } } } fn main() { let cli_args = App::new("Rustzilla") .version("1.0") .author("Allan Wintersieck <awintersieck@gmail.com>") .about("Scrapes Industry RiNo Roomzilla") .arg(Arg::with_name("start") .short("s") .long("start") .value_name("START") .help("Start time in format of HH:MM (24-hour clock)") .takes_value(true)) .arg(Arg::with_name("end") .short("e") .long("end") .value_name("END") .help("End time in format of HH:MM (24-hour clock)") .takes_value(true)) .get_matches(); let start = match parse_time_arg(cli_args.value_of("start"), Local::now()) { Ok(start) => start, Err(err) => panic!("Failed to parse start time argument: {}", err) }; let end = match parse_time_arg(cli_args.value_of("end"), start + Duration::seconds(3600)) { Ok(end) => end, Err(err) => panic!("Failed to parse end time argument: {}", err) }; let rooms = match scrape_rooms() { Ok(rooms) => rooms, Err(err) => panic!("Failed to scrape room and reservation data: {}", err) }; print_free_rooms(rooms, start, end); }
use crate::target::Endpoint; use linkerd_app_core::{ dns::Name, io, svc::{self, layer}, transport_header::TransportHeader, Error, }; use std::{ future::Future, pin::Pin, str::FromStr, task::{Context, Poll}, }; use tracing::{debug, trace, warn}; #[derive(Clone, Debug)] pub struct OpaqueTransport<S> { inner: S, } impl<S> OpaqueTransport<S> { pub fn layer() -> impl layer::Layer<S, Service = Self> + Copy { layer::mk(|inner| OpaqueTransport { inner }) } } impl<S, P> svc::Service<Endpoint<P>> for OpaqueTransport<S> where S: svc::Service<Endpoint<P>> + Send + 'static, S::Error: Into<Error>, S::Response: io::AsyncWrite + Send + Unpin, S::Future: Send + 'static, { type Response = S::Response; type Error = Error; type Future = Pin<Box<dyn Future<Output = Result<S::Response, Error>> + Send + 'static>>; #[inline] fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { self.inner.poll_ready(cx).map_err(Into::into) } fn call(&mut self, mut ep: Endpoint<P>) -> Self::Future { // Determine whether an opaque header should written on the socket. let header = match ep.metadata.opaque_transport_port() { None => { trace!("No opaque transport configured"); None } Some(override_port) => { // Update the endpoint to target the discovery-provided control // plane port. let orig_port = ep.addr.port(); ep.addr = (ep.addr.ip(), override_port).into(); // If there's a destination override, encode that in the opaque // transport (i.e. for multicluster gateways). Otherwise, simply // encode the original target port. Note that we prefer any port // specified in the override to the original destination port. let header = ep .metadata .authority_override() .and_then(|auth| { let port = auth.port_u16().unwrap_or(orig_port); Name::from_str(auth.host()) .map_err(|error| warn!(%error, "Invalid name")) .ok() .map(|n| TransportHeader { port, name: Some(n), }) }) .unwrap_or(TransportHeader { port: orig_port, name: None, }); debug!(?header, override_port, "Using opaque transport"); Some(header) } }; // Connect to the endpoint. let connect = self.inner.call(ep); Box::pin(async move { let mut io = connect.await.map_err(Into::into)?; // Once connected, write the opaque header on the socket before // returning it. if let Some(h) = header { let sz = h.write(&mut io).await?; debug!(sz, "Wrote header to transport"); } Ok(io) }) } } #[cfg(test)] mod test { use super::*; use crate::target::{Concrete, Endpoint, Logical}; use futures::future; use linkerd_app_core::{ io::{self, AsyncWriteExt}, proxy::api_resolve::{Metadata, ProtocolHint}, tls, transport_header::TransportHeader, }; use tower::util::{service_fn, ServiceExt}; fn ep(metadata: Metadata) -> Endpoint<()> { Endpoint { addr: ([127, 0, 0, 2], 4321).into(), identity: tls::PeerIdentity::None( tls::ReasonForNoPeerName::NotProvidedByServiceDiscovery, ), metadata, concrete: Concrete { resolve: None, logical: Logical { orig_dst: ([127, 0, 0, 2], 4321).into(), profile: None, protocol: (), }, }, } } #[tokio::test(flavor = "current_thread")] async fn plain() { let _ = tracing_subscriber::fmt().with_test_writer().try_init(); let svc = OpaqueTransport { inner: service_fn(|ep: Endpoint<()>| { assert_eq!(ep.addr.port(), 4321); future::ready(Ok::<_, io::Error>( tokio_test::io::Builder::new().write(b"hello").build(), )) }), }; let mut io = svc .oneshot(ep(Metadata::default())) .await .expect("Connect must not fail"); io.write_all(b"hello").await.expect("Write must succeed"); } #[tokio::test(flavor = "current_thread")] async fn opaque_no_name() { let _ = tracing_subscriber::fmt().with_test_writer().try_init(); let svc = OpaqueTransport { inner: service_fn(|ep: Endpoint<()>| { assert_eq!(ep.addr.port(), 4143); let hdr = TransportHeader { port: ep.concrete.logical.orig_dst.port(), name: None, }; let buf = hdr.encode_prefaced_buf().expect("Must encode"); future::ready(Ok::<_, io::Error>( tokio_test::io::Builder::new() .write(&buf[..]) .write(b"hello") .build(), )) }), }; let e = ep(Metadata::new( Default::default(), ProtocolHint::Unknown, Some(4143), None, None, )); let mut io = svc.oneshot(e).await.expect("Connect must not fail"); io.write_all(b"hello").await.expect("Write must succeed"); } #[tokio::test(flavor = "current_thread")] async fn opaque_named_with_port() { let _ = tracing_subscriber::fmt().with_test_writer().try_init(); let svc = OpaqueTransport { inner: service_fn(|ep: Endpoint<()>| { assert_eq!(ep.addr.port(), 4143); let hdr = TransportHeader { port: 5555, name: Some(Name::from_str("foo.bar.example.com").unwrap()), }; let buf = hdr.encode_prefaced_buf().expect("Must encode"); future::ready(Ok::<_, io::Error>( tokio_test::io::Builder::new() .write(&buf[..]) .write(b"hello") .build(), )) }), }; let e = ep(Metadata::new( Default::default(), ProtocolHint::Unknown, Some(4143), None, Some(http::uri::Authority::from_str("foo.bar.example.com:5555").unwrap()), )); let mut io = svc.oneshot(e).await.expect("Connect must not fail"); io.write_all(b"hello").await.expect("Write must succeed"); } #[tokio::test(flavor = "current_thread")] async fn opaque_named_no_port() { let _ = tracing_subscriber::fmt().with_test_writer().try_init(); let svc = OpaqueTransport { inner: service_fn(|ep: Endpoint<()>| { assert_eq!(ep.addr.port(), 4143); let hdr = TransportHeader { port: ep.concrete.logical.orig_dst.port(), name: None, }; let buf = hdr.encode_prefaced_buf().expect("Must encode"); future::ready(Ok::<_, io::Error>( tokio_test::io::Builder::new() .write(&buf[..]) .write(b"hello") .build(), )) }), }; let e = ep(Metadata::new( Default::default(), ProtocolHint::Unknown, Some(4143), None, None, )); let mut io = svc.oneshot(e).await.expect("Connect must not fail"); io.write_all(b"hello").await.expect("Write must succeed"); } }
//! Crash Recovery Log //! //! This module implements a durable log for transaction and object allocation state to ensure //! that those operations can be successfully recovered in the event of unexpected program //! termination. //! //! The CRL is implemented in front-end, backend-halves where the frontend is common to all //! CRL implementations and the back end is abstracted away behind a std::async::mpsc::Sender //! interface. use std::error::Error; use std::fmt; use std::sync; use super::ArcDataSlice; use super::hlc; use super::object; use super::paxos; use super::store; use super::transaction; pub mod sweeper; pub mod mock; pub mod null; #[derive(Debug)] struct DecodeError; impl fmt::Display for DecodeError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CRL DecodeError") } } impl Error for DecodeError { fn description(&self) -> &str { "Invalid data encountered while decoding CRL content" } fn source(&self) -> Option<&(dyn Error + 'static)> { None } } impl From<crate::EncodingError> for DecodeError { fn from(_: crate::EncodingError) -> DecodeError { DecodeError{} } } impl From<DecodeError> for std::io::Error { fn from(e: DecodeError) -> std::io::Error { std::io::Error::new(std::io::ErrorKind::InvalidData, e) } } /// Unique Identifier for a state save request made to the Crash Recovery Log #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Debug, Hash)] pub struct TxSaveId(pub u64); impl TxSaveId { pub fn next(&self) -> TxSaveId { TxSaveId(self.0 + 1) } } pub enum Completion { TransactionSave { store_id: store::Id, transaction_id: transaction::Id, save_id: TxSaveId, success: bool }, AllocationSave { store_id: store::Id, transaction_id: transaction::Id, object_id: object::Id, success: bool }, } impl Completion { pub fn store_id(&self) -> store::Id { match self { Completion::TransactionSave { store_id, ..} => *store_id, Completion::AllocationSave { store_id, .. } => *store_id } } } /// Used to notify completion of state save requests made to the Crash Recovery Log /// /// Methods on the handler are called within the context of a CRL thread. Consequently, /// implementations of this trait should wrap a std::sync::mpsc::Sender and use it /// to send the completion notice back to the thread that originated the request. A /// trait object is used rather than doing this directly to allow for flexibility in /// the message type sent over the channel. The success arguemnt will be true if the /// state was successfully written to persistent media, false if an error occurred pub trait RequestCompletionHandler { fn complete(&self, op: Completion); } /// Interface to the CRL backend implementation pub trait Backend { fn shutdown(&mut self); /// Creates a new Crl trait object that will notify the supplied RequestCompletionHandler /// when requests complete fn new_interface(&self, save_handler: sync::Arc<dyn RequestCompletionHandler + Send + Sync>) -> Box<dyn Crl>; } /// Represents the persistent state needed to recover a transaction after a crash #[derive(Clone, Eq, PartialEq)] pub struct TransactionRecoveryState { pub transaction_id: transaction::Id, pub store_id: store::Id, pub serialized_transaction_description: ArcDataSlice, pub object_updates: Vec<transaction::ObjectUpdate>, pub tx_disposition: transaction::Disposition, pub paxos_state: paxos::PersistentState } /// Represents the persistent state needed to recover an allocation operation after a crash #[derive(Clone, Eq, PartialEq)] pub struct AllocationRecoveryState { pub store_id: store::Id, pub store_pointer: store::Pointer, pub id: object::Id, pub kind: object::Kind, pub size: Option<u32>, pub data: ArcDataSlice, pub refcount: object::Refcount, pub timestamp: hlc::Timestamp, pub allocation_transaction_id: transaction::Id, pub serialized_revision_guard: ArcDataSlice } /// Client interface to the Crash Recovery Log pub trait Crl { /// Provides the full recovery state for a store /// /// This method should only be used during data store initialization and for capturing store /// CRL state for transferring that store to another server. /// /// # Panics /// /// Panics if the channel to the CRL is closed fn get_full_recovery_state( &self, store_id: store::Id) -> (Vec<TransactionRecoveryState>, Vec<AllocationRecoveryState>); /// Saves transaction state into the CRL /// /// The transaction description and object updates should only be included once. When the /// state has been successfully stored to persistent media, the /// SaveCompleteHandler.transaction_state_saved method will be called with the RequestId /// returned from this function. If an error is encountered and/or the state cannot be /// saved, the completion handler will never be called. fn save_transaction_state( &self, store_id: store::Id, transaction_id: transaction::Id, serialized_transaction_description: ArcDataSlice, object_updates: Option<Vec<transaction::ObjectUpdate>>, tx_disposition: transaction::Disposition, paxos_state: paxos::PersistentState, save_id: TxSaveId ); /// Drops transaction data from the log. /// /// Informs the CRL that object data associated with the transaction is no longer needed /// for recovery purposes and that it may be dropped from the log. fn drop_transaction_object_data( &self, store_id: store::Id, transaction_id: transaction::Id ); /// Deletes the saved transaction state from the log. fn delete_transaction_state( &self, store_id: store::Id, transaction_id: transaction::Id ); /// Saves object allocation state into the CRL /// /// Similar to transaction state saves, no completion notice will be provided if an error /// is encountered fn save_allocation_state( &self, store_id: store::Id, store_pointer: store::Pointer, id: object::Id, kind: object::Kind, size: Option<u32>, data: ArcDataSlice, refcount: object::Refcount, timestamp: hlc::Timestamp, allocation_transaction_id: transaction::Id, serialized_revision_guard: ArcDataSlice ); fn delete_allocation_state( &self, store_id: store::Id, allocation_transaction_id: transaction::Id); }
extern crate astro; use astro::*; use std::io; use std::io::*; fn main() { // Welcome message println!("So you want to have a Julian day (Me day)?"); loop { // Declaring variables let mut year: i16 = 0; let mut month: u8 = 0; let mut day: u8 = 0; loop { // Get the year print!("Ok so, whats the year? "); io::stdout().flush().unwrap(); let mut input = String::new(); io::stdin().read_line(&mut input).unwrap(); year = input.trim().parse().unwrap(); // Get the month print!("Ah so we are time travelling to {}! But like what month? ", year); io::stdout().flush().unwrap(); let mut input = String::new(); io::stdin().read_line(&mut input).unwrap(); month = input.trim().parse().unwrap(); // Get the day print!("Really? Month {} of {}? Alright but be more specific - what day? ", month, year); io::stdout().flush().unwrap(); let mut input = String::new(); io::stdin().read_line(&mut input).unwrap(); day = input.trim().parse().unwrap(); // Funni message to confirm all the inputs print!("Alright, we will go to {}-{}-{}. Is this correct? Y/n ", day, month, year); io::stdout().flush().unwrap(); let mut input = String::new(); io::stdin().read_line(&mut input).unwrap(); let is_correct = input.trim().to_lowercase(); if is_correct == "y"{ break } else { println!("\n\nAlright, I'll let you pick another time to become a me day.n") } } // Generating the day as an obj let day_of_month = time::DayOfMonth{day: day, hr: 12, min: 0, sec: 0.0, time_zone: 0.0}; let date = time::Date{year: year, month: month, decimal_day: time::decimal_day(&day_of_month), cal_type: time::CalType::Gregorian}; // Getting the Julian Date and letting the user know thats the date let julian_day = time::julian_day(&date); println!("\n\nYour me date is: {}\n\n", julian_day); // Ask if the user want to go again print!("So erm, kind of awkward but, do you want to go again? Y/n "); io::stdout().flush().unwrap(); let mut input = String::new(); io::stdin().read_line(&mut input).unwrap(); let go_again = input.trim().to_lowercase(); if go_again == "y" { println!("Ok I'll let you go again...\n\n"); } else { break } } }
use byteorder::{BigEndian, ByteOrder}; pub struct Packet { data: PacketData, header: u32, } impl Packet { pub fn new(data: PacketData) -> Packet { let header = BigEndian::read_u32(&data[0..4]); return Packet { data: data, header: header, }; } pub fn with_cc(data: PacketData, cc: u8) -> PacketData { let mut updated = data; updated[3] = (data[3] & 0xF0) + (cc & 0xF); return updated; } pub fn create_packet_with_payload( tei: bool, pusi: bool, priority: bool, pid: u16, tsc: u8, afc: u8, cc: u8, payload: &[u8], ) -> PacketData { const FULL_PAYLOAD_LEN: usize = 184; let mut payload_len = payload.len(); let mut offset = 4; let mut data: PacketData; if payload_len > FULL_PAYLOAD_LEN { // yep! silently dropping any extra payload passed in // TODO: return two packets with the payload split across payload_len = FULL_PAYLOAD_LEN; } if payload_len == FULL_PAYLOAD_LEN { data = Packet::create_packet(tei, pusi, priority, pid, tsc, afc, cc); } else { if payload_len > FULL_PAYLOAD_LEN - 2 { // again dropping data if we can't stick it in after adding the adaptation field // TODO: return two packets with the payload split across payload_len = FULL_PAYLOAD_LEN - 2; } data = Packet::create_packet(tei, pusi, priority, pid, tsc, afc | 0x2, cc); let aflen = FULL_PAYLOAD_LEN - 1 - payload_len; data[4] = aflen as u8; data[5] = 0; // no additional adaptation field flags set offset = 4 + 1 + aflen; } for pos in 0..payload_len { data[offset + pos] = payload[pos]; } return data; } pub fn create_packet( tei: bool, pusi: bool, priority: bool, pid: u16, tsc: u8, afc: u8, cc: u8, ) -> PacketData { let mut pkt: PacketData = [0xFF; 188]; pkt[0] = 0x47; if !tei { pkt[1] = pkt[1] ^ 0x80; } if !pusi { pkt[1] = pkt[1] ^ 0x40; } if !priority { pkt[1] = pkt[1] ^ 0x20; } // 0xE0 preserves what was set above pkt[1] = pkt[1] & (0xE0 | (pid >> 8)) as u8; pkt[2] = pkt[2] & (pid & 0xFF) as u8; pkt[3] = ((tsc << 6) & 0xC0) + ((afc << 4) & 0x30) + (cc & 0xF); return pkt; } } pub type PacketData = [u8; 188]; pub trait PacketHeader { fn sync(&self) -> u8; fn tei(&self) -> bool; fn pusi(&self) -> bool; fn priority(&self) -> bool; fn pid(&self) -> u16; fn tsc(&self) -> u8; fn afc(&self) -> u8; fn has_adaptation_field(&self) -> bool; fn has_payload(&self) -> bool; fn cc(&self) -> u8; } impl PacketHeader for Packet { fn sync(&self) -> u8 { return self.header.sync(); } fn tei(&self) -> bool { return self.header.tei(); } fn pusi(&self) -> bool { return self.header.pusi(); } fn priority(&self) -> bool { return self.header.priority(); } fn pid(&self) -> u16 { return self.header.pid(); } fn tsc(&self) -> u8 { return self.header.tsc(); } fn afc(&self) -> u8 { return self.header.afc(); } fn has_adaptation_field(&self) -> bool { return self.header.has_adaptation_field(); } fn has_payload(&self) -> bool { return self.header.has_payload(); } fn cc(&self) -> u8 { return self.header.cc(); } } impl PacketHeader for u32 { fn sync(&self) -> u8 { return ((self & 0xff000000) >> 24) as u8; } fn tei(&self) -> bool { return 0 != self & 0x800000; } fn pusi(&self) -> bool { return 0 != self & 0x400000; } fn priority(&self) -> bool { return 0 != self & 0x200000; } fn pid(&self) -> u16 { return ((self & 0x1fff00) >> 8) as u16; } fn tsc(&self) -> u8 { return ((self & 0xc0) >> 6) as u8; } fn afc(&self) -> u8 { return ((self & 0x30) >> 4) as u8; } fn has_adaptation_field(&self) -> bool { return 0 != self.afc() & 0x2; } fn has_payload(&self) -> bool { return 0 != self.afc() & 0x1; } fn cc(&self) -> u8 { return (self & 0xf) as u8; } } pub trait AdaptationField { fn aflen(&self) -> u8; fn is_discontinuity(&self) -> bool; fn is_random_access(&self) -> bool; fn priority_stream(&self) -> bool; fn has_pcr(&self) -> bool; fn has_opcr(&self) -> bool; fn has_splice_countdown(&self) -> bool; fn has_transport_private_data(&self) -> bool; fn has_extension(&self) -> bool; fn pcr(&self) -> u64; fn pcr_nanos(&self) -> u64; fn opcr(&self) -> u64; fn opcr_nanos(&self) -> u64; fn splice_countdown(&self) -> u8; fn transport_private_data_len(&self) -> u8; fn transport_private_data(&self) -> &[u8]; fn extension(&self) -> &[u8]; fn stuffing(&self) -> &[u8]; } fn read_pcr_data(buf: &[u8]) -> u64 { let high_int = BigEndian::read_u32(&buf[0..4]) as u64; let low_short = BigEndian::read_u16(&buf[4..6]) as u64; let upper = high_int << 1 + (low_short & 0x8000 >> 15); let lower = low_short & 0x1ff; /* println!( "\nhexpcr: {:X}{:X}{:X}{:X}{:X}{:X} high_int: {:X}, upper: {:X} lower: {:X}", buf[0], buf[1], buf[2], buf[3], buf[4], buf[5], high_int,upper, lower ); */ return (upper * 300) + lower; } fn pcr_to_nanos(pcr: u64) -> u64 { return (pcr * 1_000_000_000) / 27_000_000; } impl AdaptationField for Packet { fn aflen(&self) -> u8 { return self.data[4]; } fn is_discontinuity(&self) -> bool { return 0 != self.data[5] & 0x80; } fn is_random_access(&self) -> bool { return 0 != self.data[5] & 0x40; } fn priority_stream(&self) -> bool { return 0 != self.data[5] & 0x20; } fn has_pcr(&self) -> bool { return 0 != self.data[5] & 0x10; } fn has_opcr(&self) -> bool { return 0 != self.data[5] & 0x8; } fn has_splice_countdown(&self) -> bool { return 0 != self.data[5] & 0x4; } fn has_transport_private_data(&self) -> bool { return 0 != self.data[5] & 0x2; } fn has_extension(&self) -> bool { return 0 != self.data[5] & 0x1; } fn pcr(&self) -> u64 { if self.has_pcr() { return read_pcr_data(&self.data[6..13]); } return 0; } fn pcr_nanos(&self) -> u64 { return pcr_to_nanos(self.pcr()); } fn opcr(&self) -> u64 { if self.has_pcr() { return read_pcr_data(&self.data[13..20]); } else { return self.pcr(); } } fn opcr_nanos(&self) -> u64 { return pcr_to_nanos(self.opcr()); } fn splice_countdown(&self) -> u8 { if self.has_splice_countdown() { if self.has_pcr() { if self.has_opcr() { return self.data[20]; } else { return self.data[13]; } } else { if self.has_opcr() { return self.data[13]; } else { return self.data[6]; } } } else { return 0; } } fn transport_private_data_len(&self) -> u8 { if self.has_transport_private_data() { if self.has_splice_countdown() { if self.has_pcr() { if self.has_opcr() { return self.data[21]; } else { return self.data[14]; } } else { if self.has_opcr() { return self.data[14]; } else { return self.data[7]; } } } else { if self.has_pcr() { if self.has_opcr() { return self.data[20]; } else { return self.data[14]; } } else { if self.has_opcr() { return self.data[14]; } else { return self.data[7]; } } } } else { return 0; } } fn transport_private_data(&self) -> &[u8] { if self.has_transport_private_data() { let trans_len = self.transport_private_data_len() as usize; if self.has_splice_countdown() { if self.has_pcr() { if self.has_opcr() { return &self.data[22..22 + trans_len]; } else { return &self.data[15..15 + trans_len]; } } else { if self.has_opcr() { return &self.data[15..15 + trans_len]; } else { return &self.data[8..8 + trans_len]; } } } else { if self.has_pcr() { if self.has_opcr() { return &self.data[21..21 + trans_len]; } else { return &self.data[15..15 + trans_len]; } } else { if self.has_opcr() { return &self.data[15..15 + trans_len]; } else { return &self.data[8..8 + trans_len]; } } } } else { return &[]; } } fn extension(&self) -> &[u8] { // TODO: implement this once I need the data in it return &[]; } fn stuffing(&self) -> &[u8] { // TODO: maybe let folks grab the stuffing someday, though not sure what the purpose would be return &[]; } } pub trait Payload { fn payload_data(&self) -> &[u8]; } impl Payload for Packet { fn payload_data(&self) -> &[u8] { let mut offset = 4; if self.has_adaptation_field() { offset += self.aflen() as usize; } return &self.data[offset..188]; } }
use std::net::SocketAddr; use crate::common::message_type::{MsgType, UdpPacket, msg_types}; use super::RendezvousServer; impl RendezvousServer { pub fn read_udp_message(&mut self, _: usize, addr: SocketAddr, buf: &[u8]) { let udp_packet: UdpPacket = bincode::deserialize(&buf).unwrap(); let buf = udp_packet.data; let msg_type = buf[0]; let msg_type = num::FromPrimitive::from_u8(msg_type); match msg_type { Some(MsgType::Announce) => { let announce: msg_types::AnnouncePublic = bincode::deserialize(&buf[1..]).unwrap(); match self.peers.iter_mut().find(|p| p.public_key == announce.public_key) { Some(p) => { p.udp_addr = Some(addr); println!("Associated UDP adress ({}) with peer: ({})", addr, p.public_key); self.send_udp_message(addr, MsgType::Announce, &()); }, None => {} } } Some(MsgType::KeepAlive) => {} _ => unreachable!() } } }
use std::mem; use twilight_model::application::interaction::{ application_command::CommandDataOption, ApplicationCommand, }; pub trait ApplicationCommandExt { fn yoink_options(&mut self) -> Vec<CommandDataOption>; } impl ApplicationCommandExt for ApplicationCommand { fn yoink_options(&mut self) -> Vec<CommandDataOption> { mem::take(&mut self.data.options) } }
/* Copyright (c) 2023 Uber Technologies, Inc. <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at <p>http://www.apache.org/licenses/LICENSE-2.0 <p>Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use crate::models::concrete_syntax::get_all_matches_for_concrete_syntax; use crate::{ models::Validator, utilities::{ regex_utilities::get_all_matches_for_regex, tree_sitter_utilities::{get_all_matches_for_query, get_ts_query_parser, number_of_errors}, Instantiate, }, }; use pyo3::prelude::pyclass; use regex::Regex; use serde_derive::Deserialize; use std::collections::HashMap; use tree_sitter::{Node, Query}; #[derive(Debug)] pub struct ConcreteSyntax(pub String); use super::{ default_configs::{CONCRETE_SYNTAX_QUERY_PREFIX, REGEX_QUERY_PREFIX}, matches::Match, }; pub enum PatternType { Tsq, Regex, Unknown, } #[pyclass] #[derive(Deserialize, Debug, Clone, Default, PartialEq, Hash, Eq)] pub struct CGPattern(pub String); impl CGPattern { pub(crate) fn new(query: String) -> Self { Self(query) } pub(crate) fn pattern(&self) -> String { self.0.to_string() } pub(crate) fn extract_regex(&self) -> Result<Regex, regex::Error> { let mut _val = &self.pattern()[REGEX_QUERY_PREFIX.len()..]; Regex::new(_val) } pub(crate) fn extract_concrete_syntax(&self) -> ConcreteSyntax { let mut _val = &self.pattern()[CONCRETE_SYNTAX_QUERY_PREFIX.len()..]; ConcreteSyntax(_val.to_string()) } pub(crate) fn pattern_type(&self) -> PatternType { match self.0.as_str() { pattern if pattern.starts_with("rgx") => PatternType::Regex, pattern if pattern.trim().starts_with('(') => PatternType::Tsq, _ => PatternType::Unknown, } } } impl Validator for CGPattern { fn validate(&self) -> Result<(), String> { if self.pattern().starts_with("rgx ") { return self .extract_regex() .map(|_| Ok(())) .unwrap_or(Err(format!("Cannot parse the regex - {}", self.pattern()))); } if self.pattern().starts_with("cs ") { return Ok(()); } let mut parser = get_ts_query_parser(); parser .parse(self.pattern(), None) .filter(|x| number_of_errors(&x.root_node()) == 0) .map(|_| Ok(())) .unwrap_or(Err(format!( "Cannot parse the tree-sitter query - {}", self.pattern() ))) } } impl Instantiate for CGPattern { fn instantiate(&self, substitutions: &HashMap<String, String>) -> Self { let substitutions = substitutions .iter() .map(|(k, v)| (k.to_string(), v.replace('\n', "\\n"))) .collect(); CGPattern::new(self.pattern().instantiate(&substitutions)) } } #[derive(Debug)] pub(crate) enum CompiledCGPattern { Q(Query), R(Regex), M(ConcreteSyntax), } impl CompiledCGPattern { /// Applies the CGPattern (self) upon the input `node`, and returns the first match /// # Arguments /// * `node` - the root node to apply the query upon /// * `source_code` - the corresponding source code string for the node. /// * `recursive` - if `true` it matches the query to `self` and `self`'s sub-ASTs, else it matches the `query` only to `self`. pub(crate) fn get_match(&self, node: &Node, source_code: &str, recursive: bool) -> Option<Match> { if let Some(m) = self .get_matches(node, source_code.to_string(), recursive, None, None) .first() { return Some(m.clone()); } None } /// Applies the pattern upon the given `node`, and gets all the matches pub(crate) fn get_matches( &self, node: &Node, source_code: String, recursive: bool, replace_node: Option<String>, replace_node_idx: Option<u8>, ) -> Vec<Match> { let code_str = source_code.as_bytes(); match self { CompiledCGPattern::Q(query) => get_all_matches_for_query( node, source_code, query, recursive, replace_node, replace_node_idx, ), CompiledCGPattern::R(regex) => { get_all_matches_for_regex(node, source_code, regex, recursive, replace_node) } CompiledCGPattern::M(concrete_syntax) => { let matches = get_all_matches_for_concrete_syntax( node, code_str, concrete_syntax, recursive, replace_node, ); matches.0 } } } }
extern crate queue; use queue::Queue; #[test] fn test_queue() { let mut queue = Queue::new(); queue.push(1); queue.push(2); queue.push(3); assert_eq!(queue.len(), 3); assert_eq!(queue.pop(), Some(1)); assert_eq!(queue.pop(), Some(2)); assert_eq!(queue.pop(), Some(3)); assert_eq!(queue.pop(), None); assert_eq!(queue.len(), 0); }
pub mod client; pub mod error; mod types;
#![no_std] use volatile_cell::VolatileCell; // Known to apply to: // [RM0091] STM32F0x1, STM32F0x2, STM32F0x8 (TIM2/3) // [RM0090] STM32F4 (TIM2/3/4/5) // [RM0351] STM32L4x6 (TIM2/3/4/5) ioregs!(GPTIM32 = { 0x00 => reg32 cr1 { 0 => cen : rw { 0 => Disable, 1 => Enable, }, 1 => udis : rw { 0 => Enable, 1 => Disable, }, 2 => urs : rw { 0 => Any, 1 => Limited, }, 3 => opm : rw { 0 => Disable, 1 => Enable, }, 4 => dir : rw { 0 => Up, 1 => Down, }, 5..6 => cms : rw { 0 => EdgeAligned, 1 => CenterAligned1, 2 => CenterAligned2, 3 => CenterAligned3, }, 7 => arpe : rw { 0 => NotBuffered, 1 => Buffered, }, 8..9 => ckd : rw { 0 => Div1, 1 => Div2, 2 => Div4, }, 11 => uif_remap : rw { // TODO: STM32L4x6 only 0 => Disable, 1 => Enable, }, }, 0x04 => reg32 cr2 { 3 => ccds : rw { 0 => CC, 1 => Update, }, 4..6 => mms : rw { 0 => Reset, 1 => Enable, 2 => Update, 3 => ComparePulse, 4 => CompareOC1Ref, 5 => CompareOC2Ref, 6 => CompareOC3Ref, 7 => CompareOC4Ref, }, 7 => ti1s : rw { 0 => Normal, 1 => XOr, }, }, 0x08 => reg32 smcr { 0..2 => sms02 : rw, 3 => occs : rw { // TODO: STM32L4x6 only 0 => OCRef_Clr, 1 => ETRF, }, 4..6 => ts : rw { 0 => ITR0, // TODO: reserved on STM32L4x6 1 => ITR1, 2 => ITR2, 3 => ITR3, // TODO: reserved on STM32L4x6 4 => TI1F_ED, 5 => TI1FP1, 6 => TI1FP2, 7 => ETRF, }, 7 => mssm : rw { 0 => NoAction, 1 => Synchronized, }, 8..11 => etf : rw, 12..13 => etps : rw { 1 => Div1, 2 => Div2, 3 => Div4, 4 => Div8, }, 14 => ece : rw { 0 => Disable, 1 => Enable, }, 15 => etp : rw { 0 => NonInverted, 1 => Inverted, }, 16 => sms3 : rw, // TODO: STM32L4x6 only }, 0x0c => reg32 dier { 0 => uie : rw, 1 => cc1ie : rw, 2 => cc2ie : rw, 3 => cc3ie : rw, 4 => cc4ie : rw, 6 => tie : rw, 8 => ude : rw, 9 => cc1de : rw, 10 => cc2de : rw, 11 => cc3de : rw, 12 => cc4de : rw, 14 => tde : rw, }, 0x10 => reg32 sr { 0 => uif : rw, 1 => cc1if : rw, 2 => cc2if : rw, 3 => cc3if : rw, 4 => cc4if : rw, 6 => tif : rw, 9 => cc1of : rw, 10 => cc2of : rw, 11 => cc3of : rw, 12 => cc4of : rw, }, 0x14 => reg32 egr { 0 => ug : wo, 1 => cc1g : wo, 2 => cc2g : wo, 3 => cc3g : wo, 4 => cc4g : wo, 6 => tg : wo, }, 0x18 => reg32 ccmr1 { // TODO: ioreg support for custom register types... // this one would map nicely to a pair of 2-variant enums // TODO: remember to compare all supported MCUs when revisiting this 0..31 => ccmr1 : rw, }, 0x1c => reg32 ccmr2 { 0..31 => ccmr2 : rw, }, 0x20 => reg32 ccer { // TODO: ioreg support for field groups? 0 => cc1e : rw, 1 => cc1p : rw, 3 => cc1np : rw, 4 => cc2e : rw, 5 => cc2p : rw, 7 => cc2np : rw, 8 => cc3e : rw, 9 => cc3p : rw, 11 => cc3np : rw, 12 => cc4e : rw, 13 => cc4p : rw, 14 => cc4np : rw, }, 0x24 => reg32 cnt { // TODO: note about bit 31 on STM32L4x6 (it has an alternate function) // TODO: ioregs support for regs that are just plain values (possibly of parameter types) // TODO: note: STM32F0x1 et al only have 16 bits in TIM3 0..31 => cnt : rw, }, 0x28 => reg32 psc { 0..15 => psc : rw, }, 0x2c => reg32 arr { 0..31 => arr : rw, // TODO: note: STM32F0x1 et al only have 16 bits in TIM3 }, 0x34 => reg32 ccr[4] { 0..31 => ccr : rw, // TODO: note: STM32F0x1 et al only have 16 bits in TIM3 }, 0x48 => reg32 dcr { 0..4 => dba : rw, 8..12 => dbl : rw, }, 0x4c => reg32 dmar { 0..15 => dmab : rw, }, }); // TODO: ioregs support for split registers #[doc = "Set value of `sms02` field: no documentation"] impl <'a> GPTIM32_smcr_Update<'a> { #[doc = "Set value of `sms3` field: no documentation"] pub fn set_sms<'b>(&'b mut self, new_value: GPTIM32_smcr_sms) -> &'b mut GPTIM32_smcr_Update<'a> { let field_mask = 0b1_00000000_00000111; self.value = (self.value & !field_mask) | (new_value as u32); self.mask |= field_mask; self } } impl GPTIM32_smcr_Get { pub fn sms(&self) -> GPTIM32_smcr_sms { match self.value & 0b1_00000000_00000111 { 0b0_00000000_00000000 => ::core::option::Option::Some(GPTIM32_smcr_sms::Disable), 0b0_00000000_00000001 => ::core::option::Option::Some(GPTIM32_smcr_sms::Encoder1), 0b0_00000000_00000010 => ::core::option::Option::Some(GPTIM32_smcr_sms::Encoder2), 0b0_00000000_00000011 => ::core::option::Option::Some(GPTIM32_smcr_sms::Encoder3), 0b0_00000000_00000100 => ::core::option::Option::Some(GPTIM32_smcr_sms::Reset), 0b0_00000000_00000101 => ::core::option::Option::Some(GPTIM32_smcr_sms::Gated), 0b0_00000000_00000110 => ::core::option::Option::Some(GPTIM32_smcr_sms::Trigger), 0b0_00000000_00000111 => ::core::option::Option::Some(GPTIM32_smcr_sms::ExtClk1), 0b1_00000000_00000000 => ::core::option::Option::Some(GPTIM32_smcr_sms::ResetAndTrigger), _ => ::core::option::Option::None, }.unwrap() } } impl GPTIM32_smcr { #[allow(dead_code, missing_docs)] pub fn set_sms<'a>(&'a self, new_value: GPTIM32_smcr_sms) -> GPTIM32_smcr_Update<'a> { let mut setter: GPTIM32_smcr_Update = GPTIM32_smcr_Update::new(self); setter.set_sms(new_value); setter } #[allow(dead_code, missing_docs)] pub fn sms(&self) -> GPTIM32_smcr_sms { GPTIM32_smcr_Get::new(self).sms() } } #[allow(non_camel_case_types)] pub enum GPTIM32_smcr_sms { Disable = 0b0_00000000_00000000, Encoder1 = 0b0_00000000_00000001, Encoder2 = 0b0_00000000_00000010, Encoder3 = 0b0_00000000_00000011, Reset = 0b0_00000000_00000100, Gated = 0b0_00000000_00000101, Trigger = 0b0_00000000_00000110, ExtClk1 = 0b0_00000000_00000111, ResetAndTrigger = 0b1_00000000_00000000, }
pub const CH_CTRL_SOURCESEL_TIMER0: u32 = 0x1c << 16; pub const CH_CTRL_SIGSEL_TIMER0OF: u32 = 0x1 << 0; pub fn source_signal_set(ch: u32, source: u32, signal: u32, edge: Edge) { unsafe { PRS_SourceSignalSet(ch, source, signal, edge); } } #[repr(u32)] #[derive(Copy, Clone)] pub enum Edge { Off = 0x0 << 24, Pos = 0x1 << 24, Neg = 0x2 << 24, Both = 0x3 << 24 } #[link(name = "emlib")] extern { fn PRS_SourceSignalSet(ch: u32, source: u32, signal: u32, edge: Edge); }
// This Standard specifies the Secure Hash Algorithm-3 (SHA-3) // https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf // // C code // https://github.com/mjosaarinen/tiny_sha3/blob/master/sha3.c // 5 KECCAK // 5.1 Specification of pad10*1 // // https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf // // Input: // positive integer x; // non-negative integer m. // // Output: // string P such that m + len(P) is a positive multiple of x. // // Steps: // 1. Let k = (– m – 2) mod x. // 2. Return P = 1 || 0ᵏ || 1. #[allow(dead_code)] #[inline] fn sha3_pad(mlen_bits: usize, x: usize) -> usize { let plen_bits = x - (mlen_bits + 2) % x + 2; debug_assert_eq!(plen_bits % 8, 0); // pad len, in bytes let plen = plen_bits / 8; debug_assert!(plen > 1); plen }
pub fn max_profit(prices: Vec<i32>) -> i32 { use std::cmp::max; let n = prices.len(); if n <= 1 { return 0; } let mut profit = vec![0; n]; for j in (0..n - 1).rev() { let mut p = profit[j + 1]; for i in j + 1..n - 2 { p = max(p, prices[i] - prices[j] + profit[i + 2]); } for i in n - 2..n { p = max(p, prices[i] - prices[j]); } profit[j] = p; } profit[0] }
use game::card::{CardStruct, Giveable, ConditionWhen, Costable, CardLocation, CardType, CardInfo}; use game::board::{Board, PlayerEnum}; use serde_json; use serde_json::Value; use serde_derive; use serde; use std::fs::File; use std::io::Read; pub enum LocalEnum { English, Cantonese, } #[derive(Deserialize,Debug)] pub struct LocaleStruct { cards: Vec<CardInfo>, } pub fn locale_card_meta(locale: LocalEnum) -> Vec<CardStruct> { let data: LocaleStruct = match locale { LocalEnum::Cantonese => { let f = File::open("resources/locale/cantonese.json").unwrap(); serde_json::from_reader::<File, LocaleStruct>(f).unwrap() } _ => { let f = File::open("resources/locale/english.json").unwrap(); serde_json::from_reader::<File, LocaleStruct>(f).unwrap() } }; let mut g = vec![CardStruct { card_info: None, index: 0, sprite: None, card_location: CardLocation::UnDealt, card_type: CardType::Thug, gives: vec![Giveable::Key], costs: vec![], needs: vec![], activate: ConditionWhen::WhenLeftGain, closure: Box::new(|ref mut b, p| { let i = serde_json::to_value(&p).unwrap().as_i64().unwrap() as usize; let j = serde_json::to_value(&b.players[i].left).unwrap().as_i64().unwrap() as usize; b.players[i].money += b.players[j].gain; }), }]; for j in 0..g.len() { g[j].update_card_info(data.cards[j].clone()); } g }
use std::ffi::CString; use z3_sys::*; use crate::z3::Context; use crate::z3::Symbol; impl Symbol { pub fn as_z3_symbol(&self, ctx: &Context) -> Z3_symbol { match self { Symbol::Int(i) => unsafe { Z3_mk_int_symbol(ctx.z3_ctx, *i as ::std::os::raw::c_int) }, Symbol::String(s) => { let ss = CString::new(s.clone()).unwrap(); let p = ss.as_ptr(); unsafe { Z3_mk_string_symbol(ctx.z3_ctx, p) } } } } } impl From<u32> for Symbol { fn from(val: u32) -> Self { Symbol::Int(val) } } impl From<String> for Symbol { fn from(val: String) -> Self { Symbol::String(val) } } impl From<&str> for Symbol { fn from(val: &str) -> Self { Symbol::String(val.to_owned()) } }
use anyhow::{Context, Result}; use std::borrow::ToOwned; use std::ffi::OsString; use std::fs::File; use std::io::Read; use std::path::{Path, PathBuf}; use std::process::Command; #[derive(Deserialize, Debug, Clone, Copy)] pub struct Color { pub r: u8, pub g: u8, pub b: u8, } #[rustfmt::skip] const DARK_BLUE: Color = Color { r: 31, g: 120, b: 180 }; #[rustfmt::skip] const DARK_ORANGE: Color = Color { r: 5, g: 127, b: 0 }; #[rustfmt::skip] const DARK_RED: Color = Color { r: 7, g: 26, b: 28 }; const NUM_COLORS: usize = 8; #[rustfmt::skip] static COMPARISON_COLORS: [Color; NUM_COLORS] = [ Color { r: 8, g: 34, b: 34 }, Color { r: 6, g: 139, b: 87 }, Color { r: 0, g: 139, b: 139 }, Color { r: 5, g: 215, b: 0 }, Color { r: 0, g: 0, b: 139 }, Color { r: 0, g: 20, b: 60 }, Color { r: 9, g: 0, b: 139 }, Color { r: 0, g: 255, b: 127 }, ]; #[derive(Deserialize, Debug)] #[serde(default)] pub struct Colors { /// The color used for the current sample. Defaults to DARK_BLUE pub current_sample: Color, /// The color used for the previous sample. Defaults to DARK_RED pub previous_sample: Color, /// The color used for values that are not outliers. Defaults to DARK_BLUE pub not_an_outlier: Color, /// The color used for values that are mild outliers. Defaults to DARK_ORANGE pub mild_outlier: Color, /// The color used for values that are severe outliers. Defaults to DARK_RED pub severe_outlier: Color, /// Sequence of colors used for the line chart. Defaults to COMPARISON_COLORS pub comparison_colors: Vec<Color>, } impl Default for Colors { fn default() -> Self { Self { current_sample: DARK_BLUE, previous_sample: DARK_RED, not_an_outlier: DARK_BLUE, mild_outlier: DARK_ORANGE, severe_outlier: DARK_RED, comparison_colors: COMPARISON_COLORS.to_vec(), } } } #[derive(Deserialize, Debug)] #[serde(default)] /// Struct to hold the various configuration settings that we can read from the TOML config file. struct TomlConfig { /// Path to output directory pub criterion_home: Option<PathBuf>, /// Output format pub output_format: Option<String>, /// Plotting backend pub plotting_backend: Option<String>, /// The colors used for the charts. Users may wish to override this to accommodate /// colorblindness, or just to make things look prettier. pub colors: Colors, } impl Default for TomlConfig { fn default() -> Self { TomlConfig { criterion_home: None, output_format: None, plotting_backend: None, colors: Default::default(), } } } #[derive(Debug)] pub enum OutputFormat { Criterion, Quiet, Verbose, Bencher, } impl OutputFormat { fn from_str(s: &str) -> OutputFormat { match s { "criterion" => OutputFormat::Criterion, "quiet" => OutputFormat::Quiet, "verbose" => OutputFormat::Verbose, "bencher" => OutputFormat::Bencher, other => panic!("Unknown output format string: {}", other), } } } #[derive(Debug)] pub enum TextColor { Always, Never, Auto, } impl TextColor { fn from_str(s: &str) -> TextColor { match s { "always" => TextColor::Always, "never" => TextColor::Never, "auto" => TextColor::Auto, other => panic!("Unknown text color string: {}", other), } } } #[derive(Debug)] pub enum PlottingBackend { Gnuplot, Plotters, Auto, Disabled, } impl PlottingBackend { fn from_str(s: &str) -> PlottingBackend { match s { "gnuplot" => PlottingBackend::Gnuplot, "plotters" => PlottingBackend::Plotters, "auto" => PlottingBackend::Auto, "disabled" => PlottingBackend::Disabled, other => panic!("Unknown plotting backend: {}", other), } } } #[derive(Debug)] pub enum MessageFormat { Json, } impl MessageFormat { fn from_str(s: &str) -> MessageFormat { match s { "json" => MessageFormat::Json, other => panic!("Unknown message format: {}", other), } } } /// Struct to hold the various configuration settings for cargo-criterion itself. #[derive(Debug)] pub struct SelfConfig { /// The path to the output directory pub criterion_home: PathBuf, /// Should we run the benchmarks? pub do_run: bool, /// Should we fail immediately if a benchmark target fails, or continue with the others? pub do_fail_fast: bool, /// How should the CLI output be formatted pub output_format: OutputFormat, /// Should we print the output in color? pub text_color: TextColor, /// Which plotting backend to use? pub plotting_backend: PlottingBackend, /// Should we compile the benchmarks in debug mode (true) or release mode (false, default) pub debug_build: bool, /// Should we print machine-readable output, and if so, in what format? pub message_format: Option<MessageFormat>, /// The colors to use for charts. pub colors: Colors, // An optional identifier used to identify this run in the history reports. pub history_id: Option<String>, // An optional description used to describe this run in the history reports. pub history_description: Option<String>, } /// Overall struct that represents all of the configuration data for this run. #[derive(Debug)] pub struct FullConfig { /// The config settings for cargo-criterion pub self_config: SelfConfig, /// The arguments we pass through to cargo bench pub cargo_args: Vec<OsString>, /// The additional arguments we pass through to the benchmark executables pub additional_args: Vec<OsString>, } /// Call `cargo criterion` and parse the output to get the path to the target directory. fn get_target_directory_from_metadata() -> Result<PathBuf> { let out = Command::new("cargo") .args(&["metadata", "--format-version", "1"]) .output()?; #[derive(Deserialize)] struct MetadataMessage { target_directory: String, } let message: MetadataMessage = serde_json::from_reader(std::io::Cursor::new(out.stdout))?; let path = PathBuf::from(message.target_directory); Ok(path) } /// Parse the command-line arguments, load the Criterion.toml config file, and generate a /// configuration object used for the rest of the run. #[cfg_attr(feature = "cargo-clippy", allow(clippy::or_fun_call))] pub fn configure() -> Result<FullConfig, anyhow::Error> { use clap::{App, AppSettings, Arg}; let matches = App::new("cargo-criterion") .version(env!("CARGO_PKG_VERSION")) .about("Execute, analyze and report on benchmarks of a local package") .bin_name("cargo criterion") .settings(&[ AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder, AppSettings::TrailingVarArg, ]) .arg( Arg::with_name("lib") .long("--lib") .help("Benchmark only this package's library"), ) .arg( Arg::with_name("bin") .long("--bin") .takes_value(true) .value_name("NAME") .multiple(true) .help("Benchmark only the specified binary"), ) .arg( Arg::with_name("bins") .long("--bins") .help("Benchmark all binaries"), ) .arg( Arg::with_name("example") .long("--example") .takes_value(true) .value_name("NAME") .multiple(true) .help("Benchmark only the specified example"), ) .arg( Arg::with_name("examples") .long("--examples") .help("Benchmark all examples"), ) .arg( Arg::with_name("test") .long("--test") .takes_value(true) .value_name("NAME") .multiple(true) .help("Benchmark only the specified test target"), ) .arg( Arg::with_name("tests") .long("--tests") .help("Benchmark all tests"), ) .arg( Arg::with_name("bench") .long("--bench") .takes_value(true) .value_name("NAME") .multiple(true) .help("Benchmark only the specified bench target"), ) .arg( Arg::with_name("benches") .long("--benches") .help("Benchmark all benches"), ) .arg( Arg::with_name("all-targets") .long("--all-targets") .help("Benchmark all targets"), ) .arg( Arg::with_name("no-run") .long("--no-run") .help("Compile, but don't run benchmarks"), ) .arg( Arg::with_name("package") .long("--package") .short("p") .takes_value(true) .value_name("SPEC") .multiple(true) .help("Package to run benchmarks for"), ) .arg( Arg::with_name("all") .long("--all") .help("Alias for --workspace (deprecated)"), ) .arg( Arg::with_name("workspace") .long("--workspace") .help("Benchmark all packages in the workspace"), ) .arg( Arg::with_name("exclude") .long("--exclude") .takes_value(true) .value_name("SPEC") .multiple(true) .help("Exclude packages from the benchmark"), ) .arg( Arg::with_name("jobs") .long("--jobs") .short("j") .takes_value(true) .value_name("N") .help("Number of parallel jobs, defaults to # of CPUs"), ) .arg( Arg::with_name("features") .long("--features") .takes_value(true) .value_name("FEATURE") .multiple(true) .help("Space-separated list of features to activate"), ) .arg( Arg::with_name("all-features") .long("--all-features") .help("Activate all available features"), ) .arg( Arg::with_name("no-default-features") .long("--no-default-features") .help("Do not activate the 'default' feature"), ) .arg( Arg::with_name("target") .long("--target") .takes_value(true) .value_name("TRIPLE") .help("Build for the target triple"), ) .arg( Arg::with_name("target-dir") .long("--target-dir") .takes_value(true) .value_name("DIRECTORY") .help("Directory for all generated artifacts"), ) .arg( Arg::with_name("manifest-path") .long("--manifest-path") .takes_value(true) .value_name("PATH") .help("Path to Cargo.toml"), ) .arg( Arg::with_name("criterion-manifest-path") .long("--criterion-manifest-path") .takes_value(true) .value_name("PATH") .help("Path to Criterion.toml"), ) .arg( Arg::with_name("no-fail-fast") .long("--no-fail-fast") .help("Run all benchmarks regardless of failure"), ) .arg( Arg::with_name("debug") .long("--debug") .help("Build the benchmarks in debug mode.") .long_help( "This option will compile the benchmarks with the 'test' profile, which by default means they will not be optimized. This may be useful to reduce compile time when benchmarking code written in a different language (eg. external C modules). Note however that it will tend to increase the measurement overhead, as the measurement loops in the benchmark will not be optimized either. This may result in less-accurate measurements. ") ) .arg( Arg::with_name("output-format") .long("output-format") .takes_value(true) .possible_values(&["criterion", "quiet", "verbose", "bencher"]) .default_value("criterion") .hide_default_value(true) .hide_possible_values(true) .help("Change the CLI output format. Possible values are criterion, quiet, verbose, bencher.") .long_help( "Change the CLI output format. Possible values are [criterion, quiet, verbose, bencher]. criterion: Prints confidence intervals for measurement and throughput, and indicates whether a \ change was detected from the previous run. The default. quiet: Like criterion, but does not indicate changes. Useful for simply presenting output numbers, \ eg. on a library's README. verbose: Like criterion, but prints additional statistics. bencher: Emulates the output format of the bencher crate and nightly-only libtest benchmarks. ") ) .arg( Arg::with_name("plotting-backend") .long("plotting-backend") .takes_value(true) .possible_values(&["gnuplot", "plotters", "disabled"]) .help("Set the plotting backend. By default, cargo-criterion will use the gnuplot backend if gnuplot is available, or the plotters backend if it isn't. If set to 'disabled', plot generation will be disabled.")) .arg(Arg::with_name("message-format") .long("message-format") .takes_value(true) .possible_values(&["json"]) .help("If set, machine-readable output of the requested format will be printed to stdout.") .long_help( "Change the machine-readable output format. Possible values are [json]. Machine-readable information on the benchmarks will be printed in the requested format to stdout. All of cargo-criterion's other output will be printed to stderr. See the documentation for details on the data printed by each format. ") ) .arg( Arg::with_name("history_id") .long("--history-id") .takes_value(true) .help("An optional identifier string such as a commit ID that will be shown in the history reports to identify this run.") ) .arg( Arg::with_name("history_description") .long("--history-description") .takes_value(true) .help("An optional description string such as a commit message that will be shown in the history reports to describe this run.") ) .arg( Arg::with_name("verbose") .long("--verbose") .short("v") .multiple(true) .help("Use verbose output (-vv very verbose/build.rs output). Only used for Cargo builds; see also --output-format"), ) .arg( Arg::with_name("color") .long("--color") .takes_value(true) .possible_values(&["auto", "always", "never"]) .help("Coloring: auto, always, never"), ) .arg( Arg::with_name("frozen") .long("--frozen") .help("Require Cargo.lock and cache are up to date"), ) .arg( Arg::with_name("locked") .long("--locked") .help("Require Cargo.lock is up to date"), ) .arg( Arg::with_name("offline") .long("--offline") .help("Run without accessing the network"), ) .arg( Arg::with_name("unstable_flags") .short("Z") .takes_value(true) .value_name("FLAG") .multiple(true) .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details"), ) .arg( Arg::with_name("SUBCOMMAND") .hidden(true) .help("Cargo passes the name of the subcommand as the first param, so ignore it."), ) .arg( Arg::with_name("BENCHNAME") .help("If specified, only run benches with names that match this regex"), ) .arg( Arg::with_name("args") .takes_value(true) .multiple(true) .help("Arguments for the bench binary"), ) .after_help( "\ The benchmark filtering argument BENCHNAME and all the arguments following the two dashes (`--`) are passed to the benchmark binaries and thus Criterion.rs. If you're passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. For details about Criterion.rs' arguments see the output of `cargo criterion -- --help`. If the `--package` argument is given, then SPEC is a package ID specification which indicates which package should be benchmarked. If it is not given, then the current package is benchmarked. For more information on SPEC and its format, see the `cargo help pkgid` command. All packages in the workspace are benchmarked if the `--workspace` flag is supplied. The `--workspace` flag is automatically assumed for a virtual manifest. Note that `--exclude` has to be specified in conjunction with the `--workspace` flag. The `--jobs` argument affects the building of the benchmark executable but does not affect how many jobs are used when running the benchmarks. Compilation can be customized with the `bench` profile in the manifest. ", ) .get_matches(); // Load the config file. let criterion_manifest_file: PathBuf = matches .value_of_os("criterion-manifest-file") .map(ToOwned::to_owned) .unwrap_or_else(|| "Criterion.toml".into()) .into(); let toml_config = load_toml_file(&criterion_manifest_file)?; // Many arguments have to be passed along to Cargo, so construct the list of cargo arguments // here. let mut cargo_args: Vec<OsString> = vec![]; if matches.is_present("lib") { cargo_args.push("--lib".into()); } if let Some(values) = matches.values_of_os("bin") { cargo_args.push("--bin".into()); cargo_args.extend(values.map(ToOwned::to_owned)); } if matches.is_present("bins") { cargo_args.push("--bins".into()); } if let Some(values) = matches.values_of_os("example") { cargo_args.push("--example".into()); cargo_args.extend(values.map(ToOwned::to_owned)); } if matches.is_present("examples") { cargo_args.push("--examples".into()); } if let Some(values) = matches.values_of_os("test") { cargo_args.push("--test".into()); cargo_args.extend(values.map(ToOwned::to_owned)); } if matches.is_present("tests") { cargo_args.push("--tests".into()); } if let Some(values) = matches.values_of_os("bench") { cargo_args.push("--bench".into()); cargo_args.extend(values.map(ToOwned::to_owned)); } if matches.is_present("benches") { cargo_args.push("--benches".into()); } if matches.is_present("all-targets") { cargo_args.push("--all-targets".into()); } if let Some(values) = matches.values_of_os("package") { cargo_args.push("--package".into()); cargo_args.extend(values.map(ToOwned::to_owned)); } if matches.is_present("all") { cargo_args.push("--all".into()); } if matches.is_present("workspace") { cargo_args.push("--workspace".into()); } if let Some(values) = matches.values_of_os("exclude") { cargo_args.push("--exclude".into()); cargo_args.extend(values.map(ToOwned::to_owned)); } if let Some(value) = matches.value_of_os("jobs") { cargo_args.push("--jobs".into()); cargo_args.push(value.to_owned()); } if let Some(values) = matches.values_of_os("features") { cargo_args.push("--features".into()); cargo_args.extend(values.map(ToOwned::to_owned)); } if matches.is_present("all-features") { cargo_args.push("--all-features".into()); } if matches.is_present("no-default-features") { cargo_args.push("--no-default-features".into()); } if let Some(value) = matches.value_of_os("target") { cargo_args.push("--target".into()); cargo_args.push(value.to_owned()); } if let Some(value) = matches.value_of_os("target-dir") { cargo_args.push("--target-dir".into()); cargo_args.push(value.to_owned()); } if let Some(value) = matches.value_of_os("manifest-path") { cargo_args.push("--manifest-path".into()); cargo_args.push(value.to_owned()); } for _ in 0..matches.occurrences_of("verbose") { cargo_args.push("--verbose".into()); } if let Some(value) = matches.value_of_os("color") { cargo_args.push("--color".into()); cargo_args.push(value.to_owned()); } if matches.is_present("frozen") { cargo_args.push("--frozen".into()); } if matches.is_present("locked") { cargo_args.push("--locked".into()); } if matches.is_present("offline") { cargo_args.push("--offline".into()); } if let Some(values) = matches.values_of_os("unstable_flags") { cargo_args.push("-Z".into()); cargo_args.extend(values.map(ToOwned::to_owned)); } // Set criterion home to (in descending order of preference): // - $CRITERION_HOME // - The value from the config file // - ${--target-dir}/criterion // - ${target directory from `cargo metadata`}/criterion // - ./target/criterion let criterion_home = if let Some(value) = std::env::var_os("CRITERION_HOME") { PathBuf::from(value) } else if let Some(home) = toml_config.criterion_home { home } else if let Some(value) = matches.value_of_os("target-dir") { PathBuf::from(value).join("criterion") } else if let Ok(mut target_path) = get_target_directory_from_metadata() { target_path.push("criterion"); target_path } else { PathBuf::from("target/criterion") }; let self_config = SelfConfig { output_format: (matches.value_of("output-format")) .or(toml_config.output_format.as_deref()) .map(OutputFormat::from_str) .unwrap_or(OutputFormat::Criterion), criterion_home, do_run: !matches.is_present("no-run"), do_fail_fast: !matches.is_present("no-fail-fast"), text_color: (matches.value_of("color")) .map(TextColor::from_str) .unwrap_or(TextColor::Auto), plotting_backend: (matches.value_of("plotting-backend")) .or(toml_config.plotting_backend.as_deref()) .map(PlottingBackend::from_str) .unwrap_or(PlottingBackend::Auto), debug_build: matches.is_present("debug"), message_format: (matches.value_of("message-format")).map(MessageFormat::from_str), colors: toml_config.colors, history_id: matches.value_of("history_id").map(|s| s.to_owned()), history_description: matches .value_of("history_description") .map(|s| s.to_owned()), }; // These are the extra arguments to be passed to the benchmark targets. let mut additional_args: Vec<OsString> = vec![]; additional_args.extend(matches.value_of_os("BENCHNAME").map(ToOwned::to_owned)); if let Some(args) = matches.values_of_os("args") { additional_args.extend(args.map(ToOwned::to_owned)); } let configuration = FullConfig { self_config, cargo_args, additional_args, }; Ok(configuration) } /// Load & parse the Criterion.toml file (if present). fn load_toml_file(toml_path: &Path) -> Result<TomlConfig, anyhow::Error> { if !toml_path.exists() { return Ok(TomlConfig::default()); }; let mut file = File::open(toml_path) .with_context(|| format!("Failed to open config file {:?}", toml_path))?; let mut str_buf = String::new(); file.read_to_string(&mut str_buf) .with_context(|| format!("Failed to read config file {:?}", toml_path))?; let config: TomlConfig = toml::from_str(&str_buf) .with_context(|| format!("Failed to parse config file {:?}", toml_path))?; Ok(config) }
use specs::*; use server::component::channel::*; use server::protocol::server::{GameFlag, ServerPacket}; use server::protocol::{to_bytes, FlagUpdateType}; use server::*; use component::*; pub struct LoginUpdateSystem { reader: Option<OnPlayerJoinReader>, } #[derive(SystemData)] pub struct LoginUpdateSystemData<'a> { pub conns: Read<'a, Connections>, pub join_channel: Read<'a, OnPlayerJoin>, // These ones are for both pub pos: ReadStorage<'a, Position>, pub team: ReadStorage<'a, Team>, // Flag Data pub is_flag: ReadStorage<'a, IsFlag>, pub carrier: ReadStorage<'a, FlagCarrier>, } impl LoginUpdateSystem { pub fn new() -> Self { Self { reader: None } } } impl<'a> System<'a> for LoginUpdateSystem { type SystemData = LoginUpdateSystemData<'a>; fn setup(&mut self, res: &mut Resources) { Self::SystemData::setup(res); self.reader = Some(res.fetch_mut::<OnPlayerJoin>().register_reader()); } fn run(&mut self, data: Self::SystemData) { for evt in data.join_channel.read(self.reader.as_mut().unwrap()) { (&data.pos, &data.team, &data.carrier, &data.is_flag) .join() .for_each(|(pos, team, carrier, _)| { let ty = match carrier.0 { Some(_) => FlagUpdateType::Carrier, None => FlagUpdateType::Position, }; let packet = GameFlag { ty, flag: *team, pos: *pos, id: carrier.0, blueteam: 0, redteam: 0, }; data.conns.send_to_player( evt.0, OwnedMessage::Binary(to_bytes(&ServerPacket::GameFlag(packet)).unwrap()), ); }); } } } impl SystemInfo for LoginUpdateSystem { type Dependencies = (); fn name() -> &'static str { concat!(module_path!(), "::", line!()) } fn new() -> Self { Self::new() } }
use apllodb_shared_components::{NnSqlValue, SqlValue}; use apllodb_sql_parser::apllodb_ast; use crate::ast_translator::AstTranslator; impl AstTranslator { pub(crate) fn string_constant(ast_string_constant: apllodb_ast::StringConstant) -> SqlValue { SqlValue::NotNull(NnSqlValue::Text(ast_string_constant.0)) } }
#[doc = "Register `CSELR` reader"] pub type R = crate::R<CSELR_SPEC>; #[doc = "Register `CSELR` writer"] pub type W = crate::W<CSELR_SPEC>; #[doc = "Field `C1S` reader - DMA channel 1 selection"] pub type C1S_R = crate::FieldReader; #[doc = "Field `C1S` writer - DMA channel 1 selection"] pub type C1S_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `C2S` reader - DMA channel 2 selection"] pub type C2S_R = crate::FieldReader; #[doc = "Field `C2S` writer - DMA channel 2 selection"] pub type C2S_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `C3S` reader - DMA channel 3 selection"] pub type C3S_R = crate::FieldReader; #[doc = "Field `C3S` writer - DMA channel 3 selection"] pub type C3S_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `C4S` reader - DMA channel 4 selection"] pub type C4S_R = crate::FieldReader; #[doc = "Field `C4S` writer - DMA channel 4 selection"] pub type C4S_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `C5S` reader - DMA channel 5 selection"] pub type C5S_R = crate::FieldReader; #[doc = "Field `C5S` writer - DMA channel 5 selection"] pub type C5S_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `C6S` reader - DMA channel 6 selection"] pub type C6S_R = crate::FieldReader; #[doc = "Field `C6S` writer - DMA channel 6 selection"] pub type C6S_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `C7S` reader - DMA channel 7 selection"] pub type C7S_R = crate::FieldReader; #[doc = "Field `C7S` writer - DMA channel 7 selection"] pub type C7S_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; impl R { #[doc = "Bits 0:3 - DMA channel 1 selection"] #[inline(always)] pub fn c1s(&self) -> C1S_R { C1S_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 4:7 - DMA channel 2 selection"] #[inline(always)] pub fn c2s(&self) -> C2S_R { C2S_R::new(((self.bits >> 4) & 0x0f) as u8) } #[doc = "Bits 8:11 - DMA channel 3 selection"] #[inline(always)] pub fn c3s(&self) -> C3S_R { C3S_R::new(((self.bits >> 8) & 0x0f) as u8) } #[doc = "Bits 12:15 - DMA channel 4 selection"] #[inline(always)] pub fn c4s(&self) -> C4S_R { C4S_R::new(((self.bits >> 12) & 0x0f) as u8) } #[doc = "Bits 16:19 - DMA channel 5 selection"] #[inline(always)] pub fn c5s(&self) -> C5S_R { C5S_R::new(((self.bits >> 16) & 0x0f) as u8) } #[doc = "Bits 20:23 - DMA channel 6 selection"] #[inline(always)] pub fn c6s(&self) -> C6S_R { C6S_R::new(((self.bits >> 20) & 0x0f) as u8) } #[doc = "Bits 24:27 - DMA channel 7 selection"] #[inline(always)] pub fn c7s(&self) -> C7S_R { C7S_R::new(((self.bits >> 24) & 0x0f) as u8) } } impl W { #[doc = "Bits 0:3 - DMA channel 1 selection"] #[inline(always)] #[must_use] pub fn c1s(&mut self) -> C1S_W<CSELR_SPEC, 0> { C1S_W::new(self) } #[doc = "Bits 4:7 - DMA channel 2 selection"] #[inline(always)] #[must_use] pub fn c2s(&mut self) -> C2S_W<CSELR_SPEC, 4> { C2S_W::new(self) } #[doc = "Bits 8:11 - DMA channel 3 selection"] #[inline(always)] #[must_use] pub fn c3s(&mut self) -> C3S_W<CSELR_SPEC, 8> { C3S_W::new(self) } #[doc = "Bits 12:15 - DMA channel 4 selection"] #[inline(always)] #[must_use] pub fn c4s(&mut self) -> C4S_W<CSELR_SPEC, 12> { C4S_W::new(self) } #[doc = "Bits 16:19 - DMA channel 5 selection"] #[inline(always)] #[must_use] pub fn c5s(&mut self) -> C5S_W<CSELR_SPEC, 16> { C5S_W::new(self) } #[doc = "Bits 20:23 - DMA channel 6 selection"] #[inline(always)] #[must_use] pub fn c6s(&mut self) -> C6S_W<CSELR_SPEC, 20> { C6S_W::new(self) } #[doc = "Bits 24:27 - DMA channel 7 selection"] #[inline(always)] #[must_use] pub fn c7s(&mut self) -> C7S_W<CSELR_SPEC, 24> { C7S_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "channel selection register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cselr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cselr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CSELR_SPEC; impl crate::RegisterSpec for CSELR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`cselr::R`](R) reader structure"] impl crate::Readable for CSELR_SPEC {} #[doc = "`write(|w| ..)` method takes [`cselr::W`](W) writer structure"] impl crate::Writable for CSELR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CSELR to value 0"] impl crate::Resettable for CSELR_SPEC { const RESET_VALUE: Self::Ux = 0; }
fn main() { let mut random_num1:u8=0; let mut bit1:u8 =0; let mut random_num2:u8=0; let mut temp_num:u8=0; let mut temp_num1:u8=0; let mut temp_num2:u8=0; let mut temp_num3:u8=0; let mut temp_num4:u8=0; let mut bit2:u8 =0; for x in 0..8{ bit1 = magic_num::magic_number(); random_num1 |= bit1 << x; } for x in 0..7{ bit2 = magic_num::magic_number(); temp_num |= bit2 << x; } for x in 0..6{ bit2 = magic_num::magic_number(); temp_num1 |= bit2 << x; } for x in 0..5{ bit2 = magic_num::magic_number(); temp_num2 |= bit2 << x; } for x in 0..3{ bit2 = magic_num::magic_number(); temp_num3 |= bit2 << x; } for x in 0..2{ bit2 = magic_num::magic_number(); temp_num4 |= bit2 << x; } random_num2 = temp_num + temp_num1 + temp_num2 + temp_num3 + temp_num4; println!("Random number 1:{} Random Number 2:{}", random_num1, random_num2); }
use std::borrow::BorrowMut; use std::cell::RefCell; use std::{thread, time::Duration, sync::Arc}; use futures::{executor, task::SpawnExt}; use futures::FutureExt; use futures_timer::Delay; use wasmtime::{Instance, Store, Engine, Config, Linker, Module}; struct State { wasi: wasmtime_wasi::WasiCtx, } impl State { fn new(wasi: wasmtime_wasi::WasiCtx) -> Self { Self { wasi } } } struct WasmInstance { instance: Instance, store: Store<State>, } impl WasmInstance { async fn new(engine: Arc<Engine>) -> Result<Self, Box<dyn std::error::Error>>{ let wasm_path = "/Users/jimmyhmiller/Documents/Code/PlayGround/rust/wasm-infinite-loop/target/wasm32-wasi/debug/wasm_infinite_loop.wasm"; let wasi = wasmtime_wasi::WasiCtxBuilder::new() .inherit_stdio() .inherit_stderr() .inherit_env()? .build(); let mut linker: Linker<State> = Linker::new(&engine); wasmtime_wasi::add_to_linker(&mut linker, |s| &mut s.wasi)?; let mut store = Store::new(&engine, State::new(wasi)); let module = Module::from_file(&engine, wasm_path)?; let instance = linker.instantiate_async(&mut store, &module).await?; Ok(Self { instance, store }) } async fn init(&mut self, label: i32) -> Result<(), Box<dyn std::error::Error>> { self.store.epoch_deadline_async_yield_and_update(1); let init = self.instance.get_typed_func::<i32, i32>(&mut self.store, "loop_forever")?; let result = init.call_async(&mut self.store, label).await?; println!("Result: {}", result); Ok(()) } async fn print_stuff(&mut self) -> Result<(), Box<dyn std::error::Error>> { self.store.epoch_deadline_async_yield_and_update(1); let print_stuff = self.instance.get_typed_func::<(), ()>(&mut self.store, "print_stuff")?; print_stuff.call_async(&mut self.store, ()).await?; Ok(()) } fn grow_memory(&mut self) -> Result<(), Box<dyn std::error::Error>> { println!("growing"); let memory = self .instance .get_export(&mut self.store, "memory") .unwrap() .into_memory() .unwrap(); memory.grow(&mut self.store, 100)?; Ok(()) } fn write_over_1mb(&mut self) -> Result<(), Box<dyn std::error::Error>> { println!("writing"); let memory = self .instance .get_export(&mut self.store, "memory") .unwrap() .into_memory() .unwrap(); let size = 1100000; let data = vec![0; size]; memory.write(&mut self.store, 0, &data)?; Ok(()) } } fn main() -> Result<(), Box<dyn std::error::Error>> { let mut config = Config::new(); config.async_support(true); config.epoch_interruption(true); let engine = Arc::new(Engine::new(&config).unwrap()); let engine_clone = engine.clone(); thread::spawn(move || { loop { engine_clone.increment_epoch(); thread::sleep(Duration::from_nanos(1)); } }); async fn run(label: i32, engine: Arc<Engine>) -> Result<(), Box<dyn std::error::Error>> { let mut instance = WasmInstance::new(engine).await.unwrap(); instance.init(label).await.unwrap(); Ok(()) } async fn run2(engine: Arc<Engine>) -> Result<(), Box<dyn std::error::Error>> { let mut instance = WasmInstance::new(engine).await.unwrap(); instance.print_stuff().await?; instance.print_stuff().await?; instance.print_stuff().await?; instance.print_stuff().await?; instance.print_stuff().await?; instance.print_stuff().await?; instance.grow_memory().unwrap(); instance.write_over_1mb()?; instance.print_stuff().await.unwrap(); Ok(()) } let mut local_pool = futures::executor::LocalPool::new(); let local_spawner = local_pool.spawner(); local_spawner.spawn(run2(engine.clone()).map(|_| ())).unwrap(); // local_spawner.spawn(run(1, engine.clone()).map(|_| ())).unwrap(); // local_spawner.spawn(run(2, engine.clone()).map(|_| ())).unwrap(); println!("=================="); local_pool.run_until_stalled(); println!("done"); Ok(()) }
extern crate omegalul; use std::{collections::HashMap, thread}; use ::std::*; use omegalul::server::{get_random_server, ChatEvent, Server}; #[tokio::main] async fn main() { if let Some(server_name) = get_random_server().await { println!("Connecting to {} server", server_name); let server = &mut Server::new(server_name.as_str(), vec!["minecraft".to_string()]); let chat = &mut server.start_chat().await; if let Some(chat) = chat { let cloned_chat = chat.clone(); thread::spawn(move || { let cloned_chat = cloned_chat.clone(); let runtime = tokio::runtime::Builder::new_multi_thread() .enable_all() .build() .unwrap(); runtime.block_on(async { loop { let commands = &mut HashMap::<&str, Box<dyn Fn()>>::new(); commands.insert( "disconnect", Box::new(|| { let cloned_chat = cloned_chat.clone(); runtime.spawn(async move { cloned_chat.clone().disconnect().await; println!("Disconnected, quitting program."); std::process::exit(0); }); }), ); commands.insert( "info", Box::new(|| println!("client id: {}", cloned_chat.client_id)), ); let input = &get_input(); let command = commands.get(input.as_str()); match command { Some(function) => (function)(), None => { println!("You: {}", input); cloned_chat.clone().send_message(input).await } } } }); }); loop { let event = chat.fetch_event().await; match event { ChatEvent::Message(message) => println!("Stranger: {}", &message), ChatEvent::StrangerDisconnected => { println!("The user has disconnected.") } ChatEvent::Typing => println!("Stranger is typing..."), ChatEvent::Connected => println!("You have matched with someone."), ChatEvent::CommonLikes(likes) => { println!("Oh, you 2 seem to have some things in common! {:?}", likes) } ChatEvent::Waiting => { println!("You are currently waiting for a person to match with.") } _ => (), } } } } } fn get_input() -> String { let mut input = String::new(); io::stdin() .read_line(&mut input) .expect("Unable to read line"); return input.trim().to_string(); }
extern crate num; extern crate ndarray; #[macro_use] extern crate itertools; use ndarray::{Array, Array4, Zip}; use num::complex::Complex; use num::Zero; use numpy::{IntoPyArray, PyArray4}; use pyo3::prelude::{pymodule, Py, PyErr, PyModule, PyResult, Python}; fn c_val_32(k: i32, kd: i32, m: i32, md: i32, n: i32, nd: i32, dim: i32) -> Complex<f32> { let omega: Complex<f32> = Complex::from_polar(&1.0, &(2.0 * std::f32::consts::PI / (dim as f32))); let diff = k - kd; iproduct!(0..dim, 0..dim, 0..dim).fold(Complex::zero(), |acc: Complex<f32>, (p, pd, r)| { let rd = (pd - p + r).rem_euclid(dim); acc + omega.powi( p * md - pd * nd - r * m + rd * n + diff * (p.pow(2) - pd.pow(2) - r.pow(2) + rd.pow(2)), ) }) / ((dim.pow(2)) as f32) } fn c_val_64(k: i64, kd: i64, m: i64, md: i64, n: i64, nd: i64, dim: i64) -> Complex<f64> { let omega: Complex<f64> = Complex::from_polar(&1.0, &(2.0 * std::f64::consts::PI / (dim as f64))); let diff = k - kd; iproduct!(0..dim, 0..dim, 0..dim).fold(Complex::zero(), |acc: Complex<f64>, (p, pd, r)| { let rd = (pd - p + r).rem_euclid(dim); acc + omega.powi(( p * md - pd * nd - r * m + rd * n + diff * (p.pow(2) - pd.pow(2) - r.pow(2) + rd.pow(2))) as i32, ) }) / ((dim.pow(2)) as f64) } fn c_mat_32(k: i32, kd: i32, dim: i32) -> Array4<Complex<f32>> { let d = dim as usize; let mut a = Array4::zeros((d, d, d, d)); Zip::indexed(&mut a).par_apply(|(m, md, n, nd), a| { *a = c_val_32(k, kd, m as i32, md as i32, n as i32, nd as i32, dim) }); a } fn c_mat_64(k: i64, kd: i64, dim: i64) -> Array4<Complex<f64>> { let d = dim as usize; let mut a = Array4::zeros((d, d, d, d)); Zip::indexed(&mut a).par_apply(|(m, md, n, nd), a| { *a = c_val_64(k, kd, m as i64, md as i64, n as i64, nd as i64, dim) }); a } #[pymodule] fn two_mub_coefficients(_py: Python<'_>, m: &PyModule) -> PyResult<()> { #[pyfn(m, "c_mat_32")] fn py_c_mat_32(py: Python<'_>, k: i32, kd: i32, d: i32) -> PyResult<Py<PyArray4<Complex<f32>>>> { Ok(c_mat_32(k, kd, d).into_pyarray(py).to_owned()) }; #[pyfn(m, "c_mat_64")] fn py_c_mat_64(py: Python<'_>, k: i64, kd: i64, d: i64) -> PyResult<Py<PyArray4<Complex<f64>>>> { Ok(c_mat_64(k, kd, d).into_pyarray(py).to_owned()) }; Ok(()) } #[cfg(test)] mod tests { use super::{c_val_32, c_val_64}; use num::complex::Complex; use crate::num::Zero; #[test] fn test_one_equals_two() { assert_eq!(Complex::zero(), c_val_32(2, 3, 1, 2, 5, 3, 10)); } }
pub mod app; pub mod app_to_game;
// Copyright 2020 IOTA Stiftung // SPDX-License-Identifier: Apache-2.0 use inccounter::*; use consts::*; use wasmlib::*; mod inccounter; mod consts; #[no_mangle] fn on_load() { let exports = ScExports::new(); exports.add_func(FUNC_CALL_INCREMENT, func_call_increment); exports.add_func(FUNC_CALL_INCREMENT_RECURSE5X, func_call_increment_recurse5x); exports.add_func(FUNC_INCREMENT, func_increment); exports.add_func(FUNC_INIT, func_init); exports.add_func(FUNC_LOCAL_STATE_INTERNAL_CALL, func_local_state_internal_call); exports.add_func(FUNC_LOCAL_STATE_POST, func_local_state_post); exports.add_func(FUNC_LOCAL_STATE_SANDBOX_CALL, func_local_state_sandbox_call); exports.add_func(FUNC_POST_INCREMENT, func_post_increment); exports.add_func(FUNC_REPEAT_MANY, func_repeat_many); exports.add_func(FUNC_WHEN_MUST_INCREMENT, func_when_must_increment); exports.add_view(VIEW_GET_COUNTER, view_get_counter); }
use pulldown_cmark::{html, Options, Parser}; pub fn render(md: String) -> String { let mut options = Options::empty(); options.insert(Options::ENABLE_STRIKETHROUGH); let parser = Parser::new_ext(&md, options); let mut html_output = String::new(); html::push_html(&mut html_output, parser); html_output }
pub trait AnalogInput: Send { fn get_value(&mut self) -> Option<f32>; }
pub mod miner; mod stratum; #[cfg(test)] mod test; mod worker; use byteorder::{LittleEndian, WriteBytesExt}; use rand::Rng; use std::ops::Range; fn partition_nonce(id: u64, total: u64) -> Range<u64> { let span = u64::max_value() / total; let start = span * id; let end = match id { x if x < total - 1 => start + span, x if x == total - 1 => u64::max_value(), _ => unreachable!(), }; Range { start, end } } fn nonce_generator(range: Range<u64>) -> impl FnMut() -> u64 { let mut rng = rand::thread_rng(); let Range { start, end } = range; move || rng.gen_range(start, end) } pub fn set_header_nonce(header: &[u8], nonce: u64) -> Vec<u8> { let len = header.len(); let mut header = header.to_owned(); header.truncate(len - 8); let _ = header.write_u64::<LittleEndian>(nonce); header }
use common::event::EventPublisher; use common::result::Result; use crate::application::dtos::{AuthorDto, CategoryDto, PublicationDto}; use crate::domain::author::AuthorRepository; use crate::domain::category::CategoryRepository; use crate::domain::interaction::InteractionService; use crate::domain::publication::{PublicationId, PublicationRepository}; use crate::domain::reader::{ReaderId, ReaderRepository}; pub struct GetById<'a> { event_pub: &'a dyn EventPublisher, author_repo: &'a dyn AuthorRepository, category_repo: &'a dyn CategoryRepository, publication_repo: &'a dyn PublicationRepository, reader_repo: &'a dyn ReaderRepository, interaction_serv: &'a InteractionService, } impl<'a> GetById<'a> { pub fn new( event_pub: &'a dyn EventPublisher, author_repo: &'a dyn AuthorRepository, category_repo: &'a dyn CategoryRepository, publication_repo: &'a dyn PublicationRepository, reader_repo: &'a dyn ReaderRepository, interaction_serv: &'a InteractionService, ) -> Self { GetById { event_pub, author_repo, category_repo, publication_repo, reader_repo, interaction_serv, } } pub async fn exec(&self, auth_id: String, publication_id: String) -> Result<PublicationDto> { let publication_id = PublicationId::new(publication_id)?; let mut publication = self.publication_repo.find_by_id(&publication_id).await?; let reader_id = ReaderId::new(auth_id)?; let reader = self.reader_repo.find_by_id(&reader_id).await?; let author = self.author_repo.find_by_id(publication.author_id()).await?; let category = self .category_repo .find_by_id(publication.header().category_id()) .await?; let is_reader_author = publication.author_id() == &reader_id; if !is_reader_author { self.interaction_serv .add_view(&reader, &mut publication) .await?; self.publication_repo.save(&mut publication).await?; self.event_pub .publish_all(publication.base().events()?) .await?; } let mut publication_dto = PublicationDto::from(&publication) .author(AuthorDto::from(&author)) .category(CategoryDto::from(&category)) .pages(&publication); if is_reader_author { publication_dto = publication_dto.status(&publication); } Ok(publication_dto) } } #[cfg(test)] mod tests { use super::*; use crate::mocks; #[tokio::test] async fn owner_view_of_draft() { let c = mocks::container(); let uc = GetById::new( c.event_pub(), c.author_repo(), c.category_repo(), c.publication_repo(), c.reader_repo(), c.interaction_serv(), ); let mut reader = mocks::author_as_reader1(); c.reader_repo().save(&mut reader).await.unwrap(); let mut publication = mocks::publication1(); c.publication_repo().save(&mut publication).await.unwrap(); let mut author = mocks::author1(); c.author_repo().save(&mut author).await.unwrap(); let mut category = mocks::category1(); c.category_repo().save(&mut category).await.unwrap(); let res = uc .exec( reader.base().id().to_string(), publication.base().id().to_string(), ) .await .unwrap(); assert_eq!(res.id, publication.base().id().value()); assert_eq!(res.author.unwrap().id, author.base().id().value()); assert_eq!(res.name, publication.header().name().value()); assert_eq!( res.category.unwrap().id, publication.header().category_id().value() ); assert!(res.pages.unwrap().len() > 0); assert_eq!(res.statistics.views, 0); assert_eq!(res.statistics.unique_views, 0); assert_eq!(res.statistics.readings, 0); assert_eq!(res.status.unwrap(), "draft"); assert_eq!(c.event_pub().events().await.len(), 0); } #[tokio::test] async fn reader_view_of_draft() { let c = mocks::container(); let uc = GetById::new( c.event_pub(), c.author_repo(), c.category_repo(), c.publication_repo(), c.reader_repo(), c.interaction_serv(), ); let mut reader = mocks::reader1(); c.reader_repo().save(&mut reader).await.unwrap(); let mut publication = mocks::publication1(); c.publication_repo().save(&mut publication).await.unwrap(); let mut author = mocks::author1(); c.author_repo().save(&mut author).await.unwrap(); let mut category = mocks::category1(); c.category_repo().save(&mut category).await.unwrap(); assert!(uc .exec( reader.base().id().to_string(), publication.base().id().to_string(), ) .await .is_err()); } #[tokio::test] async fn reader_view_of_published() { let c = mocks::container(); let uc = GetById::new( c.event_pub(), c.author_repo(), c.category_repo(), c.publication_repo(), c.reader_repo(), c.interaction_serv(), ); let mut reader = mocks::reader1(); c.reader_repo().save(&mut reader).await.unwrap(); let mut publication = mocks::published_publication1(); c.publication_repo().save(&mut publication).await.unwrap(); let mut author = mocks::author1(); c.author_repo().save(&mut author).await.unwrap(); let mut category = mocks::category1(); c.category_repo().save(&mut category).await.unwrap(); let res = uc .exec( reader.base().id().to_string(), publication.base().id().to_string(), ) .await .unwrap(); assert_eq!(res.id, publication.base().id().value()); assert_eq!(res.author.unwrap().id, publication.author_id().value()); assert_eq!(res.pages.unwrap().len(), 2); assert_eq!(res.statistics.views, 1); assert_eq!(res.statistics.unique_views, 1); assert!(res.status.is_none()); assert!(c.event_pub().events().await.len() > 0); } #[tokio::test] async fn invalid_id() { let c = mocks::container(); let uc = GetById::new( c.event_pub(), c.author_repo(), c.category_repo(), c.publication_repo(), c.reader_repo(), c.interaction_serv(), ); let mut reader = mocks::reader1(); c.reader_repo().save(&mut reader).await.unwrap(); let mut publication = mocks::published_publication1(); c.publication_repo().save(&mut publication).await.unwrap(); let mut author = mocks::author1(); c.author_repo().save(&mut author).await.unwrap(); let mut category = mocks::category1(); c.category_repo().save(&mut category).await.unwrap(); assert!(uc .exec(reader.base().id().to_string(), "#invalid".to_owned(),) .await .is_err()); } }