text
stringlengths
8
4.13M
#![allow(dead_code)] use std::collections::BTreeSet; use std::collections::HashSet; use std::convert::AsRef; // Definition for singly-linked list. #[derive(PartialEq, Eq, Clone, Debug)] pub struct ListNode { pub val: i32, pub next: Option<Box<ListNode>>, } impl ListNode { #[inline] fn new(val: i32) -> Self { ListNode { next: None, val } } pub fn build(v: Vec<i32>) -> Option<Box<ListNode>> { let mut ret = None; for i in 1..=v.len() { ret = Some(Box::new(ListNode { val: v[v.len() - i], next: ret, })) } ret } pub fn into_vec(head: Option<Box<ListNode>>) -> Vec<i32> { let mut tmp = vec![]; let mut head = head; while let Some(ln) = head { tmp.push(ln.val); head = ln.next; } tmp } } use std::cell::RefCell; use std::rc::Rc; // Definition for a binary tree node. #[derive(Debug, PartialEq, Eq)] pub struct TreeNode { pub val: i32, pub left: Option<Rc<RefCell<TreeNode>>>, pub right: Option<Rc<RefCell<TreeNode>>>, } use std::collections::VecDeque; impl TreeNode { #[inline] pub fn new(val: i32) -> Self { TreeNode { val, left: None, right: None, } } pub fn build_with_str<T: AsRef<str>>(s: T) -> Option<Rc<RefCell<TreeNode>>> { let s = s.as_ref().trim(); if &s[0..=0] != "[" || &s[s.len() - 1..=&s.len() - 1] != "]" { panic!(format!("{} can not build to TreeNode", &s)); } let arr: Vec<&str> = s[1..=s.len() - 2].split(',').collect(); let v: Vec<Option<i32>> = arr .into_iter() .map(|ss| { let ss = ss.trim(); if ss == "null" { return None; } else { let n = ss.parse::<i32>().unwrap(); return Some(n); } }) .collect(); TreeNode::build(v) } pub fn build(v: Vec<Option<i32>>) -> Option<Rc<RefCell<TreeNode>>> { if v.len() == 0 { return None; } let mut v_iter = v.into_iter(); let head_v = v_iter.next().unwrap().unwrap(); let head = Rc::new(RefCell::new(TreeNode::new(head_v))); let mut tmp = VecDeque::new(); tmp.push_back(Rc::clone(&head)); while let Some(node) = tmp.pop_front() { if let Some(lv) = v_iter.next() { match lv { Some(val) => { let ln = Rc::new(RefCell::new(TreeNode::new(val))); tmp.push_back(Rc::clone(&ln)); node.borrow_mut().left = Some(ln); } None => { node.borrow_mut().left = None; } } } else { break; } if let Some(rv) = v_iter.next() { match rv { Some(val) => { let rn = Rc::new(RefCell::new(TreeNode::new(val))); tmp.push_back(Rc::clone(&rn)); node.borrow_mut().right = Some(rn); } None => { node.borrow_mut().right = None; } } } else { break; } } Some(head) } pub fn clone_tree(t: &Option<Rc<RefCell<TreeNode>>>) -> Option<Rc<RefCell<TreeNode>>> { match t { None => return None, Some(rf_node) => { return Some(Rc::new(RefCell::new(TreeNode { val: rf_node.borrow().val, left: TreeNode::clone_tree(&rf_node.borrow().left), right: TreeNode::clone_tree(&rf_node.borrow().right), }))) } } } pub fn into_vec(tree: Option<Rc<RefCell<TreeNode>>>) -> Vec<Option<i32>> { let mut queue = VecDeque::new(); queue.push_back(tree); let mut ret = vec![]; while let Some(node) = queue.pop_front() { match node { None => { ret.push(None); } Some(t) => { ret.push(Some(t.borrow().val)); if let Some(ref lt) = t.borrow().left { queue.push_back(Some(Rc::clone(lt))); } else { queue.push_back(None); } if let Some(ref rt) = t.borrow().right { queue.push_back(Some(Rc::clone(rt))); } else { queue.push_back(None); } } } } while let Some(item) = ret.pop() { if let Some(t) = item { ret.push(Some(t)); break; } } ret } } pub fn vec_2_set<T: std::cmp::Ord>(v: Vec<T>) -> BTreeSet<T> { v.into_iter().collect() } pub fn build_string_array(v: Vec<&str>) -> Vec<String> { v.into_iter().map(|s| s.to_string()).collect() } pub fn compare_nest2_vec<T: Ord + Clone + std::hash::Hash + Eq>( a: Vec<Vec<T>>, b: Vec<Vec<T>>, ) -> bool { let aa: HashSet<Vec<T>> = a .into_iter() .map(|mut v| { v.sort_unstable(); v }) .collect(); let bb: HashSet<Vec<T>> = b .into_iter() .map(|mut v| { v.sort_unstable(); v }) .collect(); aa == bb } pub fn build_sudo(s: [[&str; 9]; 9]) -> Vec<Vec<char>> { let mut ret = vec![]; for line in s.iter() { ret.push(line.iter().map(|c| c.chars().next().unwrap()).collect()); } ret } pub fn print_sudo<T: std::fmt::Debug>(sudo: Vec<Vec<T>>) { for v in sudo.iter() { println!("{:?}", v); } } #[cfg(test)] mod test { use super::TreeNode; use std::cell::RefCell; use std::rc::Rc; #[test] fn build_tree_node() { assert_eq!( TreeNode::build(vec![Some(1), None, Some(2), Some(3)]), Some(Rc::new(RefCell::new(TreeNode { val: 1, left: None, right: Some(Rc::new(RefCell::new(TreeNode { val: 2, left: Some(Rc::new(RefCell::new(TreeNode { val: 3, left: None, right: None }))), right: None }))) }))) ); } #[test] fn build_tree_node_from_str() { assert_eq!( TreeNode::build(vec![Some(1), None, Some(2), Some(3)]), TreeNode::build_with_str("[1, null, 2, 3]") ); } #[test] fn clone_tree() { let t1 = Some(Rc::new(RefCell::new(TreeNode::new(1)))); let t2 = Some(Rc::new(RefCell::new(TreeNode { val: 2, left: TreeNode::clone_tree(&t1), right: None, }))); t1.as_ref().unwrap().borrow_mut().val = 10; assert_eq!(TreeNode::build(vec![Some(2), Some(1)]), t2); assert_eq!(TreeNode::build(vec![Some(10)]), t1); } #[test] fn tree_to_vec() { let v1 = vec![Some(1), None, Some(2)]; let t1 = TreeNode::build(v1.clone()); assert_eq!(v1, TreeNode::into_vec(t1)); let v2 = vec![Some(2), Some(1)]; let t2 = TreeNode::build(v2.clone()); assert_eq!(v2, TreeNode::into_vec(t2)); } }
mod vector; mod matrix; #[cfg(test)] mod tests { #[allow(unused_imports)] use vector::*; #[allow(unused_imports)] use matrix::*; #[test] fn test_index() { let v2: Vec2f = Vec2f::new(1.0f32, 2.0f32); let v3: Vec3f = Vec3f::new(1.0f32, 2.0f32, 3.0f32); let v4: Vec4f = Vec4f::new(1.0f32, 2.0f32, 3.0f32, 4.0f32); print!("v2: ({}", v2[0]); for i in 1..v2.dim() { print!(", {}", v2[i]); } println!(")"); print!("v3: ({}", v3[0]); for i in 1..v3.dim() { print!(", {}", v3[i]); } println!(")"); print!("v4: ({}", v4[0]); for i in 1..v4.dim() { print!(", {}", v4[i]); } println!(")"); } }
use crate::location::Register; use crate::Address; /// A CFI directive and the function offset it applies to. /// /// Address::none() is used for directives that apply to the whole function. pub type Cfi = (Address, CfiDirective); /// A CFI directive. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum CfiDirective { /// .cfi_startproc StartProc, /// .cfi_endproc EndProc, /// .cfi_personality <address> Personality(Address), /// .cfi_lsda <address> // TODO: encoding? Lsda(Address), /// .cfi_signal_frame SignalFrame, /// .cfi_return_column <register> ReturnColumn(Register), /// .cfi_def_cfa <register>, <offset> DefCfa(Register, i64), /// .cfi_def_cfa_register <register> DefCfaRegister(Register), /// .cfi_def_cfa_offset <offset> DefCfaOffset(i64), /// .cfi_offset <register>, <offset> Offset(Register, i64), /// .cfi_val_offset <register>, <offset> ValOffset(Register, i64), /// .cfi_register <register1>, <register2> Register(Register, Register), /// .cfi_restore <register> Restore(Register), /// .cfi_undefined <register> Undefined(Register), /// .cfi_same_value <register> SameValue(Register), /// .cfi_remember_state RememberState, /// .cfi_restore_state RestoreState, /// An unsupported instruction. Other, }
pub mod challenge_1; pub mod challenge_2; pub mod challenge_3; pub mod challenge_4; pub mod challenge_5; pub mod challenge_6; pub mod challenge_7; pub mod challenge_8;
fn take_order() {} fn serve_order() {} fn take_payment() {}
use futures_util::{stream::SplitSink, SinkExt, StreamExt}; use serde::{Deserialize, Serialize}; use std::{collections::HashMap, sync::Arc}; use tokio::{net::TcpStream, sync::Mutex, sync::RwLock}; use tokio_tungstenite::{tungstenite::protocol::Message, WebSocketStream}; use tungstenite::{ handshake::server::{Request, Response}, http::{StatusCode, Uri}, }; use uuid::Uuid; type Sender = Arc<Mutex<SplitSink<WebSocketStream<TcpStream>, Message>>>; #[derive(Debug, Serialize, Deserialize)] #[serde(tag = "type", rename_all = "camelCase")] pub enum RequestPacket { Join { id: String }, Create { size: Option<usize> }, Leave, } #[derive(Debug, Serialize, Deserialize)] #[serde(tag = "type", rename_all = "camelCase")] pub enum ResponsePacket { Join { #[serde(skip_serializing_if = "Option::is_none")] size: Option<usize>, }, Create { id: String, }, Leave { index: usize, }, Error { message: String, }, } struct Room { size: usize, senders: Vec<Sender>, } impl Room { const MIN_ROOM_SIZE: usize = 0; const MAX_ROOM_SIZE: usize = 255; const DEFAULT_ROOM_SIZE: usize = 2; fn new(size: usize) -> Room { Room { senders: Vec::new(), size, } } } pub struct Server { rooms: HashMap<String, Room>, } impl Server { pub fn new() -> Arc<RwLock<Server>> { Arc::new(RwLock::new(Server { rooms: HashMap::new(), })) } pub async fn handle_connection( tcp_stream: TcpStream, server: Arc<RwLock<Server>>, host: String, ) { let callback = |request: &Request, response: Response| { if host.is_empty() { return Ok(response); } let Some(header_value) = request.headers().get("Origin") else { let response = Response::builder() .status(StatusCode::BAD_REQUEST) .body(None) .unwrap(); return Err(response); }; let Ok(origin) = header_value.to_str() else { let response = Response::builder() .status(StatusCode::BAD_REQUEST) .body(None) .unwrap(); return Err(response); }; let Ok(origin_uri) = origin.parse::<Uri>() else { let response = Response::builder() .status(StatusCode::BAD_REQUEST) .body(None) .unwrap(); return Err(response); }; let Some(origin_host) = origin_uri.host() else { let response = Response::builder() .status(StatusCode::BAD_REQUEST) .body(None) .unwrap(); return Err(response); }; if origin_host != host && !origin_host.ends_with(format!(".{}", host).as_str()) { let response = Response::builder() .status(StatusCode::FORBIDDEN) .body(None) .unwrap(); return Err(response); } Ok(response) }; if let Ok(websocket_stream) = tokio_tungstenite::accept_hdr_async(tcp_stream, callback).await { let (sender, mut receiver) = websocket_stream.split(); let sender = Arc::new(Mutex::new(sender)); let mut client = Client::new(sender.clone()); while let Some(message) = receiver.next().await { match message { Ok(message) => client.handle_message(&server, message).await, Err(error) => println!("Failed to read message: {}", error), } } client.handle_close(&server).await } } } pub struct Client { sender: Sender, room_id: Option<String>, } impl Client { pub fn new(sender: Sender) -> Client { Client { sender, room_id: None, } } async fn send(&self, sender: &Sender, message: Message) { let mut sender = sender.lock().await; if let Err(error) = sender.send(message).await { println!("Failed to send: {}", error); } } async fn send_packet(&self, sender: &Sender, packet: ResponsePacket) { let serialized_packet = serde_json::to_string(&packet).unwrap(); self.send(sender, Message::Text(serialized_packet)).await; } async fn send_error_packet(&self, sender: &Sender, message: String) { let error_packet = ResponsePacket::Error { message }; self.send_packet(sender, error_packet).await } async fn handle_create_room(&mut self, server: &RwLock<Server>, size_option: Option<usize>) { let mut server = server.write().await; if server.rooms.iter().any(|(_, room)| { room.senders .iter() .any(|sender| Arc::ptr_eq(sender, &self.sender)) }) { return; } let size = size_option.unwrap_or(Room::DEFAULT_ROOM_SIZE); if size == Room::MIN_ROOM_SIZE || size >= Room::MAX_ROOM_SIZE { return self .send_error_packet(&self.sender, "The room size is not valid".to_string()) .await; } let room_id = Uuid::new_v4().to_string(); if server.rooms.contains_key(&room_id) { return self .send_error_packet( &self.sender, "A room with that identifier already exists.".to_string(), ) .await; } let mut room = Room::new(size); room.senders.push(self.sender.clone()); server.rooms.insert(room_id.clone(), room); self.room_id = Some(room_id.clone()); self.send_packet(&self.sender, ResponsePacket::Create { id: room_id }) .await } async fn handle_join_room(&mut self, server: &RwLock<Server>, room_id: String) { let mut server = server.write().await; if server.rooms.iter().any(|(_, room)| { room.senders .iter() .any(|sender| Arc::ptr_eq(sender, &self.sender)) }) { return; } let Some(room) = server.rooms.get_mut(&room_id) else { return self.send_error_packet(&self.sender, "The room does not exist.".to_string()).await; }; if room.senders.len() >= room.size { return self .send_error_packet(&self.sender, "The room is full.".to_string()) .await; } room.senders.push(self.sender.clone()); for sender in &room.senders { if Arc::ptr_eq(sender, &self.sender) { self.send_packet( &sender, ResponsePacket::Join { size: Some(room.senders.len() - 1), }, ) .await; } else { self.send_packet(&sender, ResponsePacket::Join { size: None }) .await; } } self.room_id = Some(room_id); } async fn handle_leave_room(&mut self, server: &RwLock<Server>) { let mut server = server.write().await; let Some(room_id) = &self.room_id else { return; }; let Some(room) = server.rooms.get_mut(room_id) else { return; }; let Some(index) = room.senders.iter().position(|sender| Arc::ptr_eq(sender, &self.sender)) else { return; }; room.senders.remove(index); for sender in &room.senders { self.send_packet(&sender, ResponsePacket::Leave { index }) .await; } if room.senders.is_empty() { server.rooms.remove(room_id); } self.room_id = None; } async fn handle_message(&mut self, server: &RwLock<Server>, message: Message) { if message.is_text() { let Ok(text) = message.into_text() else { return }; let Ok(packet) = serde_json::from_str(&text) else { return }; return match packet { RequestPacket::Create { size } => self.handle_create_room(server, size).await, RequestPacket::Join { id } => self.handle_join_room(server, id).await, RequestPacket::Leave => self.handle_leave_room(server).await, }; } else if message.is_binary() { let server = server.read().await; let Some(room_id) = &self.room_id else { return; }; let Some(room) = server.rooms.get(room_id) else { return; }; let Some(index) = room.senders.iter().position(|sender| Arc::ptr_eq(sender, &self.sender)) else { return; }; let mut data = message.into_data(); if data.is_empty() { return; } let source = u8::try_from(index).unwrap(); let destination = usize::from(data[0]); data[0] = source; if destination < room.senders.len() { return self .send(&room.senders[destination], Message::Binary(data)) .await; } else if destination == usize::from(u8::MAX) { for sender in &room.senders { if Arc::ptr_eq(sender, &self.sender) { continue; } self.send(&sender, Message::Binary(data.clone())).await; } } } } async fn handle_close(&mut self, server: &RwLock<Server>) { self.handle_leave_room(server).await } }
pub mod field_element; pub mod point; pub mod s256_field; mod internal_macros;
pub fn array_sum(nbrs: &Vec<i32>) -> i32 { let sum: i32 = 0; let sum = nbrs.into_iter() .fold(sum, |acc, &x| acc + x); // println!("sum after = {}", &sum); return sum; }
use proc_macro as pm; use quote::quote; pub(crate) fn rewrite(_attr: syn::AttributeArgs, item: syn::ItemImpl) -> pm::TokenStream { use crate::new_id; let mut functions = Vec::new(); let ty_name = if let syn::Type::Path(x) = item.self_ty.as_ref() { &x.path.segments.last().unwrap().ident } else { unreachable!("Found non-path type in impl") }; let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); for item in &item.items { if let syn::ImplItem::Method(item) = item { let mut inputs = item.sig.inputs.clone().into_iter().collect::<Vec<_>>(); let method_name = &item.sig.ident; let name = new_id(format!("{}_{}", ty_name, method_name)); let output = &item.sig.output; if matches!(inputs[0], syn::FnArg::Receiver(_)) { inputs[0] = syn::parse_quote!(self_param: #ty_name #ty_generics); } let ctx = match inputs.pop().unwrap() { syn::FnArg::Receiver(_) => unreachable!("Receiver in impl method"), syn::FnArg::Typed(p) => p, }; let ctx_id = ctx.pat; let ctx_ty = ctx.ty; let (ids, tys): (Vec<_>, Vec<_>) = inputs .iter() .map(|i| match i { syn::FnArg::Receiver(_) => unreachable!(), syn::FnArg::Typed(i) => (&i.pat, &i.ty), }) .unzip(); functions.push(quote! { pub fn #name #impl_generics ((#(#ids,)*):(#(#tys,)*), #ctx_id: #ctx_ty) #output #where_clause { #ty_name::#method_name(#(#ids,)* #ctx_id) } }); } } quote::quote!( #item #(#functions)* ) .into() }
/* Create a resource group, similar to: az group create --name $RESOURCE_GROUP_NAME --location $RESOURCE_GROUP_LOCATION export RESOURCE_GROUP_NAME=azuresdkforrust export RESOURCE_GROUP_LOCATION=southcentralus cargo run --package azure_mgmt_resources --example group_create */ use azure_identity::token_credentials::AzureCliCredential; use azure_mgmt_resources::{models::ResourceGroup, operations::resource_groups}; use std::env; #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { let http_client = azure_core::new_http_client(); let token_credential = AzureCliCredential {}; let subscription_id = &AzureCliCredential::get_subscription()?; let resource_group_name = &env::var("RESOURCE_GROUP_NAME").map_err(|_| "RESOURCE_GROUP_NAME required")?; let resource_group_location = env::var("RESOURCE_GROUP_LOCATION").map_err(|_| "RESOURCE_GROUP_LOCATION required")?; let config = &azure_mgmt_resources::config(http_client, Box::new(token_credential)).build(); let group = ResourceGroup { id: None, name: None, type_: None, properties: None, location: resource_group_location, managed_by: None, tags: None, }; let group_created = resource_groups::create_or_update(config, resource_group_name, &group, subscription_id).await?; println!("group created: {:#?}", group_created); Ok(()) }
use log::{debug, info, warn}; use std::{ net::{SocketAddr, TcpStream}, sync::{Arc, RwLock}, time::Duration, }; use crate::tonic_ext::{GenericCodec, GenericSvc}; use crate::MockBuilder; use rand::Rng; use tonic::{ codegen::{ http::{self, HeaderMap, HeaderValue, Method}, Body, Never, StdError, }, Code, }; /// A running gRPC server /// You do not directly create this object instead use the /// macro generated server to instantiate this for you. /// ```no_run /// mod mock_server { /// wiremock_grpc::generate!("hello.Greeter", MyServer); /// } /// use mock_server::*; /// ``` /// `MyServer` also [`Deref`](core::ops::Deref) to [`GrpcServer`](crate::grpc_server::GrpcServer). /// Therefore you can call [`setup`](crate::grpc_server::GrpcServer::setup) / [`find`](crate::grpc_server::GrpcServer::find) functions on it. #[derive(Clone, Debug)] pub struct GrpcServer { pub(crate) address: SocketAddr, inner: Arc<Option<Inner>>, pub(crate) rules: Arc<RwLock<Vec<RuleItem>>>, } #[derive(Debug)] pub(crate) struct RuleItem { pub(crate) rule: MockBuilder, pub(crate) invocations_count: u32, pub(crate) invocations: Vec<RequestItem>, } /// Represent a single handled request to the mock server. #[derive(Debug, Clone)] pub struct RequestItem { pub headers: HeaderMap, pub method: Method, pub uri: String, } impl RuleItem { fn record_request<B>(&mut self, r: &http::Request<B>) where B: Body + Send + 'static, B::Error: Into<StdError> + Send + 'static, { self.invocations_count += 1; self.invocations.push(RequestItem { headers: r.headers().clone(), method: r.method().clone(), uri: r.uri().to_string(), }); } } #[derive(Debug)] struct Inner { #[allow(dead_code)] server_handle: tokio::task::JoinHandle<Result<(), tonic::transport::Error>>, } impl Drop for GrpcServer { fn drop(&mut self) { debug!("dropping server {:?}", self); if self.inner.as_ref().is_some() { info!("Terminating server"); if self.rules_len() > 0 && self.rules_unmatched() > 0 { let unmatched_paths = self .rules .read() .unwrap() .iter() .filter(|f| f.invocations_count == 0) .map(|f| f.rule.path.clone()) .collect::<Vec<String>>(); self.reset(); panic!( "Server terminated with unmatched rules: \n{}", unmatched_paths.join("\n") ); } } } } impl GrpcServer { pub fn new(port: u16) -> Self { Self { address: format!("[::1]:{}", port).parse().unwrap(), inner: Arc::default(), rules: Arc::default(), } } pub async fn find_unused_port() -> Option<u16> { let mut rng = rand::thread_rng(); loop { let port: u16 = rng.gen_range(50000..60000); let addr: SocketAddr = format!("[::1]:{}", port).parse().unwrap(); if TcpStream::connect_timeout(&addr, std::time::Duration::from_millis(25)).is_err() { return Some(port); } tokio::time::sleep(Duration::from_millis(25)).await; } } pub async fn _start( &mut self, f: tokio::task::JoinHandle<Result<(), tonic::transport::Error>>, ) { info!("Starting gRPC started in {}", self.address()); let thread = f; for _ in 0..40 { if TcpStream::connect_timeout(&self.address, std::time::Duration::from_millis(25)) .is_ok() { break; } tokio::time::sleep(Duration::from_millis(25)).await; } self.inner = Arc::new(Some(Inner { server_handle: thread, })); info!("Server started in {}", self.address()); } pub fn setup<M>(&mut self, r: M) -> MockBuilder where M: Into<MockBuilder> + Clone + crate::Mountable, { r.clone().mount(self); r.into() } /// Reset all mappings pub fn reset(&self) { self.rules.write().unwrap().clear(); } pub fn address(&self) -> &SocketAddr { &self.address } pub fn handle_request<B>( &self, req: http::Request<B>, ) -> tonic::codegen::BoxFuture<http::Response<tonic::body::BoxBody>, Never> where B: Body + Send + 'static, B::Error: Into<StdError> + Send + 'static, { info!("Request to {}", req.uri().path()); let path = req.uri().path(); let mut inner = self.rules.write().unwrap(); if let Some(item) = inner.iter_mut().find(|x| x.rule.path == path) { info!("Matched rule {:?}", item); item.record_request(&req); let code = item.rule.status_code.unwrap_or(Code::Ok); if let Some(body) = &item.rule.result { debug!("Returning body ({} bytes)", body.len()); let body = body.clone(); let fut = async move { let method = GenericSvc(body); let codec = GenericCodec::default(); let mut grpc = tonic::server::Grpc::new(codec); let mut result = grpc.unary(method, req).await; result.headers_mut().append( "grpc-status", HeaderValue::from_str(format!("{}", code as u32).as_str()).unwrap(), ); Ok(result) }; return Box::pin(fut); } else { let status = code as u32; let builder = http::Response::builder() .status(200) .header("content-type", "application/grpc") .header("grpc-status", format!("{}", status)); info!("Returning empty body with status {}", status); return Box::pin(async move { let body = builder.body(tonic::body::empty_body()).unwrap(); Ok(body) }); }; } warn!("Request unhandled"); let builder = http::Response::builder() .status(200) .header("content-type", "application/grpc") .header("grpc-status", format!("{}", Code::Unimplemented as u32)); return Box::pin(async move { let body = builder.body(tonic::body::empty_body()).unwrap(); Ok(body) }); } }
/** * cargo new ep1 * cd C:\Users\むずでょ\OneDrive\ドキュメント\practice-rust\concurrency\ep1 * cargo build --example data-race-1 * cargo run --example data-race-1 * * [並行性](https://doc.rust-jp.rs/the-rust-programming-language-ja/1.6/book/concurrency.html) */ use std::thread; use std::time::Duration; fn main() { let mut data = 1; let handle = thread::spawn(move || { data += 1; println!("Child | data: {}", data); }); handle.join().unwrap(); println!("Main | data: {}", data); thread::sleep(Duration::from_millis(50)); }
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use stack::Stack; use std::uint; use std::mem::transmute; use std::rt::stack; use std::raw; #[cfg(target_arch = "x86_64")] use std::simd; use libc; // FIXME #7761: Registers is boxed so that it is 16-byte aligned, for storing // SSE regs. It would be marginally better not to do this. In C++ we // use an attribute on a struct. // FIXME #7761: It would be nice to define regs as `Box<Option<Registers>>` // since the registers are sometimes empty, but the discriminant would // then misalign the regs again. pub struct Context { /// Hold the registers while the task or scheduler is suspended regs: Box<Registers>, /// Lower bound and upper bound for the stack stack_bounds: Option<(uint, uint)>, } pub type InitFn = extern "C" fn(uint, *mut (), *mut ()) -> !; impl Context { pub fn empty() -> Context { Context { regs: new_regs(), stack_bounds: None, } } /// Create a new context that will resume execution by running proc() /// /// The `init` function will be run with `arg` and the `start` procedure /// split up into code and env pointers. It is required that the `init` /// function never return. /// /// FIXME: this is basically an awful the interface. The main reason for /// this is to reduce the number of allocations made when a green /// task is spawned as much as possible pub fn new(init: InitFn, arg: uint, start: proc():Send, stack: &mut Stack) -> Context { let sp: *const uint = stack.end(); let sp: *mut uint = sp as *mut uint; // Save and then immediately load the current context, // which we will then modify to call the given function when restored let mut regs = new_regs(); initialize_call_frame(&mut *regs, init, arg, unsafe { transmute(start) }, sp); // Scheduler tasks don't have a stack in the "we allocated it" sense, // but rather they run on pthreads stacks. We have complete control over // them in terms of the code running on them (and hopefully they don't // overflow). Additionally, their coroutine stacks are listed as being // zero-length, so that's how we detect what's what here. let stack_base: *const uint = stack.start(); let bounds = if sp as libc::uintptr_t == stack_base as libc::uintptr_t { None } else { Some((stack_base as uint, sp as uint)) }; return Context { regs: regs, stack_bounds: bounds, } } /* Switch contexts Suspend the current execution context and resume another by saving the registers values of the executing thread to a Context then loading the registers from a previously saved Context. */ pub fn swap(out_context: &mut Context, in_context: &Context) { rtdebug!("swapping contexts"); let out_regs: &mut Registers = match out_context { &Context { regs: box ref mut r, .. } => r }; let in_regs: &Registers = match in_context { &Context { regs: box ref r, .. } => r }; rtdebug!("noting the stack limit and doing raw swap"); unsafe { // Right before we switch to the new context, set the new context's // stack limit in the OS-specified TLS slot. This also means that // we cannot call any more rust functions after record_stack_bounds // returns because they would all likely fail due to the limit being // invalid for the current task. Lucky for us `rust_swap_registers` // is a C function so we don't have to worry about that! match in_context.stack_bounds { Some((lo, hi)) => stack::record_rust_managed_stack_bounds(lo, hi), // If we're going back to one of the original contexts or // something that's possibly not a "normal task", then reset // the stack limit to 0 to make morestack never fail None => stack::record_rust_managed_stack_bounds(0, uint::MAX), } rust_swap_registers(out_regs, in_regs) } } } #[link(name = "context_switch", kind = "static")] extern { fn rust_swap_registers(out_regs: *mut Registers, in_regs: *const Registers); } // Register contexts used in various architectures // // These structures all represent a context of one task throughout its // execution. Each struct is a representation of the architecture's register // set. When swapping between tasks, these register sets are used to save off // the current registers into one struct, and load them all from another. // // Note that this is only used for context switching, which means that some of // the registers may go unused. For example, for architectures with // callee/caller saved registers, the context will only reflect the callee-saved // registers. This is because the caller saved registers are already stored // elsewhere on the stack (if it was necessary anyway). // // Additionally, there may be fields on various architectures which are unused // entirely because they only reflect what is theoretically possible for a // "complete register set" to show, but user-space cannot alter these registers. // An example of this would be the segment selectors for x86. // // These structures/functions are roughly in-sync with the source files inside // of src/rt/arch/$arch. The only currently used function from those folders is // the `rust_swap_registers` function, but that's only because for now segmented // stacks are disabled. #[cfg(target_arch = "x86")] #[repr(C)] struct Registers { eax: u32, ebx: u32, ecx: u32, edx: u32, ebp: u32, esi: u32, edi: u32, esp: u32, cs: u16, ds: u16, ss: u16, es: u16, fs: u16, gs: u16, eflags: u32, eip: u32 } #[cfg(target_arch = "x86")] fn new_regs() -> Box<Registers> { box Registers { eax: 0, ebx: 0, ecx: 0, edx: 0, ebp: 0, esi: 0, edi: 0, esp: 0, cs: 0, ds: 0, ss: 0, es: 0, fs: 0, gs: 0, eflags: 0, eip: 0 } } #[cfg(target_arch = "x86")] fn initialize_call_frame(regs: &mut Registers, fptr: InitFn, arg: uint, procedure: raw::Procedure, sp: *mut uint) { let sp = sp as *mut uint; // x86 has interesting stack alignment requirements, so do some alignment // plus some offsetting to figure out what the actual stack should be. let sp = align_down(sp); let sp = mut_offset(sp, -4); unsafe { *mut_offset(sp, 2) = procedure.env as uint }; unsafe { *mut_offset(sp, 1) = procedure.code as uint }; unsafe { *mut_offset(sp, 0) = arg as uint }; let sp = mut_offset(sp, -1); unsafe { *sp = 0 }; // The final return address regs.esp = sp as u32; regs.eip = fptr as u32; // Last base pointer on the stack is 0 regs.ebp = 0; } // windows requires saving more registers (both general and XMM), so the windows // register context must be larger. #[cfg(all(windows, target_arch = "x86_64"))] #[repr(C)] struct Registers { gpr:[libc::uintptr_t, ..14], _xmm:[simd::u32x4, ..10] } #[cfg(all(not(windows), target_arch = "x86_64"))] #[repr(C)] struct Registers { gpr:[libc::uintptr_t, ..10], _xmm:[simd::u32x4, ..6] } #[cfg(all(windows, target_arch = "x86_64"))] fn new_regs() -> Box<Registers> { box() Registers { gpr:[0,..14], _xmm:[simd::u32x4(0,0,0,0),..10] } } #[cfg(all(not(windows), target_arch = "x86_64"))] fn new_regs() -> Box<Registers> { box() Registers { gpr:[0,..10], _xmm:[simd::u32x4(0,0,0,0),..6] } } #[cfg(target_arch = "x86_64")] fn initialize_call_frame(regs: &mut Registers, fptr: InitFn, arg: uint, procedure: raw::Procedure, sp: *mut uint) { extern { fn rust_bootstrap_green_task(); } // Redefinitions from rt/arch/x86_64/regs.h static RUSTRT_RSP: uint = 1; static RUSTRT_IP: uint = 8; static RUSTRT_RBP: uint = 2; static RUSTRT_R12: uint = 4; static RUSTRT_R13: uint = 5; static RUSTRT_R14: uint = 6; static RUSTRT_R15: uint = 7; let sp = align_down(sp); let sp = mut_offset(sp, -1); // The final return address. 0 indicates the bottom of the stack unsafe { *sp = 0; } rtdebug!("creating call frame"); rtdebug!("fptr {:#x}", fptr as libc::uintptr_t); rtdebug!("arg {:#x}", arg); rtdebug!("sp {}", sp); // These registers are frobbed by rust_bootstrap_green_task into the right // location so we can invoke the "real init function", `fptr`. regs.gpr[RUSTRT_R12] = arg as libc::uintptr_t; regs.gpr[RUSTRT_R13] = procedure.code as libc::uintptr_t; regs.gpr[RUSTRT_R14] = procedure.env as libc::uintptr_t; regs.gpr[RUSTRT_R15] = fptr as libc::uintptr_t; // These registers are picked up by the regular context switch paths. These // will put us in "mostly the right context" except for frobbing all the // arguments to the right place. We have the small trampoline code inside of // rust_bootstrap_green_task to do that. regs.gpr[RUSTRT_RSP] = sp as libc::uintptr_t; regs.gpr[RUSTRT_IP] = rust_bootstrap_green_task as libc::uintptr_t; // Last base pointer on the stack should be 0 regs.gpr[RUSTRT_RBP] = 0; } #[cfg(target_arch = "arm")] type Registers = [libc::uintptr_t, ..32]; #[cfg(target_arch = "arm")] fn new_regs() -> Box<Registers> { box {[0, .. 32]} } #[cfg(target_arch = "arm")] fn initialize_call_frame(regs: &mut Registers, fptr: InitFn, arg: uint, procedure: raw::Procedure, sp: *mut uint) { extern { fn rust_bootstrap_green_task(); } let sp = align_down(sp); // sp of arm eabi is 8-byte aligned let sp = mut_offset(sp, -2); // The final return address. 0 indicates the bottom of the stack unsafe { *sp = 0; } // ARM uses the same technique as x86_64 to have a landing pad for the start // of all new green tasks. Neither r1/r2 are saved on a context switch, so // the shim will copy r3/r4 into r1/r2 and then execute the function in r5 regs[0] = arg as libc::uintptr_t; // r0 regs[3] = procedure.code as libc::uintptr_t; // r3 regs[4] = procedure.env as libc::uintptr_t; // r4 regs[5] = fptr as libc::uintptr_t; // r5 regs[13] = sp as libc::uintptr_t; // #52 sp, r13 regs[14] = rust_bootstrap_green_task as libc::uintptr_t; // #56 pc, r14 --> lr } #[cfg(any(target_arch = "mips", target_arch = "mipsel"))] type Registers = [libc::uintptr_t, ..32]; #[cfg(any(target_arch = "mips", target_arch = "mipsel"))] fn new_regs() -> Box<Registers> { box {[0, .. 32]} } #[cfg(any(target_arch = "mips", target_arch = "mipsel"))] fn initialize_call_frame(regs: &mut Registers, fptr: InitFn, arg: uint, procedure: raw::Procedure, sp: *mut uint) { let sp = align_down(sp); // sp of mips o32 is 8-byte aligned let sp = mut_offset(sp, -2); // The final return address. 0 indicates the bottom of the stack unsafe { *sp = 0; } regs[4] = arg as libc::uintptr_t; regs[5] = procedure.code as libc::uintptr_t; regs[6] = procedure.env as libc::uintptr_t; regs[29] = sp as libc::uintptr_t; regs[25] = fptr as libc::uintptr_t; regs[31] = fptr as libc::uintptr_t; } fn align_down(sp: *mut uint) -> *mut uint { let sp = (sp as uint) & !(16 - 1); sp as *mut uint } // ptr::mut_offset is positive ints only #[inline] pub fn mut_offset<T>(ptr: *mut T, count: int) -> *mut T { use std::mem::size_of; (ptr as int + count * (size_of::<T>() as int)) as *mut T }
//! `kdl` is a "document-oriented" parser and API for the [KDL Document //! Language](https://kdl.dev), a node-based, human-friendly configuration and //! serialization format. Unlike serde-based implementations, this crate //! preserves formatting when editing, as well as when inserting or changing //! values with custom formatting. This is most useful when working with //! human-maintained KDL files. //! //! You can think of this crate as //! [`toml_edit`](https://crates.io/crates/toml_edit), but for KDL. //! //! If you don't care about formatting or programmatic manipulation, you might //! check out [`knuffel`](https://crates.io/crates/knuffel) or //! [`kaydle`](https://crates.io/crates/kaydle) instead for serde (or //! serde-like) parsing. //! //! ## Example //! //! ```rust //! use kdl::KdlDocument; //! //! let doc_str = r#" //! hello 1 2 3 //! //! world prop="value" { //! child 1 //! child 2 //! } //! "#; //! //! let doc: KdlDocument = doc_str.parse().expect("failed to parse KDL"); //! //! assert_eq!( //! doc.get_args("hello"), //! vec![&1.into(), &2.into(), &3.into()] //! ); //! //! assert_eq!( //! doc.get("world").map(|node| &node["prop"]), //! Some(&"value".into()) //! ); //! //! // Documents fully roundtrip: //! assert_eq!(doc.to_string(), doc_str); //! ``` //! //! ## Controlling Formatting //! //! By default, everything is created with default formatting. You can parse //! items manually to provide custom representations, comments, etc: //! //! ```rust //! let node_str = r#" //! // indented comment //! "formatted" 1 /* comment */ \ //! 2; //! "#; //! //! let mut doc = kdl::KdlDocument::new(); //! doc.nodes_mut().push(node_str.parse().unwrap()); //! //! assert_eq!(&doc.to_string(), node_str); //! ``` //! //! [`KdlDocument`], [`KdlNode`], [`KdlEntry`], and [`KdlIdentifier`] can all //! be parsed and managed this way. //! //! ## Error Reporting //! //! [`KdlError`] implements [`miette::Diagnostic`] and can be used to display //! detailed, pretty-printed diagnostic messages when using [`miette::Result`] //! and the `"fancy"` feature flag for `miette`: //! //! ```toml //! # Cargo.toml //! [dependencies] //! miette = { version = "x.y.z", features = ["fancy"] } //! ``` //! //! ```no_run //! fn main() -> miette::Result<()> { //! "foo 1.".parse::<kdl::KdlDocument>()?; //! Ok(()) //! } //! ``` //! //! This will display a message like: //! ```text //! Error: //! × Expected valid value. //! ╭──── //! 1 │ foo 1. //! · ─┬ //! · ╰── invalid float //! ╰──── //! help: Floating point numbers must be base 10, and have numbers after the decimal point. //! ``` //! //! ## Quirks //! //! ### Properties //! //! Multiple properties with the same name are allowed, and all duplicated //! **will be preserved**, meaning those documents will correctly round-trip. //! When using `node.get()`/`node["key"]` & company, the _last_ property with //! that name's value will be returned. //! //! ### Numbers //! //! KDL itself does not specify a particular representation for numbers and //! accepts just about anything valid, no matter how large and how small. This //! means a few things: //! //! * Numbers without a decimal point are interpreted as [`u64`]. //! * Numbers with a decimal point are interpreted as [`f64`]. //! * Floating point numbers that evaluate to [`f64::INFINITY`] or //! [`f64::NEG_INFINITY`] or NaN will be represented as such in the values, //! instead of the original numbers. //! * A similar restriction applies to overflowed [`u64`] values. //! * The original _representation_ of these numbers will be preserved, unless //! you [`KdlDocument::fmt`] in which case the original representation will be //! thrown away and the actual value will be used when serializing. //! //! ## License //! //! The code in this repository is covered by [the Apache-2.0 //! License](LICENSE.md). #![deny(missing_debug_implementations, nonstandard_style)] #![warn(missing_docs, unreachable_pub, rust_2018_idioms, unreachable_pub)] #![cfg_attr(test, deny(warnings))] #![doc(html_favicon_url = "https://kdl.dev/favicon.ico")] #![doc(html_logo_url = "https://kdl.dev/logo.svg")] pub use document::*; pub use entry::*; pub use error::*; pub use identifier::*; pub use node::*; pub use value::*; mod document; mod entry; mod error; mod fmt; mod identifier; mod node; mod nom_compat; mod parser; mod value;
use crate::core; use std::time; use friday_storage; use friday_logging; use std::sync::{RwLock, Arc}; use friday_error::{FridayError, propagate, frierr}; use ureq; use pnet; // Even if local IP has not changed we will ping remote with new // ip once a day in-case that server forgets us or restarts. static REFRESH_REMOTE: u64 = 3600 * 24; // We will check for changes in local IP once an hour static REFRESH_LOCAL: u64 = 3600; use serde_derive::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Clone, Debug)] struct Config { disable: Option<bool>, local_ip: Option<String>, site_url: String, } pub struct KartaSite { config: Config, name: Arc<RwLock<String>>, port: u16, last_ip_update: time::Instant } #[derive(Serialize)] struct Message { // The URL to this machine url: String, // Name of this friday (not necessarily unique) name: String } impl KartaSite { /// Creates a Karta that will ping a server with information about this Fridays /// IP on the local network. This lets a user go visit that site and be redirected to /// the friday without having to look-up the Fridays IP on the local network itself. pub fn new(name: Arc<RwLock<String>>, port: u16) -> Result<KartaSite, FridayError> { friday_logging::info!("new"); return friday_storage::config::get_config("discovery/kartasite.json").map_or_else( propagate!("Failed to create KartaSite Karta"), |config: Config| match config.disable { Some(disable) => { if disable { // technically not an error, but an ok quickfix imo frierr!("discovery/kartasite.json contains field disable.") } else { Ok(KartaSite { config, name, port, last_ip_update: time::Instant::now() }) } }, None => Ok(KartaSite { config, name, port, last_ip_update: time::Instant::now() }) }); } fn update_local_ip(&mut self, ip: String) { self.last_ip_update = time::Instant::now(); self.config.local_ip = Some(ip); } /// Returns the first ipv4 address used in the network interface fn first_v4_ip(network: &pnet::datalink::NetworkInterface) -> Option<String> { network.ips .iter() .find(|n| n.is_ipv4()) .map(|i| i.ip().to_string()) } /// Returns the ip of the machine on the local network pub fn get_local_ip() -> Result<String, FridayError> { // Apparently the default interface for the machine is the one that is // 1. Up // Is not a loopback // and has an IP // So we will use this as the local URL let interfaces = pnet::datalink::interfaces(); // let default_interface = interfaces .iter() .find(|e| e.is_up() && !e.is_loopback() && !e.ips.is_empty()); match default_interface { Some(interface) => match KartaSite::first_v4_ip(interface) { Some(network) => Ok(network), // TODO: Maybe ipv6 is ok too? None => frierr!("Found no valid ipv4 interface") }, None => frierr!("Found no default interface on this machine") } } /// Returns the ip into a url that when used in a browser /// will redirect the user to the Friday web-interface of this machine. fn format_url(&self, ip: String) -> String { format!("http://{}:{}", ip, self.port) } /// Sends local IP to the remote server so that the server can redirect users to this local /// machine. fn send_ip(&mut self, ip: String) -> Result<(), friday_error::FridayError> { match self.name.clone().read() { Err(err) => frierr!("Failed to aquire lock for name - Reason: {}", err), Ok(name) => match serde_json::to_value(Message{ // URL that will redirect a user to this fridays local site url: self.format_url(ip.clone()), // Name of this Friday that the server can display if it wants // it is not guaranteed to be unique. name: name.clone() }) { Err(err) => frierr!("Failed to serialize to serde::value - Reason: {}", err), Ok(value) => { let response = ureq::put(self.config.site_url.as_str()).send_json(value); if response.status() == 200 { // Little logging to show that we successfully sent discovery to site friday_logging::info!( "Discovery: {} status {}", self.config.site_url, response.status()); return Ok(()); } else { return frierr!("Received status {} when sending IP to {} using 'KartaSite'", response.status(), self.config.site_url.as_str()); } } } } } } impl core::Karta for KartaSite { fn name(&self) -> String { "KartaSite".to_owned() } // Check if local IP changes every X seconds fn time_to_clue(&self) -> time::Duration { time::Duration::from_secs(REFRESH_LOCAL) } fn clue(&mut self) -> Result<(), friday_error::FridayError> { // TODO: Might want to notify remote server if our local IP changes so that it can // remove that IP from itself? match KartaSite::get_local_ip() { Err(err) => propagate!("Failed to get local IP")(err), Ok(local_ip) => match self.config.local_ip.clone() { // Tell server that this is our local ip! None => { // Update local IP self.update_local_ip(local_ip.clone()); // Tell server of IP self.send_ip(local_ip.clone()) }, Some(old_local_ip) => { // If old local ip does not match current local ip - we must // server to remove of our old IP and add our new IP! if old_local_ip != local_ip { // Update local IP self.update_local_ip(local_ip.clone()); // Tell server of IP self.send_ip(local_ip.clone()) } // Update remote even if local has not changed // In case remote has forgot us due to restart or something else if time::Instant::now() .duration_since(self.last_ip_update) .as_secs() > REFRESH_REMOTE { // We just None for old - so that the server // won't try to remove it - if the server removes // in wrong order it might add new ip and then remove it // since they are the same. This guarantees that even if // server is poorly coded it won't remove our ip :) self.send_ip(local_ip.clone()) } // Do nothing - all is good! :) else { Ok(()) } } } } } } #[cfg(test)] mod tests { use super::*; use std::env; #[test] fn create_ping_site() { env::set_var("FRIDAY_CONFIG", "./test-resources"); let name = Arc::new(RwLock::new("hi".to_owned())); KartaSite::new(name, 8000).expect("Failed to create KartaSite"); } #[test] fn get_local_ip() { friday_logging::info!("{}", KartaSite::get_local_ip().expect("Failed to get local URL")); } }
use std::collections::HashMap; use std::hash::Hash; #[derive(Default, Debug, Clone)] pub struct Counter<T: Eq + Hash + Clone> { counts: HashMap<T, usize>, } impl<T: Clone + Eq + Hash> Counter<T> { pub fn new() -> Self { Self { counts: HashMap::new(), } } pub fn add(&mut self, key: &T) { *self.counts.entry(key.clone()).or_insert(0) += 1 } pub fn increment_count(&mut self, key: &T, count: usize) { *self.counts.entry(key.clone()).or_insert(0) += count } pub fn contains(&mut self, key: &T) -> bool { self.counts.contains_key(key) } pub fn iter(&self) -> impl Iterator<Item = (&T, &usize)> { self.counts.iter() } pub fn keys(&self) -> impl Iterator<Item = &T> { self.counts.keys() } pub fn into_keys(self) -> impl Iterator<Item = T> { self.counts.into_keys() } pub fn counts(&self) -> impl Iterator<Item = &usize> { self.counts.values() } pub fn into_counts(self) -> impl Iterator<Item = usize> { self.counts.into_values() } } impl<T: Clone + Eq + Hash> FromIterator<T> for Counter<T> { fn from_iter<I: IntoIterator<Item = T>>(iterator: I) -> Self { let mut counter = Self::new(); for item in iterator { counter.add(&item); } counter } } impl<T: Clone + Eq + Hash> IntoIterator for Counter<T> { type Item = (T, usize); type IntoIter = std::collections::hash_map::IntoIter<T, usize>; fn into_iter(self) -> Self::IntoIter { self.counts.into_iter() } }
use super::floyd_warshall; #[test] fn test_no_intermediate() { use petgraph::Graph; let mut graph = Graph::new_undirected(); let a = graph.add_node(0); let b = graph.add_node(1); let c = graph.add_node(2); let d = graph.add_node(3); graph.extend_with_edges( &[ (a, b, 1usize), (a, c, 1usize), (a, d, 1usize), (b, c, 1usize), (b, d, 1usize), (c, d, 1usize), ], ); let m = floyd_warshall(&graph); println!("{:?}", m); for i in 0..4 { for j in 0..4 { if i == j { assert_eq!(m.get_path_len(i, j), 0); } else { assert_eq!(m.get_path_len(i, j), 1); } } } } #[test] fn test_intermediate() { use petgraph::Graph; let mut graph = Graph::new_undirected(); let a = graph.add_node(0); let b = graph.add_node(1); let c = graph.add_node(2); graph.extend_with_edges(&[(a, b, 1usize), (b, c, 1), (a, c, 3)]); let m = floyd_warshall(&graph); println!("{:?}", m); assert_eq!(m.get_path_len(0, 0), 0); assert_eq!(m.get_path_len(1, 1), 0); assert_eq!(m.get_path_len(2, 2), 0); assert_eq!(m.get_path_len(0, 1), 1); assert_eq!(m.get_path_len(1, 2), 1); assert_eq!(m.get_path_len(0, 2), 2); } // #[test] // fn test_cache_friendliness() { // use petgraph::Graph; // let mut graph: Graph<usize, usize, _, _> = Graph::new_undirected(); // for i in 0..10 { // graph.add_node(i); // } // let dists = floyd_warshall(&graph); // println!("{:?}", dists); // } #[test] #[ignore] fn test_random() { use petgraph::Graph; use rand; use rand::Rng; let mut graph = Graph::new_undirected(); let mut rng = rand::thread_rng(); let mut vec = Vec::new(); for i in 0..10 { vec.push(graph.add_node(i)); } for v1 in &vec { for v2 in &vec { if v1 != v2 && rng.next_f32() < 0.1 { let w = (rng.next_u64() as usize) % 100; graph.add_edge(*v1, *v2, w); } } } let m = floyd_warshall(&graph); println!("{:?}", m); use petgraph::dot::Dot; use std::fs::File; use std::io::prelude::*; let mut file = File::create("random.dot").unwrap(); let b = format!("{:?}", Dot::new(&graph)); let b = b.as_bytes(); file.write_all(b).unwrap(); loop { let i: usize = read!(); let j: usize = read!(); if m.does_path_exist(i, j) { let len = m.get_path_len(i, j); let path = m.get_path_iter(i, j); let path: Vec<&usize> = if i > j { path.rev().collect() } else { path.collect() }; println!("The path from {} to {} has total length {}.", i, j, len); println!("Path: {:?}", path); } else { println!("There is no path from {} to {}.", i, j) } } }
extern crate argparse; extern crate ansi_term; extern crate rand; extern crate random_things; extern crate clipboard; use rand::{Rng}; use argparse::{ArgumentParser, StoreTrue, Store}; use ansi_term::Colour; use random_things::random_string; use clipboard::ClipboardContext; const MINIMUM_LENGTH:u32 = 10; struct Options { length: u32, silently: bool } fn main() { let mut options = Options{length: rand::thread_rng().gen_range(MINIMUM_LENGTH, 21), silently: false}; let password:String; { // this block limits scope of borrows by ap.refer() method let mut ap = ArgumentParser::new(); ap.set_description("This tool generates a random password and copies it to clipboard"); ap.refer(&mut options.length) .add_option(&["-l", "--length"], Store, "Sets the length of password, defaults to a random length between 10 and 20"); ap.refer(&mut options.silently) .add_option(&["-s"], StoreTrue, "Mutes all messages, only returns the password"); ap.parse_args_or_exit(); } password = random_string(options.length); print_output(options, password.clone()); copy_to_clipboard(password); } fn print_output(options:Options, password:String) { if options.silently { println!("{}", Colour::Green.paint(password)); } else { let warning_message = Colour::Red.paint("WARNING: Password length should be at least: "); if options.length < MINIMUM_LENGTH { println!("{}{}", warning_message, Colour::Red.paint(MINIMUM_LENGTH.to_string())); } println!("Generated password of length: {}", Colour::Green.paint(options.length.to_string())); println!("Password: {}", Colour::Green.paint(password)); } } fn copy_to_clipboard(content:String) { let mut ctx = ClipboardContext::new().unwrap(); ctx.set_contents(content).expect("Failed to copy to clipboard"); }
use macros::HelloMacro; use macros_derive::HelloMacro; #[derive(HelloMacro)] struct Pancakes; fn main() { Pancakes::hello_macro(); }
use irc::error::IrcError; #[derive(Debug)] pub enum Error { Irc(IrcError), } impl From<IrcError> for Error { fn from(err: IrcError) -> Self { Error::Irc(err) } }
#![allow(unused_variables)] //! 제네릭 (1) /// ### 에러메세지 /// /// 03.rs:2:16: 2:24 error: unable to infer enough type information about `_`; /// type annotations or generic parameter binding required [E0282] /// 03.rs:2 let list = Vec::new(); /// ^~~~~~~~ /// 03.rs:2:16: 2:24 help: run `rustc --explain E0282` to see a detailed explanation /// error: aborting due to previous error /// /// ### 설명 /// /// Vec은 구조체의 이름. vector의 약자, 동적배열의 다른이름. Vec 선언을 보면 이렇게 되어있음 /// /// struct Vec<T> { /* ... */ } /// /// C++의 템플릿에 해당하는놈. Vec은 int 자료형을 담고있는 벡터가 될수도 있고, String 자료형을 /// 담고있는 벡터가 될수도 있다. 임의의 자료형을 담을 수 있음. /// /// 그래서 Vec::new() 라고 쓰면, T에 해당하는애가 무엇이 될 지 모르기 때문에 컴파일에러가 난것. fn main() { // 아래 문장은 컴파일에러 // let list = Vec::new(); let list = Vec::<i32>::new(); }
mod replace_space; mod reverse_left_words; pub fn main() { let default = 2; match default { //替换空字符串1 1 => replace_space::main(), 2 => reverse_left_words::main(), _ => {} } }
//! 负责分配 / 回收的数据结构 // 2021-3-12 /// 分配器:固定容量,每次分配 / 回收一个元素 pub trait Allocator { /// 给定容量,创建分配器 fn new(capacity: usize) -> Self; /// 分配一个元素,无法分配则返回 `None` fn alloc(&mut self) -> Option<usize>; /// 回收一个元素 fn dealloc(&mut self, index: usize); } // 栈式分配和线段树分配 algorithm/src/allocator mod stacked_allocator; mod segment_tree_allocator; pub use stacked_allocator::StackedAllocator; pub use segment_tree_allocator::SegmentTreeAllocator; /// 默认使用的分配器 //pub type AllocatorImpl = SegmentTreeAllocator; // pub type AllocatorImpl = StackedAllocator; pub type AllocatorImpl = SegmentTreeAllocator; // END
use vulkano::format::{Format, ClearValue}; use vulkano::image::{Dimensions, StorageImage}; use vulkano::instance::{Instance, InstanceExtensions, PhysicalDevice}; use vulkano::device::{Device, DeviceExtensions, Features}; use vulkano::buffer::{BufferUsage, CpuAccessibleBuffer}; use vulkano::command_buffer::{AutoCommandBufferBuilder, CommandBuffer}; use vulkano::sync::GpuFuture; use std::sync::Arc; use vulkano::pipeline::ComputePipeline; use vulkano::descriptor::descriptor_set::PersistentDescriptorSet; use vulkano::descriptor::PipelineLayoutAbstract; use image::{ImageBuffer, Rgba}; use vulkano::framebuffer::Framebuffer; use vulkano::pipeline::GraphicsPipeline; use vulkano::framebuffer::Subpass; use vulkano::command_buffer::DynamicState; use vulkano::pipeline::viewport::Viewport; pub fn graphics() { let instance = Instance::new(None, &InstanceExtensions::none(), None) .expect("failed to create instance"); let physical = PhysicalDevice::enumerate(&instance).next().expect("no device available"); println!("yes"); println!("{:?}", instance); println!("{:?}", physical); for family in physical.queue_families() { println!("Family {:?} queue count: {:?}", family.id() , family.queues_count()); } let queue_family = physical.queue_families() .find(|&q| q.supports_graphics()) .expect("couldn't find a graphical queue family"); let (device, mut queues) = { Device::new(physical, &Features::none(), &DeviceExtensions::supported_by_device(physical), [(queue_family, 0.5)].iter().cloned()) .expect("failed to create device") }; let queue = queues.next().unwrap(); mod cs { vulkano_shaders::shader!{ ty: "compute", src: " #version 450 layout(local_size_x = 8, local_size_y = 8, local_size_z = 1) in; layout(set = 0, binding = 0, rgba8) uniform writeonly image2D img; void main() { vec2 norm_coordinates = (gl_GlobalInvocationID.xy + vec2(0.5)) / vec2(imageSize(img)); vec2 c = (norm_coordinates - vec2(0.5)) * 2.0 - vec2(1.0, 0.0); vec2 z = vec2(0.0, 0.0); float i; for (i = 0.0; i < 1.0; i += 0.005) { z = vec2( z.x * z.x - z.y * z.y + c.x, z.y * z.x + z.x * z.y + c.y ); if (length(z) > 4.0) { break; } } vec4 to_write = vec4(vec3(i), 1.0); imageStore(img, ivec2(gl_GlobalInvocationID.xy), to_write); } " } } let image = StorageImage::new(device.clone(), Dimensions::Dim2d { width: 1024, height: 1024 }, Format::R8G8B8A8Unorm, Some(queue.family())).unwrap(); let shader = cs::Shader::load(device.clone()) .expect("failed to create shader module"); let compute_pipeline = Arc::new(ComputePipeline::new(device.clone(), &shader.main_entry_point(), &()).expect("failed to create compute pipeline")); let layout = compute_pipeline.layout().descriptor_set_layout(0).unwrap(); let set = Arc::new(PersistentDescriptorSet::start(layout.clone()) .add_image(image.clone()).unwrap() .build().unwrap() ); let buf = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), false, (0 .. 1024 * 1024 * 4).map(|_| 0u8)) .expect("failed to create buffer"); let mut builder = AutoCommandBufferBuilder::new(device.clone(), queue.family()).unwrap(); builder .dispatch([1024 / 8, 1024 / 8, 1], compute_pipeline.clone(), set.clone(), ()).unwrap() .copy_image_to_buffer(image.clone(), buf.clone()).unwrap(); let command_buffer = builder.build().unwrap(); let finished = command_buffer.execute(queue.clone()).unwrap(); finished.then_signal_fence_and_flush().unwrap() .wait(None).unwrap(); //let buffer_content = buf.read().unwrap(); //let image = ImageBuffer::<Rgba<u8>, _>::from_raw(1024, 1024, &buffer_content[..]).unwrap(); //image.save("image.png").unwrap(); #[derive(Default, Copy, Clone)] struct Vertex { position: [f32; 2], } vulkano::impl_vertex!(Vertex, position); let vertex1 = Vertex { position: [-0.5, -0.5] }; let vertex2 = Vertex { position: [ 0.0, 0.5] }; let vertex3 = Vertex { position: [ 0.5, -0.25] }; let vertex_buffer = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), false, vec![vertex1, vertex2, vertex3].into_iter()).unwrap(); mod vs { vulkano_shaders::shader!{ ty: "vertex", src: " #version 450 layout(location = 0) in vec2 position; void main() { gl_Position = vec4(position, 0.0, 1.0); } " } } mod fs { vulkano_shaders::shader!{ ty: "fragment", src: " #version 450 layout(location = 0) out vec4 f_color; void main() { f_color = vec4(1.0, 0.0, 0.0, 1.0); } " } } let vs = vs::Shader::load(device.clone()).expect("failed to create shader module"); let fs = fs::Shader::load(device.clone()).expect("failed to create shader module"); let render_pass = Arc::new(vulkano::single_pass_renderpass!(device.clone(), attachments: { color: { load: Clear, store: Store, format: Format::R8G8B8A8Unorm, samples: 1, } }, pass: { color: [color], depth_stencil: {} } ).unwrap()); let framebuffer = Arc::new(Framebuffer::start(render_pass.clone()) .add(image.clone()).unwrap() .build().unwrap()); let mut builder = AutoCommandBufferBuilder::primary_one_time_submit(device.clone(), queue.family()).unwrap(); builder .begin_render_pass(framebuffer.clone(), false, vec![[0.0, 0.0, 1.0, 1.0].into()]) .unwrap() .end_render_pass() .unwrap(); let pipeline = Arc::new(GraphicsPipeline::start() // Defines what kind of vertex input is expected. .vertex_input_single_buffer::<Vertex>() // The vertex shader. .vertex_shader(vs.main_entry_point(), ()) // Defines the viewport (explanations below). .viewports_dynamic_scissors_irrelevant(1) // The fragment shader. .fragment_shader(fs.main_entry_point(), ()) // This graphics pipeline object concerns the first pass of the render pass. .render_pass(Subpass::from(render_pass.clone(), 0).unwrap()) // Now that everything is specified, we call `build`. .build(device.clone()) .unwrap()); let dynamic_state = DynamicState { viewports: Some(vec![Viewport { origin: [0.0, 0.0], dimensions: [1024.0, 1024.0], depth_range: 0.0 .. 1.0, }]), .. DynamicState::none() }; let mut builder = AutoCommandBufferBuilder::primary_one_time_submit(device.clone(), queue.family()).unwrap(); builder .begin_render_pass(framebuffer.clone(), false, vec![[0.0, 0.0, 1.0, 1.0].into()]) .unwrap() .draw(pipeline.clone(), &dynamic_state, vertex_buffer.clone(), (), ()) .unwrap() .end_render_pass() .unwrap() .copy_image_to_buffer(image.clone(), buf.clone()) .unwrap(); let command_buffer = builder.build().unwrap(); let finished = command_buffer.execute(queue.clone()).unwrap(); finished.then_signal_fence_and_flush().unwrap() .wait(None).unwrap(); let buffer_content = buf.read().unwrap(); let image = ImageBuffer::<Rgba<u8>, _>::from_raw(1024, 1024, &buffer_content[..]).unwrap(); image.save("triangle.png").unwrap(); }
#[derive(Debug)] struct Rectangle { width: u32, height: u32 } impl Rectangle { fn create(width: u32, height: u32) -> Rectangle { Rectangle { width, height } } fn area(&self) -> u32 { self.width * self.height } fn wider(&self, rect: &Rectangle) -> bool { self.width > rect.width } } fn add(a: i32, b: i32) -> i32 { return a + b; } trait Comparable { fn compare(&self, object: &Self) -> i8; } fn max<T: Comparable>(array: &[T]) -> &T { let mut max_index = 0; let mut i = 1; while i < array.len() { if array[i].compare(&array[max_index]) > 0 { max_index = i; } i += 1; } &array[max_index] } impl Comparable for f64 { fn compare(&self, object: &f64) -> i8 { if &self > &object { 1 } else if &self == &object { 0 } else { -1 } } } fn main() { // 0- print let a = "Hello World"; println!("print str, {0}, {1}", a, a); println!("print long long, {}", 31i64); println!("pi is {pi:>0width$}", pi=3.1415926, width=10); let x = 3.1415926535897932384626433832_f32; // 3.14159274 println!("x= {:.3}", x); let b = 34; let b = "abcd"; println!("b= {}", b); let tup: (i32, f64, u8) = (500, 6.4, 1); println!("tup= {}", tup.0); let arr = [1, 2, 3, 4, 5]; let arr: [i32; 5] = [1, 2, 3, 4, 5]; let arr = [3; 5]; println!("arr= {}", arr[0]); let mut arr = [1, 2, 3, 4]; arr[0] = 4; println!("arr_change= {}", arr[0]); let x = 4; let y = { let x = 6; x + 1 }; println!("y= {}", y); fn five() -> i32 { 5 } println!("five= {}", five()); println!("add(3, 4)= {}", add(3, 4)); let a = 12; let b; if a > 0 { b = 1; } else if a < 0 { b = 1; } else { b = 0; } println!("b is {}", b); let a = 3; let number = if a > 0 { 1 } else { -1 }; println!("number 为 {}", number); let mut number = 1; while number != 4 { println!("{}", number); number += 1; } println!("EXIT"); let a = [10, 20, 30, 40, 50]; for i in a.iter() { println!("值为 : {}", i); } let a = [10, 20, 30, 40, 50]; for i in 0..5 { println!("a[{}] = {}", i, a[i]); } let s = ['R', 'U', 'N', 'O', 'O', 'B']; let mut i = 0; loop { let ch = s[i]; if ch == 'O' { break; } println!("\'{}\'", ch); i += 1; } let s = ['R', 'U', 'N', 'O', 'O', 'B']; let mut i = 0; let location = loop { let ch = s[i]; if ch == 'O' { break i; } i += 1; }; println!(" \'O\' 的索引为 {}", location); // let s1 = String::from("hello"); // let s2 = s1; // println!("{}, world!", s1); // 错误!s1 已经失效 let s1 = String::from("hello"); let s2 = s1.clone(); println!("s1 = {}, s2 = {}", s1, s2); // let s1 = String::from("hello"); // let s2 = &s1; // let s3 = s1; // println!("{}", s2); // 这段程序不正确:因为 s2 租借的 s1 已经将所有权移动到 s3 let s1 = String::from("hello"); let mut s2 = &s1; let s3 = s1; s2 = &s3; // 重新从 s3 租借所有权 println!("{}", s2); let mut s1 = String::from("run"); // s1 是可变的 let s2 = &mut s1; // s2 是可变的引用 s2.push_str("oob"); println!("{}", s2); let arr = [1, 2, 3, 4, 5]; let part = &arr[1..3]; for i in part.iter() { println!("part: {}", i); } let rect1 = Rectangle { width: 10, height: 40}; println!("rect1: {:#?}", rect1); println!("rect1 area is: {}", rect1.area()); let rect2 = Rectangle::create(100, 200); println!("rect2_area is: {}", rect2.area()); let a = 5; if a == 5 { println!("a is 5"); } let arr = [1.0, 3.0, 5.0, 4.0, 2.0]; println!("maximum of arr is {}", max(&arr)); }
use crate::packet_data::PacketData; use crate::packet_headers::PacketHeader; pub enum Layer2Type { NotSet = 0, Ipv4, Ipv6, Arp, } pub struct PacketInfo { pub headers: Vec<PacketHeader>, pub packet_data: PacketData, } impl PacketInfo { pub fn new() -> PacketInfo { PacketInfo { packet_data: PacketData::new(), headers: vec![], } } pub fn add_header() {} pub fn store_packet_data() {} }
#[doc = "Register `MPCBB1_VCTR10` reader"] pub type R = crate::R<MPCBB1_VCTR10_SPEC>; #[doc = "Register `MPCBB1_VCTR10` writer"] pub type W = crate::W<MPCBB1_VCTR10_SPEC>; #[doc = "Field `B320` reader - B320"] pub type B320_R = crate::BitReader; #[doc = "Field `B320` writer - B320"] pub type B320_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B321` reader - B321"] pub type B321_R = crate::BitReader; #[doc = "Field `B321` writer - B321"] pub type B321_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B322` reader - B322"] pub type B322_R = crate::BitReader; #[doc = "Field `B322` writer - B322"] pub type B322_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B323` reader - B323"] pub type B323_R = crate::BitReader; #[doc = "Field `B323` writer - B323"] pub type B323_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B324` reader - B324"] pub type B324_R = crate::BitReader; #[doc = "Field `B324` writer - B324"] pub type B324_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B325` reader - B325"] pub type B325_R = crate::BitReader; #[doc = "Field `B325` writer - B325"] pub type B325_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B326` reader - B326"] pub type B326_R = crate::BitReader; #[doc = "Field `B326` writer - B326"] pub type B326_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B327` reader - B327"] pub type B327_R = crate::BitReader; #[doc = "Field `B327` writer - B327"] pub type B327_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B328` reader - B328"] pub type B328_R = crate::BitReader; #[doc = "Field `B328` writer - B328"] pub type B328_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B329` reader - B329"] pub type B329_R = crate::BitReader; #[doc = "Field `B329` writer - B329"] pub type B329_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B330` reader - B330"] pub type B330_R = crate::BitReader; #[doc = "Field `B330` writer - B330"] pub type B330_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B331` reader - B331"] pub type B331_R = crate::BitReader; #[doc = "Field `B331` writer - B331"] pub type B331_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B332` reader - B332"] pub type B332_R = crate::BitReader; #[doc = "Field `B332` writer - B332"] pub type B332_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B333` reader - B333"] pub type B333_R = crate::BitReader; #[doc = "Field `B333` writer - B333"] pub type B333_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B334` reader - B334"] pub type B334_R = crate::BitReader; #[doc = "Field `B334` writer - B334"] pub type B334_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B335` reader - B335"] pub type B335_R = crate::BitReader; #[doc = "Field `B335` writer - B335"] pub type B335_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B336` reader - B336"] pub type B336_R = crate::BitReader; #[doc = "Field `B336` writer - B336"] pub type B336_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B337` reader - B337"] pub type B337_R = crate::BitReader; #[doc = "Field `B337` writer - B337"] pub type B337_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B338` reader - B338"] pub type B338_R = crate::BitReader; #[doc = "Field `B338` writer - B338"] pub type B338_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B339` reader - B339"] pub type B339_R = crate::BitReader; #[doc = "Field `B339` writer - B339"] pub type B339_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B340` reader - B340"] pub type B340_R = crate::BitReader; #[doc = "Field `B340` writer - B340"] pub type B340_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B341` reader - B341"] pub type B341_R = crate::BitReader; #[doc = "Field `B341` writer - B341"] pub type B341_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B342` reader - B342"] pub type B342_R = crate::BitReader; #[doc = "Field `B342` writer - B342"] pub type B342_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B343` reader - B343"] pub type B343_R = crate::BitReader; #[doc = "Field `B343` writer - B343"] pub type B343_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B344` reader - B344"] pub type B344_R = crate::BitReader; #[doc = "Field `B344` writer - B344"] pub type B344_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B345` reader - B345"] pub type B345_R = crate::BitReader; #[doc = "Field `B345` writer - B345"] pub type B345_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B346` reader - B346"] pub type B346_R = crate::BitReader; #[doc = "Field `B346` writer - B346"] pub type B346_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B347` reader - B347"] pub type B347_R = crate::BitReader; #[doc = "Field `B347` writer - B347"] pub type B347_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B348` reader - B348"] pub type B348_R = crate::BitReader; #[doc = "Field `B348` writer - B348"] pub type B348_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B349` reader - B349"] pub type B349_R = crate::BitReader; #[doc = "Field `B349` writer - B349"] pub type B349_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B350` reader - B350"] pub type B350_R = crate::BitReader; #[doc = "Field `B350` writer - B350"] pub type B350_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B351` reader - B351"] pub type B351_R = crate::BitReader; #[doc = "Field `B351` writer - B351"] pub type B351_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - B320"] #[inline(always)] pub fn b320(&self) -> B320_R { B320_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - B321"] #[inline(always)] pub fn b321(&self) -> B321_R { B321_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - B322"] #[inline(always)] pub fn b322(&self) -> B322_R { B322_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - B323"] #[inline(always)] pub fn b323(&self) -> B323_R { B323_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - B324"] #[inline(always)] pub fn b324(&self) -> B324_R { B324_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - B325"] #[inline(always)] pub fn b325(&self) -> B325_R { B325_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - B326"] #[inline(always)] pub fn b326(&self) -> B326_R { B326_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - B327"] #[inline(always)] pub fn b327(&self) -> B327_R { B327_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - B328"] #[inline(always)] pub fn b328(&self) -> B328_R { B328_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - B329"] #[inline(always)] pub fn b329(&self) -> B329_R { B329_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - B330"] #[inline(always)] pub fn b330(&self) -> B330_R { B330_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - B331"] #[inline(always)] pub fn b331(&self) -> B331_R { B331_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - B332"] #[inline(always)] pub fn b332(&self) -> B332_R { B332_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - B333"] #[inline(always)] pub fn b333(&self) -> B333_R { B333_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 14 - B334"] #[inline(always)] pub fn b334(&self) -> B334_R { B334_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 15 - B335"] #[inline(always)] pub fn b335(&self) -> B335_R { B335_R::new(((self.bits >> 15) & 1) != 0) } #[doc = "Bit 16 - B336"] #[inline(always)] pub fn b336(&self) -> B336_R { B336_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - B337"] #[inline(always)] pub fn b337(&self) -> B337_R { B337_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - B338"] #[inline(always)] pub fn b338(&self) -> B338_R { B338_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - B339"] #[inline(always)] pub fn b339(&self) -> B339_R { B339_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - B340"] #[inline(always)] pub fn b340(&self) -> B340_R { B340_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - B341"] #[inline(always)] pub fn b341(&self) -> B341_R { B341_R::new(((self.bits >> 21) & 1) != 0) } #[doc = "Bit 22 - B342"] #[inline(always)] pub fn b342(&self) -> B342_R { B342_R::new(((self.bits >> 22) & 1) != 0) } #[doc = "Bit 23 - B343"] #[inline(always)] pub fn b343(&self) -> B343_R { B343_R::new(((self.bits >> 23) & 1) != 0) } #[doc = "Bit 24 - B344"] #[inline(always)] pub fn b344(&self) -> B344_R { B344_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bit 25 - B345"] #[inline(always)] pub fn b345(&self) -> B345_R { B345_R::new(((self.bits >> 25) & 1) != 0) } #[doc = "Bit 26 - B346"] #[inline(always)] pub fn b346(&self) -> B346_R { B346_R::new(((self.bits >> 26) & 1) != 0) } #[doc = "Bit 27 - B347"] #[inline(always)] pub fn b347(&self) -> B347_R { B347_R::new(((self.bits >> 27) & 1) != 0) } #[doc = "Bit 28 - B348"] #[inline(always)] pub fn b348(&self) -> B348_R { B348_R::new(((self.bits >> 28) & 1) != 0) } #[doc = "Bit 29 - B349"] #[inline(always)] pub fn b349(&self) -> B349_R { B349_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bit 30 - B350"] #[inline(always)] pub fn b350(&self) -> B350_R { B350_R::new(((self.bits >> 30) & 1) != 0) } #[doc = "Bit 31 - B351"] #[inline(always)] pub fn b351(&self) -> B351_R { B351_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 0 - B320"] #[inline(always)] #[must_use] pub fn b320(&mut self) -> B320_W<MPCBB1_VCTR10_SPEC, 0> { B320_W::new(self) } #[doc = "Bit 1 - B321"] #[inline(always)] #[must_use] pub fn b321(&mut self) -> B321_W<MPCBB1_VCTR10_SPEC, 1> { B321_W::new(self) } #[doc = "Bit 2 - B322"] #[inline(always)] #[must_use] pub fn b322(&mut self) -> B322_W<MPCBB1_VCTR10_SPEC, 2> { B322_W::new(self) } #[doc = "Bit 3 - B323"] #[inline(always)] #[must_use] pub fn b323(&mut self) -> B323_W<MPCBB1_VCTR10_SPEC, 3> { B323_W::new(self) } #[doc = "Bit 4 - B324"] #[inline(always)] #[must_use] pub fn b324(&mut self) -> B324_W<MPCBB1_VCTR10_SPEC, 4> { B324_W::new(self) } #[doc = "Bit 5 - B325"] #[inline(always)] #[must_use] pub fn b325(&mut self) -> B325_W<MPCBB1_VCTR10_SPEC, 5> { B325_W::new(self) } #[doc = "Bit 6 - B326"] #[inline(always)] #[must_use] pub fn b326(&mut self) -> B326_W<MPCBB1_VCTR10_SPEC, 6> { B326_W::new(self) } #[doc = "Bit 7 - B327"] #[inline(always)] #[must_use] pub fn b327(&mut self) -> B327_W<MPCBB1_VCTR10_SPEC, 7> { B327_W::new(self) } #[doc = "Bit 8 - B328"] #[inline(always)] #[must_use] pub fn b328(&mut self) -> B328_W<MPCBB1_VCTR10_SPEC, 8> { B328_W::new(self) } #[doc = "Bit 9 - B329"] #[inline(always)] #[must_use] pub fn b329(&mut self) -> B329_W<MPCBB1_VCTR10_SPEC, 9> { B329_W::new(self) } #[doc = "Bit 10 - B330"] #[inline(always)] #[must_use] pub fn b330(&mut self) -> B330_W<MPCBB1_VCTR10_SPEC, 10> { B330_W::new(self) } #[doc = "Bit 11 - B331"] #[inline(always)] #[must_use] pub fn b331(&mut self) -> B331_W<MPCBB1_VCTR10_SPEC, 11> { B331_W::new(self) } #[doc = "Bit 12 - B332"] #[inline(always)] #[must_use] pub fn b332(&mut self) -> B332_W<MPCBB1_VCTR10_SPEC, 12> { B332_W::new(self) } #[doc = "Bit 13 - B333"] #[inline(always)] #[must_use] pub fn b333(&mut self) -> B333_W<MPCBB1_VCTR10_SPEC, 13> { B333_W::new(self) } #[doc = "Bit 14 - B334"] #[inline(always)] #[must_use] pub fn b334(&mut self) -> B334_W<MPCBB1_VCTR10_SPEC, 14> { B334_W::new(self) } #[doc = "Bit 15 - B335"] #[inline(always)] #[must_use] pub fn b335(&mut self) -> B335_W<MPCBB1_VCTR10_SPEC, 15> { B335_W::new(self) } #[doc = "Bit 16 - B336"] #[inline(always)] #[must_use] pub fn b336(&mut self) -> B336_W<MPCBB1_VCTR10_SPEC, 16> { B336_W::new(self) } #[doc = "Bit 17 - B337"] #[inline(always)] #[must_use] pub fn b337(&mut self) -> B337_W<MPCBB1_VCTR10_SPEC, 17> { B337_W::new(self) } #[doc = "Bit 18 - B338"] #[inline(always)] #[must_use] pub fn b338(&mut self) -> B338_W<MPCBB1_VCTR10_SPEC, 18> { B338_W::new(self) } #[doc = "Bit 19 - B339"] #[inline(always)] #[must_use] pub fn b339(&mut self) -> B339_W<MPCBB1_VCTR10_SPEC, 19> { B339_W::new(self) } #[doc = "Bit 20 - B340"] #[inline(always)] #[must_use] pub fn b340(&mut self) -> B340_W<MPCBB1_VCTR10_SPEC, 20> { B340_W::new(self) } #[doc = "Bit 21 - B341"] #[inline(always)] #[must_use] pub fn b341(&mut self) -> B341_W<MPCBB1_VCTR10_SPEC, 21> { B341_W::new(self) } #[doc = "Bit 22 - B342"] #[inline(always)] #[must_use] pub fn b342(&mut self) -> B342_W<MPCBB1_VCTR10_SPEC, 22> { B342_W::new(self) } #[doc = "Bit 23 - B343"] #[inline(always)] #[must_use] pub fn b343(&mut self) -> B343_W<MPCBB1_VCTR10_SPEC, 23> { B343_W::new(self) } #[doc = "Bit 24 - B344"] #[inline(always)] #[must_use] pub fn b344(&mut self) -> B344_W<MPCBB1_VCTR10_SPEC, 24> { B344_W::new(self) } #[doc = "Bit 25 - B345"] #[inline(always)] #[must_use] pub fn b345(&mut self) -> B345_W<MPCBB1_VCTR10_SPEC, 25> { B345_W::new(self) } #[doc = "Bit 26 - B346"] #[inline(always)] #[must_use] pub fn b346(&mut self) -> B346_W<MPCBB1_VCTR10_SPEC, 26> { B346_W::new(self) } #[doc = "Bit 27 - B347"] #[inline(always)] #[must_use] pub fn b347(&mut self) -> B347_W<MPCBB1_VCTR10_SPEC, 27> { B347_W::new(self) } #[doc = "Bit 28 - B348"] #[inline(always)] #[must_use] pub fn b348(&mut self) -> B348_W<MPCBB1_VCTR10_SPEC, 28> { B348_W::new(self) } #[doc = "Bit 29 - B349"] #[inline(always)] #[must_use] pub fn b349(&mut self) -> B349_W<MPCBB1_VCTR10_SPEC, 29> { B349_W::new(self) } #[doc = "Bit 30 - B350"] #[inline(always)] #[must_use] pub fn b350(&mut self) -> B350_W<MPCBB1_VCTR10_SPEC, 30> { B350_W::new(self) } #[doc = "Bit 31 - B351"] #[inline(always)] #[must_use] pub fn b351(&mut self) -> B351_W<MPCBB1_VCTR10_SPEC, 31> { B351_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "MPCBBx vector register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mpcbb1_vctr10::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mpcbb1_vctr10::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct MPCBB1_VCTR10_SPEC; impl crate::RegisterSpec for MPCBB1_VCTR10_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`mpcbb1_vctr10::R`](R) reader structure"] impl crate::Readable for MPCBB1_VCTR10_SPEC {} #[doc = "`write(|w| ..)` method takes [`mpcbb1_vctr10::W`](W) writer structure"] impl crate::Writable for MPCBB1_VCTR10_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets MPCBB1_VCTR10 to value 0"] impl crate::Resettable for MPCBB1_VCTR10_SPEC { const RESET_VALUE: Self::Ux = 0; }
use aoc_utils::read_file; use day04::FIND_GUARD_REGEX; use day04::{Action, Guard, Time}; use std::collections::HashMap; fn main() { if let Ok(contents) = read_file("./input") { let mut guards: Vec<Guard> = contents .lines() .filter_map(|line| { if let Some(caps) = FIND_GUARD_REGEX.captures(line) { let mut time = Time::new( caps["year"].parse::<i32>().unwrap(), caps["month"].parse::<i32>().unwrap(), caps["day"].parse::<i32>().unwrap(), caps["hour"].parse::<i32>().unwrap(), caps["minute"].parse::<i32>().unwrap(), ); if time.get_hour() != 0 { time.set_to_next_day(); } return Some(Guard::new( time, caps.name("guard_id") .and_then(|id| id.as_str().parse::<i32>().ok()), Action::parse_str_to_action(&caps["action"]).unwrap(), )); } None }) .collect(); guards.sort(); let guard_sleepy_table: HashMap<i32, HashMap<String, Vec<i32>>> = guards .into_iter() .fold((0, (Action::Begin, 0), HashMap::new()), |mut acc, guard| { if let Some(id) = guard.get_guard_id() { acc.0 = id; } let time = guard.get_time(); { let cache = acc.2.entry(acc.0).or_insert_with(HashMap::new); let minutes_vec = cache.entry(time.get_date_string()).or_insert_with(Vec::new); let previous_action = acc.1; if let (Action::Asleep, Action::Awake) = (previous_action.0, guard.get_action()) { for n in previous_action.1..time.get_minute() { minutes_vec.push(n); } } } acc.1 = (guard.get_action(), time.get_minute()); acc }) .2; let sleepiest_guard: (i32, (i32, i32)) = guard_sleepy_table .into_iter() .map(|(id, x)| { ( id, x.into_iter() .filter(|(_, v)| !v.is_empty()) .fold(vec![], |acc, (_, v)| [acc, v].concat()) .iter() .fold(HashMap::new(), |mut acc, n| { acc.entry(*n) .and_modify(|counter| *counter += 1) .or_insert(1); acc }) .into_iter() .max_by(|(_, x), (_, y)| x.cmp(y)), ) }) .filter_map(|(id, max)| match max { Some(n) => Some((id, n)), None => None, }) .max_by(|(_, x), (_, y)| x.1.cmp(&y.1)) .unwrap(); println!("{:?}", sleepiest_guard.0 * (sleepiest_guard.1).0); } }
use crate::lib::canister_info::{CanisterInfo, CanisterInfoFactory}; use crate::lib::error::DfxResult; use anyhow::bail; use std::path::{Path, PathBuf}; pub struct AssetsCanisterInfo { input_root: PathBuf, source_paths: Vec<PathBuf>, output_wasm_path: PathBuf, output_idl_path: PathBuf, output_assets_path: PathBuf, } impl AssetsCanisterInfo { pub fn get_source_paths(&self) -> &Vec<PathBuf> { &self.source_paths } pub fn get_output_wasm_path(&self) -> &Path { self.output_wasm_path.as_path() } pub fn get_output_idl_path(&self) -> &Path { self.output_idl_path.as_path() } pub fn get_output_assets_path(&self) -> &Path { self.output_assets_path.as_path() } pub fn assert_source_paths(&self) -> DfxResult<()> { let source_paths = &self.source_paths; let input_root = &self.input_root; let source_paths: Vec<PathBuf> = source_paths.iter().map(|x| input_root.join(x)).collect(); for source_path in &source_paths { let canonical = source_path.canonicalize()?; if !canonical.starts_with(input_root) { bail!( "Directory at '{}' is outside the workspace root.", source_path.to_path_buf().display() ); } } Ok(()) } } impl CanisterInfoFactory for AssetsCanisterInfo { fn supports(info: &CanisterInfo) -> bool { info.get_type() == "assets" } fn create(info: &CanisterInfo) -> DfxResult<AssetsCanisterInfo> { let build_root = info.get_build_root(); let name = info.get_name(); let input_root = info.get_workspace_root().to_path_buf(); // If there are no "source" field, we just ignore this. let source_paths = if info.has_extra("source") { info.get_extra::<Vec<PathBuf>>("source")? } else { vec![] }; let output_root = build_root.join(name); let output_wasm_path = output_root.join(Path::new("assetstorage.wasm")); let output_idl_path = output_wasm_path.with_extension("did"); let output_assets_path = output_root.join(Path::new("assets")); Ok(AssetsCanisterInfo { input_root, source_paths, output_wasm_path, output_idl_path, output_assets_path, }) } }
use franklin_crypto::bellman::Engine; #[derive(Debug, PartialEq, Eq)] pub enum HashFamily { Rescue, Poseidon, RescuePrime, } #[derive(Copy, Clone, Debug)] pub enum CustomGate { QuinticWidth4, QuinticWidth3, None, } #[derive(Clone, PartialEq, Eq)] pub enum Sbox { Alpha(u64), AlphaInverse([u64; 4]), // TODO } impl std::fmt::Debug for Sbox { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Alpha(_) => write!(f, "quintic sbox"), Self::AlphaInverse(_) => write!(f, "quintic inverse sbox"), } } } pub trait HashParams<E: Engine, const RATE: usize, const WIDTH: usize>: Clone + Send + Sync { fn hash_family(&self) -> HashFamily; fn constants_of_round(&self, round: usize) -> [E::Fr; WIDTH]; fn mds_matrix(&self) -> [[E::Fr; WIDTH]; WIDTH]; fn number_of_full_rounds(&self) -> usize; fn number_of_partial_rounds(&self) -> usize; fn alpha(&self) -> &Sbox; fn alpha_inv(&self) -> &Sbox; fn optimized_round_constants(&self) -> &[[E::Fr; WIDTH]]; fn optimized_mds_matrixes(&self) -> (&[[E::Fr; WIDTH]; WIDTH], &[[[E::Fr; WIDTH]; WIDTH]]); fn custom_gate(&self) -> CustomGate; fn use_custom_gate(&mut self, gate: CustomGate); }
use std::borrow::Cow; use std::marker; use std::ops::Bound; use crate::*; use super::{advance_key, retreat_key}; fn move_on_range_end<'txn>( cursor: &mut RoCursor<'txn>, end_bound: &Bound<Vec<u8>>, ) -> Result<Option<(&'txn [u8], &'txn [u8])>> { match end_bound { Bound::Included(end) => { match cursor.move_on_key_greater_than_or_equal_to(end) { Ok(Some((key, data))) if key == &end[..] => Ok(Some((key, data))), Ok(_) => cursor.move_on_prev(), Err(e) => Err(e), } }, Bound::Excluded(end) => { cursor .move_on_key_greater_than_or_equal_to(end) .and_then(|_| cursor.move_on_prev()) }, Bound::Unbounded => cursor.move_on_last(), } } fn move_on_range_start<'txn>( cursor: &mut RoCursor<'txn>, start_bound: &mut Bound<Vec<u8>>, ) -> Result<Option<(&'txn [u8], &'txn [u8])>> { match start_bound { Bound::Included(start) => { cursor.move_on_key_greater_than_or_equal_to(start) }, Bound::Excluded(start) => { advance_key(start); let result = cursor.move_on_key_greater_than_or_equal_to(start); retreat_key(start); result }, Bound::Unbounded => cursor.move_on_first(), } } pub struct RoRange<'txn, KC, DC> { cursor: RoCursor<'txn>, move_on_start: bool, start_bound: Bound<Vec<u8>>, end_bound: Bound<Vec<u8>>, _phantom: marker::PhantomData<(KC, DC)>, } impl<'txn, KC, DC> RoRange<'txn, KC, DC> { pub(crate) fn new( cursor: RoCursor<'txn>, start_bound: Bound<Vec<u8>>, end_bound: Bound<Vec<u8>>, ) -> RoRange<'txn, KC, DC> { RoRange { cursor, move_on_start: true, start_bound, end_bound, _phantom: marker::PhantomData, } } /// Change the codec types of this iterator, specifying the codecs. pub fn remap_types<KC2, DC2>(self) -> RoRange<'txn, KC2, DC2> { RoRange { cursor: self.cursor, move_on_start: self.move_on_start, start_bound: self.start_bound, end_bound: self.end_bound, _phantom: marker::PhantomData::default(), } } /// Change the key codec type of this iterator, specifying the new codec. pub fn remap_key_type<KC2>(self) -> RoRange<'txn, KC2, DC> { self.remap_types::<KC2, DC>() } /// Change the data codec type of this iterator, specifying the new codec. pub fn remap_data_type<DC2>(self) -> RoRange<'txn, KC, DC2> { self.remap_types::<KC, DC2>() } /// Wrap the data bytes into a lazy decoder. pub fn lazily_decode_data(self) -> RoRange<'txn, KC, LazyDecode<DC>> { self.remap_types::<KC, LazyDecode<DC>>() } } impl<'txn, KC, DC> Iterator for RoRange<'txn, KC, DC> where KC: BytesDecode<'txn>, DC: BytesDecode<'txn>, { type Item = Result<(KC::DItem, DC::DItem)>; fn next(&mut self) -> Option<Self::Item> { let result = if self.move_on_start { self.move_on_start = false; move_on_range_start(&mut self.cursor, &mut self.start_bound) } else { self.cursor.move_on_next() }; match result { Ok(Some((key, data))) => { let must_be_returned = match &self.end_bound { Bound::Included(end) => key <= end, Bound::Excluded(end) => key < end, Bound::Unbounded => true, }; if must_be_returned { match (KC::bytes_decode(key), DC::bytes_decode(data)) { (Ok(key), Ok(data)) => Some(Ok((key, data))), (Err(e), _) | (_, Err(e)) => Some(Err(Error::Decoding(e))), } } else { None } } Ok(None) => None, Err(e) => Some(Err(e)), } } fn last(mut self) -> Option<Self::Item> { let result = if self.move_on_start { move_on_range_end(&mut self.cursor, &self.end_bound) } else { match (self.cursor.current(), move_on_range_end(&mut self.cursor, &self.end_bound)) { (Ok(Some((ckey, _))), Ok(Some((key, data)))) if ckey != key => { Ok(Some((key, data))) }, (Ok(_), Ok(_)) => Ok(None), (Err(e), _) | (_, Err(e)) => Err(e), } }; match result { Ok(Some((key, data))) => { let must_be_returned = match &self.start_bound { Bound::Included(start) => key >= start, Bound::Excluded(start) => key > start, Bound::Unbounded => true, }; if must_be_returned { match (KC::bytes_decode(key), DC::bytes_decode(data)) { (Ok(key), Ok(data)) => Some(Ok((key, data))), (Err(e), _) | (_, Err(e)) => Some(Err(Error::Decoding(e))), } } else { None } }, Ok(None) => None, Err(e) => Some(Err(e)), } } } pub struct RwRange<'txn, KC, DC> { cursor: RwCursor<'txn>, move_on_start: bool, start_bound: Bound<Vec<u8>>, end_bound: Bound<Vec<u8>>, _phantom: marker::PhantomData<(KC, DC)>, } impl<'txn, KC, DC> RwRange<'txn, KC, DC> { pub(crate) fn new( cursor: RwCursor<'txn>, start_bound: Bound<Vec<u8>>, end_bound: Bound<Vec<u8>>, ) -> RwRange<'txn, KC, DC> { RwRange { cursor, move_on_start: true, start_bound, end_bound, _phantom: marker::PhantomData, } } pub fn del_current(&mut self) -> Result<bool> { self.cursor.del_current() } pub fn put_current(&mut self, key: &KC::EItem, data: &DC::EItem) -> Result<bool> where KC: BytesEncode, DC: BytesEncode, { let key_bytes: Cow<[u8]> = KC::bytes_encode(&key).map_err(Error::Encoding)?; let data_bytes: Cow<[u8]> = DC::bytes_encode(&data).map_err(Error::Encoding)?; self.cursor.put_current(&key_bytes, &data_bytes) } /// Change the codec types of this iterator, specifying the codecs. pub fn remap_types<KC2, DC2>(self) -> RwRange<'txn, KC2, DC2> { RwRange { cursor: self.cursor, move_on_start: self.move_on_start, start_bound: self.start_bound, end_bound: self.end_bound, _phantom: marker::PhantomData::default(), } } /// Change the key codec type of this iterator, specifying the new codec. pub fn remap_key_type<KC2>(self) -> RwRange<'txn, KC2, DC> { self.remap_types::<KC2, DC>() } /// Change the data codec type of this iterator, specifying the new codec. pub fn remap_data_type<DC2>(self) -> RwRange<'txn, KC, DC2> { self.remap_types::<KC, DC2>() } /// Wrap the data bytes into a lazy decoder. pub fn lazily_decode_data(self) -> RwRange<'txn, KC, LazyDecode<DC>> { self.remap_types::<KC, LazyDecode<DC>>() } } impl<'txn, KC, DC> Iterator for RwRange<'txn, KC, DC> where KC: BytesDecode<'txn>, DC: BytesDecode<'txn>, { type Item = Result<(KC::DItem, DC::DItem)>; fn next(&mut self) -> Option<Self::Item> { let result = if self.move_on_start { self.move_on_start = false; move_on_range_start(&mut self.cursor, &mut self.start_bound) } else { self.cursor.move_on_next() }; match result { Ok(Some((key, data))) => { let must_be_returned = match self.end_bound { Bound::Included(ref end) => key <= end, Bound::Excluded(ref end) => key < end, Bound::Unbounded => true, }; if must_be_returned { match (KC::bytes_decode(key), DC::bytes_decode(data)) { (Ok(key), Ok(data)) => Some(Ok((key, data))), (Err(e), _) | (_, Err(e)) => Some(Err(Error::Decoding(e))), } } else { None } } Ok(None) => None, Err(e) => Some(Err(e)), } } fn last(mut self) -> Option<Self::Item> { let result = if self.move_on_start { move_on_range_end(&mut self.cursor, &self.end_bound) } else { match (self.cursor.current(), move_on_range_end(&mut self.cursor, &self.end_bound)) { (Ok(Some((ckey, _))), Ok(Some((key, data)))) if ckey != key => { Ok(Some((key, data))) }, (Ok(_), Ok(_)) => Ok(None), (Err(e), _) | (_, Err(e)) => Err(e), } }; match result { Ok(Some((key, data))) => { let must_be_returned = match &self.start_bound { Bound::Included(start) => key >= start, Bound::Excluded(start) => key > start, Bound::Unbounded => true, }; if must_be_returned { match (KC::bytes_decode(key), DC::bytes_decode(data)) { (Ok(key), Ok(data)) => Some(Ok((key, data))), (Err(e), _) | (_, Err(e)) => Some(Err(Error::Decoding(e))), } } else { None } }, Ok(None) => None, Err(e) => Some(Err(e)), } } } pub struct RoRevRange<'txn, KC, DC> { cursor: RoCursor<'txn>, move_on_end: bool, start_bound: Bound<Vec<u8>>, end_bound: Bound<Vec<u8>>, _phantom: marker::PhantomData<(KC, DC)>, } impl<'txn, KC, DC> RoRevRange<'txn, KC, DC> { pub(crate) fn new( cursor: RoCursor<'txn>, start_bound: Bound<Vec<u8>>, end_bound: Bound<Vec<u8>>, ) -> RoRevRange<'txn, KC, DC> { RoRevRange { cursor, move_on_end: true, start_bound, end_bound, _phantom: marker::PhantomData, } } /// Change the codec types of this iterator, specifying the codecs. pub fn remap_types<KC2, DC2>(self) -> RoRevRange<'txn, KC2, DC2> { RoRevRange { cursor: self.cursor, move_on_end: self.move_on_end, start_bound: self.start_bound, end_bound: self.end_bound, _phantom: marker::PhantomData::default(), } } /// Change the key codec type of this iterator, specifying the new codec. pub fn remap_key_type<KC2>(self) -> RoRevRange<'txn, KC2, DC> { self.remap_types::<KC2, DC>() } /// Change the data codec type of this iterator, specifying the new codec. pub fn remap_data_type<DC2>(self) -> RoRevRange<'txn, KC, DC2> { self.remap_types::<KC, DC2>() } /// Wrap the data bytes into a lazy decoder. pub fn lazily_decode_data(self) -> RoRevRange<'txn, KC, LazyDecode<DC>> { self.remap_types::<KC, LazyDecode<DC>>() } } impl<'txn, KC, DC> Iterator for RoRevRange<'txn, KC, DC> where KC: BytesDecode<'txn>, DC: BytesDecode<'txn>, { type Item = Result<(KC::DItem, DC::DItem)>; fn next(&mut self) -> Option<Self::Item> { let result = if self.move_on_end { self.move_on_end = false; move_on_range_end(&mut self.cursor, &self.end_bound) } else { self.cursor.move_on_prev() }; match result { Ok(Some((key, data))) => { let must_be_returned = match &self.start_bound { Bound::Included(start) => key >= start, Bound::Excluded(start) => key > start, Bound::Unbounded => true, }; if must_be_returned { match (KC::bytes_decode(key), DC::bytes_decode(data)) { (Ok(key), Ok(data)) => Some(Ok((key, data))), (Err(e), _) | (_, Err(e)) => Some(Err(Error::Decoding(e))), } } else { None } } Ok(None) => None, Err(e) => Some(Err(e)), } } fn last(mut self) -> Option<Self::Item> { let result = if self.move_on_end { move_on_range_start(&mut self.cursor, &mut self.start_bound) } else { let current = self.cursor.current(); let start = move_on_range_start(&mut self.cursor, &mut self.start_bound); match (current, start) { (Ok(Some((ckey, _))), Ok(Some((key, data)))) if ckey != key => { Ok(Some((key, data))) }, (Ok(_), Ok(_)) => Ok(None), (Err(e), _) | (_, Err(e)) => Err(e), } }; match result { Ok(Some((key, data))) => { let must_be_returned = match &self.end_bound { Bound::Included(end) => key <= end, Bound::Excluded(end) => key < end, Bound::Unbounded => true, }; if must_be_returned { match (KC::bytes_decode(key), DC::bytes_decode(data)) { (Ok(key), Ok(data)) => Some(Ok((key, data))), (Err(e), _) | (_, Err(e)) => Some(Err(Error::Decoding(e))), } } else { None } }, Ok(None) => None, Err(e) => Some(Err(e)), } } } pub struct RwRevRange<'txn, KC, DC> { cursor: RwCursor<'txn>, move_on_end: bool, start_bound: Bound<Vec<u8>>, end_bound: Bound<Vec<u8>>, _phantom: marker::PhantomData<(KC, DC)>, } impl<'txn, KC, DC> RwRevRange<'txn, KC, DC> { pub(crate) fn new( cursor: RwCursor<'txn>, start_bound: Bound<Vec<u8>>, end_bound: Bound<Vec<u8>>, ) -> RwRevRange<'txn, KC, DC> { RwRevRange { cursor, move_on_end: true, start_bound, end_bound, _phantom: marker::PhantomData, } } pub fn del_current(&mut self) -> Result<bool> { self.cursor.del_current() } pub fn put_current(&mut self, key: &KC::EItem, data: &DC::EItem) -> Result<bool> where KC: BytesEncode, DC: BytesEncode, { let key_bytes: Cow<[u8]> = KC::bytes_encode(&key).map_err(Error::Encoding)?; let data_bytes: Cow<[u8]> = DC::bytes_encode(&data).map_err(Error::Encoding)?; self.cursor.put_current(&key_bytes, &data_bytes) } /// Change the codec types of this iterator, specifying the codecs. pub fn remap_types<KC2, DC2>(self) -> RwRevRange<'txn, KC2, DC2> { RwRevRange { cursor: self.cursor, move_on_end: self.move_on_end, start_bound: self.start_bound, end_bound: self.end_bound, _phantom: marker::PhantomData::default(), } } /// Change the key codec type of this iterator, specifying the new codec. pub fn remap_key_type<KC2>(self) -> RwRevRange<'txn, KC2, DC> { self.remap_types::<KC2, DC>() } /// Change the data codec type of this iterator, specifying the new codec. pub fn remap_data_type<DC2>(self) -> RwRevRange<'txn, KC, DC2> { self.remap_types::<KC, DC2>() } /// Wrap the data bytes into a lazy decoder. pub fn lazily_decode_data(self) -> RwRevRange<'txn, KC, LazyDecode<DC>> { self.remap_types::<KC, LazyDecode<DC>>() } } impl<'txn, KC, DC> Iterator for RwRevRange<'txn, KC, DC> where KC: BytesDecode<'txn>, DC: BytesDecode<'txn>, { type Item = Result<(KC::DItem, DC::DItem)>; fn next(&mut self) -> Option<Self::Item> { let result = if self.move_on_end { self.move_on_end = false; move_on_range_end(&mut self.cursor, &self.end_bound) } else { self.cursor.move_on_prev() }; match result { Ok(Some((key, data))) => { let must_be_returned = match &self.start_bound { Bound::Included(start) => key >= start, Bound::Excluded(start) => key > start, Bound::Unbounded => true, }; if must_be_returned { match (KC::bytes_decode(key), DC::bytes_decode(data)) { (Ok(key), Ok(data)) => Some(Ok((key, data))), (Err(e), _) | (_, Err(e)) => Some(Err(Error::Decoding(e))), } } else { None } } Ok(None) => None, Err(e) => Some(Err(e)), } } fn last(mut self) -> Option<Self::Item> { let result = if self.move_on_end { move_on_range_start(&mut self.cursor, &mut self.start_bound) } else { let current = self.cursor.current(); let start = move_on_range_start(&mut self.cursor, &mut self.start_bound); match (current, start) { (Ok(Some((ckey, _))), Ok(Some((key, data)))) if ckey != key => { Ok(Some((key, data))) }, (Ok(_), Ok(_)) => Ok(None), (Err(e), _) | (_, Err(e)) => Err(e), } }; match result { Ok(Some((key, data))) => { let must_be_returned = match &self.end_bound { Bound::Included(end) => key <= end, Bound::Excluded(end) => key < end, Bound::Unbounded => true, }; if must_be_returned { match (KC::bytes_decode(key), DC::bytes_decode(data)) { (Ok(key), Ok(data)) => Some(Ok((key, data))), (Err(e), _) | (_, Err(e)) => Some(Err(Error::Decoding(e))), } } else { None } }, Ok(None) => None, Err(e) => Some(Err(e)), } } }
const b: usize = 4 / 2; const a: usize = 1 /// ok ;
pub mod grid; pub mod rules; pub mod solver; mod io;
pub mod lifetime_elision; pub mod scope;
use std::fmt; use std::ops::AddAssign; #[derive(Debug, Default)] pub struct Stats { pub num_nodes: usize, pub num_ways: usize, pub num_relations: usize, pub num_unresolved_node_ids: usize, pub num_unresolved_way_ids: usize, pub num_unresolved_rel_ids: usize, } impl AddAssign for Stats { #[inline] fn add_assign(&mut self, other: Self) { self.num_nodes += other.num_nodes; self.num_ways += other.num_ways; self.num_relations += other.num_relations; self.num_unresolved_node_ids += other.num_unresolved_node_ids; self.num_unresolved_way_ids += other.num_unresolved_way_ids; self.num_unresolved_rel_ids += other.num_unresolved_rel_ids; } } impl fmt::Display for Stats { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!( f, r#"Converted: nodes: {} ways: {} relations: {} Unresolved ids: nodes: {} ways: {} relations: {}"#, self.num_nodes, self.num_ways, self.num_relations, self.num_unresolved_node_ids, self.num_unresolved_way_ids, self.num_unresolved_rel_ids ) } }
#![feature(test)] extern crate test; extern crate byteio; extern crate byteorder; use test::black_box; #[bench] fn bench_byteio_vec(b: &mut test::Bencher) { use byteio::ReadBytesExt; let vec = vec![0u8; 1_000_000]; b.iter(|| { let data = black_box(&vec[..]); for mut val in data.chunks(2) { let _: Result<u16, _> = black_box(val.read_as::<byteio::LittleEndian>()); } }); b.bytes = vec.len() as u64; } #[bench] fn bench_byteio(b: &mut test::Bencher) { use byteio::ByteIo; const NITER: i32 = 100_000; b.iter(|| { for _ in 1..NITER { let data = black_box([1, 2]); let _: u16 = black_box(byteio::LittleEndian::from_bytes(data)); } }); b.bytes = 2 * NITER as u64; } #[bench] fn bench_byteorder_vec(b: &mut test::Bencher) { use byteorder::ReadBytesExt; let vec = vec![0u8; 1_000_000]; b.iter(|| { let data = black_box(&vec[..]); for mut val in data.chunks(2) { let _: Result<u16, _> = black_box(val.read_u16::<byteorder::LittleEndian>()); } }); b.bytes = vec.len() as u64; } #[bench] fn bench_byteorder(b: &mut test::Bencher) { use byteorder::ByteOrder; const NITER: i32 = 100_000; b.iter(|| { for _ in 1..NITER { let data = black_box([1, 2]); let _: u16 = black_box(byteorder::LittleEndian::read_u16(&data)); } }); b.bytes = 2 * NITER as u64; }
mod instruction; use self::instruction::{Instruction, OpCode}; use chip8::audio::{AudioEvent, AudioSink}; use chip8::keyboard::{HexKey, Keyboard}; use chip8::memory::Memory; use chip8::stack::Stack; use chip8::vram::{VideoSink, Vram}; use chip8::Address; use chip8::DWord; use rand::{thread_rng, Rng}; use std::fmt; pub struct Cpu { v: [u8; 0x10], i: u16, delay_timer: u8, sound_timer: u8, pc: u16, stack: Stack, } impl fmt::Debug for Cpu { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[pc] {:04x}", self.pc)?; write!(f, " [i] {:04x}", self.i)?; write!(f, " [v0]")?; write!(f, " {:02x}", self.v[0])?; write!(f, " {:02x}", self.v[1])?; write!(f, " {:02x}", self.v[2])?; write!(f, " {:02x}", self.v[3])?; write!(f, " [v4]")?; write!(f, " {:02x}", self.v[4])?; write!(f, " {:02x}", self.v[5])?; write!(f, " {:02x}", self.v[6])?; write!(f, " {:02x}", self.v[7])?; write!(f, " [v8]")?; write!(f, " {:02x}", self.v[8])?; write!(f, " {:02x}", self.v[9])?; write!(f, " [va]")?; write!(f, " {:02x}", self.v[10])?; write!(f, " {:02x}", self.v[11])?; write!(f, " {:02x}", self.v[12])?; write!(f, " {:02x}", self.v[13])?; write!(f, " {:02x}", self.v[14])?; write!(f, " [vf] {:02x}", self.v[15])?; write!(f, " [dt] {:02x}", self.delay_timer)?; write!(f, " [st] {:02x}", self.sound_timer)?; write!(f, "") } } impl Cpu { pub fn new() -> Cpu { Cpu { v: [0; 0x10], i: 0, delay_timer: 0, sound_timer: 0, pc: 0x200, stack: Stack::new(), } } pub fn tick( &mut self, memory: &mut Memory, vram: &mut Vram, keyboard: &Keyboard, video_sink: &mut VideoSink, audio_sink: &mut AudioSink, ) { // println!(" ? {:?}", self); // println!(" ? {:?}", self.stack); let data = self.fetch(memory); let opcode = self.decode(data); // println!("> {:?}", opcode); let old_sound_timer = self.sound_timer; self.execute(&opcode, memory, vram, keyboard, video_sink); self.delay_timer = self.delay_timer.saturating_sub(1); self.sound_timer = self.sound_timer.saturating_sub(1); if old_sound_timer == 0 && self.sound_timer > 1 { audio_sink.event = Some(AudioEvent::Play); } else if old_sound_timer > 0 && self.sound_timer == 0 { audio_sink.event = Some(AudioEvent::Stop); } } pub fn fetch(&self, memory: &Memory) -> DWord { memory.read_dword(self.pc) } pub fn decode(&self, data: DWord) -> OpCode { let instruction = Instruction::new(data); instruction.decode() } pub fn execute( &mut self, opcode: &OpCode, memory: &mut Memory, vram: &mut Vram, keyboard: &Keyboard, video_sink: &mut VideoSink, ) { match *opcode { OpCode::Set(vx, value) => { self.v[vx as usize] = value; self.pc += 2; } OpCode::Copy(vx, vy) => { self.v[vx as usize] = self.v[vy as usize]; self.pc += 2; } OpCode::Add(vx, value) => { let (result, overflow) = self.v[vx as usize].overflowing_add(value); self.v[vx as usize] = result; self.v[0xF as usize] = if overflow { 1 } else { 0 }; self.pc += 2; } OpCode::AddVy(vx, vy) => { let value_x = self.v[vx as usize]; let value_y = self.v[vy as usize]; let (result, overflow) = value_x.overflowing_add(value_y); self.v[vx as usize] = result; self.v[0xF as usize] = if overflow { 1 } else { 0 }; self.pc += 2; } OpCode::SubVx(vx, vy) => { let value_x = self.v[vx as usize]; let value_y = self.v[vy as usize]; self.v[0xF as usize] = if value_x > value_y { 0 } else { 1 }; self.v[vx as usize] = value_y.saturating_sub(value_x); self.pc += 2; } OpCode::SubVy(vx, vy) => { let value_x = self.v[vx as usize]; let value_y = self.v[vy as usize]; self.v[0xF as usize] = if value_y > value_x { 0 } else { 1 }; self.v[vx as usize] = value_x.saturating_sub(value_y); self.pc += 2; } OpCode::And(vx, vy) => { self.v[vx as usize] &= self.v[vy as usize]; self.pc += 2; } OpCode::Or(vx, vy) => { self.v[vx as usize] |= self.v[vy as usize]; self.pc += 2; } OpCode::Xor(vx, vy) => { self.v[vx as usize] ^= self.v[vy as usize]; self.pc += 2; } OpCode::ShiftRight(vx, _vy) => { let value = self.v[vx as usize]; self.v[0xF as usize] = value & 0x1; // self.v[vy as usize] = value >> 1; self.v[vx as usize] = value >> 1; self.pc += 2; } OpCode::ShiftLeft(vx, _vy) => { let value = self.v[vx as usize]; self.v[0xF as usize] = value >> 7; // self.v[vy as usize] = value << 1; self.v[vx as usize] = value << 1; self.pc += 2; } OpCode::Jmp(address) => { self.pc = address; } OpCode::JmpV0(address) => { self.pc = address + self.v[0] as Address; } OpCode::Jeq(vx, value) => { if self.v[vx as usize] == value { self.pc += 2; } self.pc += 2; } OpCode::JeqVy(vx, vy) => { if self.v[vx as usize] == self.v[vy as usize] { self.pc += 2; } self.pc += 2; } OpCode::Jneq(vx, value) => { if self.v[vx as usize] != value { self.pc += 2; } self.pc += 2; } OpCode::JneqVy(vx, vy) => { if self.v[vx as usize] != self.v[vy as usize] { self.pc += 2; } self.pc += 2; } OpCode::JmpK(vx) => { if keyboard.get_pressed(HexKey::from(self.v[vx as usize])) { self.pc += 2; } self.pc += 2; } OpCode::JmpNK(vx) => { if !keyboard.get_pressed(HexKey::from(self.v[vx as usize])) { self.pc += 2; } self.pc += 2; } OpCode::WaitForKey(vx) => { let mut key_pressed = false; for k in 0..0x10 { if keyboard.get_pressed(HexKey::from(k)) { self.v[vx as usize] = k; key_pressed = true; } } // println!("{:?}", keyboard); if key_pressed { self.pc += 2; } } OpCode::Store(vx) => { let mut r = 0; for addr in self.i..self.i + (vx as u16) + 1 { memory.write(addr, self.v[r as usize]); r += 1; } self.i += r; self.pc += 2; } OpCode::Load(vx) => { let mut r = 0; for addr in self.i..self.i + (vx as u16) + 1 { self.v[r as usize] = memory.read(addr); r += 1; } self.i += r; self.pc += 2; } OpCode::Call(address) => { self.stack.push(self.pc); self.pc = address; } OpCode::Return() => { let addr = self.stack.pop(); self.pc = addr; self.pc += 2; } OpCode::SetDelayTimer(vx) => { self.delay_timer = self.v[vx as usize]; self.pc += 2; } OpCode::LdDelayTimer(vx) => { self.v[vx as usize] = self.delay_timer; self.pc += 2; } OpCode::SetSoundTimer(vx) => { if self.v[vx as usize] > 2 { self.sound_timer = self.v[vx as usize]; } self.pc += 2; } OpCode::SetI(value) => { self.i = value; self.pc += 2; } OpCode::AddIVx(vx) => { let value_x = self.v[vx as usize]; self.v[0xF as usize] = if self.i + value_x as u16 > 0xFFF { 1 } else { 0 }; //TODO, wrap around 0xFFF self.i += self.v[vx as usize] as Address; self.pc += 2; } OpCode::DrawSprite(vx, vy, value) => { let value_x = self.v[vx as usize]; let value_y = self.v[vy as usize]; self.v[0xF as usize] = if vram.draw_sprite(memory, self.i, value_x, value_y, value, video_sink) { 1 } else { 0 }; self.pc += 2; } OpCode::Font(vx) => { self.i = (self.v[vx as usize] * 5) as Address; self.pc += 2; } OpCode::ClearScreen() => { vram.clear(); self.pc += 2; } OpCode::BCD(vx) => { let mut x = self.v[vx as usize]; const DECIMAL_LENGTH: usize = 3; let mut digits = vec![0 as u8; DECIMAL_LENGTH]; for digit_count in 0..3 { digits[DECIMAL_LENGTH - digit_count - 1] = x % 10; x /= 10; } let i = self.i; memory.write(i, digits[0]); memory.write(i + 1, digits[1]); memory.write(i + 2, digits[2]); self.pc += 2; } OpCode::Random(vx, mask) => { let mut rng = thread_rng(); let random = rng.gen::<u8>(); self.v[vx as usize] = random & mask; self.pc += 2; } } } }
// @lint-ignore LICENSELINT /* * MIT License * * Copyright (c) 2021 Daniel Prilik * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ use bytes::Bytes; use bytes::BytesMut; use num_traits::CheckedAdd; use num_traits::CheckedMul; use num_traits::FromPrimitive; use num_traits::Zero; use serde::de; use serde::de::Visitor; use serde::ser; use serde::ser::SerializeSeq; use serde::Deserialize; use serde::Deserializer; use serde::Serialize; use serde::Serializer; use thiserror::Error; /// Decode gdb hex error code #[derive(Debug, Error, PartialEq)] pub enum GdbHexError { /// Invalid hex digit #[error("Input contains non-ASCII chars")] NotAscii, /// Input is empty #[error("Input is empty")] Empty, /// Output is too small: overflowed #[error("Output is too small/overflowed")] Overflow, /// Invalid Hex input #[error("Gdb hex is malformed")] InvalidGdbHex, /// Invalid binary inpput #[error("Gdb binary is malformed")] InvalidGdbBinary, /// Invalid Output (num) type. #[error("Invalid output num type")] InvalidOutput, } #[derive(PartialEq, Debug)] pub struct GdbHexString { bytes: Bytes, } impl From<Bytes> for GdbHexString { fn from(bytes: Bytes) -> Self { GdbHexString { bytes } } } impl From<BytesMut> for GdbHexString { fn from(bytes: BytesMut) -> Self { GdbHexString { bytes: bytes.freeze(), } } } impl GdbHexString { /// decode gdb hex encoded binary data into a slice. #[cfg(test)] pub fn decode(&self) -> Result<Vec<u8>, GdbHexError> { let serialized: Vec<u8> = bincode::serialize(self).map_err(|_| GdbHexError::InvalidGdbHex)?; bincode::deserialize(&serialized).map_err(|_| GdbHexError::InvalidGdbHex) } /// encode slice into gdb hex encoded data #[cfg(test)] pub fn encode(bytes: &[u8]) -> Result<Self, GdbHexError> { let serialized: Vec<u8> = bincode::serialize(bytes).map_err(|_| GdbHexError::InvalidGdbHex)?; bincode::deserialize(&serialized).map_err(|_| GdbHexError::InvalidGdbHex) } } impl Serialize for GdbHexString { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if self.bytes.is_empty() || self.bytes.len() % 2 != 0 { return Err(ser::Error::custom(GdbHexError::InvalidGdbHex)); } let mut seq = serializer.serialize_seq(Some(self.bytes.len() / 2))?; let mut j = 0; while j < self.bytes.len() { let val: u8 = from_hex(self.bytes[j]) .ok_or_else(|| ser::Error::custom(GdbHexError::NotAscii))? * 16 + from_hex(self.bytes[j + 1]) .ok_or_else(|| ser::Error::custom(GdbHexError::NotAscii))?; seq.serialize_element(&val)?; j += 2; } seq.end() } } struct HexStringVisitor; impl<'de> Visitor<'de> for HexStringVisitor { type Value = Vec<u8>; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { write!(formatter, "a &[u8] slice") } fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> where E: de::Error, { let mut res: Vec<u8> = Vec::new(); for ch in v { let hi = to_hex(*ch >> 4).unwrap(); let lo = to_hex(*ch & 0xf).unwrap(); res.push(hi); res.push(lo); } Ok(res) } } impl<'de> Deserialize<'de> for GdbHexString { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let bytes = deserializer.deserialize_bytes(HexStringVisitor)?; Ok(GdbHexString { bytes: Bytes::from(bytes), }) } } #[derive(PartialEq, Debug)] pub struct GdbBinaryString { bytes: Bytes, } impl From<Bytes> for GdbBinaryString { fn from(bytes: Bytes) -> Self { GdbBinaryString { bytes } } } impl From<BytesMut> for GdbBinaryString { fn from(bytes: BytesMut) -> Self { GdbBinaryString { bytes: bytes.freeze(), } } } impl GdbBinaryString { /// decode gdb binary encoded binary data into a slice. #[cfg(test)] pub fn decode(&self) -> Result<Vec<u8>, GdbHexError> { let serialized: Vec<u8> = bincode::serialize(self).map_err(|_| GdbHexError::InvalidGdbHex)?; bincode::deserialize(&serialized).map_err(|_| GdbHexError::InvalidGdbHex) } /// encode slice into gdb binary encoded data #[cfg(test)] pub fn encode(bytes: &[u8]) -> Result<Self, GdbHexError> { let serialized: Vec<u8> = bincode::serialize(bytes).map_err(|_| GdbHexError::InvalidGdbHex)?; bincode::deserialize(&serialized).map_err(|_| GdbHexError::InvalidGdbHex) } } impl Serialize for GdbBinaryString { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut res: Vec<u8> = Vec::new(); let mut j = 0; while j < self.bytes.len() { let ch = self.bytes[j]; match ch { b'}' => { if j == self.bytes.len() - 1 { return Err(ser::Error::custom(GdbHexError::InvalidGdbBinary)); } res.push(self.bytes[1 + j] ^ 0x20); j += 2; } _ => { res.push(ch); j += 1; } } } serializer.serialize_bytes(&res) } } struct BinaryStringisitor; impl<'de> Visitor<'de> for BinaryStringisitor { type Value = Vec<u8>; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { write!(formatter, "a &[u8] slice") } fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> where E: de::Error, { let mut res: Vec<u8> = Vec::new(); for &ch in v { match ch { b'#' | b'$' | b'}' | b'*' => { res.push(b'}'); res.push(ch ^ 0x20) } _ => res.push(ch), } } Ok(res) } } impl<'de> Deserialize<'de> for GdbBinaryString { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let bytes = deserializer.deserialize_bytes(BinaryStringisitor)?; Ok(GdbBinaryString { bytes: Bytes::from(bytes), }) } } fn from_hex(c: u8) -> Option<u8> { if b"0123456789".contains(&c) { Some(c - b'0') } else if b"abcdef".contains(&c) { Some(c - b'a' + 10) } else if b"ABCDEF".contains(&c) { Some(c - b'A' + 10) } else if b"xX".contains(&c) { Some(0) } else { None } } fn to_hex(c: u8) -> Option<u8> { if c > 15 { None } else if c < 10 { Some(c + b'0') } else { Some(c + b'A') } } /// Decode a GDB dex string into the specified integer. /// /// GDB hex strings may include "xx", which represent "missing" data. This /// method simply treats "xx" as 00. pub fn decode_hex<I>(buf: &[u8]) -> Result<I, GdbHexError> where I: FromPrimitive + Zero + CheckedAdd + CheckedMul, { if buf.is_empty() { return Err(GdbHexError::Empty); } let radix = I::from_u8(16).ok_or(GdbHexError::InvalidOutput)?; let mut result = I::zero(); for &digit in buf { let x = I::from_u8(from_hex(digit).ok_or(GdbHexError::NotAscii)?) .ok_or(GdbHexError::InvalidOutput)?; result = result.checked_mul(&radix).ok_or(GdbHexError::Overflow)?; result = result.checked_add(&x).ok_or(GdbHexError::Overflow)? } Ok(result) } /// Decode a GDB hex string into a u8 Vector. /// /// GDB hex strings may include "xx", which represent "missing" data. This /// method simply treats "xx" as 00. pub fn decode_hex_string(buf: &[u8]) -> Result<Vec<u8>, GdbHexError> { let mut res = Vec::new(); let mut i = 0; if buf.len() % 2 != 0 { return Err(GdbHexError::InvalidGdbHex); } while i < buf.len() - 1 { let x = from_hex(buf[i]).ok_or(GdbHexError::NotAscii)?; let x = 16 * x + from_hex(buf[i + 1]).ok_or(GdbHexError::NotAscii)?; res.push(x); i += 2; } Ok(res) } /// Decode a GDB binary string into a u8 Vector. /// /// GDB hex strings may include "xx", which represent "missing" data. This /// method simply treats "xx" as 00. pub fn decode_binary_string(buf: &[u8]) -> Result<Vec<u8>, GdbHexError> { let mut res = Vec::new(); let mut i = 0; while i < buf.len() { match buf[i] { b'}' => { if i >= buf.len() - 1 { return Err(GdbHexError::InvalidGdbBinary); } res.push(buf[i + 1] ^ 0x20); i += 2; } _ => { res.push(buf[i]); i += 1; } } } Ok(res) } #[cfg(test)] mod test { use super::*; #[test] fn serde_sanity() { let test1 = Bytes::from(&[4, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4][..]); let test2 = GdbHexString { bytes: Bytes::from("01020304"), }; let bytes: Vec<u8> = bincode::serialize(&test2).unwrap(); assert_eq!(bytes, test1); let encoded: GdbHexString = bincode::deserialize(&bytes).unwrap(); assert_eq!(encoded, test2); let bytes: Vec<u8> = bincode::deserialize(&bytes).unwrap(); assert_eq!(bytes, vec![1, 2, 3, 4]); } #[test] fn encode_decode_sanity() { let test1 = vec![1, 2, 3, 4]; let hex = GdbHexString { bytes: Bytes::from("01020304"), }; let test2 = vec![b'1', b'2', b'$', b'{']; let bin = GdbBinaryString { bytes: Bytes::from(&b"12}\x04{"[..]), }; assert_eq!(GdbHexString::encode(&test1).unwrap(), hex); assert_eq!(GdbHexString::decode(&hex).unwrap(), test1); assert_eq!(GdbBinaryString::encode(&test2).unwrap(), bin); assert_eq!(GdbBinaryString::decode(&bin).unwrap(), test2); } #[test] fn decode_gdb_hex_test() { assert_eq!( decode_hex_string(b"31323334"), Ok::<_, GdbHexError>(b"1234".to_vec()) ); assert_eq!( decode_hex_string(b"12345"), Err::<Vec<u8>, _>(GdbHexError::InvalidGdbHex) ); } #[test] fn decode_gdb_binary_test() { assert_eq!( decode_binary_string(b"12345"), Ok::<_, GdbHexError>(b"12345".to_vec()) ); assert_eq!( decode_binary_string(b"1234}"), Err::<Vec<u8>, _>(GdbHexError::InvalidGdbBinary) ); assert_eq!( decode_binary_string(b"1234}A"), Ok::<_, GdbHexError>(b"1234a".to_vec()) ); } }
use crate::hex::render::{ area::AreaRenderer, area_edge::AreaEdgeRenderer, edge::EdgeRenderer, multi::MultiRenderer, square::{SquareRenderer, SquareScale}, tile::{HexScale, TileRenderer}, }; pub mod bumpy_builder; pub mod cellular; pub mod cubic_range_shape; pub mod custom; pub mod directions; pub mod flat_builder; pub mod pointer; pub mod render; pub mod ring; pub mod rooms_and_mazes; pub mod shape; pub mod snake; const HEX_SCALE_HORIZONTAL: f32 = 0.8; const GROUND_HEX_SCALE_VERTICAL: f32 = 0.1; const WALL_HEX_SCALE_VERTICAL: f32 = 1.0; pub fn new_tile_renderer() -> TileRenderer { TileRenderer::new( HexScale { horizontal: HEX_SCALE_HORIZONTAL, vertical: GROUND_HEX_SCALE_VERTICAL, }, HexScale { horizontal: HEX_SCALE_HORIZONTAL, vertical: WALL_HEX_SCALE_VERTICAL, }, ) } const SQUARE_SCALE_HORIZONTAL: f32 = 0.7; pub fn new_square_renderer() -> SquareRenderer { SquareRenderer::new( SquareScale { horizontal: SQUARE_SCALE_HORIZONTAL, }, SquareScale { horizontal: SQUARE_SCALE_HORIZONTAL, }, ) } pub fn new_edge_renderer() -> EdgeRenderer { EdgeRenderer::new() } pub fn new_area_renderer() -> AreaRenderer { AreaRenderer::new() } pub fn new_area_edge_renderer() -> AreaEdgeRenderer { AreaEdgeRenderer::new() } pub fn new_multi_renderer<R1, R2>(r1: R1, r2: R2) -> MultiRenderer<R1, R2> { MultiRenderer::new(r1, r2) }
use std::fs::File; use std::io::Read; use std::path::Path; use std::error::Error; use de::deserialise; use node::StoryNode; use feed::StoryFeed; pub fn load_story(filepath: &str) -> Result<StoryFeed, String> { let path = Path::new(filepath); let display = path.display(); // Open file let mut file = match File::open(path) { Ok(f) => f, Err(err) => return Err(format!("Could not open file {}: {}", display, err.description())), }; // Load file content into a string let mut content = String::new(); match file.read_to_string(&mut content) { Ok(_) => {} Err(err) => { return Err(format!("Could not read from file {}: {}", display, err.description())) } }; // Deserialize story let nodes: Vec<StoryNode> = match deserialise(content.as_str()) { Ok(nodes) => nodes, Err(err) => return Err(format!("Could not deserialise {}: {}", display, err.description())), }; Ok(StoryFeed::new(nodes)) }
#[doc = "Reader of register DATACNT"] pub type R = crate::R<u32, super::DATACNT>; #[doc = "Reader of field `DATACNT`"] pub type DATACNT_R = crate::R<u32, u32>; impl R { #[doc = "Bits 0:24 - Data count value"] #[inline(always)] pub fn datacnt(&self) -> DATACNT_R { DATACNT_R::new((self.bits & 0x01ff_ffff) as u32) } }
#[doc = "Reader of register REGION_BASE_LOW0"] pub type R = crate::R<u32, super::REGION_BASE_LOW0>; #[doc = "Reader of field `BASE_ADDRESS_LOW`"] pub type BASE_ADDRESS_LOW_R = crate::R<u32, u32>; impl R { #[doc = "Bits 12:31 - base address bits"] #[inline(always)] pub fn base_address_low(&self) -> BASE_ADDRESS_LOW_R { BASE_ADDRESS_LOW_R::new(((self.bits >> 12) & 0x000f_ffff) as u32) } }
use async_graphql::SimpleObject; use chrono::{DateTime, Utc}; use super::schema::configs; #[derive(Debug, Queryable, SimpleObject)] pub struct Config { pub id: i32, pub name: String, pub value: String, pub guild_id: i64, pub created_at: DateTime<Utc>, pub updated_at: DateTime<Utc>, } #[derive(Insertable)] #[table_name = "configs"] pub struct NewConfig { pub name: String, pub value: String, pub guild_id: i64, }
/// A collection of Expressions. struct Function { name: Option<String>, fields: Vec<Expression> } impl Function { fn new_function(name: &str, fields: Vec<Arc<Data>>) -> Function { Function { name: Some(name), fields: fields, } } fn new_lambda(fields: Vec<Arc<Data>>) -> Function { Function { name: None, fields: fields, } } fn name(&self) -> Option<String> { self.name } fn fields(&self) -> Vec<Arc<Data>> { self.fields } } // Todo: This. enum Expression { Assignment(Expression, Expression), Addition(Left, Right) } impl Data for Function { fn data_type(&self) -> DataType { DataType::Function } }
#[doc = r"Register block"] #[repr(C)] pub struct BANK { #[doc = "0x00 - FLASH key register for bank 1"] pub keyr: KEYR, _reserved1: [u8; 0x04], #[doc = "0x08 - FLASH control register for bank 1"] pub cr: CR, #[doc = "0x0c - FLASH status register for bank 1"] pub sr: SR, #[doc = "0x10 - FLASH clear control register for bank 1"] pub ccr: CCR, _reserved4: [u8; 0x10], #[doc = "0x24 - FLASH protection address for bank 1"] pub prar_cur: PRAR_CUR, #[doc = "0x28 - FLASH protection address for bank 1"] pub prar_prg: PRAR_PRG, #[doc = "0x2c - FLASH secure address for bank 1"] pub scar_cur: SCAR_CUR, #[doc = "0x30 - FLASH secure address for bank 1"] pub scar_prg: SCAR_PRG, #[doc = "0x34 - FLASH write sector protection for bank 1"] pub wpsn_curr: WPSN_CURR, #[doc = "0x38 - FLASH write sector protection for bank 1"] pub wpsn_prgr: WPSN_PRGR, _reserved10: [u8; 0x10], #[doc = "0x4c - FLASH CRC control register for bank 1"] pub crccr: CRCCR, #[doc = "0x50 - FLASH CRC start address register for bank 1"] pub crcsaddr: CRCSADDR, #[doc = "0x54 - FLASH CRC end address register for bank 1"] pub crceaddr: CRCEADDR, _reserved13: [u8; 0x04], #[doc = "0x5c - FLASH ECC fail address for bank 1"] pub far: FAR, _reserved_end: [u8; 0xa0], } #[doc = "KEYR (rw) register accessor: FLASH key register for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`keyr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`keyr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`keyr`] module"] pub type KEYR = crate::Reg<keyr::KEYR_SPEC>; #[doc = "FLASH key register for bank 1"] pub mod keyr; #[doc = "CR (rw) register accessor: FLASH control register for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr`] module"] pub type CR = crate::Reg<cr::CR_SPEC>; #[doc = "FLASH control register for bank 1"] pub mod cr; #[doc = "SR (rw) register accessor: FLASH status register for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`sr`] module"] pub type SR = crate::Reg<sr::SR_SPEC>; #[doc = "FLASH status register for bank 1"] pub mod sr; #[doc = "CCR (rw) register accessor: FLASH clear control register for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ccr`] module"] pub type CCR = crate::Reg<ccr::CCR_SPEC>; #[doc = "FLASH clear control register for bank 1"] pub mod ccr; #[doc = "PRAR_CUR (r) register accessor: FLASH protection address for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`prar_cur::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`prar_cur`] module"] pub type PRAR_CUR = crate::Reg<prar_cur::PRAR_CUR_SPEC>; #[doc = "FLASH protection address for bank 1"] pub mod prar_cur; #[doc = "PRAR_PRG (rw) register accessor: FLASH protection address for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`prar_prg::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`prar_prg::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`prar_prg`] module"] pub type PRAR_PRG = crate::Reg<prar_prg::PRAR_PRG_SPEC>; #[doc = "FLASH protection address for bank 1"] pub mod prar_prg; #[doc = "SCAR_CUR (rw) register accessor: FLASH secure address for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`scar_cur::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`scar_cur::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`scar_cur`] module"] pub type SCAR_CUR = crate::Reg<scar_cur::SCAR_CUR_SPEC>; #[doc = "FLASH secure address for bank 1"] pub mod scar_cur; #[doc = "SCAR_PRG (rw) register accessor: FLASH secure address for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`scar_prg::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`scar_prg::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`scar_prg`] module"] pub type SCAR_PRG = crate::Reg<scar_prg::SCAR_PRG_SPEC>; #[doc = "FLASH secure address for bank 1"] pub mod scar_prg; #[doc = "WPSN_CURR (r) register accessor: FLASH write sector protection for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wpsn_curr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`wpsn_curr`] module"] pub type WPSN_CURR = crate::Reg<wpsn_curr::WPSN_CURR_SPEC>; #[doc = "FLASH write sector protection for bank 1"] pub mod wpsn_curr; #[doc = "WPSN_PRGR (rw) register accessor: FLASH write sector protection for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wpsn_prgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`wpsn_prgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`wpsn_prgr`] module"] pub type WPSN_PRGR = crate::Reg<wpsn_prgr::WPSN_PRGR_SPEC>; #[doc = "FLASH write sector protection for bank 1"] pub mod wpsn_prgr; #[doc = "CRCCR (rw) register accessor: FLASH CRC control register for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`crccr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`crccr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`crccr`] module"] pub type CRCCR = crate::Reg<crccr::CRCCR_SPEC>; #[doc = "FLASH CRC control register for bank 1"] pub mod crccr; #[doc = "CRCSADDR (rw) register accessor: FLASH CRC start address register for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`crcsaddr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`crcsaddr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`crcsaddr`] module"] pub type CRCSADDR = crate::Reg<crcsaddr::CRCSADDR_SPEC>; #[doc = "FLASH CRC start address register for bank 1"] pub mod crcsaddr; #[doc = "CRCEADDR (rw) register accessor: FLASH CRC end address register for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`crceaddr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`crceaddr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`crceaddr`] module"] pub type CRCEADDR = crate::Reg<crceaddr::CRCEADDR_SPEC>; #[doc = "FLASH CRC end address register for bank 1"] pub mod crceaddr; #[doc = "FAR (r) register accessor: FLASH ECC fail address for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`far::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`far`] module"] pub type FAR = crate::Reg<far::FAR_SPEC>; #[doc = "FLASH ECC fail address for bank 1"] pub mod far;
use std::fs::File; use std::io::{BufRead, BufReader}; fn main() { let filename = "src/input"; // Open the file in read-only mode (ignoring errors). let file = File::open(filename).unwrap(); let reader = BufReader::new(file); let mut digits = Vec::new(); // Read the file line by line using the lines() iterator from std::io::BufRead. for (_, line) in reader.lines().enumerate() { let line = line.unwrap(); // Ignore errors. if line.trim().len() == 0 { break; } // Show the line and its number. for digit in line.as_bytes().iter() { digits.push(*digit - ('0' as u8)); } } println!("Digits: {:?}", digits); let base_pattern = vec![0, 1, 0, -1]; let pattern_start_idx = 1; let mut old_digits = digits.clone(); for _phase in 1..=100 { let mut new_digits = vec![0; digits.len()]; for j in 1..=digits.len() { let mut pattern_idx = if j == 1 { pattern_start_idx } else { pattern_start_idx - 1 }; let mut repeat_counter = if j == 1 { 0 } else { 1 }; let mut digit_value : i64 = 0; for k in 0..digits.len() { digit_value += base_pattern[pattern_idx] as i64 * old_digits[k] as i64; repeat_counter += 1; if repeat_counter == j { repeat_counter = 0; pattern_idx += 1; if pattern_idx == base_pattern.len() { pattern_idx = 0; } } } new_digits[j-1] = (digit_value.abs() % 10) as u8; } // println!("{} -> {:?}", _phase, new_digits); old_digits = new_digits; } println!("{} -> {:?}", 100, old_digits); }
use bytes::{Buf, BufMut, Bytes, BytesMut}; use crate::utils::Writeable; #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct Version { major: u16, minor: u16, } impl Default for Version { fn default() -> Version { Version { major: 1, minor: 0 } } } impl Writeable for Version { fn write_to(&self, bf: &mut BytesMut) { bf.put_u16(self.major); bf.put_u16(self.minor); } fn len(&self) -> usize { 4 } } impl Version { pub fn new(major: u16, minor: u16) -> Version { Version { major, minor } } pub fn get_major(self) -> u16 { self.major } pub fn get_minor(self) -> u16 { self.minor } }
fn main() { unsafe { print_long_double() }; } extern "C" { fn print_long_double(); }
use syn::{Attribute, DataStruct, Error}; use syn::{NestedMeta, Result}; #[derive(Debug)] pub(crate) enum StructMemberLayout { C, Packed(u64), } /// Is the given Attribute a #[repr(...)] attribute? fn is_repr_attribute(attribute: &Attribute) -> bool { attribute .path .get_ident() .map(|path| path == "repr") .unwrap_or(false) } fn arg_is_c(arg: &NestedMeta) -> bool { match arg { NestedMeta::Meta(meta) => match meta { syn::Meta::Path(path) => path.get_ident().map(|ident| ident == "C").unwrap_or(false), _ => false, }, _ => false, } } fn arg_is_packed(arg: &NestedMeta) -> bool { match arg { NestedMeta::Meta(meta) => match meta { syn::Meta::Path(path) => path .get_ident() .map(|ident| ident == "packed") .unwrap_or(false), syn::Meta::List(list) => list .path .get_ident() .map(|ident| ident == "packed") .unwrap_or(false), _ => false, }, _ => false, } } fn get_packing_from_arg(arg: &NestedMeta) -> Result<u64> { match arg { NestedMeta::Meta(meta) => { match meta { syn::Meta::Path(_) => Ok(1), syn::Meta::List(list) => { let packed_value = match list.nested.first() { Some(f) => f, None => return Err(Error::new_spanned(arg, "Expected #[repr(packed(N))]. packed List attribute has no nested members")), }; match packed_value { NestedMeta::Meta(meta) => match meta { syn::Meta::Path(path) => { let val_str = path .get_ident() .map(|ident| ident.to_string()) .ok_or_else(|| { Error::new_spanned( arg, "Expected #[repr(packed(N))]. Could not get ident", ) })?; val_str.parse::<u64>().map_err(|_| { Error::new_spanned( arg, "Expected #[repr(packed(N))]. Could not parse number", ) }) } _ => Err(Error::new_spanned( arg, "Expected #[repr(packed(N))]. Meta is no Path", )), }, NestedMeta::Lit(literal) => match literal { syn::Lit::Int(int_literal) => Ok(int_literal.base10_parse::<u64>()?), _ => Err(Error::new_spanned( arg, "Expected #[repr(packed(N))], but argument N is no integer literal", )), }, } } _ => Err(Error::new_spanned(arg, "Expected #[repr(packed(N))]")), } } _ => Err(Error::new_spanned(arg, "Expected #[repr(packed(N))]")), } } pub(crate) fn get_struct_member_layout( struct_attributes: &[Attribute], data_struct: &DataStruct, ) -> Result<StructMemberLayout> { // Do we have a #[repr(...)] attribute? let maybe_repr_attribute = struct_attributes.iter().find(|a| is_repr_attribute(a)); if maybe_repr_attribute.is_none() { //return Ok(StructMemberLayout::Rust); return Err(Error::new_spanned(data_struct.struct_token, "derive(PointType) is only valid for structs that are either #[repr(C)] or #[repr(packed)]")); } let repr_attribute = maybe_repr_attribute.unwrap(); let attribute_as_meta = repr_attribute.parse_meta()?; match &attribute_as_meta { syn::Meta::List(list) => { // There are several possible arguments for #[repr] on structs: // - C // - transparent (on single-value structs) // - packed // - packed(N) // - align(N) // Several combinations of those are possible! So first we look for either // the 'C' or 'packed'/'packed(N)' arguments let maybe_packed = list.nested.iter().find(|arg| arg_is_packed(arg)); if let Some(packed_arg) = maybe_packed { let packing = get_packing_from_arg(packed_arg)?; return Ok(StructMemberLayout::Packed(packing)); } let maybe_repr_c = list.nested.iter().find(|arg| arg_is_c(arg)); if maybe_repr_c.is_some() { return Ok(StructMemberLayout::C); } //Ok(StructMemberLayout::Rust) Err(Error::new_spanned( repr_attribute, "Unrecongized repr attribute", )) } _ => Err(Error::new_spanned( repr_attribute, "Could not parse repr attribute", )), } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StoragePoolOperationDisplay { pub provider: String, pub resource: String, pub operation: String, pub description: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StoragePoolRpOperation { pub name: String, #[serde(rename = "isDataAction")] pub is_data_action: bool, #[serde(rename = "actionType", default, skip_serializing_if = "Option::is_none")] pub action_type: Option<String>, pub display: StoragePoolOperationDisplay, #[serde(default, skip_serializing_if = "Option::is_none")] pub origin: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct StoragePoolOperationListResult { pub value: Vec<StoragePoolRpOperation>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DiskPoolListResult { pub value: Vec<DiskPool>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DiskPool { #[serde(flatten)] pub tracked_resource: TrackedResource, pub properties: DiskPoolProperties, #[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")] pub system_data: Option<SystemMetadata>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DiskPoolCreate { pub properties: DiskPoolCreateProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, pub location: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DiskPoolUpdate { pub properties: DiskPoolUpdateProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } pub type AvailabilityZone = String; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DiskPoolProperties { #[serde(rename = "provisioningState")] pub provisioning_state: ProvisioningState, #[serde(rename = "availabilityZones")] pub availability_zones: Vec<AvailabilityZone>, pub status: OperationalStatus, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub disks: Vec<Disk>, #[serde(rename = "subnetId")] pub subnet_id: String, #[serde(rename = "additionalCapabilities", default, skip_serializing_if = "Vec::is_empty")] pub additional_capabilities: Vec<AdditionalCapability>, pub tier: DiskPoolTier, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DiskPoolCreateProperties { #[serde(rename = "availabilityZones")] pub availability_zones: Vec<AvailabilityZone>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub disks: Vec<Disk>, #[serde(rename = "subnetId")] pub subnet_id: String, #[serde(rename = "additionalCapabilities", default, skip_serializing_if = "Vec::is_empty")] pub additional_capabilities: Vec<AdditionalCapability>, pub tier: DiskPoolTier, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DiskPoolUpdateProperties { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub disks: Vec<Disk>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Disk { pub id: String, } pub type AdditionalCapability = String; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IscsiTargetList { pub value: Vec<IscsiTarget>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IscsiTarget { #[serde(flatten)] pub proxy_resource: ProxyResource, pub properties: IscsiTargetProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IscsiTargetCreate { #[serde(flatten)] pub proxy_resource: ProxyResource, pub properties: IscsiTargetCreateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IscsiTargetUpdate { pub properties: IscsiTargetUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IscsiTargetProperties { #[serde(rename = "provisioningState")] pub provisioning_state: ProvisioningState, pub status: OperationalStatus, pub tpgs: Vec<TargetPortalGroup>, #[serde(rename = "targetIqn")] pub target_iqn: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IscsiTargetCreateProperties { pub tpgs: Vec<TargetPortalGroupCreate>, #[serde(rename = "targetIqn", default, skip_serializing_if = "Option::is_none")] pub target_iqn: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IscsiTargetUpdateProperties { pub tpgs: Vec<TargetPortalGroupUpdate>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TargetPortalGroup { pub luns: Vec<IscsiLun>, pub acls: Vec<Acl>, pub attributes: Attributes, pub endpoints: Vec<String>, pub tag: i32, pub port: i32, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TargetPortalGroupCreate { pub luns: Vec<IscsiLun>, pub acls: Vec<Acl>, pub attributes: Attributes, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TargetPortalGroupUpdate { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub luns: Vec<IscsiLun>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub acls: Vec<Acl>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Acl { #[serde(rename = "initiatorIqn")] pub initiator_iqn: String, #[serde(rename = "mappedLuns")] pub mapped_luns: Vec<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub credentials: Option<IscsiTargetCredentials>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Attributes { pub authentication: bool, #[serde(rename = "prodModeWriteProtect")] pub prod_mode_write_protect: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IscsiTargetCredentials { pub username: String, pub password: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IscsiLun { pub name: String, #[serde(rename = "managedDiskAzureResourceId")] pub managed_disk_azure_resource_id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ErrorResponse>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ProvisioningState { Invalid, Succeeded, Failed, Canceled, Pending, Creating, Updating, Deleting, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum OperationalStatus { Invalid, Unknown, Healthy, Unhealthy, Updating, Running, Stopped, #[serde(rename = "Stopped (deallocated)")] StoppedDeallocated, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum DiskPoolTier { Basic, Standard, Premium, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SystemMetadata { #[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")] pub created_by: Option<String>, #[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")] pub created_by_type: Option<String>, #[serde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")] pub created_at: Option<String>, #[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")] pub last_modified_by: Option<String>, #[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")] pub last_modified_by_type: Option<String>, #[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")] pub last_modified_at: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TrackedResource { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, pub location: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProxyResource { #[serde(flatten)] pub resource: Resource, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorAdditionalInfo { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub info: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<ErrorResponse>, #[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")] pub additional_info: Vec<ErrorAdditionalInfo>, }
pub mod args; pub mod logging; pub mod subcommands; use clap::AppSettings; use color_eyre::{Report, Result}; static AFTER_HELP: &str = "Thank you for using Spideog. Please send any feedback, bug report or feature request to the project's github page: https://github.com/jeanmanguy/spideog"; #[derive(Debug, Clap)] #[clap(author, about, version)] #[clap(global_setting = AppSettings::ColoredHelp)] #[clap(global_setting = AppSettings::ColorAuto)] #[clap(global_setting = AppSettings::DeriveDisplayOrder)] #[clap(global_setting = AppSettings::DontCollapseArgsInUsage)] #[clap(global_setting = AppSettings::GlobalVersion)] #[clap(global_setting = AppSettings::ArgRequiredElseHelp)] #[clap(global_setting = AppSettings::HelpRequired)] #[clap(global_setting = AppSettings::UnifiedHelpMessage)] #[clap(after_help = AFTER_HELP)] pub struct Opts { #[clap(subcommand)] pub command: subcommands::Command, // #[clap(flatten)] // pub logging: logging::Logging, } pub fn setup_error_hook() -> Result<(), Report> { color_eyre::config::HookBuilder::default() .add_default_filters() .issue_url(concat!(env!("CARGO_PKG_REPOSITORY"), "/issues/new")) .add_issue_metadata("version", crate_version!()) .add_issue_metadata("architecture", std::env::consts::ARCH) .add_issue_metadata("OS", std::env::consts::OS) .issue_filter(|kind| match kind { color_eyre::ErrorKind::NonRecoverable(_) => true, color_eyre::ErrorKind::Recoverable(_) => false, }) .install() } // Boilerplate: https://github.com/yaahc/color-eyre/blob/master/examples/usage.rs // TODO: adjust for use // TODO: move to logging.rs? pub fn install_tracing() { use tracing_error::ErrorLayer; use tracing_subscriber::prelude::*; use tracing_subscriber::{fmt, EnvFilter}; let fmt_layer = fmt::layer().with_target(false); let filter_layer = EnvFilter::try_from_default_env() .or_else(|_| EnvFilter::try_new("debug")) .unwrap(); tracing_subscriber::registry() .with(filter_layer) .with(fmt_layer) .with(ErrorLayer::default()) .init(); }
#[cfg(target_os = "windows")] extern crate winapi; #[macro_use] extern crate quickcheck; extern crate registry_pol; mod v1;
#[cfg(test)] #[path = "../../../tests/unit/solver/evolution/evolution_test.rs"] mod evolution_test; use crate::construction::heuristics::InsertionContext; use crate::solver::hyper::HyperHeuristic; use crate::solver::telemetry::Telemetry; use crate::solver::termination::*; use crate::solver::{Metrics, Population, RefinementContext}; use crate::utils::Timer; mod config; pub use self::config::*; mod run_simple; pub use self::run_simple::RunSimple; /// Defines evolution result type. pub type EvolutionResult = Result<(Box<dyn Population + Send + Sync>, Option<Metrics>), String>; /// An evolution algorithm strategy. pub trait EvolutionStrategy { /// Runs evolution for given `refinement_ctx`. fn run( &self, refinement_ctx: RefinementContext, hyper: Box<dyn HyperHeuristic + Send + Sync>, termination: &(dyn Termination + Send + Sync), telemetry: Telemetry, ) -> EvolutionResult; } /// An entity which simulates evolution process. pub struct EvolutionSimulator { config: EvolutionConfig, } impl EvolutionSimulator { pub fn new(config: EvolutionConfig) -> Result<Self, String> { if config.population.initial.methods.is_empty() { return Err("at least one initial method has to be specified".to_string()); } Ok(Self { config }) } /// Runs evolution for given `problem` using evolution `config`. /// Returns populations filled with solutions. pub fn run(mut self) -> EvolutionResult { let refinement_ctx = self.create_refinement_ctx(); let strategy = self.config.strategy.clone(); strategy.run(refinement_ctx, self.config.hyper, self.config.termination.as_ref(), self.config.telemetry) } /// Creates refinement context with population containing initial individuals. fn create_refinement_ctx(&mut self) -> RefinementContext { let mut refinement_ctx = RefinementContext::new( self.config.problem.clone(), std::mem::replace(&mut self.config.population.variation, None).unwrap(), self.config.environment.clone(), std::mem::replace(&mut self.config.quota, None), ); self.config.telemetry.log( format!( "problem has total jobs: {}, actors: {}", self.config.problem.jobs.size(), self.config.problem.fleet.actors.len() ) .as_str(), ); self.config.telemetry.log("preparing initial solution(-s)"); std::mem::take(&mut self.config.population.initial.individuals) .into_iter() .zip(0_usize..) .take(self.config.population.initial.max_size) .for_each(|(ctx, idx)| { if should_add_solution(&refinement_ctx) { refinement_ctx.population.add(ctx); self.config.telemetry.on_initial( idx, self.config.population.initial.max_size, Timer::start(), self.config.termination.estimate(&refinement_ctx), ); } else { self.config.telemetry.log(format!("skipping provided initial solution {}", idx).as_str()) } }); let weights = self.config.population.initial.methods.iter().map(|(_, weight)| *weight).collect::<Vec<_>>(); let empty_ctx = InsertionContext::new(self.config.problem.clone(), refinement_ctx.environment.clone()); let initial_time = Timer::start(); let _ = (refinement_ctx.population.size()..self.config.population.initial.max_size).try_for_each(|idx| { let item_time = Timer::start(); let is_overall_termination = self.config.termination.is_termination(&mut refinement_ctx); let is_initial_quota_reached = self.config.termination.estimate(&refinement_ctx) > self.config.population.initial.quota; if is_initial_quota_reached || is_overall_termination { self.config.telemetry.log( format!( "stop building initial solutions due to initial quota reached ({}) or overall termination ({}).", is_initial_quota_reached, is_overall_termination ) .as_str(), ); return Err(()); } let method_idx = if idx < self.config.population.initial.methods.len() { idx } else { self.config.environment.random.weighted(weights.as_slice()) }; // TODO consider initial quota limit let insertion_ctx = self.config.population.initial.methods[method_idx].0.run(&refinement_ctx, empty_ctx.deep_copy()); if should_add_solution(&refinement_ctx) { refinement_ctx.population.add(insertion_ctx); self.config.telemetry.on_initial(idx, self.config.population.initial.max_size, item_time, self.config.termination.estimate(&refinement_ctx)); } else { self.config.telemetry.log(format!("skipping built initial solution {}", idx).as_str()) } Ok(()) }); if refinement_ctx.population.size() > 0 { on_generation( &mut refinement_ctx, &mut self.config.telemetry, self.config.termination.as_ref(), initial_time, true, ); } else { self.config.telemetry.log("created an empty population"); } refinement_ctx } } fn should_add_solution(refinement_ctx: &RefinementContext) -> bool { let is_quota_reached = refinement_ctx.quota.as_ref().map_or(false, |quota| quota.is_reached()); let is_population_empty = refinement_ctx.population.size() == 0; // NOTE when interrupted, population can return solution with worse primary objective fitness values as first is_population_empty || !is_quota_reached } fn should_stop(refinement_ctx: &mut RefinementContext, termination: &dyn Termination) -> bool { let is_terminated = termination.is_termination(refinement_ctx); let is_quota_reached = refinement_ctx.quota.as_ref().map_or(false, |q| q.is_reached()); is_terminated || is_quota_reached } fn on_generation( refinement_ctx: &mut RefinementContext, telemetry: &mut Telemetry, termination: &dyn Termination, generation_time: Timer, is_improved: bool, ) { let termination_estimate = termination.estimate(refinement_ctx); telemetry.on_generation(refinement_ctx, termination_estimate, generation_time, is_improved); refinement_ctx.population.on_generation(&refinement_ctx.statistics); }
use core; use kalloc::{HEAP_SIZE, HEAP_START}; use super::frame_allocator::{frame_alloc, PAGE_SIZE}; pub const PTE_ADDR_MASK: usize = 0x000f_ffff_ffff_f000; pub const PT1_INDEX: usize = 0x1ff << (0 * 9 + 12); pub const PT2_INDEX: usize = 0x1ff << (1 * 9 + 12); pub const PT3_INDEX: usize = 0x1ff << (2 * 9 + 12); pub const PT4_INDEX: usize = 0x1ff << (3 * 9 + 12); bitflags! { pub flags PageFlags: usize { const NONE = 0, const PRESENT = 1 << 0, const WRITE = 1 << 1, const USER = 1 << 2, const WRITE_THROUGH = 1 << 3, const NO_CACHE = 1 << 4, const ACCESSED = 1 << 5, const DIRTY = 1 << 6, const HUGE = 1 << 7, const GLOBAL = 1 << 8, const NO_EXECUTE = 1 << 63, } } struct PageEntry<L: PageLevel> { pub value: usize, level: core::marker::PhantomData<L>, } pub const NUM_ENTRIES: usize = 512; struct PageTable<L: PageLevel> { entries: [PageEntry<L>; NUM_ENTRIES], } // Type safety magic enum Level1 { } enum Level2 { } enum Level3 { } enum Level4 { } trait PageLevel { const LEVEL: usize; fn can_be_huge() -> bool { Self::LEVEL == 2 || Self::LEVEL == 3 } } impl PageLevel for Level1 { const LEVEL: usize = 1; } impl PageLevel for Level2 { const LEVEL: usize = 2; } impl PageLevel for Level3 { const LEVEL: usize = 3; } impl PageLevel for Level4 { const LEVEL: usize = 4; } trait MappableLevel: PageLevel { } impl MappableLevel for Level1 { } impl MappableLevel for Level2 { } impl MappableLevel for Level3 { } trait NextPageLevel: PageLevel { type Next: MappableLevel; } impl NextPageLevel for Level2 { type Next = Level1; } impl NextPageLevel for Level3 { type Next = Level2; } impl NextPageLevel for Level4 { type Next = Level3; } impl<L: PageLevel> PageEntry<L> { fn set_addr(&mut self, addr: usize) { self.value = addr & PTE_ADDR_MASK; } fn get_addr(&self) -> usize { self.value & PTE_ADDR_MASK } fn flags(&self) -> PageFlags { PageFlags::from_bits_truncate(self.value) } fn terminal(&self) -> bool { if L::LEVEL == 1 { true } else if L::can_be_huge() { self.flags().contains(HUGE) } else { false } } fn present(&self) -> bool { self.flags().contains(PRESENT) } fn points_to_table(&self) -> bool { self.present() && !self.terminal() } } impl<L: PageLevel> PageTable<L> { fn new() -> *mut PageTable<L> { let mut frame = frame_alloc(); frame.clear(); frame.addr() as *mut PageTable<L> } } impl<L: MappableLevel> PageTable<L> { fn map_mem(&mut self, index: usize, paddr: usize, flags: PageFlags) { self.entries[index].set_addr(paddr); self.entries[index].value |= flags.bits(); self.entries[index].value |= PRESENT.bits(); if L::can_be_huge() { // allow 2MB / 1GB pages self.entries[index].value |= HUGE.bits(); } } } impl<L: NextPageLevel> PageTable<L> { fn map_table<'a>(&mut self, index: usize, table: *const PageTable<L::Next>) { self.entries[index].set_addr(table as usize); // if the entry in PT4 is not marked USER, then none of the pages mapped // in any lower tables (PT3-1) can be USER. Thus, mark all entries // pointing to tables as USER. Similar problem for WRITE. // Note: ring0 ignores WRITE flag unless CR0.WP is set self.entries[index].value |= (PRESENT | USER | WRITE).bits(); } fn get_table_mut(&mut self, index: usize) -> Option<&mut PageTable<L::Next>> { let ref entry = self.entries[index]; if !entry.points_to_table() { return None; } unsafe { Some(&mut *(entry.get_addr() as *mut PageTable<_>)) } } fn get_new_table(&mut self, index: usize) -> &mut PageTable<L::Next> { if self.entries[index].present() { self.get_table_mut(index).expect("Memory already mapped to") } else { let pt = PageTable::new(); self.map_table(index, pt); self.get_table_mut(index).unwrap() } } } pub unsafe fn initialize() -> PT4 { use super::KERNEL_BASE; const G: usize = 0x40000000; let mut pt4 = PT4::new(); pt4.map_to_1g(KERNEL_BASE, 0, USER | WRITE); pt4.map_to_1g(KERNEL_BASE + 1*G, 1*G, USER | WRITE); // map heap for i in 0..HEAP_SIZE / PAGE_SIZE { let addr = i * PAGE_SIZE + HEAP_START; pt4.map_4k(addr, WRITE); } pt4.activate(); // flushes TLB pt4 } pub struct PT4 { table: core::ptr::Unique<PageTable<Level4>>, } impl PT4 { pub fn new() -> PT4 { PT4 { table: unsafe { core::ptr::Unique::new_unchecked(PageTable::new()) }, } } fn get(&self) -> &PageTable<Level4> { unsafe { self.table.as_ref() } } fn get_mut(&mut self) -> &mut PageTable<Level4> { unsafe { self.table.as_mut() } } pub fn map_4k(&mut self, vaddr: usize, flags: PageFlags) { self.map_to_4k(vaddr, frame_alloc().addr(), flags) } pub fn map_to_4k(&mut self, vaddr: usize, paddr: usize, flags: PageFlags) { self.get_mut() .get_new_table(get_pt4_index(vaddr)) .get_new_table(get_pt3_index(vaddr)) .get_new_table(get_pt2_index(vaddr)) .map_mem(get_pt1_index(vaddr), paddr, flags); } pub fn map_to_2m(&mut self, vaddr: usize, paddr: usize, flags: PageFlags) { self.get_mut() .get_new_table(get_pt4_index(vaddr)) .get_new_table(get_pt3_index(vaddr)) .map_mem(get_pt2_index(vaddr), paddr, flags); } pub fn map_to_1g(&mut self, vaddr: usize, paddr: usize, flags: PageFlags) { self.get_mut() .get_new_table(get_pt4_index(vaddr)) .map_mem(get_pt3_index(vaddr), paddr, flags); } pub fn activate(&self) { unsafe { asm!("mov cr3, $0" :: "r"(self.get()) :: "intel"); } } } pub fn get_pt1_index(val: usize) -> usize { (val & PT1_INDEX) >> 12 } pub fn get_pt2_index(val: usize) -> usize { (val & PT2_INDEX) >> 21 } pub fn get_pt3_index(val: usize) -> usize { (val & PT3_INDEX) >> 30 } pub fn get_pt4_index(val: usize) -> usize { (val & PT4_INDEX) >> 39 }
use serde::{Deserialize, Serialize}; use serde_json::Result; use std::time::{SystemTime, Instant}; #[derive(Serialize, Deserialize, Debug)] pub enum FileType { Free, File, Directory } #[derive(Serialize, Deserialize, Debug)] pub struct Inode { pub number: i128, pub typee: FileType, pub startBlock: i128, pub size: i128, pub cTime: SystemTime, } impl Inode { pub fn buildInode(number: i128, typee: FileType, startBlock: i128, size: i128, cTime: SystemTime) -> Inode { Inode { number: number, typee: typee, startBlock: startBlock, size: size, cTime: cTime, } } pub fn to_JSON(&mut self) -> Result<()> { let j = serde_json::to_string_pretty(&self)?; println!("{}", j); Ok(()) } pub fn from_JSON(s: String) -> Inode { let back: Inode = serde_json::from_str(&s).unwrap(); println!("{:?}", back); back } } fn main() { println!("Hello, world!"); }
use crate::{ clap_handler::{ app::{GeneralOptions, PrimenetOptions}, p95_work::PrimenetWorkType, }, util::*, }; use regex::RegexBuilder; use reqwest::blocking::{Client, ClientBuilder}; use std::fs::{remove_file, File, OpenOptions}; use std::io::{BufWriter, Write}; use std::path::Path; use std::str::from_utf8; use std::thread::sleep; use std::time::{Duration, Instant}; // Work validation regex const WVR: &str = r"((DoubleCheck|Test|PRP)\s*=\s*([0-9A-F]){32}(,[0-9]+){3}((,-?[0-9]+){3,5})?)$"; const P95_LOGIN_ADDR: &str = "https://www.mersenne.org/"; const P95_REQUEST_ADDR: &str = "https://www.mersenne.org/manual_assignment/?"; const P95_REPORT_ADDR: &str = "https://www.mersenne.org/manual_result/?"; pub fn primenet_login(client: &Client, username: &str, password: &str) -> Result<(), String> { let result = client .post(P95_LOGIN_ADDR) .form(&[("user_login", username), ("user_password", password)]) .send() .map_err(|e| format!("Failed to send login attempt to Primenet. Error: {}", e))?; let status = result.status().as_u16(); if status == 200 { let url = result.url().clone(); let result_text = result .text() .map_err(|e| format!("Failed to read response text. Error: {}", e))?; if result_text.contains(&format!("{}<br>logged in", username)) { Ok(()) } else { println!("Failed to log in to Primenet."); println!("Login URL: {}", url); println!("Request status code: {}", status); println!("Login response: {}", result_text); Err("Login failed.".to_string()) } } else { println!( "Login attempt at address '{}' returned bad status: {}", result.url(), status ); println!("Failure response: {:?}", result.text()); Err("Login failed.".to_string()) } } fn primenet_request( client: &Client, num_to_cache: usize, worktodo_path: &Path, worktodo_lock_path: &Path, work_info: PrimenetWorkType, ) -> Result<(), String> { while worktodo_lock_path.exists() { sleep(Duration::from_secs(1)); } let workfile_contents = read_list_lock(worktodo_path, worktodo_lock_path) .map_err(|e| format!("Failed to read worktodo file. Error: {}", e))?; if num_to_cache <= workfile_contents.len() { println!( "Already have {} assignment(s) cached of the requested {}. Not requesting more.", workfile_contents.len(), num_to_cache ); unlock_file(worktodo_lock_path) .map_err(|e| format!("Failed to unlock worktodo file. Error: {}", e)) } else { let worktype = work_info.as_str(); let num_to_get = format!("{}", num_to_cache - workfile_contents.len()); let response = client .get(P95_REQUEST_ADDR) .query(&[ ("cores", "1"), ("num_to_get", &num_to_get), ("pref", worktype), ("exp_lo", ""), ("exp_hi", ""), ("B1", "Get+Assignments"), ]) .send() .map_err(|e| format!("Failed to make work request to Primenet. Error: {}", e))?; let status = response.status().as_u16(); let response_text = response .text() .map_err(|e| format!("Failed to read response text from Primenet. Error: {}", e))?; if status == 200 { println!("Got work request response."); let work_validation_regex = RegexBuilder::new(WVR) .multi_line(true) .build() .expect("Failed to build regex for task validation"); let validated_jobs = work_validation_regex .captures_iter(&response_text) .map(|captures| captures[0].to_string()) .collect::<Vec<_>>(); println!("Validated jobs: {:?}", validated_jobs); if validated_jobs.len() == 0 { println!("WARNING!"); println!( "Received work request response but failed to find any valid jobs in it. You \n\ may want to check your Primenet account to see if any work has been \n\ reserved, and if so, add it to your worktodo file manually." ); return Ok(()); } // On any errors below until everything is written, show what hasn't yet been written and // ask the user to add it themselves. let mut list_file = BufWriter::new( OpenOptions::new() .append(true) .open(worktodo_path) .map_err(|e| { error_msg_with_jobs(e, "Failed to open worktodo file.", &validated_jobs) })?, ); for i in 0..validated_jobs.len() { list_file .write_all(validated_jobs[i].as_bytes()) .map_err(|e| { error_msg_with_jobs( e, "Failed to write to worktodo file.", &validated_jobs[i..], ) })?; list_file.write_all(&[b'\n']).map_err(|e| { error_msg_with_jobs( e, "Failed to write to worktodo file.", &validated_jobs[i..], ) })?; } list_file.flush().map_err(|e| { error_msg_with_unwritten( e, "Failed to flush buffered reader to worktodo file.", from_utf8(list_file.buffer()).unwrap(), ) })?; // Everything should be written to the file now, so we should be safe not to include it in // the error message. remove_file(worktodo_lock_path).map_err(|e| { format!( "Failed to remove lockfile after writing new jobs to it. Error: {}", e ) })?; } else { println!("Failed to request work from Primenet. Status: {}", status); println!("Response text: {}", response_text); } Ok(()) } } fn writeback_on_failure(results_bufwriter: &mut BufWriter<File>, unsent_result: String) { // If submission fails, write the result back to the results file. } pub fn primenet_submit( client: &Client, worktodo_path: &Path, worktodo_lock_path: &Path, results_path: &Path, results_lock_path: &Path, results_sent_path: &Path, results_sent_lock_path: &Path, ) -> Result<(), String> { let worktodo_contents = read_list_lock(worktodo_path, worktodo_lock_path) .map_err(|e| format!("Could not lock and read worktodo file. Error: {}", e))?; let mut results_contents = read_list_lock(results_path, results_lock_path) .map_err(|e| format!("Could not lock and read results file. Error: {}", e))?; lock_file(results_sent_lock_path)?; let mut results_file = BufWriter::new( OpenOptions::new() .write(true) .append(false) .open(results_path) .map_err(|e| { format!( "Failed to open results file with write privileges. Error: {}", e ) })?, ); let mut results_sent_file = BufWriter::new( OpenOptions::new() .write(true) .append(true) .open(results_sent_path) .map_err(|e| { format!( "Failed to open sent results file with write privileges. Error: {}", e ) })?, ); let mut collisions = Vec::new(); // Only jobs that are completed are allowed to be submitted. // Could handle this with hashset collisions, but then I have to sort out ordering when writing // back to the file on submission errors. Resolve this. for job in &worktodo_contents { if let Some(pos) = results_contents.iter().position(|j| j == job) { collisions.push(results_contents.remove(pos)); } } println!("Found the following incomplete jobs in results.txt:"); for collision in collisions { println!(" {}", collision); } if results_contents.len() > 0 { while let Some(completed_job) = results_contents.pop() { let response_text = client .post(P95_REPORT_ADDR) .form(&[("data", "completed_job")]) .send() .map_err(|e| format!("Failed to send work submission to primenet. Error: {}", e))? .text() .map_err(|e| { format!( "Failed to read response text from work submission to Primenet. Error: {}", e ) })?; if response_text.contains("Error") { let e_start = response_text.find("Error").unwrap(); let e_end = response_text[e_start..].find("</div>").unwrap(); println!( "Submission failed. Error message from Primenet: {}", &response_text[e_start..e_end] ); writeback_on_failure(&mut results_file, completed_job); } else if response_text.contains("Accepted") { // Submission was accepted by Primenet - write result to results.sent.txt } else { // Unknown failure case - write failed submission back to results.txt } } } unlock_file(worktodo_lock_path).map_err(|e| { format!( "Could not remove lockfile {}. Error: {}", worktodo_lock_path.display(), e ) })?; unlock_file(results_lock_path).map_err(|e| { format!( "Could not remove lockfile {}. Error: {}", results_lock_path.display(), e ) })?; unlock_file(results_sent_lock_path).map_err(|e| { format!( "Could not remove lockfile {}. Error: {}", results_sent_lock_path.display(), e ) })?; Ok(()) } pub fn primenet_runtime(primenet_options: PrimenetOptions) -> Result<(), String> { let PrimenetOptions { credentials: (username, password), work_type, general_options: GeneralOptions { work_directory, num_cache, timeout, }, } = primenet_options; let client = ClientBuilder::default() .cookie_store(true) .build() .map_err(|e| format!("Failed to build web client. Error: {}", e))?; primenet_login(&client, &username, &password)?; println!("Successfully logged into Primenet."); let worktodo_txt_path = Path::new(&work_directory).join(Path::new("worktodo.txt")); let worktodo_ini_path = Path::new(&work_directory).join(Path::new("worktodo.ini")); let (worktodo_path, worktodo_lock_path) = if worktodo_txt_path.exists() { (worktodo_txt_path, worktodo_txt_path.join(".lck")) } else { (worktodo_ini_path, worktodo_ini_path.join(".lck")) }; let results_path = Path::new(&work_directory).join(Path::new("results.txt")); let results_lock_path = results_path.join(".lck"); let results_sent_path = Path::new(&work_directory).join(Path::new("results.sent")); let results_sent_lock_path = results_sent_path.join(".lck"); println!("Using worktodo path: {}", worktodo_path.display()); println!("Using worktodo_lock path: {}", worktodo_lock_path.display()); println!("Using results path: {}", results_path.display()); if timeout == 0 { primenet_request( &client, num_cache, &worktodo_path, &worktodo_lock_path, work_type, )?; primenet_submit( &client, &worktodo_path, &worktodo_lock_path, &results_path, &results_lock_path, &results_sent_path, &results_sent_lock_path, )?; } else { loop { let start = Instant::now(); if let Err(e) = primenet_request( &client, num_cache, &worktodo_path, &worktodo_lock_path, work_type, ) { println!("{}", e); } else { println!("Successfully requested and cached jobs."); } if let Err(e) = primenet_submit( &client, &worktodo_path, &worktodo_lock_path, &results_path, &results_lock_path, &results_sent_path, &results_sent_lock_path, ) { println!("{}", e); } else { println!( "Successfully submitted cached results to Primenet. Submitted results can be" ); println!("found in $WORDKDIR/results.sent until next submission."); } let sleep_duration = Duration::from_secs(timeout as u64) - start.elapsed(); sleep(sleep_duration); } } Ok(()) } pub fn primenet_cleanup(primenet_options: PrimenetOptions) {}
//! Implements `StdTtable`. use libc; use libc::c_void; use std::sync::atomic::{AtomicUsize, Ordering}; use std::marker::PhantomData; use std::isize; use std::cell::Cell; use std::cmp::max; use std::mem; use ttable::*; use moves::MoveDigest; /// Represents a record in the transposition table. /// /// Consists of a transposition table entry plus the highest 32 bits /// of the key. The key is split into two `u16` values to allow more /// flexible alignment. #[derive(Copy, Clone)] struct Record<T: TtableEntry> { key: (u16, u16), data: T, } /// A handle to a set of records (a bucket) in the transposition /// table. /// /// `R` gives records' type. Each bucket can hold up to 6 records, /// depending on their size. A 5-bit generation number is stored for /// each record. struct Bucket<R> { first: *mut R, // This field is laid out the following way: 30 of the bits are // used to store records' generation numbers (6 slots, 5 bits // each); 1 bit is not used; 1 bit is used as a locking flag. // // **Important note:** Because `AtomicU32` is unstable at the time // of writing, we use `AtomicUsize` for the `info` field. So, on // 64-bit platforms the `info` field may overlap with the last // record in the bucket. But that is OK, because we mind machine's // endianness and read and manipulate only the last 4 bytes of the // `info` field. info: *mut AtomicUsize, } /// The size of each bucket in bytes. /// /// `64` is the most common cache line size. const BUCKET_SIZE: usize = 64; impl<R> Bucket<R> { /// Creates a new instance from a raw pointer. #[inline] pub unsafe fn new(p: *mut c_void) -> Bucket<R> { let byte_offset = BUCKET_SIZE - mem::size_of::<usize>(); let info = (p.offset(byte_offset as isize) as *mut AtomicUsize) .as_mut() .unwrap(); // Acquire the lock for the bucket. loop { let old = info.load(Ordering::Relaxed); if old & BUCKET_LOCKING_FLAG == 0 { let new = old | BUCKET_LOCKING_FLAG; if info.compare_exchange_weak(old, new, Ordering::Acquire, Ordering::Relaxed) .is_ok() { break; } } } Bucket { first: p as *mut R, info: info as *mut AtomicUsize, } } /// Returns the number of slots in the bucket. #[inline] pub fn len() -> usize { (BUCKET_SIZE - 4) / mem::size_of::<R>() } /// Returns a raw pointer to the record in a given slot. #[inline] pub fn get(&self, slot: usize) -> *mut R { assert!(slot < Self::len()); unsafe { self.first.offset(slot as isize) } } /// Returns the generation number for a given slot. #[inline] pub fn get_generation(&self, slot: usize) -> usize { let info = unsafe { self.info.as_mut().unwrap() }; info.load(Ordering::Relaxed) >> GENERATION_SHIFTS[slot] & 31 } /// Sets the generation number for a given slot. #[inline] pub fn set_generation(&self, slot: usize, generation: usize) { debug_assert!(generation <= 31); let info = unsafe { self.info.as_mut().unwrap() }; let mut v = info.load(Ordering::Relaxed); v &= GENERATION_MASKS[slot]; v |= generation << GENERATION_SHIFTS[slot]; info.store(v, Ordering::Relaxed); } } impl<R> Drop for Bucket<R> { #[inline] fn drop(&mut self) { // Release the lock for the bucket. let info = unsafe { self.info.as_mut().unwrap() }; let old = info.load(Ordering::Relaxed); let new = old & !BUCKET_LOCKING_FLAG; info.store(new, Ordering::Release); } } /// Implements the `Ttable` trait. /// /// `StdTtable` provides a generic transposition table implementation /// that can efficiently pack in memory a wide range of transposition /// table entry types. The only condition is that `T` has a size /// between 6 and 16 bytes, and alignment requirements of 4 bytes or /// less. pub struct StdTtable<T: TtableEntry> { entries: PhantomData<T>, /// The current generation number. /// /// A generation number is assigned to each record, so as to be /// able to determine which records are from the current search, /// and which are from previous searches. Records from previous /// searches will be replaced before records from the current /// search. The generation number is always between 1 and /// 31. Generation `0` is reserved for empty records. generation: Cell<usize>, /// The number of buckets in the table. /// /// Each bucket can hold 3 to 6 records, depending on their size. /// `bucket_count` should always be a power of 2. bucket_count: usize, /// The raw pointer obtained from `libc::calloc`. /// /// This pointer will be passed to `libc::free` before the /// transposition table is dropped. alloc_ptr: *mut c_void, /// Optimally aligned raw pointer to the transposition table. /// /// Aligning table's buckets to machine's cache lines may in some /// cases improve performance. table_ptr: *mut c_void, } impl<T: TtableEntry> Ttable for StdTtable<T> { type Entry = T; fn new(size_mb: Option<usize>) -> StdTtable<T> { // Assert our basic premises. assert_eq!(mem::size_of::<c_void>(), 1); assert_eq!(BUCKET_SIZE, 64); assert!(mem::align_of::<T>() <= 4, format!("too restrictive transposition table entry alignment: {} bytes", mem::align_of::<T>())); assert!(Bucket::<Record<T>>::len() >= 3, format!("too big transposition table entry: {} bytes", mem::size_of::<T>())); assert!(Bucket::<Record<T>>::len() <= 6, format!("too small transposition table entry: {} bytes", mem::size_of::<T>())); let size_mb = size_mb.unwrap_or(16); let bucket_count = { // Make sure that the number of buckets is a power of 2. let n = max(1, ((size_mb * 1024 * 1024) / BUCKET_SIZE) as u64); 1 << (63 - n.leading_zeros()) }; let alloc_ptr; let table_ptr = unsafe { // Make sure that the first bucket is optimally aligned. alloc_ptr = libc::calloc(bucket_count + 1, BUCKET_SIZE); let mut addr = mem::transmute::<*mut c_void, usize>(alloc_ptr); addr += BUCKET_SIZE; addr &= !(BUCKET_SIZE - 1); mem::transmute::<usize, *mut c_void>(addr) }; StdTtable { entries: PhantomData, generation: Cell::new(1), bucket_count: bucket_count, alloc_ptr: alloc_ptr, table_ptr: table_ptr, } } fn new_search(&self) { const N: usize = 128; loop { // Increment the generation number (with wrapping). self.generation .set(match self.generation.get() { n @ 1...30 => n + 1, 31 => 1, _ => unreachable!(), }); debug_assert!(self.generation.get() > 0); debug_assert!(self.generation.get() < 32); // Count how many staled records from this generation // there are among the first `N` buckets. let mut staled = 0; for bucket in self.buckets().take(N) { for slot in 0..Bucket::<Record<T>>::len() { if bucket.get_generation(slot) == self.generation.get() { staled += 1; } } } // If the staled records from this generation are too // many, we should continue to increment the generation // number. (This may happen if a very long search was // executed long time ago.) if staled < N { break; } } } #[inline] fn store(&self, key: u64, mut data: Self::Entry) { let bucket = self.bucket(key); let key = chop_key(key); // Choose a bucket slot to which to write the data. let mut replace_slot = 0; let mut replace_score = isize::MAX; for slot in 0..Bucket::<Record<T>>::len() { let record = unsafe { &mut *bucket.get(slot) }; let generation = bucket.get_generation(slot); // Use this slot if it is empty. if generation == 0 { replace_slot = slot; break; } // Use this slot if it contains an old record for the same key. if record.key == key { if record.data.bound() == BOUND_EXACT && record.data.importance() > data.importance() { // Keep the old record if we are certain that it // is more important than the new one. // // **Note:** We do not keep old records with // inexact bounds because they can be useless, // regardless of their depth. data = record.data; } else if data.move_digest() == MoveDigest::invalid() { // Keep the move from the old record if the new // record has no move. data = data.set_move_digest(record.data.move_digest()); } replace_slot = slot; break; } // Calculate the score for the record in this slot. The // replaced record will be the one with the lowest score. let mut score = record.data.importance() as isize; if generation == self.generation.get() { // Positions from the current generation are always // scored higher than positions from older generations. score += 1 << 16; }; if score < replace_score { replace_slot = slot; replace_score = score; } } // Write the data to the chosen slot. unsafe { *bucket.get(replace_slot) = Record { key: key, data: data, }; bucket.set_generation(replace_slot, self.generation.get()); } } #[inline] fn probe(&self, key: u64) -> Option<Self::Entry> { let bucket = self.bucket(key); let key = chop_key(key); for slot in 0..Bucket::<Record<T>>::len() { if bucket.get_generation(slot) != 0 { let record = unsafe { &mut *bucket.get(slot) }; if record.key == key { bucket.set_generation(slot, self.generation.get()); return Some(record.data); } } } None } fn clear(&self) { for bucket in self.buckets() { for slot in 0..Bucket::<Record<T>>::len() { bucket.set_generation(slot, 0); } } self.generation.set(1); } } impl<T: TtableEntry> StdTtable<T> { /// Returns the bucket for a given key. #[inline] fn bucket(&self, key: u64) -> Bucket<Record<T>> { unsafe { let byte_offset = (key as usize & (self.bucket_count - 1)) * BUCKET_SIZE; Bucket::new(self.table_ptr.offset(byte_offset as isize)) } } /// Returns an iterator over the buckets in the table. #[inline] fn buckets(&self) -> Iter<T> { Iter { entries: PhantomData, table_ptr: self.table_ptr, bucket_count: self.bucket_count, iterated: 0, } } } impl<T: TtableEntry> Drop for StdTtable<T> { fn drop(&mut self) { unsafe { libc::free(self.alloc_ptr); } } } unsafe impl<T: TtableEntry> Sync for StdTtable<T> {} unsafe impl<T: TtableEntry> Send for StdTtable<T> {} /// A helper type for `StdTtable`. It iterates over the buckets in the /// table. struct Iter<T: TtableEntry> { entries: PhantomData<T>, table_ptr: *mut c_void, bucket_count: usize, iterated: usize, } impl<T: TtableEntry> Iterator for Iter<T> { type Item = Bucket<Record<T>>; #[inline] fn next(&mut self) -> Option<Self::Item> { debug_assert!(self.iterated <= self.bucket_count); if self.iterated == self.bucket_count { None } else { let byte_offset = (self.iterated * BUCKET_SIZE) as isize; let bucket = unsafe { Bucket::new(self.table_ptr.offset(byte_offset)) }; self.iterated += 1; Some(bucket) } } } /// A helper function for `StdTtable`. It takes the highest 32 bits of /// an `u64` value and splits them into two `u16` values. #[inline] fn chop_key(key: u64) -> (u16, u16) { unsafe { mem::transmute::<u32, (u16, u16)>((key >> 32) as u32) } } #[cfg(any(target_pointer_width = "32", target_endian = "big"))] const BUCKET_LOCKING_FLAG: usize = 1 << 31; #[cfg(all(target_pointer_width = "64", target_endian = "little"))] const BUCKET_LOCKING_FLAG: usize = 1 << 63; #[cfg(any(target_pointer_width = "32", target_endian = "big"))] const GENERATION_SHIFTS: [usize; 6] = [0, 5, 10, 15, 20, 25]; #[cfg(all(target_pointer_width = "64", target_endian = "little"))] const GENERATION_SHIFTS: [usize; 6] = [32, 37, 42, 47, 52, 57]; #[cfg(any(target_pointer_width = "32", target_endian = "big"))] const GENERATION_MASKS: [usize; 6] = [!(31 << 0), !(31 << 5), !(31 << 10), !(31 << 15), !(31 << 20), !(31 << 25)]; #[cfg(all(target_pointer_width = "64", target_endian = "little"))] const GENERATION_MASKS: [usize; 6] = [!(31 << 32), !(31 << 37), !(31 << 42), !(31 << 47), !(31 << 52), !(31 << 57)]; #[cfg(test)] mod tests { use libc; use super::*; use super::{Bucket, Record}; use depth::*; use value::*; use moves::*; use stock::std_ttable_entry::*; #[test] fn bucket() { unsafe { let p = libc::calloc(1, 64); let b = Bucket::<Record<StdTtableEntry>>::new(p); assert_eq!(b.get_generation(0), 0); assert_eq!(b.get_generation(1), 0); let mut record = b.get(0).as_mut().unwrap(); let entry = StdTtableEntry::new(0, BOUND_NONE, 10); *record = Record { key: (0, 0), data: entry, }; b.set_generation(0, 12); b.set_generation(1, 13); assert_eq!(record.data.depth(), 10); assert_eq!(b.get_generation(0), 12); assert_eq!(b.get_generation(1), 13); assert_eq!(Bucket::<Record<StdTtableEntry>>::len(), 5); libc::free(p); } } #[test] fn bucket_endianness() { unsafe { let p = libc::calloc(1, 64); let b = Bucket::<Record<StdTtableEntry>>::new(p); let mut record = b.get(4).as_mut().unwrap(); let entry = StdTtableEntry::new(0, BOUND_NONE, 10); *record = Record { key: (0, 0), data: entry, }; b.set_generation(0, 12); b.set_generation(1, 12); b.set_generation(2, 12); b.set_generation(3, 12); b.set_generation(4, 12); assert_eq!(record.data.static_eval(), VALUE_UNKNOWN); libc::free(p); } } #[test] fn store_and_probe() { let tt = StdTtable::<StdTtableEntry>::new(None); assert!(tt.probe(1).is_none()); let data = StdTtableEntry::new(0, 0, 50); assert_eq!(data.depth(), 50); assert_eq!(data.move_digest(), MoveDigest::invalid()); tt.store(1, data); assert_eq!(tt.probe(1).unwrap().depth(), 50); tt.store(1, StdTtableEntry::new(0, 0, 50)); assert_eq!(tt.probe(1).unwrap().depth(), 50); assert_eq!(tt.probe(1).unwrap().move_digest(), MoveDigest::invalid()); for i in 2..50 { tt.store(i, StdTtableEntry::new(i as i16, 0, i as Depth)); } assert_eq!(tt.probe(1).unwrap().depth(), 50); assert_eq!(tt.probe(49).unwrap().depth(), 49); assert_eq!(tt.probe(48).unwrap().depth(), 48); assert_eq!(tt.probe(47).unwrap().depth(), 47); tt.clear(); assert!(tt.probe(1).is_none()); tt.store(1, data); tt.new_search(); tt.probe(1); assert!(tt.probe(1).is_some()); } #[test] fn new_search() { let tt = StdTtable::<StdTtableEntry>::new(None); assert_eq!(tt.generation.get(), 1); tt.new_search(); assert_eq!(tt.generation.get(), 2); for _ in 3..34 { tt.new_search(); } assert_eq!(tt.generation.get(), 2); } }
fn main() { println!("Hello, World!!"); another_func(10, 6); let x = 5; let y = { let x = 3; x + 1 //式は末尾に;がつかない //文は値を返さないが式は返す }; println!("The value of y is: {}", y); let z = five(); println!("The value of z is: {}", z); let mut number = 3; if number < 5 { println!("condition was true"); // 条件は真でした } else { println!("condition was false"); // 条件は偽でした } //;はいらない 式だから let answer = if number < 4 { 2 } else { 6 }; //;はつける なぜならこれは文だから println!("The value of number is: {}", answer); let mut number = 3; while number != 0 { //while + 続ける条件 println!("{}!", number); number = number - 1; } // 発射! println!("LIFTOFF!!!"); let aun = [12, 23, 34, 45, 56, 67]; for element in aun.iter().rev() { //反転.rev() println!("the value is: {}", element); } for down in (1..6).rev(){ println!("{}", down) } println!("LIFTOFF!!!"); /////////////////////////////////// let mut a = String::from("Hello"); a.push_str(", World"); println!("{}", a); } fn another_func(x: i32, y: i32) { println!("Another function here!!!: {}", x); println!("Another function here!!!: {}", y); } fn five() -> i32 { 5 } fn plus_one(x: i32) -> i32 { x + 1 }
use pcap::*; fn main() { let name = pcap_lookupdev().unwrap(); match pcap_open_live(&name, 1000, 1, 1000) { Ok(handle) => { while let res = pcap_next_ex(&handle) { match res { Ok(packet) => { println!("{:#?}", packet) } Err(err) => { println!("{}", err) } } } pcap_close(&handle) } Err(err) => { println!("{}", err) } } }
#[doc = "Register `DIR` reader"] pub type R = crate::R<DIR_SPEC>; #[doc = "Field `THI` reader - Threshold HIGH"] pub type THI_R = crate::FieldReader<u16>; #[doc = "Field `TLO` reader - Threshold LOW"] pub type TLO_R = crate::FieldReader<u16>; impl R { #[doc = "Bits 0:12 - Threshold HIGH"] #[inline(always)] pub fn thi(&self) -> THI_R { THI_R::new((self.bits & 0x1fff) as u16) } #[doc = "Bits 16:28 - Threshold LOW"] #[inline(always)] pub fn tlo(&self) -> TLO_R { TLO_R::new(((self.bits >> 16) & 0x1fff) as u16) } } #[doc = "Debug Information register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dir::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct DIR_SPEC; impl crate::RegisterSpec for DIR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`dir::R`](R) reader structure"] impl crate::Readable for DIR_SPEC {} #[doc = "`reset()` method sets DIR to value 0"] impl crate::Resettable for DIR_SPEC { const RESET_VALUE: Self::Ux = 0; }
// use crate::layout::Layout; extern crate yewapp; use yewapp::pages::{webgpu::WebGPUPage, todo::TodoPage}; use yew::prelude::*; use yew_router::prelude::*; #[derive(Debug, Clone, Copy, PartialEq, Routable)] pub enum MyRoute { #[at("/")] Home, #[at("/web-gpu")] WebGpu, #[at("/todo")] Todo, #[not_found] #[at("/404")] NotFound, } pub struct Route { _link: ComponentLink<Self>, } impl Component for Route { type Message = (); type Properties = (); fn create(_props: Self::Properties, _link: ComponentLink<Self>) -> Self { Self { _link } } fn update(&mut self, _msg: Self::Message) -> ShouldRender { false } fn change(&mut self, _props: Self::Properties) -> ShouldRender { false } fn view(&self) -> Html { html! { // <Layout> <Router<MyRoute> render=Router::render(switch) /> // </Layout> } } } fn switch(routes: &MyRoute) -> Html { match routes { MyRoute::Home => { html! {<div>{"home"}</div>} } MyRoute::WebGpu => { html! { <WebGPUPage/> } } MyRoute::Todo => { html! { <TodoPage /> } } _ => { html! {<div>{"404 Not Found"}</div>} } } }
use std::io; use std::cmp::Ordering; use rand::Rng; use std::fmt; use std::fmt::Formatter; use std::collections::HashMap; use std::io::Read; fn _guessing_game(){ println!("Guess the number between 1 and 100"); let secret_number = rand::thread_rng().gen_range(1..101); loop{ println!("Please input your guess:\n"); let mut guess = String::new(); io::stdin().read_line(&mut guess).expect("Failed to read line!"); //let guess: u32 = guess.trim().parse().expect("Please type a number!"); let guess: u32 = match guess.trim().parse(){ Ok(num) => num, Err(_) => continue, }; match guess.cmp(&secret_number){ Ordering::Less => println!("Too small!\n"), Ordering::Greater => println!("Too big!\n"), Ordering::Equal => { println!("Das right!"); break; } } } } fn _shadowing(){ let x = 5; println!("x = {}", x); let x = x + 1; println!("x = {}", x); let x = x * 2; println!("x = {}", x); let x = 69; println!("x = {}", x); } fn _tuples_and_arrays(){ //tuples are fixed size/length collections of possibly different types let tup = (420, 69, 6.10); let (x, y, z) = tup; println!("x: {}, y: {}, z:{}", x, y, z); let ex: (i32, u8, f64) = (500, 1, 6.4); let five_hundo = ex.0; println!("{}", five_hundo); let months = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]; let array: [f64; 4] = [1.1, 2.2, 3.3, 4.4]; let same_value = [3; 5]; // [value, size] for element in same_value.iter(){ println!("same_value: {}", element); } } fn _addition(x: i32, y: i32) -> i32{ //notice lack of semicolon, semicolon would make it a statement instead of expression x + y //same as "return x + y;" } fn _looping(){ let mut counter = 0; let result = loop{ counter += 1; if counter == 10{ break counter * 2; } }; println!("the result is: {}", result); //20 /* let mut number = 3; while number != 0{ println!("{}!", number); number -= 1; } println!("LIFTOFF!");*/ for number in (1..4).rev(){ println!("{}!", number); } println!("LIFTOFF!"); let a = [10, 20, 30, 40, 50]; for element in a.iter(){ println!("the value is {}", element); } } fn _string_slicing(){ let s = String::from("Hello World!"); let hello = &s[0..5]; //&s[..5]; works as well (no need for the first 0) let world = &s[6..12];//&s[6..]; works here too! println!("{} {}", hello, world); } #[derive(Debug)] struct Rectangle{ width: u32, height: u32, } //related functions impl Rectangle { fn new(width: u32, height: u32) -> Rectangle { Rectangle { width, height, } } } //methods impl Rectangle { fn area(&self) -> u32{ self.width * self.height } fn show(&self){ println!("{}x{} with area {}", self.width, self.height, self.area()); } } impl fmt::Display for Rectangle{ fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "({}, {}) and Area: {}", self.width, self.height, self.area()) } } fn _struct_example(){ let r = Rectangle{ width: 32, height: 64, }; let r2 = Rectangle::new(128, 128); r.show(); r2.show(); //#[derive(Debug)] gives access to printing structs with debug "{:?} or pretty debug "{:#?}" println!("{:?}", r); println!("{:#?}", r2); //impl Display trait gives access to println!ing struct without debug println!("{}", r); } fn _match_example(){ let pair = (4, -5); match pair{ (x, y) if x == y => println!("Equal"), (x, y) if x + y == 0 => println!("Equal 0"), (x, _) if x % 2 == 0 => println!("X is even"), _ => println!("no match"), } } #[derive(Debug)] enum Shape{ Rectangle {width: u32, height: u32}, Square(u32), Circle(f64), } impl Shape{ fn area(&self) -> f64{ match *self{ Shape::Rectangle {width, height} => (width * height) as f64, Shape::Square(ref s) => (s * s) as f64, Shape::Circle(ref r) => 3.14 * (r * r), } } } // enum Option<T>{ // Some(T), // None, // } fn division(x: f64, y: f64) -> Option<f64>{ if y == 0.0 { None } else { Some(x / y) } } fn _enums_and_options(){ let r = Shape::Rectangle {width: 10, height: 70}; let s = Shape::Square(10); let c = Shape::Circle(4.5); // println!("{}", r.area()); // println!("{}", s.area()); // println!("{}", c.area()); let result = division(5.0, 7.0); match result{ Some(x) => println!("{}", x), None => println!("cannot divide by 0!"), } } fn _vectors(){ let x = vec![1, 2, 3, 4]; let mut v: Vec<i32> = Vec::new(); v.push(5); v.push(6); v.push(7); v.push(8); v.push(9); for i in &v{ println!("{}", i); } //debug print println!("{:?} {} {}", &v, v.len(), v.capacity()); let enum_vector = vec![ Shape::Circle(4.2), Shape::Square(69), Shape::Rectangle {width: 6, height: 10}, ]; println!("{:?}", &enum_vector); } fn _hash_maps(){ use std::collections::hash_map; let mut hm = HashMap::new(); hm.insert(String::from("random"), 12); hm.insert(String::from("strings"), 49); for (k, v) in &hm{ println!("{}: {}", k, v); } match hm.get(&String::from("random")){ Some(&n) => println!("{}", n), None => println!("no match"), } } fn _result_example(){ use std::fs::File; let f = File::open("text.txt"); let f = match f{ Ok(file) => file, Err(error) => { panic!("There was a problem opening file: {:#?}", error) }, }; } fn main() { _guessing_game(); //shadowing(); //tuples_and_arrays(); //println!("5 + 6 = {}", addition(5, 6)); //looping(); //string_slicing(); //_struct_example(); //_match_example(); //_enums_and_options(); //_vectors(); //_hash_maps(); //_result_example(); }
use demo; use http::StatusCode; use now_lambda::{error::NowError, lambda, IntoResponse, Request, Response}; use std::error::Error; fn main() -> Result<(), Box<dyn Error>> { Ok(lambda!(handler)) } fn handler(_request: Request) -> Result<impl IntoResponse, NowError> { let message = match demo::connect() { Err(error) => error.to_string(), Ok(db) => match demo::users::find(&db, 1) { Err(error) => error.to_string(), Ok(user) => user.email, }, }; Response::builder() .status(StatusCode::OK) .body(message.to_string()) .map_err(|error| NowError::from(error)) }
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // use super::*; use crate::*; #[test] fn escaped_path_starts_with_1() { let s = "bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on"; assert_eq!( s.unescape_uri() .skip_slashes() .starts_with("blåbær/%2F/syltetøy"), Some("bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y".len()) ); } #[test] fn escaped_path_starts_with_2() { let s = "/bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on"; assert_eq!( s.unescape_uri() .skip_slashes() .starts_with("blåbær/%2F/syltetøy"), None ); } #[test] fn escaped_path_starts_with_3() { let s = "/1/2/3/"; assert_eq!(s.unescape_uri().skip_slashes().starts_with("/1/"), Some(3)); } #[test] fn escaped_path_starts_with_4() { let s = "/1/"; assert_eq!(s.unescape_uri().skip_slashes().starts_with("/1/"), Some(3)); } #[test] fn escaped_starts_with_0() { let s = "bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on"; assert_eq!(s.unescape_uri().starts_with("blåbær/%2F/syltetøy"), None); } #[test] fn escaped_starts_with_1() { let s = "bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on"; assert_eq!( s.unescape_uri().starts_with("blåbær///syltetøy"), Some("bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y".len()) ); } #[test] fn escaped_starts_with_2() { let s = "/bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on"; assert_eq!(s.unescape_uri().starts_with("blåbær///syltetøy"), None); } #[test] fn escaped_starts_with_3() { let s = "/1/2/3/"; assert_eq!(s.unescape_uri().starts_with("/1/"), Some(3)); } #[test] fn escaped_starts_with_4() { let s = "/1/"; assert_eq!(s.unescape_uri().starts_with("/1/"), Some(3)); } #[test] fn escape_uri_cow_1() { let s = "needs-no-escaping"; let cow = s.escape_uri().to_cow(); assert_eq!(cow, s); } #[test] fn escape_uri_cow_2() { let s = "needs escaping"; let cow = s.escape_uri().to_cow(); assert_ne!(cow, s); assert_eq!(cow, "needs%20escaping"); } #[test] fn unescape_uri_cow_1() { let s = "needs-no-unescaping"; let cow = s.unescape_uri().to_cow(); assert_eq!(cow, s); } #[test] fn unescape_uri_cow_2() { let s = "needs%20unescaping"; let cow = s.unescape_uri().to_cow(); assert_ne!(cow, s); assert_eq!(cow, "needs unescaping"); } #[test] fn unescape_uri_path_cow_1() { let s = "needs/no/unescaping"; let cow = s.unescape_uri().skip_slashes().to_cow(); assert_eq!(cow, s); } #[test] fn unescape_uri_path_cow_2() { let s = "this/%20does%20/need%2Funescaping"; let cow = s.unescape_uri().skip_slashes().to_cow(); assert_ne!(cow, s); assert_eq!(cow, "this/ does /need%2Funescaping"); } #[test] fn try_unescape_uri_cow_1() { let s = "needs-no-unescaping"; let cow = s.unescape_uri().try_to_cow(); assert_eq!(cow, Ok(Cow::from(s))); } #[test] fn try_unescape_uri_cow_2() { let s = "needs%20unescaping"; let cow = s.unescape_uri().try_to_cow(); assert_ne!(cow, Ok(Cow::from(s))); assert_eq!(cow, Ok(Cow::from("needs unescaping"))); } #[test] fn try_unescape_uri_cow_3() { let s = "bad%10escaping"; let cow = s.unescape_uri().try_to_cow(); assert_eq!(cow.unwrap_err().index, 6); } macro_rules! test_escape_unescape { ( $NAME:ident, $UNESCAPED:expr, $ESCAPED:expr ) => { #[test] fn $NAME() { assert_eq!( &$UNESCAPED.escape_uri().to_string(), $ESCAPED, "Failed on escape_uri().to_string()" ); assert_eq!( &$ESCAPED.unescape_uri().to_string(), $UNESCAPED, "Failed on unescape_uri().to_string()" ); } }; } macro_rules! test_unescape_garbage { ( $NAME:ident, $UNESCAPED:expr, $ESCAPED:expr ) => { #[test] fn $NAME() { let escaped = $ESCAPED; assert_eq!( &escaped.unescape_uri().to_string(), $UNESCAPED, "Failed on uri_unescape_to_string_lossy({:?})", escaped ); } }; } test_escape_unescape!(test_ascii_1, "a-simple-test", "a-simple-test"); test_escape_unescape!(test_ascii_2, "a?simple?test", "a%3Fsimple%3Ftest"); test_escape_unescape!(test_ascii_3, "\u{20AC}", "%E2%82%AC"); test_escape_unescape!( test_ascii_4, "blåbærsyltetøy", "bl%C3%A5b%C3%A6rsyltet%C3%B8y" ); test_escape_unescape!(test_ascii_5, "f/scen?create", "f%2Fscen%3Fcreate"); test_unescape_garbage!(ascii_control_percent_escape, "␀␁␂␃␄", "%00%01%02%03%04"); test_unescape_garbage!(bad_utf8_spaces, "� � �", "%E2 %82 %AC"); test_unescape_garbage!(bad_utf8_3b, "�", "%E2%F2%AC"); test_unescape_garbage!(truncated_utf8_1, "fan�say", "fan%E2%8say"); test_unescape_garbage!(truncated_utf8_2, "fan�say", "fan%E2%82say"); test_unescape_garbage!(truncated_utf8_3, "fan�say", "fan%E2%82%say"); test_unescape_garbage!(bad_percent_escape, "bloat%1zface", "bloat%1zface");
use crate::{ Affine2, Affine3A, DAffine2, DAffine3, DMat2, DMat3, DMat4, DQuat, DVec2, DVec3, DVec4, I64Vec2, I64Vec3, I64Vec4, IVec2, IVec3, IVec4, Mat2, Mat3, Mat3A, Mat4, Quat, U64Vec2, U64Vec3, U64Vec4, UVec2, UVec3, UVec4, Vec2, Vec3, Vec3A, Vec4, }; use bytemuck::{AnyBitPattern, Pod, Zeroable}; unsafe impl Pod for Affine2 {} unsafe impl Zeroable for Affine2 {} unsafe impl AnyBitPattern for Affine3A {} unsafe impl Zeroable for Affine3A {} unsafe impl Pod for Mat2 {} unsafe impl Zeroable for Mat2 {} unsafe impl Pod for Mat3 {} unsafe impl Zeroable for Mat3 {} unsafe impl AnyBitPattern for Mat3A {} unsafe impl Zeroable for Mat3A {} unsafe impl Pod for Mat4 {} unsafe impl Zeroable for Mat4 {} unsafe impl Pod for Quat {} unsafe impl Zeroable for Quat {} unsafe impl Pod for Vec2 {} unsafe impl Zeroable for Vec2 {} unsafe impl Pod for Vec3 {} unsafe impl Zeroable for Vec3 {} unsafe impl AnyBitPattern for Vec3A {} unsafe impl Zeroable for Vec3A {} unsafe impl Pod for Vec4 {} unsafe impl Zeroable for Vec4 {} unsafe impl Pod for DAffine2 {} unsafe impl Zeroable for DAffine2 {} unsafe impl Pod for DAffine3 {} unsafe impl Zeroable for DAffine3 {} unsafe impl Pod for DMat2 {} unsafe impl Zeroable for DMat2 {} unsafe impl Pod for DMat3 {} unsafe impl Zeroable for DMat3 {} unsafe impl Pod for DMat4 {} unsafe impl Zeroable for DMat4 {} unsafe impl Pod for DQuat {} unsafe impl Zeroable for DQuat {} unsafe impl Pod for DVec2 {} unsafe impl Zeroable for DVec2 {} unsafe impl Pod for DVec3 {} unsafe impl Zeroable for DVec3 {} unsafe impl Pod for DVec4 {} unsafe impl Zeroable for DVec4 {} unsafe impl Pod for IVec2 {} unsafe impl Zeroable for IVec2 {} unsafe impl Pod for IVec3 {} unsafe impl Zeroable for IVec3 {} unsafe impl Pod for IVec4 {} unsafe impl Zeroable for IVec4 {} unsafe impl Pod for UVec2 {} unsafe impl Zeroable for UVec2 {} unsafe impl Pod for UVec3 {} unsafe impl Zeroable for UVec3 {} unsafe impl Pod for UVec4 {} unsafe impl Zeroable for UVec4 {} unsafe impl Pod for I64Vec2 {} unsafe impl Zeroable for I64Vec2 {} unsafe impl Pod for I64Vec3 {} unsafe impl Zeroable for I64Vec3 {} unsafe impl Pod for I64Vec4 {} unsafe impl Zeroable for I64Vec4 {} unsafe impl Pod for U64Vec2 {} unsafe impl Zeroable for U64Vec2 {} unsafe impl Pod for U64Vec3 {} unsafe impl Zeroable for U64Vec3 {} unsafe impl Pod for U64Vec4 {} unsafe impl Zeroable for U64Vec4 {} #[cfg(test)] mod test { use crate::{ Affine2, Affine3A, DAffine2, DAffine3, DMat2, DMat3, DMat4, DQuat, DVec2, DVec3, DVec4, I64Vec2, I64Vec3, I64Vec4, IVec2, IVec3, IVec4, Mat2, Mat3, Mat3A, Mat4, Quat, U64Vec2, U64Vec3, U64Vec4, UVec2, UVec3, UVec4, Vec2, Vec3, Vec3A, Vec4, }; use core::mem; macro_rules! test_pod_t { ($name:ident, $t:ty) => { #[test] fn $name() { let t = <$t>::default(); let b = bytemuck::bytes_of(&t); // should be the same address assert_eq!(&t as *const $t as usize, b.as_ptr() as usize); // should be the same size assert_eq!(b.len(), mem::size_of_val(&t)); } }; } macro_rules! test_any_bit_pattern_t { ($name:ident, $t:ident) => { #[test] fn $name() { let b = [0_u8; mem::size_of::<$t>()]; let t: $t = bytemuck::cast(b); // should be the same size assert_eq!(b.len(), mem::size_of_val(&t)); // should be zero assert_eq!(t, $t::ZERO); } }; } test_pod_t!(affine2, Affine2); test_any_bit_pattern_t!(affine3a, Affine3A); test_pod_t!(mat2, Mat2); test_pod_t!(mat3, Mat3); test_any_bit_pattern_t!(mat3a, Mat3A); test_pod_t!(mat4, Mat4); test_pod_t!(quat, Quat); test_pod_t!(vec2, Vec2); test_pod_t!(vec3, Vec3); test_any_bit_pattern_t!(vec3a, Vec3A); test_pod_t!(vec4, Vec4); test_pod_t!(daffine2, DAffine2); test_pod_t!(daffine3, DAffine3); test_pod_t!(dmat2, DMat2); test_pod_t!(dmat3, DMat3); test_pod_t!(dmat4, DMat4); test_pod_t!(dquat, DQuat); test_pod_t!(dvec2, DVec2); test_pod_t!(dvec3, DVec3); test_pod_t!(dvec4, DVec4); test_pod_t!(ivec2, IVec2); test_pod_t!(ivec3, IVec3); test_pod_t!(ivec4, IVec4); test_pod_t!(uvec2, UVec2); test_pod_t!(uvec3, UVec3); test_pod_t!(uvec4, UVec4); test_pod_t!(i64vec2, I64Vec2); test_pod_t!(i64vec3, I64Vec3); test_pod_t!(i64vec4, I64Vec4); test_pod_t!(u64vec2, U64Vec2); test_pod_t!(u64vec3, U64Vec3); test_pod_t!(u64vec4, U64Vec4); }
mod qr; mod roms; mod transform; use embedded_graphics::{ geometry::Size, prelude::*, primitives::PrimitiveStyleBuilder, primitives::Rectangle, }; use embedded_hal::prelude::*; use epd_waveshare::color::OctColor; use epd_waveshare::graphics::OctDisplay; use epd_waveshare::{epd5in65f::*, prelude::*}; use rand::seq::SliceRandom; mod octimage; use anyhow::{anyhow, Result}; use display::create; use image::imageops::FilterType; use image::io::Reader as ImageReader; use image::DynamicImage; use image::GenericImageView; use image::ImageBuffer; use imageproc::geometric_transformations::*; use octimage::OctDither; use qr::QrCode; use rand::Rng; use roms::{get_frames, Config, GameboyImage}; use std::path::PathBuf; use transform::{Transform, Transformable}; static COLORS: [OctColor; 8] = [ OctColor::HiZ, OctColor::White, OctColor::Black, OctColor::Red, OctColor::Green, OctColor::Orange, OctColor::Blue, OctColor::Yellow, ]; cfg_if::cfg_if! { if #[cfg(feature="spi")] { mod eink; use eink as display; } else if #[cfg(feature="sim")] { mod sim; use sim as display; } else { compile_error!("Wrong feature"); } } fn bars<DISP, E>(display: &mut DISP, offset: usize) where DISP: OctDisplay<Error = E>, E: std::fmt::Debug, { let width = WIDTH as usize / COLORS.len(); for (c, l) in COLORS .iter() .cycle() .skip(offset) .take(COLORS.len()) .zip((0..WIDTH as i32).step_by(width)) { let style = PrimitiveStyleBuilder::new() .stroke_color(*c) .stroke_width(3) .fill_color(*c) .build(); Rectangle::new( Point::new(l, 0), Size { width: width as u32, height: HEIGHT as u32, }, ) .into_styled(style) .draw(display) .expect("Valid rect"); } { use embedded_graphics::{ mono_font::iso_8859_16::FONT_10X20, mono_font::MonoTextStyle, prelude::*, text::{Text, TextStyleBuilder}, }; let character_style = MonoTextStyle::new(&FONT_10X20, OctColor::White); // Create a new text style let text_style = TextStyleBuilder::new().build(); // Create a text at position (20, 30) and draw it using the previously defined style Text::with_text_style( "Hello Rust!", Point::new(WIDTH as i32 / 7 * 3, HEIGHT as i32 / 2), character_style, text_style, ) .draw(display) .expect("Wrote Text"); } } fn place( img: &GameboyImage, screen: &ImageBuffer<image::Rgba<u8>, Vec<u8>>, ) -> ImageBuffer<image::Rgba<u8>, Vec<u8>> { use imageproc::geometric_transformations::*; let mut gb = ImageReader::open(&img.path) .unwrap() .decode() .unwrap() .to_rgba8(); let (x, y) = screen.dimensions(); let (x, y) = (x as f32, y as f32); let dim = [(0.0, 0.0), (x, 0.0), (x, y), (0.0, y)]; let proj = Projection::from_control_points(dim, img.screen).unwrap(); let screen = DynamicImage::ImageRgba8(screen.clone()).to_rgba8(); let mut gb_scratch = gb.clone(); warp_into( &screen, &proj, Interpolation::Bicubic, image::Rgba([0, 0, 0, 0]), &mut gb_scratch, ); image::imageops::overlay(&mut gb, &gb_scratch, 0, 0); gb } use std::marker::PhantomData; struct Controller<SPI, CS, BUSY, DC, RST, DELAY, DISP> where DISP: WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY, DisplayColor = OctColor>, SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayMs<u8>, <SPI as _embedded_hal_blocking_spi_Write<u8>>::Error: std::fmt::Debug, { display: Display5in65f, epd: DISP, spi: SPI, pub delay: DELAY, frames_since_clear: usize, _phantom: PhantomData<(RST, CS, DC, BUSY)>, } use embedded_hal::{ blocking::delay::*, blocking::spi::Write, digital::v2::{InputPin, OutputPin}, }; impl<SPI, CS, BUSY, DC, RST, DELAY, DISP> Controller<SPI, CS, BUSY, DC, RST, DELAY, DISP> where DISP: WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY, DisplayColor = OctColor>, SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayMs<u8>, <SPI as _embedded_hal_blocking_spi_Write<u8>>::Error: std::error::Error + Send + Sync + 'static, { fn new(epd: DISP, spi: SPI, delay: DELAY) -> Result<Self> { let mut display = Display5in65f::default(); display.set_rotation(DisplayRotation::Rotate270); let mut new = Self { display, epd, spi, delay, frames_since_clear: 0, _phantom: PhantomData, }; new.wipe()?; Ok(new) } fn wipe(&mut self) -> Result<()> { self.epd.set_background_color(OctColor::HiZ); self.epd.clear_frame(&mut self.spi, &mut self.delay)?; self.frames_since_clear = 0; Ok(()) } fn draw<F>(&mut self, f: F) -> Result<()> where F: FnOnce(&mut Display5in65f) -> Result<()>, { self.frames_since_clear += 1; if self.frames_since_clear > 10 { self.wipe()?; self.frames_since_clear = 0; } self.epd.set_background_color(OctColor::White); f(&mut self.display)?; self.epd .update_and_display_frame(&mut self.spi, self.display.buffer(), &mut self.delay)?; Ok(()) } } impl<SPI, CS, BUSY, DC, RST, DELAY, DISP> Drop for Controller<SPI, CS, BUSY, DC, RST, DELAY, DISP> where DISP: WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY, DisplayColor = OctColor>, SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayMs<u8>, <SPI as _embedded_hal_blocking_spi_Write<u8>>::Error: std::fmt::Debug, { fn drop(&mut self) { self.epd .sleep(&mut self.spi, &mut self.delay) .expect("Couldn't sleep device"); } } #[cfg(feature = "web")] #[rocket::main] async fn rocket() { println!("Rocket Launching"); rocket::build() .mount("/gameboy", rocket::fs::FileServer::from("gameboy")) .launch() .await; } fn main() -> Result<()> { let path = PathBuf::from("gameboy"); if !path.exists() { std::fs::create_dir(&path).expect("Directory created"); } let host = gethostname::gethostname(); let port = 7777; #[cfg(feature = "web")] let child = std::thread::spawn(move || { println!("Rocket Launching"); rocket(); }); println!("Roms searching!"); let toml_path = PathBuf::from("assets.toml"); let cfg: Config = toml::from_str(&std::fs::read_to_string(&toml_path).unwrap()).unwrap(); let roms = cfg .romdata .iter() .flat_map(|x| x.roms()) .collect::<Vec<_>>(); println!( "Boxart found: {}", roms.iter().filter(|x| x.boxart.is_some()).count() ); println!("Total Roms: {}", roms.len()); let (spi, delay, epd) = create(); let mut controller = Controller::new(epd, spi, delay)?; for skip in 0..8 { controller.draw(|display| { display.set_rotation(DisplayRotation::Rotate0); bars(display, skip); Ok(()) })?; controller.delay.delay_ms(1_000u32); } let mut rng = rand::thread_rng(); loop { let (rom, frames) = 'has_frames: loop { let rom = roms.choose(&mut rng).unwrap(); let frame_count = 10; let frames = std::panic::catch_unwind(|| { get_frames( &rom.path, None, &(0usize..frame_count).map(|f| f * 60).collect::<Vec<_>>(), ) .unwrap() }) .unwrap_or_else(|_| vec![]); if frames.len() == frame_count { break 'has_frames (rom, frames); } }; controller.draw(|display| { display.set_rotation(DisplayRotation::Rotate270); let mut base = DynamicImage::new_rgba8(HEIGHT, WIDTH); let bg = if rng.gen() { image::imageops::vertical_gradient } else { image::imageops::horizontal_gradient }; let start = transform::rgba(&mut rng, Some(0xff)); let end = transform::rgba(&mut rng, Some(0xff)); bg(&mut base, &start, &end); let mut images = frames .iter() .map(|f| { let f = f.clone(); let img = place(&cfg.gameboy[rng.gen_range(0..cfg.gameboy.len())], &f); let img = DynamicImage::ImageRgba8(img); img.resize(HEIGHT, WIDTH, FilterType::Gaussian) }) .chain( rom.boxart .as_ref() .map(|boxart| -> Result<DynamicImage> { ImageReader::new(std::io::Cursor::new(std::fs::read(boxart).unwrap())) .with_guessed_format() .map_err(|e| anyhow!("{}", e))? .decode() .map_err(|e| anyhow!("{}", e)) }) .transpose() .ok() .flatten() .into_iter(), ) .collect::<Vec<_>>(); images.shuffle(&mut rng); for img in images.into_iter() { let transforms = (0..rng.gen_range(1..10)) .map(|_| Transform::random(&mut rng, HEIGHT, WIDTH)) .collect::<Vec<_>>(); let mut transformable = Transformable::new(img); for t in transforms { transformable.transform(t); } let img = transformable.into_inner(); let projection = transform::projection(&mut rng, img.dimensions(), (HEIGHT, WIDTH)); use image::Rgba; let mut scratch = base.clone(); imageproc::geometric_transformations::warp_into( &img.into_rgba8(), &projection, Interpolation::Bicubic, Rgba([0, 0, 0, 0]), scratch.as_mut_rgba8().unwrap(), ); image::imageops::overlay(&mut base, &scratch, 0, 0); } use sha2::Digest; let mut sha = sha2::Sha256::new(); sha.update(base.as_bytes()); let result = sha.finalize(); let png_name = format!("{:x}.png", result); let output = path.join(&png_name); let uri = format!( "http://{}:{}/{}", host.to_string_lossy(), port, output.display() ); println!("Target URL {}", uri); let dither = OctDither::new_default(base, Point::zero()); let image = dither.output(); use std::os::unix::fs::symlink; image.save(&output)?; let symlink_file = path.join("latest.png"); std::fs::remove_file(&symlink_file)?; symlink(&png_name, &symlink_file)?; dither.iter().draw(display).unwrap(); let code = QrCode::new( Point::new(0, 0), 2, OctColor::Black, OctColor::White, uri.as_bytes(), ); Drawable::draw(&code, display).unwrap(); Ok(()) })?; controller.delay.delay_ms(30 * 60_000u32); } }
// use std::io; use std::env; use std::io::{self, Write}; use rand::Rng; fn meme_text(text: String) -> String { let mut to_return: String = "".to_owned(); let mut rng = rand::thread_rng(); for i in 0..text.len() { let num: f32 = rng.gen::<f32>(); if num < 0.5 { to_return.push_str(&text.chars().nth(i).unwrap().to_string().to_uppercase()); } else { to_return.push_str(&text.chars().nth(i).unwrap().to_string().to_lowercase()); } } return to_return; } fn args_to_string(args: &Vec<String>) -> String { let mut to_return: String = "".to_owned(); for i in 1..args.len() { to_return.push_str(&args[i]); if i < args.len() - 1 { to_return.push_str(" "); } } return to_return; } fn main() -> io::Result<()> { let stdout = io::stdout(); let mut handle = stdout.lock(); let args: Vec<String> = env::args().collect(); // println!("{}", meme_text(args_to_string(&args))); handle.write_all(meme_text(args_to_string(&args)).as_bytes())?; Ok(()) }
pub(crate) trait MakingList { /// generates a list from elements of any structure used. fn make_required_list(&self) -> Vec<&str>; }
pub mod rythme_flag; pub mod chord_name; pub mod inner_graphic; pub mod route_rect; pub mod whole_note_label;
use crate::custom_types::ASCII_COMMA; use crate::looping::{IterAttrs, IterResult, NativeIterator}; use crate::runtime::Runtime; use crate::std_type::Type; use crate::string_var::{MaybeString, StringVar}; use crate::variable::{FnResult, Variable}; use ascii::{AsciiChar, AsciiStr}; use once_cell::sync::Lazy; use std::cell::{Cell, Ref}; use std::cmp::{max, min}; use std::fmt::Debug; use std::iter::FusedIterator; use std::mem::{replace, take}; use std::rc::Rc; pub(super) trait DictLike: Debug { fn borrow(&self) -> Ref<'_, InnerDict>; } #[derive(Debug, Clone)] pub(super) struct InnerDict { size: usize, size_w_deleted: usize, entries: Vec<Entry>, } #[derive(Debug, Clone)] pub(super) enum Entry { None, Removed, Some(InnerEntry), } #[derive(Debug, Clone)] pub(super) struct InnerEntry { key: Variable, value: Variable, hash: usize, } #[derive(Debug)] pub(super) struct EntryMut<'a> { size: &'a mut usize, size_w_deleted: &'a mut usize, entry: &'a mut Entry, } impl InnerDict { const PERTURB_SHIFT: u32 = 5; const MIN_SIZE: usize = 8; pub fn new() -> InnerDict { InnerDict { size: 0, size_w_deleted: 0, entries: Vec::new(), } } pub(super) fn entries_raw_mut(&mut self) -> &mut [Entry] { &mut self.entries } pub(super) fn size_mut(&mut self) -> &mut usize { &mut self.size } pub fn from_args( keys: Vec<Variable>, values: Vec<Variable>, runtime: &mut Runtime, ) -> Result<InnerDict, ()> { debug_assert!(keys.len() == values.len()); if keys.is_empty() { Result::Ok(InnerDict::new()) } else { let vec_capacity = Self::new_cap(0, keys.len()); let mut value = InnerDict { size: 0, size_w_deleted: 0, entries: vec![Entry::None; vec_capacity], }; for (x, y) in keys.into_iter().zip(values) { value.set(x, y, runtime)?; } Result::Ok(value) } } pub fn size(&self) -> usize { self.size } pub fn get(&self, key: Variable, runtime: &mut Runtime) -> Result<Option<Variable>, ()> { if self.entries.is_empty() { Result::Ok(Option::None) } else if let Entry::Some(e) = self.entry(key, runtime)? { Result::Ok(Option::Some(e.value.clone())) } else { Result::Ok(Option::None) } } pub fn get_pair( &self, key: Variable, runtime: &mut Runtime, ) -> Result<Option<(Variable, Variable)>, ()> { if self.entries.is_empty() { Result::Ok(Option::None) } else if let Entry::Some(e) = self.entry(key, runtime)? { Result::Ok(Option::Some((e.key.clone(), e.value.clone()))) } else { Result::Ok(Option::None) } } pub fn set( &mut self, key: Variable, val: Variable, runtime: &mut Runtime, ) -> Result<Option<Variable>, ()> { let hash = key.clone().hash(runtime)?; self.resize(1); assert!(!self.entries.is_empty()); Result::Ok(self.entry_mut(key.clone(), runtime)?.put(key, val, hash)) } pub fn equals(&self, other: &InnerDict, runtime: &mut Runtime) -> Result<bool, ()> { if self.size != other.size { return Result::Ok(false); } for (key, value) in self { if !match other.get(key.clone(), runtime)? { Option::Some(val) => val.equals(value.clone(), runtime)?, Option::None => false, } { return Result::Ok(false); } } Result::Ok(true) } pub fn del(&mut self, value: Variable, runtime: &mut Runtime) -> Result<Option<Variable>, ()> { Result::Ok(self.entry_mut(value, runtime)?.remove()) } pub fn clear(&mut self) { self.size = 0; self.size_w_deleted = 0; for entry in &mut self.entries { entry.take(); } } pub fn is_empty(&self) -> bool { self.size == 0 } pub fn true_repr(&self, runtime: &mut Runtime) -> Result<StringVar, ()> { static ASCII_COLON: Lazy<&AsciiStr> = Lazy::new(|| AsciiStr::from_ascii(": ").unwrap()); if self.is_empty() { static EMPTY_DICT: Lazy<&AsciiStr> = Lazy::new(|| AsciiStr::from_ascii("{:}").unwrap()); return Result::Ok((*EMPTY_DICT).into()); } let mut result = MaybeString::new(); result.push_ascii(AsciiChar::CurlyBraceOpen); let mut first = true; for entry in &self.entries { if let Entry::Some(e) = entry { if !first { result += *ASCII_COMMA; } first = false; result += e.key.clone().str(runtime)?; result += *ASCII_COLON; result += e.value.clone().str(runtime)?; } } result.push_ascii(AsciiChar::CurlyBraceClose); Result::Ok(result.into()) } pub fn key_repr(&self, runtime: &mut Runtime) -> Result<StringVar, ()> { if self.is_empty() { static EMPTY_SET: Lazy<&AsciiStr> = Lazy::new(|| AsciiStr::from_ascii("{}").unwrap()); return Result::Ok((*EMPTY_SET).into()); } let mut result = MaybeString::new(); result.push_ascii(AsciiChar::CurlyBraceOpen); let mut first = true; for entry in &self.entries { if let Entry::Some(e) = entry { if !first { result += *ASCII_COMMA; } first = false; result += e.key.clone().str(runtime)?; } } result.push_ascii(AsciiChar::CurlyBraceClose); Result::Ok(result.into()) } fn entry(&self, key: Variable, runtime: &mut Runtime) -> Result<&Entry, ()> { assert!(!self.entries.is_empty()); let len = self.entries.len(); let hash = key.clone().hash(runtime)?; let mut perturb = hash; let mut bucket = hash % len; let mut first_removed = Option::None; loop { match &self.entries[bucket] { e @ Entry::None => return Result::Ok(first_removed.unwrap_or(e)), e @ Entry::Removed => { first_removed.get_or_insert(e); bucket = Self::rehash(&mut perturb, bucket) % len; } Entry::Some(e) => { if e.hash == hash && e.key.clone().equals(key.clone(), runtime)? { return Result::Ok(&self.entries[bucket]); } bucket = Self::rehash(&mut perturb, bucket) % len; } } } } pub(crate) fn entry_mut( &mut self, key: Variable, runtime: &mut Runtime, ) -> Result<EntryMut<'_>, ()> { assert!(!self.entries.is_empty()); let len = self.entries.len(); let hash = key.clone().hash(runtime)?; let mut perturb = hash; let mut bucket = hash % len; let mut first_removed = Option::None; let bucket: usize = loop { match &mut self.entries[bucket] { Entry::None => { break first_removed.unwrap_or(bucket); } Entry::Removed => { first_removed.get_or_insert(bucket); bucket = Self::rehash(&mut perturb, bucket) % len; } Entry::Some(e) => { if e.hash == hash && e.key.clone().equals(key.clone(), runtime)? { break bucket; } else { bucket = Self::rehash(&mut perturb, bucket) % len; } } } }; Result::Ok(EntryMut { size: &mut self.size, size_w_deleted: &mut self.size_w_deleted, entry: &mut self.entries[bucket], }) } pub(super) fn resize(&mut self, additional: usize) { let new_size = self.size_w_deleted + additional; let new_capacity = self.new_capacity(new_size); let current_size = self.entries.len(); if current_size < new_capacity { // Resize ignoring the deleted values b/c they'll all disappear in resizing let new_cap = max(current_size, self.new_capacity(self.size + additional)); self.resize_exact(new_cap); } } fn resize_exact(&mut self, new_size: usize) { debug_assert!(new_size.is_power_of_two()); let old_vec = replace(&mut self.entries, vec![Entry::None; new_size]); let new_vec = &mut self.entries; let len = new_vec.len(); for entry in old_vec { // Doesn't use self.set here b/c we already know all elements are unique, and we know // the hash of each element already if let Entry::Some(entry) = entry { let hash = entry.hash; let mut bucket = hash % len; let mut perturb = hash; while let Entry::Some(_) = new_vec[bucket] { bucket = Self::rehash(&mut perturb, bucket) % len; } new_vec[bucket] = Entry::Some(entry); } } self.size_w_deleted = self.size; } fn new_capacity(&self, new_size: usize) -> usize { Self::new_cap(self.entries.len(), new_size) } fn requires_resize(current_cap: usize, new_size: usize) -> bool { // Equivalent to load factor of 0.75, but without loss of precision from floats current_cap - (current_cap / 4) < new_size } fn new_cap(current_cap: usize, new_size: usize) -> usize { if !Self::requires_resize(current_cap, new_size) { return current_cap; } let new_cap = max(Self::MIN_SIZE, new_size.next_power_of_two()); if Self::requires_resize(current_cap, new_cap) { new_cap << 1 } else { new_cap } } fn rehash(perturb: &mut usize, bucket: usize) -> usize { let result = 5 * bucket + 1 + *perturb; *perturb >>= Self::PERTURB_SHIFT; result } } impl Entry { pub fn take(&mut self) -> Self { take(self) } pub fn remove(&mut self) -> Self { replace(self, Entry::Removed) } pub fn unwrap(self) -> InnerEntry { match self { Entry::None => panic!(), Entry::Removed => panic!(), Entry::Some(e) => e, } } } impl Default for Entry { fn default() -> Self { Entry::None } } impl InnerEntry { pub fn clone_key(&self) -> Variable { self.key.clone() } pub fn clone_value(&self) -> Variable { self.value.clone() } pub fn value_mut(&mut self) -> &mut Variable { &mut self.value } } #[derive(Debug)] pub(super) struct DictIter<T: DictLike> { parent: Rc<T>, bucket_no: Cell<usize>, } impl<T: DictLike> DictIter<T> { pub fn new(parent: Rc<T>) -> DictIter<T> { DictIter { parent, bucket_no: Cell::new(0), } } fn true_next(self: Rc<Self>) -> Option<(Variable, Variable)> { let parent = self.parent.borrow(); let len = parent.entries.len(); let mut bucket = self.bucket_no.get(); if bucket >= len { return Option::None; } loop { if bucket >= len { self.bucket_no.set(bucket); return Option::None; } else if let Entry::Some(e) = &parent.entries[bucket] { bucket += 1; self.bucket_no.set(bucket); return Option::Some((e.key.clone(), e.value.clone())); } else { bucket += 1; } } } } impl<'a> EntryMut<'a> { pub fn remove(&mut self) -> Option<Variable> { match &mut self.entry { Entry::None => Option::None, Entry::Removed => Option::None, e @ Entry::Some(_) => { let entry = e.remove().unwrap(); *self.size -= 1; Option::Some(entry.value) } } } pub fn put(&mut self, key: Variable, val: Variable, hash: usize) -> Option<Variable> { match &mut self.entry { e @ Entry::None | e @ Entry::Removed => { let old_entry = replace( *e, Entry::Some(InnerEntry { key, hash, value: val, }), ); if let Entry::None = old_entry { *self.size_w_deleted += 1; } *self.size += 1; Option::None } Entry::Some(e) => Option::Some(replace(&mut e.value, val)), } } pub fn into_value(self) -> Result<&'a mut InnerEntry, Self> { match self.entry { Entry::Some(e) => Result::Ok(e), _ => Result::Err(self), } } } impl<T: DictLike + 'static> IterAttrs for DictIter<T> { fn next_fn(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.is_empty()); match self.true_next() { Option::None => runtime.return_n([Option::None.into(), Option::None.into()]), Option::Some(val) => { runtime.return_n([Option::Some(val.0).into(), Option::Some(val.1).into()]) } } } fn get_type() -> Type { unimplemented!() } } impl<T: DictLike + 'static> NativeIterator for DictIter<T> { fn next(self: Rc<Self>, _runtime: &mut Runtime) -> IterResult { IterResult::Ok(self.true_next().map(|(a, b)| vec![a, b]).into()) } } pub(super) struct InnerDictIter<'a> { parent: &'a InnerDict, i: usize, } impl<'a> Iterator for InnerDictIter<'a> { type Item = (&'a Variable, &'a Variable); fn next(&mut self) -> Option<Self::Item> { let len = self.parent.entries.len(); loop { if self.i >= len { return Option::None; } else if let Entry::Some(e) = &self.parent.entries[self.i] { self.i += 1; return Option::Some((&e.key, &e.value)); } else { self.i += 1; } } } fn size_hint(&self) -> (usize, Option<usize>) { let min_max = min(self.parent.size, self.parent.entries.len() - self.i); (0, Option::Some(min_max)) } } impl<'a> IntoIterator for &'a InnerDict { type Item = (&'a Variable, &'a Variable); type IntoIter = InnerDictIter<'a>; fn into_iter(self) -> Self::IntoIter { InnerDictIter { parent: self, i: 0 } } } impl FusedIterator for InnerDictIter<'_> {}
extern crate hangeul; fn main() { // literally: pikachu transliterated let subject = "피카츄"; // Korean marks the topic of the sentence with a post position // particle: 이 follows consonants, and 가 follows vowels. let post_position = match hangeul::ends_in_consonant(subject).unwrap() { true => "이", false => "가", }; // -> A wild pikachu has appeared! let sentence = format!("야생의 {}{} 나타났다!", subject, post_position); println!("{}", sentence); // 야생의 피카츄가 나타났다! // get_lead is an alias of get_choseong, to get the first character // of a Hangeul syllable. let sentence_in_choseong = sentence .chars() .map(|c| hangeul::get_lead(&c).unwrap_or(c)) .collect::<String>(); println!("{}", sentence_in_choseong); // ㅇㅅㅇ ㅍㅋㅊㄱ ㄴㅌㄴㄷ! }
use std::ffi::CStr; use std::fmt; use std::os::raw::c_char; /* typedef struct APerson { const char * name; const char * long_name; } APerson ; APerson *get_person(const char * name, const char * long_name); void free_person(APerson *person); */ #[repr(C)] pub struct APerson { name: *const c_char, long_name: *const c_char, } impl APerson { fn new(name: *const c_char, long_name: *const c_char) -> APerson { let result = APerson { name, long_name }; println!("Created {}", result); result } } impl Drop for APerson { fn drop(&mut self) { println!("Dropping {}", self); } } impl fmt::Display for APerson { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let name = unsafe { CStr::from_ptr(self.name) }; let long_name = unsafe { CStr::from_ptr(self.long_name) }; write!( formatter, "APerson: name: {:?}, long_name: {:?}", &name, &long_name ) } } // See https://doc.rust-lang.org/std/boxed/index.html#memory-layout #[no_mangle] pub fn get_person(name: *const c_char, long_name: *const c_char) -> Box<APerson> { // Box::new allocates memory from the heap and places the Person object in it. Box::new(APerson::new(name, long_name)) } #[no_mangle] pub extern "C" fn free_person(_person: Option<Box<APerson>>) { // When the Box is dropped at the end of this function, the memory is released. } #[cfg(test)] mod tests { use super::*; use std::ffi::CString; #[test] fn test_person() { // NOTE: Rust literals are not null terminated, however, a CString is. let name = CString::new("Name").unwrap(); let long_name = CString::new("This is a long name").unwrap(); let person = get_person(name.as_ptr(), long_name.as_ptr()); free_person(Some(person)); } }
// #![feature(trait_alias)] use async_std::io; use async_std::task; use serde::{Deserialize, Serialize}; use tide::StatusCode; use tide_validator::{HttpField, ValidatorMiddleware}; #[derive(Deserialize, Serialize)] struct Cat { name: String, } fn main() -> io::Result<()> { task::block_on(async { let mut app = tide::new(); let mut validator_middleware = ValidatorMiddleware::new(); // 'age' is the parameter name inside the route '/test/:name' validator_middleware.add_validator(HttpField::Param("age"), is_number); // You can also add multiple validators on a single parameter to check different things validator_middleware.add_validator(HttpField::Param("age"), is_required); // You can assign different middleware for each routes so different validators for each routes app.at("/test/:age").middleware(validator_middleware).get( |_: tide::Request<()>| async move { let cat = Cat { name: "Gribouille".into(), }; Ok(tide::Response::new(StatusCode::Ok).body_json(&cat).unwrap()) }, ); app.listen("127.0.0.1:8080").await?; Ok(()) }) } #[inline] fn is_number(field_name: &str, field_value: Option<&str>) -> Result<(), String> { if let Some(field_value) = field_value { if field_value.parse::<i64>().is_err() { return Err(format!( "field '{}' = '{}' is not a valid number", field_name, field_value )); } } Ok(()) } #[inline] fn is_required(field_name: &str, field_value: Option<&str>) -> Result<(), String> { if field_value.is_none() { Err(format!("'{}' is required", field_name)) } else { Ok(()) } }
use hacspec_dev::prelude::*; use hacspec_lib::prelude::*; use hacspec_aes::*; fn aes_128_enc_dec_test(m: &ByteSeq, key: Key128, iv: AesNonce, ctr: U32, ctxt: Option<&ByteSeq>) { let c = aes128_encrypt(key, iv, ctr, m); let m_dec = aes128_decrypt(key, iv, ctr, &c); assert_bytes_eq!(m, m_dec); if ctxt.is_some() { assert_bytes_eq!(c, ctxt.unwrap()); } } #[test] fn test_enc_dec() { let key = Key128::from_public_slice(&random_byte_vec(Key128::length())); let iv = AesNonce::from_public_slice(&random_byte_vec(AesNonce::length())); let m = ByteSeq::from_public_slice(&random_byte_vec(40)); aes_128_enc_dec_test(&m, key, iv, U32(0), None); } #[test] fn test_kat1() { let msg = ByteSeq::from_public_slice(&[ 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a, ]); let key = Key128::from_public_slice(&[ 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c, ]); let nonce = AesNonce::from_public_slice(&[ 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb, ]); let ctr = U32(0xfcfdfeff); let ctxt = ByteSeq::from_public_slice(&[ 0x87, 0x4d, 0x61, 0x91, 0xb6, 0x20, 0xe3, 0x26, 0x1b, 0xef, 0x68, 0x64, 0x99, 0x0d, 0xb6, 0xce, ]); aes_128_enc_dec_test(&msg, key, nonce, ctr, Some(&ctxt)); } #[test] fn test_kat2() { let msg = ByteSeq::from_public_slice(&[ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, ]); let key = Key128::from_public_slice(&[ 0x7E, 0x24, 0x06, 0x78, 0x17, 0xFA, 0xE0, 0xD7, 0x43, 0xD6, 0xCE, 0x1F, 0x32, 0x53, 0x91, 0x63, ]); let nonce = AesNonce::from_public_slice(&[ 0x00, 0x6C, 0xB6, 0xDB, 0xC0, 0x54, 0x3B, 0x59, 0xDA, 0x48, 0xD9, 0x0B, ]); let ctr = 0x00000001; let ctxt = ByteSeq::from_public_slice(&[ 0x51, 0x04, 0xA1, 0x06, 0x16, 0x8A, 0x72, 0xD9, 0x79, 0x0D, 0x41, 0xEE, 0x8E, 0xDA, 0xD3, 0x88, 0xEB, 0x2E, 0x1E, 0xFC, 0x46, 0xDA, 0x57, 0xC8, 0xFC, 0xE6, 0x30, 0xDF, 0x91, 0x41, 0xBE, 0x28, ]); aes_128_enc_dec_test(&msg, key, nonce, U32(ctr), Some(&ctxt)); }
pub mod client; pub mod listener; pub mod status;
use std::fmt; use std::error; use reqwest::StatusCode; use reqwest::blocking::{Client, Body}; #[derive(Debug)] pub enum Error { WrongStatusCode } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Wrong status code") } } impl error::Error for Error { fn source(&self) -> Option<&(dyn error::Error + 'static)> { None } } pub fn remote_delete(remote: String) -> Result<(), Box<dyn error::Error>> { let resp = Client::new().delete(&remote).body(Body::from("")).send()?; if resp.status() != StatusCode::NO_CONTENT { // 204 return Err(Box::new(Error::WrongStatusCode)); } return Ok(()); } pub fn remote_put(remote: &String, _length: usize, body: &String) -> Result<(), Box<dyn std::error::Error>> { let resp = Client::new().put(remote).body::<String>(body.into()).send()?; if resp.status() != StatusCode::CREATED && resp.status() != StatusCode::NO_CONTENT { // 201 && 204 return Err(Box::new(Error::WrongStatusCode)); } return Ok(()); } pub fn remote_get(remote: &String) -> Result<String, Box<dyn std::error::Error>> { let mut resp = Client::new().get(remote).body(Body::from("")).send()?; if resp.status() != StatusCode::OK { return Err(Box::new(Error::WrongStatusCode)); } let mut buffer = Vec::<u8>::new(); resp.copy_to(&mut buffer)?; return Ok(String::from_utf8(buffer)?); } pub fn remote_head(remote: &String) -> bool { let resp = Client::new().head(remote).body(Body::from("")).send().expect("remote_head: error while sending request"); return resp.status() == 200; }
use crate::sqlite::database::SqliteDatabase; use apllodb_shared_components::DatabaseName; use glob::glob; /// Removes sqlite3 database file on drop(). /// /// Use this in "panic guard" pattern. #[derive(Debug)] pub struct SqliteDatabaseCleaner(DatabaseName); impl SqliteDatabaseCleaner { pub fn new(database_name: DatabaseName) -> Self { Self(database_name) } } impl Drop for SqliteDatabaseCleaner { fn drop(&mut self) { let sqlite3_path = SqliteDatabase::sqlite_db_path(&self.0); let sqlite3_files_pattern = format!("{}*", sqlite3_path.as_os_str().to_str().unwrap()); for path in glob(&sqlite3_files_pattern).unwrap().flatten() { log::debug!( "SqliteDatabaseCleaner: found {}. removing...", path.as_os_str().to_str().unwrap() ); std::fs::remove_file(&path) .or_else(|ioerr| match ioerr.kind() { std::io::ErrorKind::NotFound => Ok(()), _ => Err(ioerr), }) .unwrap(); log::debug!("SqliteDatabaseCleaner: done"); } } }
use crate::features::syntax::StatementFeature; use crate::parse::visitor::tests::assert_no_stmt_feature; use crate::parse::visitor::tests::assert_stmt_feature; #[test] fn continue_no_label() { assert_stmt_feature( "function a() { return; }", StatementFeature::ReturnNothingStatement, ); } #[test] fn continue_with_label() { assert_stmt_feature( "const a = function() { return 123; }", StatementFeature::ReturnExprStatement, ); } #[test] fn distinct() { assert_no_stmt_feature( "const abc = () => { return };", StatementFeature::ReturnExprStatement, ); assert_no_stmt_feature( "function foo() { return undefined; }", StatementFeature::ReturnNothingStatement, ); }
type TFoo<'a, A: 'a> = (&'a A, u64); struct SFoo<'a, A: 'a>(&'a A); struct SBar<'a, A: 'a> { x: &'a A } enum EFoo<'a, A: 'a> { X { x: &'a A }, Y { y: &'a A }, } struct SBaz<'a, 'b, A: 'a, B: 'b> { a: &'a A, b: &'b B, } trait TBaz<'a, 'b, A: 'a, B: 'b> { fn baz(&self); } impl<'a, 'b, A: 'a, B: 'b> TBaz<'a, 'b, A, B> for SBaz<'a, 'b, A, B> { fn baz(&self){} } trait Foo { fn f(&self); } trait Bar { fn f(&self); } struct Baz; impl Foo for Baz { fn f(&self) { println!("Baz’s impl of Foo"); } } impl Bar for Baz { fn f(&self) { println!("Baz’s impl of Bar"); } } fn main() { let b = Baz; Foo::f(&b); Bar::f(&b); <Baz as Foo>::f(&b); <Baz as Bar>::f(&b); }
use serde::{Deserialize, Serialize}; use core::pos::{ BiPos, Position, }; pub mod hir; pub mod mir; #[repr(u8)] #[derive(Debug, Clone, PartialEq)] pub enum Constant<'a>{ Str(&'a str), Int(i32), Float(f32), Bool(bool), } use std::cell::RefCell; #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Chunk{ pub code: Vec<u8>, pub ins_ptr: RefCell<usize> } impl Chunk{ pub fn new() -> Self{ Self{ code: Vec::with_capacity(4), ins_ptr: RefCell::new(0) } } pub fn set_ins_ptr(&self, index: usize){ self.ins_ptr.replace(index); } pub fn jump_to(&self, index: usize) -> Result<(),String>{ if index >= self.length(){ return Err(format!("Cannot jump beyond the length of the bytecode: {}", index)) } self.set_ins_ptr(index); Ok(()) } pub fn advance(&self){ self.inc_ins_ptr(1); } pub fn can_read(&self) -> bool{ self.ins_ptr.clone().into_inner() < self.code.len() } pub fn peek(&self) -> Option<&u8>{ self.code.get(self.ins_ptr.clone().into_inner() + 1) } pub fn length(&self) -> usize{ self.code.len() } pub fn get_current(&self) -> u8{ self.code[self.ins_ptr.clone().into_inner()] } pub fn get_at_index(&self, index: usize) -> u8{ self.code[index] } pub fn read_int(&self) -> i32{ self.read_int_at(self.ins_ptr.clone().into_inner()) } pub fn read_int_at(&self, idx: usize) -> i32{ let int = i32::from_be_bytes(unsafe { *(self.code[idx..idx+4].as_ptr() as *const [u8; 4]) }); self.inc_ins_ptr(4); return int; } pub fn write_pos(&mut self, bipos: BiPos){ self.write_usize(bipos.start.0); self.write_usize(bipos.start.1); self.write_usize(bipos.end.0); self.write_usize(bipos.end.1); self.write_usize(bipos.offset.0); self.write_usize(bipos.offset.1); self.write_usize(bipos.line_region.0); self.write_usize(bipos.line_region.1); } pub fn read_pos(&self) -> Result<BiPos, String>{ let start_line = match self.read_usize(){ Ok(start_line) => start_line, Err(msg) => return Err(msg) }; let start_col = match self.read_usize(){ Ok(start_col) => start_col, Err(msg) => return Err(msg) }; let end_line = match self.read_usize(){ Ok(end_line) => end_line, Err(msg) => return Err(msg) }; let end_col = match self.read_usize(){ Ok(end_col) => end_col, Err(msg) => return Err(msg) }; let offset_start = match self.read_usize(){ Ok(offset_start) => offset_start, Err(msg) => return Err(msg) }; let offset_end = match self.read_usize(){ Ok(offset_end) => offset_end, Err(msg) => return Err(msg) }; let line_region_start = match self.read_usize(){ Ok(line_region_start) => line_region_start, Err(msg) => return Err(msg) }; let line_region_end = match self.read_usize(){ Ok(line_region_end) => line_region_end, Err(msg) => return Err(msg) }; Ok(BiPos{ start: Position(start_line, start_col), end: Position(end_line, end_col), offset: Position(offset_start, offset_end), line_region: Position(line_region_start, line_region_end) }) } pub fn read_float(&self) -> f32{ self.read_float_at(self.ins_ptr.clone().into_inner()) } pub fn read_float_at(&self, idx: usize) -> f32{ let float = f32::from_be_bytes(unsafe { *(self.code[idx..idx+4].as_ptr() as *const [u8; 4]) }); self.inc_ins_ptr(5); return float } pub fn read_string(&self) -> &str{ self.read_string_at(self.ins_ptr.clone().into_inner()) } pub fn read_string_at(&self, idx: usize) -> &str{ let length = self.read_int() as usize; let start = idx+4; let string= &self.code[start..start+length]; self.inc_ins_ptr(length as usize); return std::str::from_utf8(string).unwrap() } pub fn write_string(&mut self, str: String){ self.write_int(str.len() as i32); self.code.append(str.as_bytes().to_vec().as_mut()) } pub fn write_str(&mut self, str: &str){ self.write_int(str.len() as i32); self.code.append(str.as_bytes().to_vec().as_mut()) } pub fn read_usize(&self) -> Result<usize, String>{ self.read_usize_at(self.ins_ptr.clone().into_inner()) } pub fn inc_ins_ptr(&self, amount: usize){ let old = self.ins_ptr.clone().into_inner(); self.ins_ptr.replace(old + amount); } pub fn dec_ins_ptr(&self, amount: usize){ let old = self.ins_ptr.clone().into_inner(); self.ins_ptr.replace(old - amount); } pub fn read_usize_at(&self, idx: usize) -> Result<usize, String>{ if self.code.len() < idx || self.code.len() < idx+8{ return Err(format!("Cannot read usize from chunk cause chunk length is less than given idx: {}", idx)) } let float = usize::from_be_bytes(unsafe { *(self.code[idx..idx+8].as_ptr() as *const [u8; 8]) }); self.inc_ins_ptr(std::mem::size_of::<usize>()); return Ok(float) } pub fn write_usize(&mut self, size: usize){ self.code.extend(size.to_be_bytes().iter()) } pub fn write_int(&mut self, int: i32){ self.code.extend(int.to_be_bytes().iter()) } pub fn write_float(&mut self, float: f32){ self.code.extend(float.to_be_bytes().iter()) } pub fn write_double(&mut self, double: f64){ self.code.extend(double.to_be_bytes().iter()) } pub fn write_bool(&mut self, boolean: bool){ self.code.push(boolean as u8) } pub fn read_bool(&self) -> bool{ let value = self.read_byte(); value == b'\x01' } pub fn write_byte(&mut self, byte: u8){ self.code.push(byte) } pub fn read_byte(&self) -> u8{ let b = self.get_current(); self.advance(); b } pub fn write_chunk(&mut self, chunk: Self){ self.code.extend(chunk.code) } } impl Iterator for Chunk{ type Item = u8; fn next(&mut self) -> Option<Self::Item> { if !self.can_read(){ return None } self.advance(); Some(self.get_current()) } } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Module { /// The name of the module pub name: String, ///The module's bytecode chunk pub chunk: Chunk } impl Module { pub fn new(name: String) -> Self { Module { name: name.to_string(), chunk: Chunk::new() } } }
use auto_impl::auto_impl; #[auto_impl(&)] trait Foo { #[auto_impl(keep_default_for(&))] type Foo; } fn main() {}
//! The updater pulls down the necessary data to populate the repository. The canonical repository //! is stored in git for its flexibility of version control. use config::Config; use std::env; use std::fs; use std::fs::Path; pub struct Updater { config: Config } impl Updater { pub fn new(config: Config) -> Updater { return Updater { config: config } } /// This pulls data from github and updates the repository with it. pub fn update() { // } } fn update() { let repo = Path::new("."); if !repo.exists() || !repo.is_dir() { if !repo.is_dir() { fs::remove_file(repo.to_string())?; } fs::create_dir(repo.to_string())?; } let hash = repo.join("hash"); let hash_remote = unimplemented!(); // Download from github if !hash.exists() || hash != hash_remote { // Download zip from github master. // Unpack zip into directory. } }
use sdl2::{video::Window, render::Canvas}; use crate::{input::GameInput}; pub trait Scene { fn update(&mut self, inputs: Vec<GameInput>, t: u128, dt: f64); fn render(&mut self, canvas: &mut Canvas<Window>); }
//! Run a BuildLoop for `shell.nix`, watching for input file changes. //! Can be used together with `direnv`. use crate::daemon::Daemon; use crate::ops::error::{ok, OpResult}; use crate::socket::SocketPath; use slog_scope::info; /// See the documentation for lorri::cli::Command::Daemon for details. pub fn main() -> OpResult { let (daemon, build_rx) = Daemon::new(); let build_handle = std::thread::spawn(|| { for msg in build_rx { info!("build status"; "message" => ?msg); } }); info!("ready"); let paths = crate::ops::get_paths()?; daemon.serve( SocketPath::from(paths.daemon_socket_file()), paths.gc_root_dir().to_path_buf(), paths.cas_store().clone(), )?; build_handle .join() .expect("failed to join build status thread"); ok() }
use std::cmp::{max, Ordering}; use std::collections::{BinaryHeap, HashMap, HashSet}; use std::fmt; use std::fmt::Formatter; use crate::util::time; pub fn day16() { println!("== Day 16 =="); let input = "src/day16/input.txt"; time(part_a, input, "A"); time(part_b, input, "B"); } #[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)] struct State { cost: u32, position: ValveId, } impl Ord for State { fn cmp(&self, other: &Self) -> Ordering { // Flip to make min-heap other.cost.cmp(&self.cost) } } impl PartialOrd for State { // Same as for ord fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } #[derive(Debug, Hash, Eq, PartialEq, Copy, Clone)] // #[display=(fmt = "{}{}", )] struct ValveId(char, char); impl ValveId { fn new(id: &str) -> Self { let chars = id.chars().collect::<Vec<char>>(); Self(chars[0], chars[1]) } } #[derive(Debug, Hash, Eq, PartialEq, Clone)] struct Valve { id: ValveId, flow_rate: u32, connected: Vec<ValveId>, } impl Valve { fn new(id: ValveId, flow_rate: u32, connected: Vec<ValveId>) -> Self { Self { id, flow_rate, connected } } } struct System { valves: HashMap<ValveId, Valve>, pressurized: HashSet<ValveId>, paths: HashMap<ValveId, HashMap<ValveId, u32>>, } impl System { fn parse(input: &str) -> Self { let mut valves = HashMap::new(); let mut pressurized = HashSet::new(); for line in input.lines() { let split = line.split("; ").collect::<Vec<&str>>(); let s0 = split[0].split("=").collect::<Vec<&str>>(); let this_valve = ValveId::new(s0[0].split(" ").collect::<Vec<&str>>()[1]); let flow_rate = s0[1].parse::<u32>().unwrap(); let vec = split[1].split(" ").collect::<Vec<&str>>(); let connected = vec.split_at(4).1.iter().map(|s| ValveId::new(s)).collect::<Vec<ValveId>>(); valves.insert(this_valve, Valve::new(this_valve, flow_rate, connected)); if flow_rate > 0 { pressurized.insert(this_valve); } } let paths = System::generate_paths(&valves); Self { valves, pressurized, paths, } } fn generate_paths(valves: &HashMap<ValveId, Valve>) -> HashMap<ValveId, HashMap<ValveId, u32>> { let mut paths = HashMap::new(); for (valve, _connections) in valves.iter() { let mut p = HashMap::new(); for (v, _c) in valves.iter() { if v == valve { continue; } if valves.get(v).unwrap().flow_rate == 0 { continue; } p.insert(*v, System::djikstra(valve, v, valves).unwrap()); } paths.insert(*valve, p); } paths } fn djikstra(start: &ValveId, end: &ValveId, valves: &HashMap<ValveId, Valve>) -> Option<u32> { let mut dist: HashMap<ValveId, u32> = HashMap::from_iter(valves.iter().map(|(k, _v)| (*k, u32::MAX))); let mut heap = BinaryHeap::new(); *dist.entry(*start).or_default() = 0; heap.push(State { cost: 0, position: *start }); while let Some(State { cost, position }) = heap.pop() { if position == *end { return Some(cost); } if cost > *dist.get(&position).unwrap() { continue; } for valve in &valves.get(&position).unwrap().connected { let valve = *valve; let next = State { cost: cost + 1, position: valve }; if next.cost < *dist.get(&next.position).unwrap() { heap.push(next); *dist.entry(next.position).or_default() = next.cost; } } } None } fn _find_path(&self, pos: &ValveId, time: u32, curr_time: u32, opened: HashSet<ValveId>) -> u32 { // println!("pos: {:?}, time: {}, curr_time: {:?}, opened: {:?}", pos, time, curr_time, opened); if opened.len() == self.pressurized.len() { // All valves opened return 0; } let mut pressure = 0; for valve_id in self.pressurized.iter() { if opened.contains(valve_id) { continue; } let release_time = curr_time + self.paths.get(pos).unwrap().get(valve_id).unwrap() + 1; if release_time <= time { let release_pressure = self.valves.get(valve_id).unwrap().flow_rate * (time - release_time); let mut open = opened.clone(); open.insert(*valve_id); let sub = self._find_path(valve_id, time, release_time, open); pressure = max(pressure, release_pressure + sub); } } pressure } fn _find_path_with_elephant(&self, me_pos: &ValveId, el_pos: &ValveId, time: u32, me_time: u32, el_time: u32, opened: HashSet<ValveId>) -> u32 { // println!("pos: {:?}, time: {}, curr_time: {:?}, opened: {:?}", pos, time, curr_time, opened); if opened.len() == self.pressurized.len() { // All valves opened return 0; } let mut pressure = 0; for valve_id in self.pressurized.iter() { if opened.contains(valve_id) { continue; } let me_rt = me_time + self.paths.get(me_pos).unwrap().get(valve_id).unwrap() + 1; let el_rt = el_time + self.paths.get(el_pos).unwrap().get(valve_id).unwrap() + 1; if me_rt <= el_rt { // Me first! if me_rt <= time { let release_pressure = self.valves.get(valve_id).unwrap().flow_rate * (time - me_rt); let mut open = opened.clone(); open.insert(*valve_id); let sub = self._find_path_with_elephant(valve_id, el_pos, time, me_rt, el_time, open); pressure = max(pressure, release_pressure + sub); } } else { // Elephants turn if el_rt <= time { let release_pressure = self.valves.get(valve_id).unwrap().flow_rate * (time - el_rt); let mut open = opened.clone(); open.insert(*valve_id); let sub = self._find_path_with_elephant(me_pos, valve_id, time, me_time, el_rt, open); pressure = max(pressure, release_pressure + sub); } } } pressure } fn calc_paths(&self, curr_valve: &ValveId, time_left: u32, visited: HashMap<ValveId, u32>) -> (u32, Option<HashMap<ValveId, u32>>) { if time_left < 1 { return (0, Some(visited)) } // self.valves.keys()//.iter() self.pressurized.iter() .filter(|target| *target != curr_valve) .filter_map(|target| { let distance = self.paths.get(curr_valve).unwrap().get(target); if distance.is_none() { println!("No path from {:?} to {:?}", curr_valve, target); println!("{:?}", self.paths); panic!(); // return None; } let distance = *distance.unwrap(); let time_left_after_open = time_left as i32 - distance as i32 - 1; if time_left_after_open < 0 { return None; } let time_left_after_open = time_left_after_open as u32; let flow_rate = self.valves.get(target).unwrap().flow_rate; let valve_value = time_left_after_open * flow_rate; let current_target_value = *visited.get(target).unwrap_or(&0); if valve_value <= current_target_value { return None } let mut visited = visited.to_owned(); visited.insert(*target, valve_value); let mut result = self.calc_paths( target, time_left_after_open, visited, ); result.0 += valve_value - current_target_value; result.1 = result.1.to_owned(); Some(result) }) .max_by(|a, b| a.0.cmp(&b.0)) .unwrap_or((0, Some(visited))) } fn max_release(&self, start: &ValveId, time: u32) -> u32 { // self.find_path(start, time, 0, HashSet::new()) let x = self.calc_paths(start, time, HashMap::new()); x.0 } fn max_release_with_elephant(&self, start: &ValveId, time: u32) -> u32 { // let valve_values = Vec::with_capacity(self.paths.len()); // println!("{:?}", self.pressurized); let x = self.calc_paths(start, time, HashMap::new()); // println!("{:?}", x); let y = self.calc_paths(start, time, x.1.unwrap()); // println!("{:?}", y); x.0 + y.0 // self.find_path_with_elephant(start, start, time, 0, 0, HashSet::new()) } } impl fmt::Debug for System { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { for (id, _valve) in self.valves.iter() { writeln!(f, "Id: {}{} :: {:?}", id.0, id.1, self.paths.get(id).unwrap())?; } Ok(()) } } fn part_a(input: &str) -> u32 { let open = std::fs::read_to_string(input.to_string()).expect("Could not read file"); let system = System::parse(open.as_str()); // println!("{:?}", system); system.max_release(&ValveId::new("AA"), 30) } fn part_b(input: &str) -> u32 { let open = std::fs::read_to_string(input.to_string()).expect("Could not read file"); let system = System::parse(open.as_str()); // println!("{:?}", system); system.max_release_with_elephant(&ValveId::new("AA"), 26) } #[cfg(test)] mod tests { use super::*; #[ignore] #[test] fn runday() { day16(); } #[ignore] #[test] fn real_a() { let input = "src/day16/input.txt"; assert_eq!(1728, part_a(input)); } #[ignore] #[test] fn real_b() { let input = "src/day16/input.txt"; assert_eq!(2304, part_b(input)); } #[test] fn part_a_test_input() { let input = "src/day16/test-input.txt"; let result = part_a(input); assert_eq!(1651, result); } #[test] fn part_b_test_input() { let input = "src/day16/test-input.txt"; let result = part_b(input); assert_eq!(1707, result); } }
use http::{HeaderMap, Uri}; use http::header::HeaderValue; use http::header::{ACCESS_CONTROL_ALLOW_ORIGIN, CONTENT_TYPE}; use hyper::{Body, Request as HyperRequest, Response as HyperResponse}; use hyper::rt::{Future, Stream}; use pact_matching::models::{OptionalBody, Request, Response, HttpPart}; use pact_matching::models::parse_query_string; use std::collections::HashMap; fn extract_query_string(uri: &Uri) -> Option<HashMap<String, Vec<String>>> { match uri.query() { Some(q) => parse_query_string(&s!(q)), None => None } } fn extract_headers(headers: &HeaderMap<HeaderValue>) -> Option<HashMap<String, String>> { if headers.len() > 0 { Some(headers.iter().map(|(h, v)| (h.as_str().into(), v.to_str().unwrap_or("").to_string())).collect()) } else { None } } fn extract_body(req: &mut Body) -> OptionalBody { match req.by_ref().concat2().wait() { Ok(chunk) => if chunk.is_empty() { OptionalBody::Empty } else { OptionalBody::Present(chunk.iter().cloned().collect()) }, Err(err) => { warn!("Failed to read request body: {}", err); OptionalBody::Empty } } } pub fn hyper_request_to_pact_request(req: &mut HyperRequest<Body>) -> Request { Request { method: req.method().to_string(), path: req.uri().path().to_string(), query: extract_query_string(req.uri()), headers: extract_headers(req.headers()), body: extract_body(req.body_mut()), .. Request::default_request() } } pub fn pact_response_to_hyper_response(response: &Response) -> HyperResponse<Body> { info!("<=== Sending response {:?}", response); info!(" body '{}'\n\n", response.body.str_value()); let mut res = HyperResponse::builder(); { res.status(response.status); match response.headers { Some(ref headers) => { for (k, v) in headers.clone() { res.header(k.as_str(), v); } }, None => () } if !response.has_header(&ACCESS_CONTROL_ALLOW_ORIGIN.as_str().into()) { res.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); } match response.body { OptionalBody::Present(ref body) => { if !response.has_header(&CONTENT_TYPE.as_str().into()) { res.header(CONTENT_TYPE, response.content_type()); } res.body(Body::from(body.clone())) }, _ => res.body(Body::empty()) }.unwrap() } } #[cfg(test)] mod test { use expectest::prelude::*; use super::*; use pact_matching::models::{OptionalBody, Response}; use http::status::StatusCode; use http::header::HeaderValue; #[test] fn test_response() { let response = Response { status: 201, headers: Some(hashmap! { }), .. Response::default_response() }; let hyper_response = pact_response_to_hyper_response(&response); expect!(hyper_response.status()).to(be_equal_to(StatusCode::CREATED)); expect!(hyper_response.headers().len()).to(be_equal_to(1)); expect!(hyper_response.headers().get("Access-Control-Allow-Origin")).to(be_some().value(HeaderValue::from_static("*"))); } #[test] fn test_response_with_content_type() { let response = Response { status: 201, headers: Some(hashmap! { s!("Content-Type") => s!("text/dizzy") }), body: OptionalBody::Present("{\"a\": 1, \"b\": 4, \"c\": 6}".as_bytes().into()), .. Response::default_response() }; let hyper_response = pact_response_to_hyper_response(&response); expect!(hyper_response.status()).to(be_equal_to(StatusCode::CREATED)); expect!(hyper_response.headers().is_empty()).to(be_false()); expect!(hyper_response.headers().get("content-type")).to(be_some().value(HeaderValue::from_static("text/dizzy"))); } #[test] fn adds_a_content_type_if_there_is_not_one_and_there_is_a_body() { let response = Response { body: OptionalBody::Present("{\"a\": 1, \"b\": 4, \"c\": 6}".as_bytes().into()), .. Response::default_response() }; let hyper_response = pact_response_to_hyper_response(&response); expect!(hyper_response.headers().is_empty()).to(be_false()); expect!(hyper_response.headers().get("content-type")).to(be_some().value(HeaderValue::from_static("application/json"))); } #[test] fn only_add_a_cors_origin_header_if_one_has_not_already_been_provided() { let response = Response { headers: Some(hashmap! { s!("Access-Control-Allow-Origin") => s!("dodgy.com") }), .. Response::default_response() }; let hyper_response = pact_response_to_hyper_response(&response); expect!(hyper_response.headers().len()).to(be_equal_to(1)); expect!(hyper_response.headers().get("Access-Control-Allow-Origin")).to(be_some().value(HeaderValue::from_static("dodgy.com"))); } }
#[macro_use] extern crate log; use azure_core::prelude::*; use azure_storage::core::prelude::*; use azure_storage::queue::prelude::*; use std::collections::HashMap; use std::error::Error; #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { // First we retrieve the account name and master key from environment variables. let account = std::env::var("STORAGE_ACCOUNT").expect("Set env variable STORAGE_ACCOUNT first!"); let master_key = std::env::var("STORAGE_MASTER_KEY").expect("Set env variable STORAGE_MASTER_KEY first!"); let queue_name = std::env::args() .nth(1) .expect("Please pass the queue name as first parameter"); let queue_client = QueueAccountClient::new(client::with_access_key(&account, &master_key)) .into_queue_client(&queue_name); trace!("creating queue"); // this step is optional but here we show // how to add metadata to a new queue. let mut hm = HashMap::new(); hm.insert("source", "azure-sdk-for-rust"); let response = queue_client .create_queue() .with_metadata(&hm) .execute() .await?; println!("response == {:#?}", response); // now let's delete it let response = queue_client .delete_queue() .with_client_request_id("myclientid") .execute() .await?; println!("response == {:#?}", response); Ok(()) }
#![no_main] use libfuzzer_sys::fuzz_target; pub use tf_demo_parser::{Demo, DemoParser, Parse, ParseError, ParserState, Stream}; fn fuzz(data: &[u8]) { let demo = Demo::new(data); let parser = DemoParser::new_all(demo.get_stream()); let _ = parser.parse(); } fuzz_target!(|data: &[u8]| { fuzz(data) });
//! `rust-peg` is a simple yet flexible parser generator based on the [Parsing Expression //! Grammar][wikipedia-peg] formalism. It provides the `parser!{}` macro that builds a recursive //! descent parser from a concise definition of the grammar. //! //! [wikipedia-peg]: https://en.wikipedia.org/wiki/Parsing_expression_grammar //! //! The `parser!{}` macro encloses a `grammar` definition containing a set of `rule`s which match //! components of your language. It expands to a Rust `mod` containing functions corresponding to //! each `rule` marked `pub`. //! //! ```rust //! peg::parser!{ //! grammar list_parser() for str { //! rule number() -> u32 //! = n:$(['0'..='9']+) { n.parse().unwrap() } //! //! pub rule list() -> Vec<u32> //! = "[" l:number() ** "," "]" { l } //! } //! } //! //! pub fn main() { //! assert_eq!(list_parser::list("[1,1,2,3,5,8]"), Ok(vec![1, 1, 2, 3, 5, 8])); //! } //! ``` //! //! ## Expressions //! //! * `"keyword"` - _Literal:_ match a literal string. //! * `['0'..='9']` - _Pattern:_ match a single element that matches a Rust `match`-style //! pattern. [(details)](#match-expressions) //! * `some_rule()` - _Rule:_ match a rule defined elsewhere in the grammar and return its //! result. //! * `e1 e2 e3` - _Sequence:_ match expressions in sequence (`e1` followed by `e2` followed by //! `e3`). //! * `e1 / e2 / e3` - _Ordered choice:_ try to match `e1`. If the match succeeds, return its //! result, otherwise try `e2`, and so on. //! * `expression?` - _Optional:_ match one or zero repetitions of `expression`. Returns an //! `Option`. //! * `expression*` - _Repeat:_ match zero or more repetitions of `expression` and return the //! results as a `Vec`. //! * `expression+` - _One-or-more:_ match one or more repetitions of `expression` and return the //! results as a `Vec`. //! * `expression*<n,m>` - _Range repeat:_ match between `n` and `m` repetitions of `expression` //! return the results as a `Vec`. [(details)](#repeat-ranges) //! * `expression ** delim` - _Delimited repeat:_ match zero or more repetitions of `expression` //! delimited with `delim` and return the results as a `Vec`. //! * `&expression` - _Positive lookahead:_ Match only if `expression` matches at this position, //! without consuming any characters. //! * `!expression` - _Negative lookahead:_ Match only if `expression` does not match at this //! position, without consuming any characters. //! * `a:e1 b:e2 c:e3 { rust }` - _Action:_ Match `e1`, `e2`, `e3` in sequence. If they match //! successfully, run the Rust code in the block and return its return value. The variable //! names before the colons in the preceding sequence are bound to the results of the //! corresponding expressions. //! * `a:e1 b:e2 c:e3 {? rust }` - Like above, but the Rust block returns a `Result<T, &str>` //! instead of a value directly. On `Ok(v)`, it matches successfully and returns `v`. On //! `Err(e)`, the match of the entire expression fails and it tries alternatives or reports a //! parse error with the `&str` `e`. //! * `$(e)` - _Slice:_ match the expression `e`, and return the `&str` slice of the input //! corresponding to the match. //! * `position!()` - return a `usize` representing the current offset into the input, and //! consumes no characters. //! * `quiet!{ e }` - match expression, but don't report literals within it as "expected" in //! error messages. //! * `expected!("something")` - fail to match, and report the specified string as an expected //! symbol at the current location. //! * `precedence!{ ... }` - Parse infix, prefix, or postfix expressions by precedence climbing. //! [(details)](#precedence-climbing) //! //! ### Match expressions //! //! The `[pat]` syntax expands into a [Rust `match` //! pattern](https://doc.rust-lang.org/book/ch18-03-pattern-syntax.html) against the next character //! (or element) of the input. //! //! This is commonly used for matching sets of characters with Rust's `..=` inclusive range pattern //! syntax and `|` to match multiple patterns. For example `['a'..='z' | 'A'..='Z']` matches an //! upper or lower case ASCII alphabet character. //! //! If your input type is a slice of an enum type, a pattern could match an enum variant like //! `[Token::Operator('+')]`. //! //! `[_]` matches any single element. As this always matches except at end-of-file, combining it //! with negative lookahead as `![_]` is the idiom for matching EOF in PEG. //! //! ### Repeat ranges //! //! The repeat operators `*` and `**` can be followed by an optional range specification of the //! form `<n>` (exact), `<n,>` (min), `<,m>` (max) or `<n,m>` (range), where `n` and `m` are either //! integers, or a Rust `usize` expression enclosed in `{}`. //! //! ### Precedence climbing //! //! `precedence!{ rules... }` provides a convenient way to parse infix, prefix, and postfix //! operators using the [precedence //! climbing](http://eli.thegreenplace.net/2012/08/02/parsing-expressions-by-precedence-climbing) //! algorithm. //! //! ```rust,no_run //! # peg::parser!{grammar doc() for str { //! # pub rule number() -> i64 = "..." { 0 } //! pub rule arithmetic() -> i64 = precedence!{ //! x:(@) "+" y:@ { x + y } //! x:(@) "-" y:@ { x - y } //! -- //! x:(@) "*" y:@ { x * y } //! x:(@) "/" y:@ { x / y } //! -- //! x:@ "^" y:(@) { x.pow(y as u32) } //! -- //! n:number() { n } //! } //! # }} //! # fn main() {} //! ``` //! //! Each `--` introduces a new precedence level that binds more tightly than previous precedence //! levels. The levels consist of one or more operator rules each followed by a Rust action //! expression. //! //! The `(@)` and `@` are the operands, and the parentheses indicate associativity. An operator //! rule beginning and ending with `@` is an infix expression. Prefix and postfix rules have one //! `@` at the beginning or end, and atoms do not include `@`. //! //! ## Custom input types //! //! `rust-peg` handles input types through a series of traits, and comes with implementations for //! `str`, `[u8]`, and `[T]`. //! //! * `Parse` is the base trait for all inputs. The others are only required to use the //! corresponding expressions. //! * `ParseElem` implements the `[_]` pattern operator, with a method returning the next item of //! the input to match. //! * `ParseLiteral` implements matching against a `"string"` literal. //! * `ParseSlice` implements the `$()` operator, returning a slice from a span of indexes. //! //! ### Error reporting //! //! When a match fails, position information is automatically recorded to report a set of //! "expected" tokens that would have allowed the parser to advance further. //! //! Some rules should never appear in error messages, and can be suppressed with `quiet!{e}`: //! ```rust,no_run //! # peg::parser!{grammar doc() for str { //! rule whitespace() = quiet!{[' ' | '\n' | '\t']+} //! # }} //! # fn main() {} //! ``` //! //! If you want the "expected" set to contain a more helpful string instead of character sets, you //! can use `quiet!{}` and `expected!()` together: //! //! ```rust,no_run //! # peg::parser!{grammar doc() for str { //! rule identifier() //! = quiet!{[ 'a'..='z' | 'A'..='Z']['a'..='z' | 'A'..='Z' | '0'..='9' ]+} //! / expected!("identifier") //! # }} //! # fn main() {} //! ``` //! //! ## Imports //! //! ```rust,no_run //! mod ast { //! pub struct Expr; //! } //! //! peg::parser!{grammar doc() for str { //! use self::ast::Expr; //! }} //! # fn main() {} //! ``` //! //! The grammar may begin with a series of `use` declarations, just like in Rust, which are //! included in the generated module. Unlike normal `mod {}` blocks, `use super::*` is inserted by //! default, so you don't have to deal with this most of the time. //! //! ## Rustdoc comments //! //! `rustdoc` comments with `///` before a `grammar` or `pub rule` are propagated to the resulting //! function: //! //! ```rust,no_run //! # peg::parser!{grammar doc() for str { //! /// Parse an array expression. //! pub rule array() -> Vec<i32> = "[...]" { vec![] } //! # }} //! # fn main() {} //! ``` //! //! As with all procedural macros, non-doc comments are ignored by the lexer and can be used like //! in any other Rust code. //! //! ## Tracing //! //! If you pass the `peg/trace` feature to Cargo when building your project, a trace of the parsing //! will be printed to stdout when parsing. For example, //! ```sh //! $ cargo run --features peg/trace //! ... //! [PEG_TRACE] Matched rule type at 8:5 //! [PEG_TRACE] Attempting to match rule ident at 8:12 //! [PEG_TRACE] Attempting to match rule letter at 8:12 //! [PEG_TRACE] Failed to match rule letter at 8:12 //! ... //! ``` extern crate peg_macros; extern crate peg_runtime as runtime; pub use peg_macros::parser; pub use runtime::*;
use std::fs; use std::collections::HashMap; fn get_positions(wire: Vec<&str>) -> HashMap<(isize, isize), usize> { let mut last = (0, 0); let mut step = 0; let mut positions = HashMap::new(); for (idx,direction) in wire.iter().enumerate() { let (dir, dist) = direction.split_at(1); let distance = dist.parse::<isize>().unwrap(); let mut multiplier: (isize, isize); match dir { "U" => { multiplier = (0, 1); } "D" => { multiplier = (0, -1); } "R" => { multiplier = (1, 0); } "L" => { multiplier = (-1, 0); } _ => { panic!(); } } for i in 1..distance+1 { step += 1; if !positions.contains_key(&(last.0+i*multiplier.0, last.1+i*multiplier.1)) { positions.insert((last.0+i*multiplier.0, last.1+i*multiplier.1), step); } } last = (last.0 + distance*multiplier.0, last.1 + distance*multiplier.1); } return positions; } fn manhattan_from_origin((x, y): (isize, isize)) -> usize { return (x.abs() + y.abs()) as usize; } fn part1(w1pos: &HashMap<(isize, isize), usize>, w2pos: &HashMap<(isize, isize), usize>) { let mut min_dist = usize::max_value(); for (x, _) in w1pos { if w2pos.contains_key(x) { if manhattan_from_origin(*x) < min_dist { min_dist = manhattan_from_origin(*x); } } } println!("Closest intersection is at a distance {:?}", min_dist); } fn part2(w1pos: &HashMap<(isize, isize), usize>, w2pos: &HashMap<(isize, isize), usize>) { let mut min_step = usize::max_value(); for (x, y) in w1pos { if w2pos.contains_key(x) { if y + w2pos.get(x).unwrap() < min_step { min_step = y + w2pos.get(x).unwrap(); } } } println!("Minimal number of steps is {}", min_step); } fn main() { let contents = fs::read_to_string("input.txt").expect("Something went wrong reading the file"); let wires: Vec<&str> = contents.split("\n").collect(); let wire1: Vec<&str> = wires[0].split(",").collect(); let wire2: Vec<&str> = wires[1].split(",").collect(); println!("Content: \n{:?} \n{:?}", &wire1[1..5], &wire2[1..5]); let (w1pos, w2pos) = (get_positions(wire1), get_positions(wire2)); part1(&w1pos, &w2pos); part2(&w1pos, &w2pos); }
fn iterators() { let scores = vec![100, 90, 85]; for score in &scores { println!("score: {}", score); } for score in scores { println!("score: {}", score); } //for i in 0..10 { // println!("{}", i); //} // //for i in (0..=10).map(|x| x * 2) { // println!("{}", i); //} for i in (0..10).filter(|y| y % 2 == 0) { println!("{}", i); } let values: Vec<i32> = (0..10).collect(); let sum: i32 = (0..10).sum(); }
fn main(){ // very typical if-elseastatements. We dont need to put the boolean // in parenthesises though. // if 3 > 2 { println!("Branch1A"); } else if 4 > 3 { println!("Branch2A"); } else { println!("Branch3A"); } if 2 > 2 { println!("Branch1B"); } else if 4 > 3 { println!("Branch2B"); } else { println!("Branch3B"); } if 2 > 2 { println!("Branch1C"); } else if 4 > 5 { println!("Branch2C"); } else { println!("Branch3C"); } }
pub mod vector; pub mod matrix; const EPSILON: f32 = 0.00001; /// Clamps a float value between [min, max] pub fn clamp(value: f32, min: f32, max: f32) -> f32 { if value > max { max } else if value < min { min } else { value } } /// Clamps a float value between [0.0, 1.0] pub fn clamp01(value: f32) -> f32 { clamp(value, 0.0, 1.0) }
pub mod pieces; /* use std::clone::Clone; use self::bitboard::BitBoard; use color::Color; use shape::Shape; use pieces; use point::Point; use direction::Direction; use piece::Orientation; use corner::Corner; use rand; #[derive(Default)] pub struct Board { pub occupied: [BitBoard; 4], pub auras: [BitBoard; 4], pub corners: [Vec<Corner>; 4], } impl Board { pub fn new() -> Self { // The initial corners a player may play at. let corners = vec![Corner::new(Point::new(-1, -1), Direction::SE), Corner::new(Point::new(20, -1), Direction::SW), Corner::new(Point::new(-1, 20), Direction::NE), Corner::new(Point::new(20, 20), Direction::NW)]; Board { occupied: Default::default(), auras: Default::default(), corners: [corners.clone(), corners.clone(), corners.clone(), corners.clone()], } } // Makes appropriate board changes for a turn, but only if the move is // legal. Returns a boolean which represents whether or not the move was // made. #[inline] pub fn do_move_if_legal(&mut self, orientation: &Orientation, corner_index: usize, connect: Point, color: Color) -> bool { let corner = self.get_corner(corner_index, color); let index = Board::get_index(corner, connect); if self.is_illegal(index, &orientation.shape, color) { false } else { self.do_move(corner_index, orientation, index, color); true } } #[inline] pub fn do_move(&mut self, corner_index: usize, orientation: &Orientation, index: usize, color: Color) { self.occupied[color as usize].place_shape(&orientation.shape, index); self.auras[color as usize].place_shape(&orientation.shape, index); let corners = &mut self.corners[color as usize]; corners.extend(orientation.corners().map(|c| Corner::new(c.coordinates + Point::from_index(corner_index), c.direction))); corners.swap_remove(corner_index); } #[inline] pub fn monomino_fits(&self, corner: usize, color: Color) -> bool { let corner = self.get_corner(corner, color); let orientation = &pieces::piece(0).orientation(0); let connect = orientation.get_corners(corner.direction.opposite())[0]; let index = Board::get_index(corner, connect); !self.is_illegal(index, &orientation.shape, color) } #[inline] fn is_illegal(&self, index: usize, shape: &Shape, color: Color) -> bool { // Tests if piece intersects one of its own pieces or one of its // own pieces' aura. let auras = &self.auras[color as usize]; let aura_intersect = auras.shape_intersects(shape, index); aura_intersect || self.intersects_others(index, shape, color) } #[inline] fn intersects_others(&self, index: usize, shape: &Shape, color: Color) -> bool { let intersects = |bitboard: &BitBoard| bitboard.shape_intersects(shape, index); let mut iter = self.occupied.iter(); if iter.by_ref().take(color as usize).any(&intersects) { true } else if let Some(_) = iter.next() { iter.any(intersects) } else { false } } fn get_index(corner: Corner, connect: Point) -> usize { /* if corner.direction.up() { (corner.coordinates - connect.block_position()).to_index() } else { (corner.coordinates + connect).to_index() } */ let connect = Corner::new(connect, corner.direction.opposite()); (corner.coordinates + connect.block_position()).to_index() } pub fn random_corner<R: rand::Rng>(&self, color: Color, rng: &mut R) -> Option<usize> { if self.corners[color as usize].is_empty() { None } else { Some(rng.gen_range(0, self.corners[color as usize].len())) } } pub fn place_monomino(&mut self, corner_index: usize, color: Color) -> Turn { let corner = self.get_corner(corner_index, color); let orientation = pieces::piece(0).orientation(0); let connect = orientation.get_corners(corner.direction.opposite())[0]; let index = Board::get_index(corner, connect); let orientation = pieces::piece(0).orientation(0); self.do_move(corner_index, orientation, index, color); Turn::new(0, 0, corner_index, connect, color) } pub fn get_corner(&self, index: usize, color: Color) -> Corner { self.corners[color as usize][index] } pub fn print(&self) { for y in 0..20 { for x in 0..20 { let index = y*20 + x; if self.occupied[0].index(index) { print!("\x1B[34m*"); } else if self.occupied[1].index(index) { print!("\x1B[93m*"); } else if self.occupied[2].index(index) { print!("\x1B[92m*"); } else if self.occupied[3].index(index) { print!("\x1B[91m*"); } else { print!("\x1B[39m-"); } } println!("\x1B[39m"); } } pub fn print_auras(&self) { for y in 0..20 { for x in 0..20 { let index = y*20 + x; if self.auras[0].index(index) { print!("\x1B[34m*"); } else if self.auras[1].index(index) { print!("\x1B[93m*"); } else if self.auras[2].index(index) { print!("\x1B[92m*"); } else if self.auras[3].index(index) { print!("\x1B[91m*"); } else { print!("\x1B[39m-"); } } println!("\x1B[39m"); } } } impl Clone for Board { fn clone(&self) -> Self { // Ew. Board { occupied: self.occupied.clone(), auras: self.auras.clone(), corners: [self.corners[0].clone(), self.corners[1].clone(), self.corners[2].clone(), self.corners[3].clone()], } } } */
#[macro_use] extern crate actix_web; use actix_cors::Cors; use actix_files as fs; use actix_session::Session; use std::{env, io}; use actix_web::http::StatusCode; use actix_web::{middleware, web, App, HttpRequest, HttpResponse, HttpServer, Result}; mod city_list; use city_list::get_city_list; mod search_city_list; use search_city_list::get_search_city_list; /// favicon handler #[get("/favicon")] async fn favicon() -> Result<fs::NamedFile> { Ok(fs::NamedFile::open("static/favicon.ico")?) } /// simple index handler #[get("/welcome")] async fn welcome(session: Session, req: HttpRequest) -> Result<HttpResponse> { println!("{:?}", req); // session let mut counter = 1; if let Some(count) = session.get::<i32>("counter")? { println!("SESSION value: {}", count); counter = count + 1; } // set counter to session session.set("counter", counter)?; // response Ok(HttpResponse::build(StatusCode::OK) .content_type("text/html; charset=utf-8") .body(include_str!("../static/welcome.html"))) } /// 404 handler async fn p404() -> Result<fs::NamedFile> { Ok(fs::NamedFile::open("static/404.html")?.set_status_code(StatusCode::NOT_FOUND)) } #[actix_rt::main] async fn main() -> io::Result<()> { env::set_var("RUST_LOG", "actix_web=debug,actix_server=info"); env_logger::init(); println!("listening http://localhost:8000"); HttpServer::new(|| { App::new() .wrap( Cors::new() .allowed_methods(vec!["GET", "POST"]) // .allowed_headers(vec![header::AUTHORIZATION, header::ACCEPT]) // .allowed_header(header::CONTENT_TYPE) // .max_age(3600) .finish(), ) .wrap(middleware::Logger::default()) .service(get_city_list) .service(get_search_city_list) // default .default_service( // 404 for GET request web::resource("").route(web::get().to(p404)), ) }) .bind("0.0.0.0:8000")? .run() .await }
#[doc = "Register `FDCAN_TDCR` reader"] pub type R = crate::R<FDCAN_TDCR_SPEC>; #[doc = "Field `TDCF` reader - Transmitter Delay Compensation Filter Window Length"] pub type TDCF_R = crate::FieldReader; #[doc = "Field `TDCO` reader - Transmitter Delay Compensation Offset"] pub type TDCO_R = crate::FieldReader; impl R { #[doc = "Bits 0:6 - Transmitter Delay Compensation Filter Window Length"] #[inline(always)] pub fn tdcf(&self) -> TDCF_R { TDCF_R::new((self.bits & 0x7f) as u8) } #[doc = "Bits 8:14 - Transmitter Delay Compensation Offset"] #[inline(always)] pub fn tdco(&self) -> TDCO_R { TDCO_R::new(((self.bits >> 8) & 0x7f) as u8) } } #[doc = "FDCAN Transmitter Delay Compensation Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fdcan_tdcr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct FDCAN_TDCR_SPEC; impl crate::RegisterSpec for FDCAN_TDCR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`fdcan_tdcr::R`](R) reader structure"] impl crate::Readable for FDCAN_TDCR_SPEC {} #[doc = "`reset()` method sets FDCAN_TDCR to value 0"] impl crate::Resettable for FDCAN_TDCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use structopt::StructOpt; use airhobot::prelude::*; use std::{ path::PathBuf, }; #[derive(StructOpt, Debug)] #[structopt(name = "AirHoBot")] pub struct Args { /// Use the cam with the given id as input source #[structopt(short, long, conflicts_with = "image")] pub cam_id: Option<i32>, /// Use the image as input source #[structopt(short, long, conflicts_with = "cam_id")] pub image: Option<PathBuf>, /// Use the video as input source #[structopt(short, long, conflicts_with = "image, cam_id")] pub video: Option<PathBuf>, } impl Args { pub fn source(&self) -> Result<Source> { self.cam_id .map(Source::cam) .or(self.video.as_ref().map(Source::video)) .or(self.image.as_ref().map(Source::image)) .unwrap_or(Err(Error::Arguments { msg: "input source missing".into(), })) } }
use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; /// Use this method to specify a url and receive incoming updates via an outgoing webhook. Whenever there is an update for the bot, we will send an HTTPS POST request to the specified url, containing a JSON-serialized Update. In case of an unsuccessful request, we will give up after a reasonable amount of attempts. Returns True on success. /// /// If you'd like to make sure that the Webhook request comes from Telegram, we recommend using a secret path in the URL, e.g. https://www.example.com/<token>. Since nobody else knows your bot‘s token, you can be pretty sure it’s us. #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] pub struct SetWebhook { /// HTTPS url to send updates to. Use an empty string to remove webhook integration pub url: String, /// Upload your public key certificate so that the root certificate in use can be checked. See our self-signed guide for details. pub certificate: Option<String>, /// Maximum allowed number of simultaneous HTTPS connections to the webhook for update delivery, 1-100. Defaults to 40. Use lower values to limit the load on your bot‘s server, and higher values to increase your bot’s throughput. pub max_connections: Option<i32>, /// List the types of updates you want your bot to receive. For example, specify [“message”, “edited_channel_post”, “callback_query”] to only receive updates of these types. See Update for a complete list of available update types. Specify an empty list to receive all updates regardless of type (default). If not specified, the previous setting will be used. pub allowed_updates: Option<Vec<String>>, } /// Use this method to remove webhook integration if you decide to switch back to getUpdates. Returns True on success. #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] pub struct DeleteWebhook; /// Use this method to get current webhook status. Requires no parameters. On success, returns a WebhookInfo object. If the bot is using getUpdates, will return an object with the url field empty. #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] pub struct GetWebhookInfo; /// The method for receiving incoming updates using long polling #[skip_serializing_none] #[derive(Debug, Serialize, Deserialize, Clone)] pub struct GetUpdates { /// Identifier of the first update to be returned. Must be greater by one than the highest among the identifiers of previously received updates. By default, updates starting with the earliest unconfirmed update are returned. An update is considered confirmed as soon as getUpdates is called with an offset higher than its update_id. The negative offset can be specified to retrieve updates starting from -offset update from the end of the updates queue. All previous updates will forgotten. pub offset: Option<i32>, /// Limits the number of updates to be retrieved. Values between 1—100 are accepted. Defaults to 100. pub limit: Option<i32>, /// Timeout in seconds for long polling. Defaults to 0, i.e. usual short polling. Should be positive, short polling should be used for testing purposes only. pub timeout: Option<i32>, /// List the types of updates you want your bot to receive. For example, specify [“message”, “edited_channel_post”, “callback_query”] to only receive updates of these types. See Update for a complete list of available update types. Specify an empty list to receive all updates regardless of type (default). If not specified, the previous setting will be used. /// ///Please note that this parameter doesn't affect updates created before the call to the getUpdates, so unwanted updates may be received for a short period of time. pub allowed_updates: Option<Vec<String>>, }
mod util; pub mod asset; pub mod data_backend;
#[doc = "Register `CTR` reader"] pub type R = crate::R<CTR_SPEC>; #[doc = "Field `_IminLine` reader - IminLine"] pub type _IMIN_LINE_R = crate::FieldReader; #[doc = "Field `DMinLine` reader - DMinLine"] pub type DMIN_LINE_R = crate::FieldReader; #[doc = "Field `ERG` reader - ERG"] pub type ERG_R = crate::FieldReader; #[doc = "Field `CWG` reader - CWG"] pub type CWG_R = crate::FieldReader; #[doc = "Field `Format` reader - Format"] pub type FORMAT_R = crate::FieldReader; impl R { #[doc = "Bits 0:3 - IminLine"] #[inline(always)] pub fn _imin_line(&self) -> _IMIN_LINE_R { _IMIN_LINE_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 16:19 - DMinLine"] #[inline(always)] pub fn dmin_line(&self) -> DMIN_LINE_R { DMIN_LINE_R::new(((self.bits >> 16) & 0x0f) as u8) } #[doc = "Bits 20:23 - ERG"] #[inline(always)] pub fn erg(&self) -> ERG_R { ERG_R::new(((self.bits >> 20) & 0x0f) as u8) } #[doc = "Bits 24:27 - CWG"] #[inline(always)] pub fn cwg(&self) -> CWG_R { CWG_R::new(((self.bits >> 24) & 0x0f) as u8) } #[doc = "Bits 29:31 - Format"] #[inline(always)] pub fn format(&self) -> FORMAT_R { FORMAT_R::new(((self.bits >> 29) & 7) as u8) } } #[doc = "Cache Type register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ctr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CTR_SPEC; impl crate::RegisterSpec for CTR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`ctr::R`](R) reader structure"] impl crate::Readable for CTR_SPEC {} #[doc = "`reset()` method sets CTR to value 0x8303_c003"] impl crate::Resettable for CTR_SPEC { const RESET_VALUE: Self::Ux = 0x8303_c003; }